diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index ac1c24d10..3fcbd79dd 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -a8f547d3728fba835fbdda301e846829c5cbbef5 \ No newline at end of file +95e08d8bc631ac93d68e3846aea3cb04a2913495 \ No newline at end of file diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 6bd12e9f3..5c690bd54 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,3 +11,25 @@ ### Internal Changes ### API Changes +* Added [w.ai_builder](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/aibuilder/ai_builder.html) workspace-level service. +* Added [w.feature_store](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/feature_store.html) workspace-level service. +* Added `expiration_time` field for `databricks.sdk.service.database.DatabaseCredential`. +* Added `effective_stopped` field for `databricks.sdk.service.database.DatabaseInstance`. +* Added `existing_pipeline_id` field for `databricks.sdk.service.database.SyncedTableSpec`. +* Added `pipeline_id` field for `databricks.sdk.service.database.SyncedTableStatus`. +* Added `dbt_platform_output` field for `databricks.sdk.service.jobs.RunOutput`. +* Added `dbt_platform_task` field for `databricks.sdk.service.jobs.RunTask`. +* Added `dbt_platform_task` field for `databricks.sdk.service.jobs.SubmitTask`. +* Added `dbt_platform_task` field for `databricks.sdk.service.jobs.Task`. +* Added `environment` field for `databricks.sdk.service.pipelines.CreatePipeline`. +* Added `environment` field for `databricks.sdk.service.pipelines.EditPipeline`. +* Added `environment` field for `databricks.sdk.service.pipelines.PipelineSpec`. +* Added `description` field for `databricks.sdk.service.serving.ServingEndpoint`. +* Added `description` field for `databricks.sdk.service.serving.ServingEndpointDetailed`. +* Added `cancelled`, `error`, `queued`, `running`, `starting` and `success` enum values for `databricks.sdk.service.jobs.DbtPlatformRunStatus`. +* [Breaking] Changed `status` field for `databricks.sdk.service.jobs.DbtCloudJobRunStep` to type `databricks.sdk.service.jobs.DbtPlatformRunStatus` dataclass. +* [Breaking] Removed [w.custom_llms](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/aibuilder/custom_llms.html) workspace-level service. +* [Breaking] Removed `table_serving_url` field for `databricks.sdk.service.database.DatabaseTable`. +* [Breaking] Removed `table_serving_url` field for `databricks.sdk.service.database.SyncedDatabaseTable`. +* [Breaking] Removed `pipeline_id` field for `databricks.sdk.service.database.SyncedTableSpec`. +* [Breaking] Removed `cancelled`, `error`, `queued`, `running`, `starting` and `success` enum values for `databricks.sdk.service.jobs.DbtCloudRunStatus`. diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 612d1dd1b..5e0fb4c62 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -36,7 +36,7 @@ from databricks.sdk.service import sql as pkg_sql from databricks.sdk.service import vectorsearch as pkg_vectorsearch from databricks.sdk.service import workspace as pkg_workspace -from databricks.sdk.service.aibuilder import CustomLlmsAPI +from databricks.sdk.service.aibuilder import AiBuilderAPI from databricks.sdk.service.apps import AppsAPI from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI, BudgetsAPI, LogDeliveryAPI, @@ -87,8 +87,8 @@ ProviderExchangeFiltersAPI, ProviderExchangesAPI, ProviderFilesAPI, ProviderListingsAPI, ProviderPersonalizationRequestsAPI, ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI) -from databricks.sdk.service.ml import (ExperimentsAPI, ForecastingAPI, - ModelRegistryAPI) +from databricks.sdk.service.ml import (ExperimentsAPI, FeatureStoreAPI, + ForecastingAPI, ModelRegistryAPI) from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI, CustomAppIntegrationAPI, OAuthPublishedAppsAPI, @@ -235,6 +235,7 @@ def __init__( serving_endpoints = ServingEndpointsExt(self._api_client) self._access_control = pkg_iam.AccessControlAPI(self._api_client) self._account_access_control_proxy = pkg_iam.AccountAccessControlProxyAPI(self._api_client) + self._ai_builder = pkg_aibuilder.AiBuilderAPI(self._api_client) self._alerts = pkg_sql.AlertsAPI(self._api_client) self._alerts_legacy = pkg_sql.AlertsLegacyAPI(self._api_client) self._alerts_v2 = pkg_sql.AlertsV2API(self._api_client) @@ -256,7 +257,6 @@ def __init__( self._credentials = pkg_catalog.CredentialsAPI(self._api_client) self._credentials_manager = pkg_settings.CredentialsManagerAPI(self._api_client) self._current_user = pkg_iam.CurrentUserAPI(self._api_client) - self._custom_llms = pkg_aibuilder.CustomLlmsAPI(self._api_client) self._dashboard_widgets = pkg_sql.DashboardWidgetsAPI(self._api_client) self._dashboards = pkg_sql.DashboardsAPI(self._api_client) self._data_sources = pkg_sql.DataSourcesAPI(self._api_client) @@ -265,6 +265,7 @@ def __init__( self._dbsql_permissions = pkg_sql.DbsqlPermissionsAPI(self._api_client) self._experiments = pkg_ml.ExperimentsAPI(self._api_client) self._external_locations = pkg_catalog.ExternalLocationsAPI(self._api_client) + self._feature_store = pkg_ml.FeatureStoreAPI(self._api_client) self._files = _make_files_client(self._api_client, self._config) self._functions = pkg_catalog.FunctionsAPI(self._api_client) self._genie = pkg_dashboards.GenieAPI(self._api_client) @@ -366,6 +367,11 @@ def account_access_control_proxy(self) -> pkg_iam.AccountAccessControlProxyAPI: """These APIs manage access rules on resources in an account.""" return self._account_access_control_proxy + @property + def ai_builder(self) -> pkg_aibuilder.AiBuilderAPI: + """The Custom LLMs service manages state and powers the UI for the Custom LLM product.""" + return self._ai_builder + @property def alerts(self) -> pkg_sql.AlertsAPI: """The alerts API can be used to perform CRUD operations on alerts.""" @@ -471,11 +477,6 @@ def current_user(self) -> pkg_iam.CurrentUserAPI: """This API allows retrieving information about currently authenticated user or service principal.""" return self._current_user - @property - def custom_llms(self) -> pkg_aibuilder.CustomLlmsAPI: - """The Custom LLMs service manages state and powers the UI for the Custom LLM product.""" - return self._custom_llms - @property def dashboard_widgets(self) -> pkg_sql.DashboardWidgetsAPI: """This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace.""" @@ -516,6 +517,11 @@ def external_locations(self) -> pkg_catalog.ExternalLocationsAPI: """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.""" return self._external_locations + @property + def feature_store(self) -> pkg_ml.FeatureStoreAPI: + """A feature store is a centralized repository that enables data scientists to find and share features.""" + return self._feature_store + @property def files(self) -> pkg_files.FilesAPI: """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI.""" diff --git a/databricks/sdk/service/aibuilder.py b/databricks/sdk/service/aibuilder.py index 3b37a2070..2d2633622 100755 --- a/databricks/sdk/service/aibuilder.py +++ b/databricks/sdk/service/aibuilder.py @@ -21,23 +21,85 @@ class CancelCustomLlmOptimizationRunRequest: @dataclass -class CancelResponse: +class CancelOptimizeResponse: def as_dict(self) -> dict: - """Serializes the CancelResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the CancelOptimizeResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the CancelResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the CancelOptimizeResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelResponse: - """Deserializes the CancelResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> CancelOptimizeResponse: + """Deserializes the CancelOptimizeResponse from a dictionary.""" return cls() +@dataclass +class CreateCustomLlmRequest: + name: str + """Name of the custom LLM. Only alphanumeric characters and dashes allowed.""" + + instructions: str + """Instructions for the custom LLM to follow""" + + agent_artifact_path: Optional[str] = None + """Optional: UC path for agent artifacts. If you are using a dataset that you only have read + permissions, please provide a destination path where you have write permissions. Please provide + this in catalog.schema format.""" + + datasets: Optional[List[Dataset]] = None + """Datasets used for training and evaluating the model, not for inference. Currently, only 1 + dataset is accepted.""" + + guidelines: Optional[List[str]] = None + """Guidelines for the custom LLM to adhere to""" + + def as_dict(self) -> dict: + """Serializes the CreateCustomLlmRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.agent_artifact_path is not None: + body["agent_artifact_path"] = self.agent_artifact_path + if self.datasets: + body["datasets"] = [v.as_dict() for v in self.datasets] + if self.guidelines: + body["guidelines"] = [v for v in self.guidelines] + if self.instructions is not None: + body["instructions"] = self.instructions + if self.name is not None: + body["name"] = self.name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateCustomLlmRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.agent_artifact_path is not None: + body["agent_artifact_path"] = self.agent_artifact_path + if self.datasets: + body["datasets"] = self.datasets + if self.guidelines: + body["guidelines"] = self.guidelines + if self.instructions is not None: + body["instructions"] = self.instructions + if self.name is not None: + body["name"] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateCustomLlmRequest: + """Deserializes the CreateCustomLlmRequest from a dictionary.""" + return cls( + agent_artifact_path=d.get("agent_artifact_path", None), + datasets=_repeated_dict(d, "datasets", Dataset), + guidelines=d.get("guidelines", None), + instructions=d.get("instructions", None), + name=d.get("name", None), + ) + + @dataclass class CustomLlm: name: str @@ -159,6 +221,24 @@ def from_dict(cls, d: Dict[str, Any]) -> Dataset: return cls(table=_from_dict(d, "table", Table)) +@dataclass +class DeleteCustomLlmResponse: + def as_dict(self) -> dict: + """Serializes the DeleteCustomLlmResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteCustomLlmResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteCustomLlmResponse: + """Deserializes the DeleteCustomLlmResponse from a dictionary.""" + return cls() + + @dataclass class StartCustomLlmOptimizationRunRequest: id: Optional[str] = None @@ -273,13 +353,13 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomLlmRequest: ) -class CustomLlmsAPI: +class AiBuilderAPI: """The Custom LLMs service manages state and powers the UI for the Custom LLM product.""" def __init__(self, api_client): self._api = api_client - def cancel(self, id: str): + def cancel_optimize(self, id: str): """Cancel a Custom LLM Optimization Run. :param id: str @@ -294,24 +374,68 @@ def cancel(self, id: str): self._api.do("POST", f"/api/2.0/custom-llms/{id}/optimize/cancel", headers=headers) - def create(self, id: str) -> CustomLlm: - """Start a Custom LLM Optimization Run. - - :param id: str - The Id of the tile. + def create_custom_llm( + self, + name: str, + instructions: str, + *, + agent_artifact_path: Optional[str] = None, + datasets: Optional[List[Dataset]] = None, + guidelines: Optional[List[str]] = None, + ) -> CustomLlm: + """Create a Custom LLM. + + :param name: str + Name of the custom LLM. Only alphanumeric characters and dashes allowed. + :param instructions: str + Instructions for the custom LLM to follow + :param agent_artifact_path: str (optional) + Optional: UC path for agent artifacts. If you are using a dataset that you only have read + permissions, please provide a destination path where you have write permissions. Please provide this + in catalog.schema format. + :param datasets: List[:class:`Dataset`] (optional) + Datasets used for training and evaluating the model, not for inference. Currently, only 1 dataset is + accepted. + :param guidelines: List[str] (optional) + Guidelines for the custom LLM to adhere to :returns: :class:`CustomLlm` """ - + body = {} + if agent_artifact_path is not None: + body["agent_artifact_path"] = agent_artifact_path + if datasets is not None: + body["datasets"] = [v.as_dict() for v in datasets] + if guidelines is not None: + body["guidelines"] = [v for v in guidelines] + if instructions is not None: + body["instructions"] = instructions + if name is not None: + body["name"] = name headers = { "Accept": "application/json", "Content-Type": "application/json", } - res = self._api.do("POST", f"/api/2.0/custom-llms/{id}/optimize", headers=headers) + res = self._api.do("POST", "/api/2.0/custom-llms", body=body, headers=headers) return CustomLlm.from_dict(res) - def get(self, id: str) -> CustomLlm: + def delete_custom_llm(self, id: str): + """Delete a Custom LLM. + + :param id: str + The id of the custom llm + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/custom-lms/{id}", headers=headers) + + def get_custom_llm(self, id: str) -> CustomLlm: """Get a Custom LLM. :param id: str @@ -327,7 +451,24 @@ def get(self, id: str) -> CustomLlm: res = self._api.do("GET", f"/api/2.0/custom-llms/{id}", headers=headers) return CustomLlm.from_dict(res) - def update(self, id: str, custom_llm: CustomLlm, update_mask: str) -> CustomLlm: + def start_optimize(self, id: str) -> CustomLlm: + """Start a Custom LLM Optimization Run. + + :param id: str + The Id of the tile. + + :returns: :class:`CustomLlm` + """ + + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/custom-llms/{id}/optimize", headers=headers) + return CustomLlm.from_dict(res) + + def update_custom_llm(self, id: str, custom_llm: CustomLlm, update_mask: str) -> CustomLlm: """Update a Custom LLM. :param id: str diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index e0ca7d9a4..9f9e38c4a 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -1232,9 +1232,7 @@ def wait_get_app_stopped( raise TimeoutError(f"timed out after {timeout}: {status_message}") def create(self, app: App, *, no_compute: Optional[bool] = None) -> Wait[App]: - """Create an app. - - Creates a new app. + """Creates a new app. :param app: :class:`App` :param no_compute: bool (optional) @@ -1260,9 +1258,7 @@ def create_and_wait(self, app: App, *, no_compute: Optional[bool] = None, timeou return self.create(app=app, no_compute=no_compute).result(timeout=timeout) def delete(self, name: str) -> App: - """Delete an app. - - Deletes an app. + """Deletes an app. :param name: str The name of the app. @@ -1278,9 +1274,7 @@ def delete(self, name: str) -> App: return App.from_dict(res) def deploy(self, app_name: str, app_deployment: AppDeployment) -> Wait[AppDeployment]: - """Create an app deployment. - - Creates an app deployment for the app with the supplied name. + """Creates an app deployment for the app with the supplied name. :param app_name: str The name of the app. @@ -1310,9 +1304,7 @@ def deploy_and_wait( return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout) def get(self, name: str) -> App: - """Get an app. - - Retrieves information for the app with the supplied name. + """Retrieves information for the app with the supplied name. :param name: str The name of the app. @@ -1328,9 +1320,7 @@ def get(self, name: str) -> App: return App.from_dict(res) def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment: - """Get an app deployment. - - Retrieves information for the app deployment with the supplied name and deployment id. + """Retrieves information for the app deployment with the supplied name and deployment id. :param app_name: str The name of the app. @@ -1348,9 +1338,7 @@ def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment: return AppDeployment.from_dict(res) def get_permission_levels(self, app_name: str) -> GetAppPermissionLevelsResponse: - """Get app permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param app_name: str The app for which to get or manage permissions. @@ -1366,9 +1354,7 @@ def get_permission_levels(self, app_name: str) -> GetAppPermissionLevelsResponse return GetAppPermissionLevelsResponse.from_dict(res) def get_permissions(self, app_name: str) -> AppPermissions: - """Get app permissions. - - Gets the permissions of an app. Apps can inherit permissions from their root object. + """Gets the permissions of an app. Apps can inherit permissions from their root object. :param app_name: str The app for which to get or manage permissions. @@ -1384,9 +1370,7 @@ def get_permissions(self, app_name: str) -> AppPermissions: return AppPermissions.from_dict(res) def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]: - """List apps. - - Lists all apps in the workspace. + """Lists all apps in the workspace. :param page_size: int (optional) Upper bound for items returned. @@ -1417,9 +1401,7 @@ def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = N def list_deployments( self, app_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[AppDeployment]: - """List app deployments. - - Lists all app deployments for the app with the supplied name. + """Lists all app deployments for the app with the supplied name. :param app_name: str The name of the app. @@ -1452,9 +1434,7 @@ def list_deployments( def set_permissions( self, app_name: str, *, access_control_list: Optional[List[AppAccessControlRequest]] = None ) -> AppPermissions: - """Set app permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param app_name: str @@ -1475,9 +1455,7 @@ def set_permissions( return AppPermissions.from_dict(res) def start(self, name: str) -> Wait[App]: - """Start an app. - - Start the last active deployment of the app in the workspace. + """Start the last active deployment of the app in the workspace. :param name: str The name of the app. @@ -1499,9 +1477,7 @@ def start_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App: return self.start(name=name).result(timeout=timeout) def stop(self, name: str) -> Wait[App]: - """Stop an app. - - Stops the active deployment of the app in the workspace. + """Stops the active deployment of the app in the workspace. :param name: str The name of the app. @@ -1523,9 +1499,7 @@ def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App: return self.stop(name=name).result(timeout=timeout) def update(self, name: str, app: App) -> App: - """Update an app. - - Updates the app with the supplied name. + """Updates the app with the supplied name. :param name: str The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It @@ -1546,9 +1520,7 @@ def update(self, name: str, app: App) -> App: def update_permissions( self, app_name: str, *, access_control_list: Optional[List[AppAccessControlRequest]] = None ) -> AppPermissions: - """Update app permissions. - - Updates the permissions on an app. Apps can inherit permissions from their root object. + """Updates the permissions on an app. Apps can inherit permissions from their root object. :param app_name: str The app for which to get or manage permissions. diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index 4dc535891..763cc0501 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -1719,9 +1719,7 @@ def __init__(self, api_client): self._api = api_client def download(self, start_month: str, end_month: str, *, personal_data: Optional[bool] = None) -> DownloadResponse: - """Return billable usage logs. - - Returns billable usage logs in CSV format for the specified account and date range. For the data + """Returns billable usage logs in CSV format for the specified account and date range. For the data schema, see [CSV file schema]. Note that this method might take multiple minutes to complete. **Warning**: Depending on the queried date range, the number of workspaces in the account, the size of @@ -1766,9 +1764,7 @@ def __init__(self, api_client): self._api = api_client def create(self, *, policy: Optional[BudgetPolicy] = None, request_id: Optional[str] = None) -> BudgetPolicy: - """Create a budget policy. - - Creates a new policy. + """Creates a new policy. :param policy: :class:`BudgetPolicy` (optional) The policy to create. `policy_id` needs to be empty as it will be generated `policy_name` must be @@ -1796,9 +1792,7 @@ def create(self, *, policy: Optional[BudgetPolicy] = None, request_id: Optional[ return BudgetPolicy.from_dict(res) def delete(self, policy_id: str): - """Delete a budget policy. - - Deletes a policy + """Deletes a policy :param policy_id: str The Id of the policy. @@ -1813,9 +1807,7 @@ def delete(self, policy_id: str): self._api.do("DELETE", f"/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}", headers=headers) def get(self, policy_id: str) -> BudgetPolicy: - """Get a budget policy. - - Retrieves a policy by it's ID. + """Retrieves a policy by it's ID. :param policy_id: str The Id of the policy. @@ -1840,9 +1832,7 @@ def list( page_token: Optional[str] = None, sort_spec: Optional[SortSpec] = None, ) -> Iterator[BudgetPolicy]: - """List policies. - - Lists all policies. Policies are returned in the alphabetically ascending order of their names. + """Lists all policies. Policies are returned in the alphabetically ascending order of their names. :param filter_by: :class:`Filter` (optional) A filter to apply to the list of policies. @@ -1888,9 +1878,7 @@ def list( def update( self, policy_id: str, policy: BudgetPolicy, *, limit_config: Optional[LimitConfig] = None ) -> BudgetPolicy: - """Update a budget policy. - - Updates a policy + """Updates a policy :param policy_id: str The Id of the policy. This field is generated by Databricks and globally unique. @@ -1929,9 +1917,7 @@ def __init__(self, api_client): self._api = api_client def create(self, budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigurationResponse: - """Create new budget. - - Create a new budget configuration for an account. For full details, see + """Create a new budget configuration for an account. For full details, see https://docs.databricks.com/en/admin/account-settings/budgets.html. :param budget: :class:`CreateBudgetConfigurationBudget` @@ -1951,9 +1937,7 @@ def create(self, budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigu return CreateBudgetConfigurationResponse.from_dict(res) def delete(self, budget_id: str): - """Delete budget. - - Deletes a budget configuration for an account. Both account and budget configuration are specified by + """Deletes a budget configuration for an account. Both account and budget configuration are specified by ID. This cannot be undone. :param budget_id: str @@ -1969,9 +1953,7 @@ def delete(self, budget_id: str): self._api.do("DELETE", f"/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}", headers=headers) def get(self, budget_id: str) -> GetBudgetConfigurationResponse: - """Get budget. - - Gets a budget configuration for an account. Both account and budget configuration are specified by ID. + """Gets a budget configuration for an account. Both account and budget configuration are specified by ID. :param budget_id: str The budget configuration ID @@ -1987,9 +1969,7 @@ def get(self, budget_id: str) -> GetBudgetConfigurationResponse: return GetBudgetConfigurationResponse.from_dict(res) def list(self, *, page_token: Optional[str] = None) -> Iterator[BudgetConfiguration]: - """Get all budgets. - - Gets all budgets associated with this account. + """Gets all budgets associated with this account. :param page_token: str (optional) A page token received from a previous get all budget configurations call. This token can be used to @@ -2017,9 +1997,7 @@ def list(self, *, page_token: Optional[str] = None) -> Iterator[BudgetConfigurat query["page_token"] = json["next_page_token"] def update(self, budget_id: str, budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse: - """Modify budget. - - Updates a budget configuration for an account. Both account and budget configuration are specified by + """Updates a budget configuration for an account. Both account and budget configuration are specified by ID. :param budget_id: str @@ -2053,9 +2031,7 @@ def __init__(self, api_client): def create( self, log_delivery_configuration: CreateLogDeliveryConfigurationParams ) -> WrappedLogDeliveryConfiguration: - """Create a new log delivery configuration. - - Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs + """Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs to your storage location. This requires that you already created a [credential object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a [storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket). @@ -2093,9 +2069,7 @@ def create( return WrappedLogDeliveryConfiguration.from_dict(res) def get(self, log_delivery_configuration_id: str) -> GetLogDeliveryConfigurationResponse: - """Get log delivery configuration. - - Gets a Databricks log delivery configuration object for an account, both specified by ID. + """Gets a Databricks log delivery configuration object for an account, both specified by ID. :param log_delivery_configuration_id: str The log delivery configuration id of customer @@ -2122,9 +2096,7 @@ def list( status: Optional[LogDeliveryConfigStatus] = None, storage_configuration_id: Optional[str] = None, ) -> Iterator[LogDeliveryConfiguration]: - """Get all log delivery configurations. - - Gets all Databricks log delivery configurations associated with an account specified by ID. + """Gets all Databricks log delivery configurations associated with an account specified by ID. :param credentials_id: str (optional) The Credentials id to filter the search results with @@ -2164,9 +2136,7 @@ def list( query["page_token"] = json["next_page_token"] def patch_status(self, log_delivery_configuration_id: str, status: LogDeliveryConfigStatus): - """Enable or disable log delivery configuration. - - Enables or disables a log delivery configuration. Deletion of delivery configurations is not + """Enables or disables a log delivery configuration. Deletion of delivery configurations is not supported, so disable log delivery configurations that are no longer needed. Note that you can't re-enable a delivery configuration if this would violate the delivery configuration limits described under [Create log delivery](:method:LogDelivery/Create). @@ -2208,9 +2178,7 @@ def __init__(self, api_client): def create( self, *, dashboard_type: Optional[UsageDashboardType] = None, workspace_id: Optional[int] = None ) -> CreateBillingUsageDashboardResponse: - """Create new usage dashboard. - - Create a usage dashboard specified by workspaceId, accountId, and dashboard type. + """Create a usage dashboard specified by workspaceId, accountId, and dashboard type. :param dashboard_type: :class:`UsageDashboardType` (optional) Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage @@ -2236,9 +2204,7 @@ def create( def get( self, *, dashboard_type: Optional[UsageDashboardType] = None, workspace_id: Optional[int] = None ) -> GetBillingUsageDashboardResponse: - """Get usage dashboard. - - Get a usage dashboard specified by workspaceId, accountId, and dashboard type. + """Get a usage dashboard specified by workspaceId, accountId, and dashboard type. :param dashboard_type: :class:`UsageDashboardType` (optional) Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index f1819bf54..3cd01d05b 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -520,8 +520,10 @@ def from_dict(cls, d: Dict[str, Any]) -> AwsIamRole: @dataclass class AwsIamRoleRequest: + """The AWS IAM role configuration""" + role_arn: str - """The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access.""" + """The Amazon Resource Name (ARN) of the AWS IAM role used to vend temporary credentials.""" def as_dict(self) -> dict: """Serializes the AwsIamRoleRequest into a dictionary suitable for use as a JSON request body.""" @@ -545,11 +547,13 @@ def from_dict(cls, d: Dict[str, Any]) -> AwsIamRoleRequest: @dataclass class AwsIamRoleResponse: + """The AWS IAM role configuration""" + role_arn: str - """The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access.""" + """The Amazon Resource Name (ARN) of the AWS IAM role used to vend temporary credentials.""" external_id: Optional[str] = None - """The external ID used in role assumption to prevent confused deputy problem..""" + """The external ID used in role assumption to prevent the confused deputy problem.""" unity_catalog_iam_arn: Optional[str] = None """The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity @@ -659,9 +663,7 @@ class AzureManagedIdentity: `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`.""" credential_id: Optional[str] = None - """The Databricks internal ID that represents this managed identity. This field is only used to - persist the credential_id once it is fetched from the credentials manager - as we only use the - protobuf serializer to store credentials, this ID gets persisted to the database. .""" + """The Databricks internal ID that represents this managed identity.""" managed_identity_id: Optional[str] = None """The Azure resource ID of the managed identity. Use the format, @@ -704,16 +706,18 @@ def from_dict(cls, d: Dict[str, Any]) -> AzureManagedIdentity: @dataclass class AzureManagedIdentityRequest: + """The Azure managed identity configuration.""" + access_connector_id: str """The Azure resource ID of the Azure Databricks Access Connector. Use the format - /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}.""" + `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`.""" managed_identity_id: Optional[str] = None - """The Azure resource ID of the managed identity. Use the format - /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}. + """The Azure resource ID of the managed identity. Use the format, + `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}` This is only available for user-assgined identities. For system-assigned identities, the access_connector_id is used to identify the identity. If this field is not provided, then we - assume the AzureManagedIdentity is for a system-assigned identity.""" + assume the AzureManagedIdentity is using the system-assigned identity.""" def as_dict(self) -> dict: """Serializes the AzureManagedIdentityRequest into a dictionary suitable for use as a JSON request body.""" @@ -744,19 +748,21 @@ def from_dict(cls, d: Dict[str, Any]) -> AzureManagedIdentityRequest: @dataclass class AzureManagedIdentityResponse: + """The Azure managed identity configuration.""" + access_connector_id: str """The Azure resource ID of the Azure Databricks Access Connector. Use the format - /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}.""" + `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`.""" credential_id: Optional[str] = None """The Databricks internal ID that represents this managed identity.""" managed_identity_id: Optional[str] = None - """The Azure resource ID of the managed identity. Use the format - /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}. + """The Azure resource ID of the managed identity. Use the format, + `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}` This is only available for user-assgined identities. For system-assigned identities, the access_connector_id is used to identify the identity. If this field is not provided, then we - assume the AzureManagedIdentity is for a system-assigned identity.""" + assume the AzureManagedIdentity is using the system-assigned identity.""" def as_dict(self) -> dict: """Serializes the AzureManagedIdentityResponse into a dictionary suitable for use as a JSON request body.""" @@ -1162,14 +1168,17 @@ class CatalogType(Enum): @dataclass class CloudflareApiToken: + """The Cloudflare API token configuration. Read more at + https://developers.cloudflare.com/r2/api/s3/tokens/""" + access_key_id: str - """The Cloudflare access key id of the token.""" + """The access key ID associated with the API token.""" secret_access_key: str - """The secret access token generated for the access key id""" + """The secret access token generated for the above access key ID.""" account_id: str - """The account id associated with the API token.""" + """The ID of the account associated with the API token.""" def as_dict(self) -> dict: """Serializes the CloudflareApiToken into a dictionary suitable for use as a JSON request body.""" @@ -1542,7 +1551,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: class ConnectionType(Enum): - """Next Id: 31""" + """Next Id: 33""" BIGQUERY = "BIGQUERY" DATABRICKS = "DATABRICKS" @@ -1773,19 +1782,19 @@ class CreateCredentialRequest: metastore.""" aws_iam_role: Optional[AwsIamRole] = None - """The AWS IAM role configuration""" + """The AWS IAM role configuration.""" azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" + """The Azure service principal configuration.""" comment: Optional[str] = None """Comment associated with the credential.""" databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None - """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" + """The Databricks managed GCP service account configuration.""" purpose: Optional[CredentialPurpose] = None """Indicates the purpose of the credential.""" @@ -2563,7 +2572,8 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateSchema: @dataclass class CreateStorageCredential: name: str - """The credential name. The name must be unique within the metastore.""" + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" aws_iam_role: Optional[AwsIamRoleRequest] = None """The AWS IAM role configuration.""" @@ -2584,7 +2594,8 @@ class CreateStorageCredential: """The Databricks managed GCP service account configuration.""" read_only: Optional[bool] = None - """Whether the storage credential is only usable for read operations.""" + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" skip_validation: Optional[bool] = None """Supplying true to this argument skips validation of the created credential.""" @@ -2760,13 +2771,13 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateVolumeRequestContent: @dataclass class CredentialInfo: aws_iam_role: Optional[AwsIamRole] = None - """The AWS IAM role configuration""" + """The AWS IAM role configuration.""" azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" + """The Azure service principal configuration.""" comment: Optional[str] = None """Comment associated with the credential.""" @@ -2778,7 +2789,7 @@ class CredentialInfo: """Username of credential creator.""" databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None - """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" + """The Databricks managed GCP service account configuration.""" full_name: Optional[str] = None """The full name of the credential.""" @@ -3011,9 +3022,7 @@ class DatabricksGcpServiceAccount: """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" credential_id: Optional[str] = None - """The Databricks internal ID that represents this managed identity. This field is only used to - persist the credential_id once it is fetched from the credentials manager - as we only use the - protobuf serializer to store credentials, this ID gets persisted to the database""" + """The Databricks internal ID that represents this managed identity.""" email: Optional[str] = None """The email of the service account.""" @@ -3055,6 +3064,8 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccount: @dataclass class DatabricksGcpServiceAccountRequest: + """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" + def as_dict(self) -> dict: """Serializes the DatabricksGcpServiceAccountRequest into a dictionary suitable for use as a JSON request body.""" body = {} @@ -3073,11 +3084,13 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountRequest: @dataclass class DatabricksGcpServiceAccountResponse: + """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" + credential_id: Optional[str] = None - """The Databricks internal ID that represents this service account. This is an output-only field.""" + """The Databricks internal ID that represents this managed identity.""" email: Optional[str] = None - """The email of the service account. This is an output-only field.""" + """The email of the service account.""" def as_dict(self) -> dict: """Serializes the DatabricksGcpServiceAccountResponse into a dictionary suitable for use as a JSON request body.""" @@ -7834,7 +7847,7 @@ class StorageCredentialInfo: """Comment associated with the credential.""" created_at: Optional[int] = None - """Time at which this Credential was created, in epoch milliseconds.""" + """Time at which this credential was created, in epoch milliseconds.""" created_by: Optional[str] = None """Username of credential creator.""" @@ -7849,18 +7862,21 @@ class StorageCredentialInfo: """The unique identifier of the credential.""" isolation_mode: Optional[IsolationMode] = None + """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" metastore_id: Optional[str] = None - """Unique identifier of parent metastore.""" + """Unique identifier of the parent metastore.""" name: Optional[str] = None - """The credential name. The name must be unique within the metastore.""" + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" owner: Optional[str] = None """Username of current owner of credential.""" read_only: Optional[bool] = None - """Whether the storage credential is only usable for read operations.""" + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" updated_at: Optional[int] = None """Time at which this credential was last modified, in epoch milliseconds.""" @@ -7869,7 +7885,8 @@ class StorageCredentialInfo: """Username of user who last modified the credential.""" used_for_managed_storage: Optional[bool] = None - """Whether this credential is the current metastore's root storage credential.""" + """Whether this credential is the current metastore's root storage credential. Only applicable when + purpose is **STORAGE**.""" def as_dict(self) -> dict: """Serializes the StorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" @@ -8808,19 +8825,19 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateConnection: @dataclass class UpdateCredentialRequest: aws_iam_role: Optional[AwsIamRole] = None - """The AWS IAM role configuration""" + """The AWS IAM role configuration.""" azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" + """The Azure service principal configuration.""" comment: Optional[str] = None """Comment associated with the credential.""" databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None - """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" + """The Databricks managed GCP service account configuration.""" force: Optional[bool] = None """Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent @@ -9609,6 +9626,7 @@ class UpdateStorageCredential: """Force update even if there are dependent external locations or external tables.""" isolation_mode: Optional[IsolationMode] = None + """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" name: Optional[str] = None """Name of the storage credential.""" @@ -9620,7 +9638,8 @@ class UpdateStorageCredential: """Username of current owner of credential.""" read_only: Optional[bool] = None - """Whether the storage credential is only usable for read operations.""" + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" skip_validation: Optional[bool] = None """Supplying true to this argument skips validation of the updated credential.""" @@ -10067,7 +10086,7 @@ class ValidateStorageCredential: """Whether the storage credential is only usable for read operations.""" storage_credential_name: Optional[str] = None - """The name of the storage credential to validate.""" + """Required. The name of an existing credential or long-lived cloud credential to validate.""" url: Optional[str] = None """The external location url to validate.""" @@ -10212,7 +10231,8 @@ def from_dict(cls, d: Dict[str, Any]) -> ValidationResult: class ValidationResultOperation(Enum): - """The operation tested.""" + """A enum represents the file operation performed on the external location with the storage + credential""" DELETE = "DELETE" LIST = "LIST" @@ -10222,7 +10242,7 @@ class ValidationResultOperation(Enum): class ValidationResultResult(Enum): - """The results of the tested operation.""" + """A enum represents the result of the file operation""" FAIL = "FAIL" PASS = "PASS" @@ -10449,9 +10469,7 @@ def __init__(self, api_client): def create( self, workspace_id: int, metastore_id: str, *, metastore_assignment: Optional[CreateMetastoreAssignment] = None ): - """Assigns a workspace to a metastore. - - Creates an assignment to a metastore for a workspace + """Creates an assignment to a metastore for a workspace :param workspace_id: int Workspace ID. @@ -10477,9 +10495,7 @@ def create( ) def delete(self, workspace_id: int, metastore_id: str): - """Delete a metastore assignment. - - Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. + """Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. :param workspace_id: int Workspace ID. @@ -10500,9 +10516,7 @@ def delete(self, workspace_id: int, metastore_id: str): ) def get(self, workspace_id: int) -> AccountsMetastoreAssignment: - """Gets the metastore assignment for a workspace. - - Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned + """Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment will not be found and a 404 returned. @@ -10522,9 +10536,7 @@ def get(self, workspace_id: int) -> AccountsMetastoreAssignment: return AccountsMetastoreAssignment.from_dict(res) def list(self, metastore_id: str) -> Iterator[int]: - """Get all workspaces assigned to a metastore. - - Gets a list of all Databricks workspace IDs that have been assigned to given metastore. + """Gets a list of all Databricks workspace IDs that have been assigned to given metastore. :param metastore_id: str Unity Catalog metastore ID @@ -10545,9 +10557,7 @@ def list(self, metastore_id: str) -> Iterator[int]: def update( self, workspace_id: int, metastore_id: str, *, metastore_assignment: Optional[UpdateMetastoreAssignment] = None ): - """Updates a metastore assignment to a workspaces. - - Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be + """Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be updated. :param workspace_id: int @@ -10582,9 +10592,7 @@ def __init__(self, api_client): self._api = api_client def create(self, *, metastore_info: Optional[CreateMetastore] = None) -> AccountsMetastoreInfo: - """Create metastore. - - Creates a Unity Catalog metastore. + """Creates a Unity Catalog metastore. :param metastore_info: :class:`CreateMetastore` (optional) @@ -10602,9 +10610,7 @@ def create(self, *, metastore_info: Optional[CreateMetastore] = None) -> Account return AccountsMetastoreInfo.from_dict(res) def delete(self, metastore_id: str, *, force: Optional[bool] = None): - """Delete a metastore. - - Deletes a Unity Catalog metastore for an account, both specified by ID. + """Deletes a Unity Catalog metastore for an account, both specified by ID. :param metastore_id: str Unity Catalog metastore ID @@ -10629,9 +10635,7 @@ def delete(self, metastore_id: str, *, force: Optional[bool] = None): ) def get(self, metastore_id: str) -> AccountsMetastoreInfo: - """Get a metastore. - - Gets a Unity Catalog metastore from an account, both specified by ID. + """Gets a Unity Catalog metastore from an account, both specified by ID. :param metastore_id: str Unity Catalog metastore ID @@ -10649,9 +10653,7 @@ def get(self, metastore_id: str) -> AccountsMetastoreInfo: return AccountsMetastoreInfo.from_dict(res) def list(self) -> Iterator[MetastoreInfo]: - """Get all metastores associated with an account. - - Gets all Unity Catalog metastores associated with an account specified by ID. + """Gets all Unity Catalog metastores associated with an account specified by ID. :returns: Iterator over :class:`MetastoreInfo` """ @@ -10665,9 +10667,7 @@ def list(self) -> Iterator[MetastoreInfo]: return parsed if parsed is not None else [] def update(self, metastore_id: str, *, metastore_info: Optional[UpdateMetastore] = None) -> AccountsMetastoreInfo: - """Update a metastore. - - Updates an existing Unity Catalog metastore. + """Updates an existing Unity Catalog metastore. :param metastore_id: str Unity Catalog metastore ID @@ -10698,9 +10698,7 @@ def __init__(self, api_client): def create( self, metastore_id: str, *, credential_info: Optional[CreateStorageCredential] = None ) -> AccountsStorageCredentialInfo: - """Create a storage credential. - - Creates a new storage credential. The request object is specific to the cloud: + """Creates a new storage credential. The request object is specific to the cloud: * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials * **GcpServiceAcountKey** for GCP credentials. @@ -10731,9 +10729,7 @@ def create( return AccountsStorageCredentialInfo.from_dict(res) def delete(self, metastore_id: str, storage_credential_name: str, *, force: Optional[bool] = None): - """Delete a storage credential. - - Deletes a storage credential from the metastore. The caller must be an owner of the storage + """Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. :param metastore_id: str @@ -10761,9 +10757,7 @@ def delete(self, metastore_id: str, storage_credential_name: str, *, force: Opti ) def get(self, metastore_id: str, storage_credential_name: str) -> AccountsStorageCredentialInfo: - """Gets the named storage credential. - - Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the + """Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have a level of privilege on the storage credential. :param metastore_id: str @@ -10786,9 +10780,7 @@ def get(self, metastore_id: str, storage_credential_name: str) -> AccountsStorag return AccountsStorageCredentialInfo.from_dict(res) def list(self, metastore_id: str) -> Iterator[StorageCredentialInfo]: - """Get all storage credentials assigned to a metastore. - - Gets a list of all storage credentials that have been assigned to given metastore. + """Gets a list of all storage credentials that have been assigned to given metastore. :param metastore_id: str Unity Catalog metastore ID @@ -10815,9 +10807,7 @@ def update( *, credential_info: Optional[UpdateStorageCredential] = None, ) -> AccountsStorageCredentialInfo: - """Updates a storage credential. - - Updates a storage credential on the metastore. The caller must be the owner of the storage credential. + """Updates a storage credential on the metastore. The caller must be the owner of the storage credential. If the caller is a metastore admin, only the __owner__ credential can be changed. :param metastore_id: str @@ -10853,9 +10843,7 @@ def __init__(self, api_client): self._api = api_client def get(self, artifact_type: ArtifactType) -> ArtifactAllowlistInfo: - """Get an artifact allowlist. - - Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or have + """Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. :param artifact_type: :class:`ArtifactType` @@ -10880,9 +10868,7 @@ def update( created_by: Optional[str] = None, metastore_id: Optional[str] = None, ) -> ArtifactAllowlistInfo: - """Set an artifact allowlist. - - Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with + """Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. @@ -10942,9 +10928,7 @@ def create( share_name: Optional[str] = None, storage_root: Optional[str] = None, ) -> CatalogInfo: - """Create a catalog. - - Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the + """Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. :param name: str @@ -10994,9 +10978,7 @@ def create( return CatalogInfo.from_dict(res) def delete(self, name: str, *, force: Optional[bool] = None): - """Delete a catalog. - - Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the owner + """Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the owner of the catalog. :param name: str @@ -11017,9 +10999,7 @@ def delete(self, name: str, *, force: Optional[bool] = None): self._api.do("DELETE", f"/api/2.1/unity-catalog/catalogs/{name}", query=query, headers=headers) def get(self, name: str, *, include_browse: Optional[bool] = None) -> CatalogInfo: - """Get a catalog. - - Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the + """Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the catalog, or a user that has the **USE_CATALOG** privilege set for their account. :param name: str @@ -11048,9 +11028,7 @@ def list( max_results: Optional[int] = None, page_token: Optional[str] = None, ) -> Iterator[CatalogInfo]: - """List catalogs. - - Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be + """Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in the array. @@ -11104,9 +11082,7 @@ def update( owner: Optional[str] = None, properties: Optional[Dict[str, str]] = None, ) -> CatalogInfo: - """Update a catalog. - - Updates the catalog that matches the supplied name. The caller must be either the owner of the + """Updates the catalog that matches the supplied name. The caller must be either the owner of the catalog, or a metastore admin (when changing the owner field of the catalog). :param name: str @@ -11175,9 +11151,7 @@ def create( properties: Optional[Dict[str, str]] = None, read_only: Optional[bool] = None, ) -> ConnectionInfo: - """Create a connection. - - Creates a new connection + """Creates a new connection Creates a new connection to an external data source. It allows users to specify connection details and configurations for interaction with the external server. @@ -11219,9 +11193,7 @@ def create( return ConnectionInfo.from_dict(res) def delete(self, name: str): - """Delete a connection. - - Deletes the connection that matches the supplied name. + """Deletes the connection that matches the supplied name. :param name: str The name of the connection to be deleted. @@ -11236,9 +11208,7 @@ def delete(self, name: str): self._api.do("DELETE", f"/api/2.1/unity-catalog/connections/{name}", headers=headers) def get(self, name: str) -> ConnectionInfo: - """Get a connection. - - Gets a connection from it's name. + """Gets a connection from it's name. :param name: str Name of the connection. @@ -11254,9 +11224,7 @@ def get(self, name: str) -> ConnectionInfo: return ConnectionInfo.from_dict(res) def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ConnectionInfo]: - """List connections. - - List all connections. + """List all connections. :param max_results: int (optional) Maximum number of connections to return. - If not set, all connections are returned (not @@ -11290,9 +11258,7 @@ def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = def update( self, name: str, options: Dict[str, str], *, new_name: Optional[str] = None, owner: Optional[str] = None ) -> ConnectionInfo: - """Update a connection. - - Updates the connection that matches the supplied name. + """Updates the connection that matches the supplied name. :param name: str Name of the connection. @@ -11346,9 +11312,7 @@ def create_credential( read_only: Optional[bool] = None, skip_validation: Optional[bool] = None, ) -> CredentialInfo: - """Create a credential. - - Creates a new credential. The type of credential to be created is determined by the **purpose** field, + """Creates a new credential. The type of credential to be created is determined by the **purpose** field, which should be either **SERVICE** or **STORAGE**. The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for @@ -11358,15 +11322,15 @@ def create_credential( The credential name. The name must be unique among storage and service credentials within the metastore. :param aws_iam_role: :class:`AwsIamRole` (optional) - The AWS IAM role configuration + The AWS IAM role configuration. :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) The Azure managed identity configuration. :param azure_service_principal: :class:`AzureServicePrincipal` (optional) - The Azure service principal configuration. Only applicable when purpose is **STORAGE**. + The Azure service principal configuration. :param comment: str (optional) Comment associated with the credential. :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional) - GCP long-lived credential. Databricks-created Google Cloud Storage service account. + The Databricks managed GCP service account configuration. :param purpose: :class:`CredentialPurpose` (optional) Indicates the purpose of the credential. :param read_only: bool (optional) @@ -11405,9 +11369,7 @@ def create_credential( return CredentialInfo.from_dict(res) def delete_credential(self, name_arg: str, *, force: Optional[bool] = None): - """Delete a credential. - - Deletes a service or storage credential from the metastore. The caller must be an owner of the + """Deletes a service or storage credential from the metastore. The caller must be an owner of the credential. :param name_arg: str @@ -11435,9 +11397,7 @@ def generate_temporary_service_credential( azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None, gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None, ) -> TemporaryCredentials: - """Generate a temporary service credential. - - Returns a set of temporary credentials generated using the specified service credential. The caller + """Returns a set of temporary credentials generated using the specified service credential. The caller must be a metastore admin or have the metastore privilege **ACCESS** on the service credential. :param credential_name: str @@ -11465,9 +11425,7 @@ def generate_temporary_service_credential( return TemporaryCredentials.from_dict(res) def get_credential(self, name_arg: str) -> CredentialInfo: - """Get a credential. - - Gets a service or storage credential from the metastore. The caller must be a metastore admin, the + """Gets a service or storage credential from the metastore. The caller must be a metastore admin, the owner of the credential, or have any permission on the credential. :param name_arg: str @@ -11490,9 +11448,7 @@ def list_credentials( page_token: Optional[str] = None, purpose: Optional[CredentialPurpose] = None, ) -> Iterator[CredentialInfo]: - """List credentials. - - Gets an array of credentials (as __CredentialInfo__ objects). + """Gets an array of credentials (as __CredentialInfo__ objects). The array is limited to only the credentials that the caller has permission to access. If the caller is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific @@ -11547,9 +11503,7 @@ def update_credential( read_only: Optional[bool] = None, skip_validation: Optional[bool] = None, ) -> CredentialInfo: - """Update a credential. - - Updates a service or storage credential on the metastore. + """Updates a service or storage credential on the metastore. The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission. If the caller is a metastore admin, only the __owner__ field can be changed. @@ -11557,15 +11511,15 @@ def update_credential( :param name_arg: str Name of the credential. :param aws_iam_role: :class:`AwsIamRole` (optional) - The AWS IAM role configuration + The AWS IAM role configuration. :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) The Azure managed identity configuration. :param azure_service_principal: :class:`AzureServicePrincipal` (optional) - The Azure service principal configuration. Only applicable when purpose is **STORAGE**. + The Azure service principal configuration. :param comment: str (optional) Comment associated with the credential. :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional) - GCP long-lived credential. Databricks-created Google Cloud Storage service account. + The Databricks managed GCP service account configuration. :param force: bool (optional) Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent external locations and external tables (when purpose is **STORAGE**). @@ -11626,9 +11580,7 @@ def validate_credential( read_only: Optional[bool] = None, url: Optional[str] = None, ) -> ValidateCredentialResponse: - """Validate a credential. - - Validates a credential. + """Validates a credential. For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific credential must be provided. @@ -11718,9 +11670,7 @@ def create( read_only: Optional[bool] = None, skip_validation: Optional[bool] = None, ) -> ExternalLocationInfo: - """Create an external location. - - Creates a new external location entry in the metastore. The caller must be a metastore admin or have + """Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage credential. @@ -11779,9 +11729,7 @@ def create( return ExternalLocationInfo.from_dict(res) def delete(self, name: str, *, force: Optional[bool] = None): - """Delete an external location. - - Deletes the specified external location from the metastore. The caller must be the owner of the + """Deletes the specified external location from the metastore. The caller must be the owner of the external location. :param name: str @@ -11802,9 +11750,7 @@ def delete(self, name: str, *, force: Optional[bool] = None): self._api.do("DELETE", f"/api/2.1/unity-catalog/external-locations/{name}", query=query, headers=headers) def get(self, name: str, *, include_browse: Optional[bool] = None) -> ExternalLocationInfo: - """Get an external location. - - Gets an external location from the metastore. The caller must be either a metastore admin, the owner + """Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. :param name: str @@ -11833,9 +11779,7 @@ def list( max_results: Optional[int] = None, page_token: Optional[str] = None, ) -> Iterator[ExternalLocationInfo]: - """List external locations. - - Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller + """Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on the external location. There is no guarantee of a specific ordering of the elements in the array. @@ -11891,9 +11835,7 @@ def update( skip_validation: Optional[bool] = None, url: Optional[str] = None, ) -> ExternalLocationInfo: - """Update an external location. - - Updates an external location in the metastore. The caller must be the owner of the external location, + """Updates an external location in the metastore. The caller must be the owner of the external location, or be a metastore admin. In the second case, the admin can only update the name of the external location. @@ -11976,9 +11918,7 @@ def __init__(self, api_client): self._api = api_client def create(self, function_info: CreateFunction) -> FunctionInfo: - """Create a function. - - **WARNING: This API is experimental and will change in future versions** + """**WARNING: This API is experimental and will change in future versions** Creates a new function @@ -12003,9 +11943,7 @@ def create(self, function_info: CreateFunction) -> FunctionInfo: return FunctionInfo.from_dict(res) def delete(self, name: str, *, force: Optional[bool] = None): - """Delete a function. - - Deletes the function that matches the supplied name. For the deletion to succeed, the user must + """Deletes the function that matches the supplied name. For the deletion to succeed, the user must satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and have both the **USE_CATALOG** privilege on its parent catalog @@ -12030,9 +11968,7 @@ def delete(self, name: str, *, force: Optional[bool] = None): self._api.do("DELETE", f"/api/2.1/unity-catalog/functions/{name}", query=query, headers=headers) def get(self, name: str, *, include_browse: Optional[bool] = None) -> FunctionInfo: - """Get a function. - - Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must + """Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must satisfy one of the following requirements: - Is a metastore admin - Is an owner of the function's parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog and be the owner of the function - Have the **USE_CATALOG** privilege on the function's parent catalog, the @@ -12068,9 +12004,7 @@ def list( max_results: Optional[int] = None, page_token: Optional[str] = None, ) -> Iterator[FunctionInfo]: - """List functions. - - List functions within the specified parent catalog and schema. If the user is a metastore admin, all + """List functions within the specified parent catalog and schema. If the user is a metastore admin, all functions are returned in the output list. Otherwise, the user must have the **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and the output list contains only functions for which either the user has the **EXECUTE** privilege or the user is the owner. There is @@ -12119,9 +12053,7 @@ def list( query["page_token"] = json["next_page_token"] def update(self, name: str, *, owner: Optional[str] = None) -> FunctionInfo: - """Update a function. - - Updates the function that matches the supplied name. Only the owner of the function can be updated. If + """Updates the function that matches the supplied name. Only the owner of the function can be updated. If the user is not a metastore admin, the user must be a member of the group that is the new function owner. - Is a metastore admin - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and has the **USE_CATALOG** privilege on its parent catalog - Is the owner of @@ -12171,9 +12103,7 @@ def get( page_token: Optional[str] = None, principal: Optional[str] = None, ) -> GetPermissionsResponse: - """Get permissions. - - Gets the permissions for a securable. Does not include inherited permissions. + """Gets the permissions for a securable. Does not include inherited permissions. :param securable_type: str Type of securable. @@ -12222,9 +12152,7 @@ def get_effective( page_token: Optional[str] = None, principal: Optional[str] = None, ) -> EffectivePermissionsList: - """Get effective permissions. - - Gets the effective permissions for a securable. Includes inherited permissions from any parent + """Gets the effective permissions for a securable. Includes inherited permissions from any parent securables. :param securable_type: str @@ -12273,9 +12201,7 @@ def get_effective( def update( self, securable_type: str, full_name: str, *, changes: Optional[List[PermissionsChange]] = None ) -> UpdatePermissionsResponse: - """Update permissions. - - Updates the permissions for a securable. + """Updates the permissions for a securable. :param securable_type: str Type of securable. @@ -12317,9 +12243,7 @@ def __init__(self, api_client): self._api = api_client def assign(self, workspace_id: int, metastore_id: str, default_catalog_name: str): - """Create an assignment. - - Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be + """Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account admin. @@ -12346,9 +12270,7 @@ def assign(self, workspace_id: int, metastore_id: str, default_catalog_name: str self._api.do("PUT", f"/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore", body=body, headers=headers) def create(self, name: str, *, region: Optional[str] = None, storage_root: Optional[str] = None) -> MetastoreInfo: - """Create a metastore. - - Creates a new metastore based on a provided name and optional storage root path. By default (if the + """Creates a new metastore based on a provided name and optional storage root path. By default (if the __owner__ field is not set), the owner of the new metastore is the user calling the __createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the ownership is assigned to the System User instead. @@ -12378,9 +12300,7 @@ def create(self, name: str, *, region: Optional[str] = None, storage_root: Optio return MetastoreInfo.from_dict(res) def current(self) -> MetastoreAssignment: - """Get metastore assignment for workspace. - - Gets the metastore assignment for the workspace being accessed. + """Gets the metastore assignment for the workspace being accessed. :returns: :class:`MetastoreAssignment` """ @@ -12393,9 +12313,7 @@ def current(self) -> MetastoreAssignment: return MetastoreAssignment.from_dict(res) def delete(self, id: str, *, force: Optional[bool] = None): - """Delete a metastore. - - Deletes a metastore. The caller must be a metastore admin. + """Deletes a metastore. The caller must be a metastore admin. :param id: str Unique ID of the metastore. @@ -12415,9 +12333,7 @@ def delete(self, id: str, *, force: Optional[bool] = None): self._api.do("DELETE", f"/api/2.1/unity-catalog/metastores/{id}", query=query, headers=headers) def get(self, id: str) -> MetastoreInfo: - """Get a metastore. - - Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this + """Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this info. :param id: str @@ -12434,9 +12350,7 @@ def get(self, id: str) -> MetastoreInfo: return MetastoreInfo.from_dict(res) def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[MetastoreInfo]: - """List metastores. - - Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin + """Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in the array. :param max_results: int (optional) @@ -12472,9 +12386,7 @@ def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = query["page_token"] = json["next_page_token"] def summary(self) -> GetMetastoreSummaryResponse: - """Get a metastore summary. - - Gets information about a metastore. This summary includes the storage credential, the cloud vendor, + """Gets information about a metastore. This summary includes the storage credential, the cloud vendor, the cloud region, and the global metastore ID. :returns: :class:`GetMetastoreSummaryResponse` @@ -12488,9 +12400,7 @@ def summary(self) -> GetMetastoreSummaryResponse: return GetMetastoreSummaryResponse.from_dict(res) def unassign(self, workspace_id: int, metastore_id: str): - """Delete an assignment. - - Deletes a metastore assignment. The caller must be an account administrator. + """Deletes a metastore assignment. The caller must be an account administrator. :param workspace_id: int A workspace ID. @@ -12523,9 +12433,7 @@ def update( privilege_model_version: Optional[str] = None, storage_root_credential_id: Optional[str] = None, ) -> MetastoreInfo: - """Update a metastore. - - Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__ + """Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__ field is set to the empty string (**""**), the ownership is updated to the System User. :param id: str @@ -12576,9 +12484,7 @@ def update( def update_assignment( self, workspace_id: int, *, default_catalog_name: Optional[str] = None, metastore_id: Optional[str] = None ): - """Update an assignment. - - Updates a metastore assignment. This operation can be used to update __metastore_id__ or + """Updates a metastore assignment. This operation can be used to update __metastore_id__ or __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore. The caller must be an account admin to update __metastore_id__; otherwise, the caller can be a Workspace admin. @@ -12618,9 +12524,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, full_name: str, version: int): - """Delete a Model Version. - - Deletes a model version from the specified registered model. Any aliases assigned to the model version + """Deletes a model version from the specified registered model. Any aliases assigned to the model version will also be deleted. The caller must be a metastore admin or an owner of the parent registered model. For the latter case, @@ -12647,9 +12551,7 @@ def get( include_aliases: Optional[bool] = None, include_browse: Optional[bool] = None, ) -> ModelVersionInfo: - """Get a Model Version. - - Get a model version. + """Get a model version. The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** @@ -12683,9 +12585,7 @@ def get( return ModelVersionInfo.from_dict(res) def get_by_alias(self, full_name: str, alias: str, *, include_aliases: Optional[bool] = None) -> ModelVersionInfo: - """Get Model Version By Alias. - - Get a model version by alias. + """Get a model version by alias. The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** @@ -12721,9 +12621,7 @@ def list( max_results: Optional[int] = None, page_token: Optional[str] = None, ) -> Iterator[ModelVersionInfo]: - """List Model Versions. - - List model versions. You can list model versions under a particular schema, or list all model versions + """List model versions. You can list model versions under a particular schema, or list all model versions in the current metastore. The returned models are filtered based on the privileges of the calling user. For example, the @@ -12775,9 +12673,7 @@ def list( query["page_token"] = json["next_page_token"] def update(self, full_name: str, version: int, *, comment: Optional[str] = None) -> ModelVersionInfo: - """Update a Model Version. - - Updates the specified model version. + """Updates the specified model version. The caller must be a metastore admin or an owner of the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the @@ -12844,9 +12740,7 @@ def wait_get_online_table_active( raise TimeoutError(f"timed out after {timeout}: {status_message}") def create(self, table: OnlineTable) -> Wait[OnlineTable]: - """Create an Online Table. - - Create a new Online Table. + """Create a new Online Table. :param table: :class:`OnlineTable` Online Table information. @@ -12870,9 +12764,7 @@ def create_and_wait(self, table: OnlineTable, timeout=timedelta(minutes=20)) -> return self.create(table=table).result(timeout=timeout) def delete(self, name: str): - """Delete an Online Table. - - Delete an online table. Warning: This will delete all the data in the online table. If the source + """Delete an online table. Warning: This will delete all the data in the online table. If the source Delta table was deleted or modified since this Online Table was created, this will lose the data forever! @@ -12889,9 +12781,7 @@ def delete(self, name: str): self._api.do("DELETE", f"/api/2.0/online-tables/{name}", headers=headers) def get(self, name: str) -> OnlineTable: - """Get an Online Table. - - Get information about an existing online table and its status. + """Get information about an existing online table and its status. :param name: str Full three-part (catalog, schema, table) name of the table. @@ -12919,9 +12809,7 @@ def __init__(self, api_client): self._api = api_client def cancel_refresh(self, table_name: str, refresh_id: str): - """Cancel refresh. - - Cancel an active monitor refresh for the given refresh ID. + """Cancel an active monitor refresh for the given refresh ID. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: @@ -12962,9 +12850,7 @@ def create( time_series: Optional[MonitorTimeSeries] = None, warehouse_id: Optional[str] = None, ) -> MonitorInfo: - """Create a table monitor. - - Creates a new monitor for the specified table. + """Creates a new monitor for the specified table. The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on the table's @@ -13046,9 +12932,7 @@ def create( return MonitorInfo.from_dict(res) def delete(self, table_name: str): - """Delete a table monitor. - - Deletes a monitor for the specified table. + """Deletes a monitor for the specified table. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: @@ -13071,9 +12955,7 @@ def delete(self, table_name: str): self._api.do("DELETE", f"/api/2.1/unity-catalog/tables/{table_name}/monitor", headers=headers) def get(self, table_name: str) -> MonitorInfo: - """Get a table monitor. - - Gets a monitor for the specified table. + """Gets a monitor for the specified table. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema. 3. have the following @@ -13098,9 +12980,7 @@ def get(self, table_name: str) -> MonitorInfo: return MonitorInfo.from_dict(res) def get_refresh(self, table_name: str, refresh_id: str) -> MonitorRefreshInfo: - """Get refresh. - - Gets info about a specific monitor refresh using the given refresh ID. + """Gets info about a specific monitor refresh using the given refresh ID. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: @@ -13127,9 +13007,7 @@ def get_refresh(self, table_name: str, refresh_id: str) -> MonitorRefreshInfo: return MonitorRefreshInfo.from_dict(res) def list_refreshes(self, table_name: str) -> MonitorRefreshListResponse: - """List refreshes. - - Gets an array containing the history of the most recent refreshes (up to 25) for this table. + """Gets an array containing the history of the most recent refreshes (up to 25) for this table. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: @@ -13154,9 +13032,7 @@ def list_refreshes(self, table_name: str) -> MonitorRefreshListResponse: def regenerate_dashboard( self, table_name: str, *, warehouse_id: Optional[str] = None ) -> RegenerateDashboardResponse: - """Regenerate a monitoring dashboard. - - Regenerates the monitoring dashboard for the specified table. + """Regenerates the monitoring dashboard for the specified table. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: @@ -13188,9 +13064,7 @@ def regenerate_dashboard( return RegenerateDashboardResponse.from_dict(res) def run_refresh(self, table_name: str) -> MonitorRefreshInfo: - """Queue a metric refresh for a monitor. - - Queues a metric refresh on the monitor for the specified table. The refresh will execute in the + """Queues a metric refresh on the monitor for the specified table. The refresh will execute in the background. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the @@ -13229,9 +13103,7 @@ def update( snapshot: Optional[MonitorSnapshot] = None, time_series: Optional[MonitorTimeSeries] = None, ) -> MonitorInfo: - """Update a table monitor. - - Updates a monitor for the specified table. + """Updates a monitor for the specified table. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: @@ -13345,9 +13217,7 @@ def create( comment: Optional[str] = None, storage_location: Optional[str] = None, ) -> RegisteredModelInfo: - """Create a Registered Model. - - Creates a new registered model in Unity Catalog. + """Creates a new registered model in Unity Catalog. File storage for model versions in the registered model will be located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. @@ -13390,9 +13260,7 @@ def create( return RegisteredModelInfo.from_dict(res) def delete(self, full_name: str): - """Delete a Registered Model. - - Deletes a registered model and all its model versions from the specified parent catalog and schema. + """Deletes a registered model and all its model versions from the specified parent catalog and schema. The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the @@ -13409,9 +13277,7 @@ def delete(self, full_name: str): self._api.do("DELETE", f"/api/2.1/unity-catalog/models/{full_name}", headers=headers) def delete_alias(self, full_name: str, alias: str): - """Delete a Registered Model Alias. - - Deletes a registered model alias. + """Deletes a registered model alias. The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the @@ -13432,9 +13298,7 @@ def delete_alias(self, full_name: str, alias: str): def get( self, full_name: str, *, include_aliases: Optional[bool] = None, include_browse: Optional[bool] = None ) -> RegisteredModelInfo: - """Get a Registered Model. - - Get a registered model. + """Get a registered model. The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** @@ -13472,9 +13336,7 @@ def list( page_token: Optional[str] = None, schema_name: Optional[str] = None, ) -> Iterator[RegisteredModelInfo]: - """List Registered Models. - - List registered models. You can list registered models under a particular schema, or list all + """List registered models. You can list registered models under a particular schema, or list all registered models in the current metastore. The returned models are filtered based on the privileges of the calling user. For example, the @@ -13539,9 +13401,7 @@ def list( query["page_token"] = json["next_page_token"] def set_alias(self, full_name: str, alias: str, version_num: int) -> RegisteredModelAlias: - """Set a Registered Model Alias. - - Set an alias on the specified registered model. + """Set an alias on the specified registered model. The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the @@ -13577,9 +13437,7 @@ def update( new_name: Optional[str] = None, owner: Optional[str] = None, ) -> RegisteredModelInfo: - """Update a Registered Model. - - Updates the specified registered model. + """Updates the specified registered model. The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the @@ -13627,9 +13485,7 @@ def __init__(self, api_client): self._api = api_client def get_quota(self, parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse: - """Get information for a single resource quota. - - The GetQuota API returns usage information for a single resource quota, defined as a child-parent + """The GetQuota API returns usage information for a single resource quota, defined as a child-parent pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered asynchronously. The updated count might not be returned in the first call. @@ -13657,9 +13513,7 @@ def get_quota(self, parent_securable_type: str, parent_full_name: str, quota_nam def list_quotas( self, *, max_results: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[QuotaInfo]: - """List all resource quotas under a metastore. - - ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the + """ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the counts returned. This API does not trigger a refresh of quota counts. :param max_results: int (optional) @@ -13709,9 +13563,7 @@ def create( properties: Optional[Dict[str, str]] = None, storage_root: Optional[str] = None, ) -> SchemaInfo: - """Create a schema. - - Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the + """Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent catalog. :param name: str @@ -13747,9 +13599,7 @@ def create( return SchemaInfo.from_dict(res) def delete(self, full_name: str, *, force: Optional[bool] = None): - """Delete a schema. - - Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an + """Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an owner of the parent catalog. :param full_name: str @@ -13770,9 +13620,7 @@ def delete(self, full_name: str, *, force: Optional[bool] = None): self._api.do("DELETE", f"/api/2.1/unity-catalog/schemas/{full_name}", query=query, headers=headers) def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> SchemaInfo: - """Get a schema. - - Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the + """Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the schema, or a user that has the **USE_SCHEMA** privilege on the schema. :param full_name: str @@ -13802,9 +13650,7 @@ def list( max_results: Optional[int] = None, page_token: Optional[str] = None, ) -> Iterator[SchemaInfo]: - """List schemas. - - Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the + """Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in the array. @@ -13857,9 +13703,7 @@ def update( owner: Optional[str] = None, properties: Optional[Dict[str, str]] = None, ) -> SchemaInfo: - """Update a schema. - - Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If + """Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If the caller is a metastore admin, only the __owner__ field can be changed in the update. If the __name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA** privilege on the parent catalog. @@ -13927,12 +13771,14 @@ def create( read_only: Optional[bool] = None, skip_validation: Optional[bool] = None, ) -> StorageCredentialInfo: - """Create a storage credential. + """Creates a new storage credential. - Creates a new storage credential. + The caller must be a metastore admin or have the **CREATE_STORAGE_CREDENTIAL** privilege on the + metastore. :param name: str - The credential name. The name must be unique within the metastore. + The credential name. The name must be unique among storage and service credentials within the + metastore. :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) The AWS IAM role configuration. :param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional) @@ -13946,7 +13792,8 @@ def create( :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional) The Databricks managed GCP service account configuration. :param read_only: bool (optional) - Whether the storage credential is only usable for read operations. + Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**. :param skip_validation: bool (optional) Supplying true to this argument skips validation of the created credential. @@ -13980,15 +13827,14 @@ def create( return StorageCredentialInfo.from_dict(res) def delete(self, name: str, *, force: Optional[bool] = None): - """Delete a credential. - - Deletes a storage credential from the metastore. The caller must be an owner of the storage + """Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. :param name: str Name of the storage credential. :param force: bool (optional) - Force deletion even if there are dependent external locations or external tables. + Force an update even if there are dependent external locations or external tables (when purpose is + **STORAGE**) or dependent services (when purpose is **SERVICE**). """ @@ -14003,9 +13849,7 @@ def delete(self, name: str, *, force: Optional[bool] = None): self._api.do("DELETE", f"/api/2.1/unity-catalog/storage-credentials/{name}", query=query, headers=headers) def get(self, name: str) -> StorageCredentialInfo: - """Get a credential. - - Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the + """Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the storage credential. :param name: str @@ -14024,9 +13868,7 @@ def get(self, name: str) -> StorageCredentialInfo: def list( self, *, max_results: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[StorageCredentialInfo]: - """List credentials. - - Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to + """Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the elements in the array. @@ -14052,8 +13894,6 @@ def list( "Accept": "application/json", } - if "max_results" not in query: - query["max_results"] = 0 while True: json = self._api.do("GET", "/api/2.1/unity-catalog/storage-credentials", query=query, headers=headers) if "storage_credentials" in json: @@ -14080,9 +13920,10 @@ def update( read_only: Optional[bool] = None, skip_validation: Optional[bool] = None, ) -> StorageCredentialInfo: - """Update a credential. + """Updates a storage credential on the metastore. - Updates a storage credential on the metastore. + The caller must be the owner of the storage credential or a metastore admin. If the caller is a + metastore admin, only the **owner** field can be changed. :param name: str Name of the storage credential. @@ -14101,12 +13942,14 @@ def update( :param force: bool (optional) Force update even if there are dependent external locations or external tables. :param isolation_mode: :class:`IsolationMode` (optional) + Whether the current securable is accessible from all workspaces or a specific set of workspaces. :param new_name: str (optional) New name for the storage credential. :param owner: str (optional) Username of current owner of credential. :param read_only: bool (optional) - Whether the storage credential is only usable for read operations. + Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**. :param skip_validation: bool (optional) Supplying true to this argument skips validation of the updated credential. @@ -14158,9 +14001,7 @@ def validate( storage_credential_name: Optional[str] = None, url: Optional[str] = None, ) -> ValidateStorageCredentialResponse: - """Validate a storage credential. - - Validates a storage credential. At least one of __external_location_name__ and __url__ need to be + """Validates a storage credential. At least one of __external_location_name__ and __url__ need to be provided. If only one of them is provided, it will be used for validation. And if both are provided, the __url__ will be used for validation, and __external_location_name__ will be ignored when checking overlapping urls. @@ -14185,7 +14026,7 @@ def validate( :param read_only: bool (optional) Whether the storage credential is only usable for read operations. :param storage_credential_name: str (optional) - The name of the storage credential to validate. + Required. The name of an existing credential or long-lived cloud credential to validate. :param url: str (optional) The external location url to validate. @@ -14227,9 +14068,7 @@ def __init__(self, api_client): self._api = api_client def disable(self, metastore_id: str, schema_name: str): - """Disable a system schema. - - Disables the system schema and removes it from the system catalog. The caller must be an account admin + """Disables the system schema and removes it from the system catalog. The caller must be an account admin or a metastore admin. :param metastore_id: str @@ -14249,9 +14088,7 @@ def disable(self, metastore_id: str, schema_name: str): ) def enable(self, metastore_id: str, schema_name: str, *, catalog_name: Optional[str] = None): - """Enable a system schema. - - Enables the system schema and adds it to the system catalog. The caller must be an account admin or a + """Enables the system schema and adds it to the system catalog. The caller must be an account admin or a metastore admin. :param metastore_id: str @@ -14281,9 +14118,7 @@ def enable(self, metastore_id: str, schema_name: str, *, catalog_name: Optional[ def list( self, metastore_id: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[SystemSchemaInfo]: - """List system schemas. - - Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore + """Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore admin. :param metastore_id: str @@ -14336,9 +14171,7 @@ def __init__(self, api_client): self._api = api_client def create(self, full_name_arg: str, constraint: TableConstraint) -> TableConstraint: - """Create a table constraint. - - Creates a new table constraint. + """Creates a new table constraint. For the table constraint creation to succeed, the user must satisfy both of these conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA** @@ -14369,9 +14202,7 @@ def create(self, full_name_arg: str, constraint: TableConstraint) -> TableConstr return TableConstraint.from_dict(res) def delete(self, full_name: str, constraint_name: str, cascade: bool): - """Delete a table constraint. - - Deletes a table constraint. + """Deletes a table constraint. For the table constraint deletion to succeed, the user must satisfy both of these conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA** @@ -14417,9 +14248,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, full_name: str): - """Delete a table. - - Deletes a table from the specified parent catalog and schema. The caller must be the owner of the + """Deletes a table from the specified parent catalog and schema. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. @@ -14437,9 +14266,7 @@ def delete(self, full_name: str): self._api.do("DELETE", f"/api/2.1/unity-catalog/tables/{full_name}", headers=headers) def exists(self, full_name: str) -> TableExistsResponse: - """Get boolean reflecting if table exists. - - Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one + """Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, @@ -14467,9 +14294,7 @@ def get( include_delta_metadata: Optional[bool] = None, include_manifest_capabilities: Optional[bool] = None, ) -> TableInfo: - """Get a table. - - Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the + """Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be @@ -14516,9 +14341,7 @@ def list( omit_username: Optional[bool] = None, page_token: Optional[str] = None, ) -> Iterator[TableInfo]: - """List tables. - - Gets an array of all tables for the current metastore under the parent catalog and schema. The caller + """Gets an array of all tables for the current metastore under the parent catalog and schema. The caller must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is no guarantee of a specific @@ -14599,9 +14422,7 @@ def list_summaries( schema_name_pattern: Optional[str] = None, table_name_pattern: Optional[str] = None, ) -> Iterator[TableSummary]: - """List table summaries. - - Gets an array of summaries for tables for a schema and catalog within the metastore. The table + """Gets an array of summaries for tables for a schema and catalog within the metastore. The table summaries returned are either: * summaries for tables (within the current metastore and parent catalog and schema), when the user is @@ -14661,9 +14482,7 @@ def list_summaries( query["page_token"] = json["next_page_token"] def update(self, full_name: str, *, owner: Optional[str] = None): - """Update a table owner. - - Change the owner of the table. The caller must be the owner of the parent catalog, have the + """Change the owner of the table. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. @@ -14705,9 +14524,7 @@ def __init__(self, api_client): def generate_temporary_table_credentials( self, *, operation: Optional[TableOperation] = None, table_id: Optional[str] = None ) -> GenerateTemporaryTableCredentialResponse: - """Generate a temporary table credential. - - Get a short-lived credential for directly accessing the table data on cloud storage. The metastore + """Get a short-lived credential for directly accessing the table data on cloud storage. The metastore must have external_access_enabled flag set to true (default false). The caller must have EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog owners. @@ -14755,9 +14572,7 @@ def create( comment: Optional[str] = None, storage_location: Optional[str] = None, ) -> VolumeInfo: - """Create a Volume. - - Creates a new volume. + """Creates a new volume. The user could create either an external volume or a managed volume. An external volume will be created in the specified external location, while a managed volume will be located in the default @@ -14814,9 +14629,7 @@ def create( return VolumeInfo.from_dict(res) def delete(self, name: str): - """Delete a Volume. - - Deletes a volume from the specified parent catalog and schema. + """Deletes a volume from the specified parent catalog and schema. The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** @@ -14841,9 +14654,7 @@ def list( max_results: Optional[int] = None, page_token: Optional[str] = None, ) -> Iterator[VolumeInfo]: - """List Volumes. - - Gets an array of volumes for the current metastore under the parent catalog and schema. + """Gets an array of volumes for the current metastore under the parent catalog and schema. The returned volumes are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the volumes. A regular user needs to be the owner or have the @@ -14904,9 +14715,7 @@ def list( query["page_token"] = json["next_page_token"] def read(self, name: str, *, include_browse: Optional[bool] = None) -> VolumeInfo: - """Get a Volume. - - Gets a volume from the metastore for a specific catalog and schema. + """Gets a volume from the metastore for a specific catalog and schema. The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege @@ -14934,9 +14743,7 @@ def read(self, name: str, *, include_browse: Optional[bool] = None) -> VolumeInf def update( self, name: str, *, comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None ) -> VolumeInfo: - """Update a Volume. - - Updates the specified volume under the specified parent catalog and schema. + """Updates the specified volume under the specified parent catalog and schema. The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** @@ -14991,9 +14798,7 @@ def __init__(self, api_client): self._api = api_client def get(self, name: str) -> GetCatalogWorkspaceBindingsResponse: - """Get catalog workspace bindings. - - Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the + """Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the catalog. :param name: str @@ -15017,9 +14822,7 @@ def get_bindings( max_results: Optional[int] = None, page_token: Optional[str] = None, ) -> Iterator[WorkspaceBinding]: - """Get securable workspace bindings. - - Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the + """Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. :param securable_type: str @@ -15068,9 +14871,7 @@ def update( assign_workspaces: Optional[List[int]] = None, unassign_workspaces: Optional[List[int]] = None, ) -> UpdateCatalogWorkspaceBindingsResponse: - """Update catalog workspace bindings. - - Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the + """Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the catalog. :param name: str @@ -15105,9 +14906,7 @@ def update_bindings( add: Optional[List[WorkspaceBinding]] = None, remove: Optional[List[WorkspaceBinding]] = None, ) -> UpdateWorkspaceBindingsResponse: - """Update securable workspace bindings. - - Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the + """Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. :param securable_type: str diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index 490f7711e..8dd2c1359 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -1222,9 +1222,7 @@ def __init__(self, api_client): self._api = api_client def create(self, clean_room_name: str, asset: CleanRoomAsset) -> CleanRoomAsset: - """Create an asset. - - Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC + """Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC asset that is added through this method, the clean room owner must also have enough privilege on the asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to access the asset. Typically, you should use a group as the clean room owner. @@ -1246,9 +1244,7 @@ def create(self, clean_room_name: str, asset: CleanRoomAsset) -> CleanRoomAsset: return CleanRoomAsset.from_dict(res) def delete(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str): - """Delete an asset. - - Delete a clean room asset - unshare/remove the asset from the clean room + """Delete a clean room asset - unshare/remove the asset from the clean room :param clean_room_name: str Name of the clean room. @@ -1269,9 +1265,7 @@ def delete(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, name ) def get(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str) -> CleanRoomAsset: - """Get an asset. - - Get the details of a clean room asset by its type and full name. + """Get the details of a clean room asset by its type and full name. :param clean_room_name: str Name of the clean room. @@ -1322,9 +1316,7 @@ def list(self, clean_room_name: str, *, page_token: Optional[str] = None) -> Ite def update( self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str, asset: CleanRoomAsset ) -> CleanRoomAsset: - """Update an asset. - - Update a clean room asset. For example, updating the content of a notebook; changing the shared + """Update a clean room asset. For example, updating the content of a notebook; changing the shared partitions of a table; etc. :param clean_room_name: str @@ -1373,9 +1365,7 @@ def list( page_size: Optional[int] = None, page_token: Optional[str] = None, ) -> Iterator[CleanRoomNotebookTaskRun]: - """List notebook task runs. - - List all the historical notebook task runs in a clean room. + """List all the historical notebook task runs in a clean room. :param clean_room_name: str Name of the clean room. @@ -1419,9 +1409,7 @@ def __init__(self, api_client): self._api = api_client def create(self, clean_room: CleanRoom) -> CleanRoom: - """Create a clean room. - - Create a new clean room with the specified collaborators. This method is asynchronous; the returned + """Create a new clean room with the specified collaborators. This method is asynchronous; the returned name field inside the clean_room field can be used to poll the clean room status, using the :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING state, with only name, owner, comment, created_at and status populated. The clean room will be usable @@ -1445,9 +1433,7 @@ def create(self, clean_room: CleanRoom) -> CleanRoom: def create_output_catalog( self, clean_room_name: str, output_catalog: CleanRoomOutputCatalog ) -> CreateCleanRoomOutputCatalogResponse: - """Create an output catalog. - - Create the output catalog of the clean room. + """Create the output catalog of the clean room. :param clean_room_name: str Name of the clean room. @@ -1467,9 +1453,7 @@ def create_output_catalog( return CreateCleanRoomOutputCatalogResponse.from_dict(res) def delete(self, name: str): - """Delete a clean room. - - Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other + """Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other collaborators have not deleted the clean room, they will still have the clean room in their metastore, but it will be in a DELETED state and no operations other than deletion can be performed on it. @@ -1486,9 +1470,7 @@ def delete(self, name: str): self._api.do("DELETE", f"/api/2.0/clean-rooms/{name}", headers=headers) def get(self, name: str) -> CleanRoom: - """Get a clean room. - - Get the details of a clean room given its name. + """Get the details of a clean room given its name. :param name: str @@ -1503,9 +1485,7 @@ def get(self, name: str) -> CleanRoom: return CleanRoom.from_dict(res) def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[CleanRoom]: - """List clean rooms. - - Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are + """Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are returned. :param page_size: int (optional) @@ -1535,9 +1515,7 @@ def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = N query["page_token"] = json["next_page_token"] def update(self, name: str, *, clean_room: Optional[CleanRoom] = None) -> CleanRoom: - """Update a clean room. - - Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM** + """Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM** privilege, or be metastore admin. When the caller is a metastore admin, only the __owner__ field can be updated. diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 46d940a04..e04a09fca 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -9834,9 +9834,7 @@ def create( policy_family_definition_overrides: Optional[str] = None, policy_family_id: Optional[str] = None, ) -> CreatePolicyResponse: - """Create a new policy. - - Creates a new policy with prescribed settings. + """Creates a new policy with prescribed settings. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -9894,9 +9892,7 @@ def create( return CreatePolicyResponse.from_dict(res) def delete(self, policy_id: str): - """Delete a cluster policy. - - Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited. + """Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited. :param policy_id: str The ID of the policy to delete. @@ -9925,9 +9921,7 @@ def edit( policy_family_definition_overrides: Optional[str] = None, policy_family_id: Optional[str] = None, ): - """Update a cluster policy. - - Update an existing policy for cluster. This operation may make some clusters governed by the previous + """Update an existing policy for cluster. This operation may make some clusters governed by the previous policy invalid. :param policy_id: str @@ -9989,9 +9983,7 @@ def edit( self._api.do("POST", "/api/2.0/policies/clusters/edit", body=body, headers=headers) def get(self, policy_id: str) -> Policy: - """Get a cluster policy. - - Get a cluster policy entity. Creation and editing is available to admins only. + """Get a cluster policy entity. Creation and editing is available to admins only. :param policy_id: str Canonical unique identifier for the Cluster Policy. @@ -10010,9 +10002,7 @@ def get(self, policy_id: str) -> Policy: return Policy.from_dict(res) def get_permission_levels(self, cluster_policy_id: str) -> GetClusterPolicyPermissionLevelsResponse: - """Get cluster policy permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param cluster_policy_id: str The cluster policy for which to get or manage permissions. @@ -10030,9 +10020,7 @@ def get_permission_levels(self, cluster_policy_id: str) -> GetClusterPolicyPermi return GetClusterPolicyPermissionLevelsResponse.from_dict(res) def get_permissions(self, cluster_policy_id: str) -> ClusterPolicyPermissions: - """Get cluster policy permissions. - - Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root + """Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root object. :param cluster_policy_id: str @@ -10051,9 +10039,7 @@ def get_permissions(self, cluster_policy_id: str) -> ClusterPolicyPermissions: def list( self, *, sort_column: Optional[ListSortColumn] = None, sort_order: Optional[ListSortOrder] = None ) -> Iterator[Policy]: - """List cluster policies. - - Returns a list of policies accessible by the requesting user. + """Returns a list of policies accessible by the requesting user. :param sort_column: :class:`ListSortColumn` (optional) The cluster policy attribute to sort by. * `POLICY_CREATION_TIME` - Sort result list by policy @@ -10081,9 +10067,7 @@ def list( def set_permissions( self, cluster_policy_id: str, *, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None ) -> ClusterPolicyPermissions: - """Set cluster policy permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param cluster_policy_id: str @@ -10108,9 +10092,7 @@ def set_permissions( def update_permissions( self, cluster_policy_id: str, *, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None ) -> ClusterPolicyPermissions: - """Update cluster policy permissions. - - Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root + """Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root object. :param cluster_policy_id: str @@ -10226,9 +10208,7 @@ def wait_get_cluster_terminated( raise TimeoutError(f"timed out after {timeout}: {status_message}") def change_owner(self, cluster_id: str, owner_username: str): - """Change cluster owner. - - Change the owner of the cluster. You must be an admin and the cluster must be terminated to perform + """Change the owner of the cluster. You must be an admin and the cluster must be terminated to perform this operation. The service principal application ID can be supplied as an argument to `owner_username`. @@ -10285,9 +10265,7 @@ def create( use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None, ) -> Wait[ClusterDetails]: - """Create new cluster. - - Creates a new Spark cluster. This method will acquire new instances from the cloud provider if + """Creates a new Spark cluster. This method will acquire new instances from the cloud provider if necessary. This method is asynchronous; the returned ``cluster_id`` can be used to poll the cluster status. When this method returns, the cluster will be in a ``PENDING`` state. The cluster will be usable once it enters a ``RUNNING`` state. Note: Databricks may not be able to acquire some of the @@ -10615,9 +10593,7 @@ def create_and_wait( ).result(timeout=timeout) def delete(self, cluster_id: str) -> Wait[ClusterDetails]: - """Terminate cluster. - - Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the + """Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is already in a `TERMINATING` or `TERMINATED` state, nothing will happen. @@ -10681,9 +10657,7 @@ def edit( use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None, ) -> Wait[ClusterDetails]: - """Update cluster configuration. - - Updates the configuration of a cluster to match the provided attributes and size. A cluster can be + """Updates the configuration of a cluster to match the provided attributes and size. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes @@ -11018,9 +10992,7 @@ def events( page_token: Optional[str] = None, start_time: Optional[int] = None, ) -> Iterator[ClusterEvent]: - """List cluster activity events. - - Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more + """Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more events to read, the response includes all the parameters necessary to request the next page of events. :param cluster_id: str @@ -11088,9 +11060,7 @@ def events( body = json["next_page"] def get(self, cluster_id: str) -> ClusterDetails: - """Get cluster info. - - Retrieves the information for a cluster given its identifier. Clusters can be described while they are + """Retrieves the information for a cluster given its identifier. Clusters can be described while they are running, or up to 60 days after they are terminated. :param cluster_id: str @@ -11110,9 +11080,7 @@ def get(self, cluster_id: str) -> ClusterDetails: return ClusterDetails.from_dict(res) def get_permission_levels(self, cluster_id: str) -> GetClusterPermissionLevelsResponse: - """Get cluster permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param cluster_id: str The cluster for which to get or manage permissions. @@ -11128,9 +11096,7 @@ def get_permission_levels(self, cluster_id: str) -> GetClusterPermissionLevelsRe return GetClusterPermissionLevelsResponse.from_dict(res) def get_permissions(self, cluster_id: str) -> ClusterPermissions: - """Get cluster permissions. - - Gets the permissions of a cluster. Clusters can inherit permissions from their root object. + """Gets the permissions of a cluster. Clusters can inherit permissions from their root object. :param cluster_id: str The cluster for which to get or manage permissions. @@ -11153,9 +11119,7 @@ def list( page_token: Optional[str] = None, sort_by: Optional[ListClustersSortBy] = None, ) -> Iterator[ClusterDetails]: - """List clusters. - - Return information about all pinned and active clusters, and all clusters terminated within the last + """Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. :param filter_by: :class:`ListClustersFilterBy` (optional) @@ -11195,9 +11159,7 @@ def list( query["page_token"] = json["next_page_token"] def list_node_types(self) -> ListNodeTypesResponse: - """List node types. - - Returns a list of supported Spark node types. These node types can be used to launch a cluster. + """Returns a list of supported Spark node types. These node types can be used to launch a cluster. :returns: :class:`ListNodeTypesResponse` """ @@ -11210,9 +11172,7 @@ def list_node_types(self) -> ListNodeTypesResponse: return ListNodeTypesResponse.from_dict(res) def list_zones(self) -> ListAvailableZonesResponse: - """List availability zones. - - Returns a list of availability zones where clusters can be created in (For example, us-west-2a). These + """Returns a list of availability zones where clusters can be created in (For example, us-west-2a). These zones can be used to launch a cluster. :returns: :class:`ListAvailableZonesResponse` @@ -11226,9 +11186,7 @@ def list_zones(self) -> ListAvailableZonesResponse: return ListAvailableZonesResponse.from_dict(res) def permanent_delete(self, cluster_id: str): - """Permanently delete cluster. - - Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously + """Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously removed. In addition, users will no longer see permanently deleted clusters in the cluster list, and API users @@ -11250,9 +11208,7 @@ def permanent_delete(self, cluster_id: str): self._api.do("POST", "/api/2.1/clusters/permanent-delete", body=body, headers=headers) def pin(self, cluster_id: str): - """Pin cluster. - - Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a + """Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a cluster that is already pinned will have no effect. This API can only be called by workspace admins. :param cluster_id: str @@ -11272,9 +11228,7 @@ def pin(self, cluster_id: str): def resize( self, cluster_id: str, *, autoscale: Optional[AutoScale] = None, num_workers: Optional[int] = None ) -> Wait[ClusterDetails]: - """Resize cluster. - - Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a + """Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a `RUNNING` state. :param cluster_id: str @@ -11324,9 +11278,7 @@ def resize_and_wait( return self.resize(autoscale=autoscale, cluster_id=cluster_id, num_workers=num_workers).result(timeout=timeout) def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wait[ClusterDetails]: - """Restart cluster. - - Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state, + """Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state, nothing will happen. :param cluster_id: str @@ -11360,9 +11312,7 @@ def restart_and_wait( def set_permissions( self, cluster_id: str, *, access_control_list: Optional[List[ClusterAccessControlRequest]] = None ) -> ClusterPermissions: - """Set cluster permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param cluster_id: str @@ -11383,9 +11333,7 @@ def set_permissions( return ClusterPermissions.from_dict(res) def spark_versions(self) -> GetSparkVersionsResponse: - """List available Spark versions. - - Returns the list of available Spark versions. These versions can be used to launch a cluster. + """Returns the list of available Spark versions. These versions can be used to launch a cluster. :returns: :class:`GetSparkVersionsResponse` """ @@ -11398,9 +11346,7 @@ def spark_versions(self) -> GetSparkVersionsResponse: return GetSparkVersionsResponse.from_dict(res) def start(self, cluster_id: str) -> Wait[ClusterDetails]: - """Start terminated cluster. - - Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except: + """Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except: - The previous cluster id and attributes are preserved. - The cluster starts with the last specified cluster size. - If the previous cluster was an autoscaling cluster, the current cluster starts with the minimum number of nodes. - If the cluster is not currently in a ``TERMINATED`` state, nothing will @@ -11430,9 +11376,7 @@ def start_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> Clus return self.start(cluster_id=cluster_id).result(timeout=timeout) def unpin(self, cluster_id: str): - """Unpin cluster. - - Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API. + """Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API. Unpinning a cluster that is not pinned will have no effect. This API can only be called by workspace admins. @@ -11453,9 +11397,7 @@ def unpin(self, cluster_id: str): def update( self, cluster_id: str, update_mask: str, *, cluster: Optional[UpdateClusterResource] = None ) -> Wait[ClusterDetails]: - """Update cluster configuration (partial). - - Updates the configuration of a cluster to match the partial set of attributes and size. Denote which + """Updates the configuration of a cluster to match the partial set of attributes and size. Denote which fields to update using the `update_mask` field in the request body. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes can take effect. If a cluster is updated while in a `TERMINATED` @@ -11516,9 +11458,7 @@ def update_and_wait( def update_permissions( self, cluster_id: str, *, access_control_list: Optional[List[ClusterAccessControlRequest]] = None ) -> ClusterPermissions: - """Update cluster permissions. - - Updates the permissions on a cluster. Clusters can inherit permissions from their root object. + """Updates the permissions on a cluster. Clusters can inherit permissions from their root object. :param cluster_id: str The cluster for which to get or manage permissions. @@ -11657,9 +11597,7 @@ def wait_command_status_command_execution_finished_or_error( def cancel( self, *, cluster_id: Optional[str] = None, command_id: Optional[str] = None, context_id: Optional[str] = None ) -> Wait[CommandStatusResponse]: - """Cancel a command. - - Cancels a currently running command within an execution context. + """Cancels a currently running command within an execution context. The command ID is obtained from a prior successful call to __execute__. @@ -11703,9 +11641,7 @@ def cancel_and_wait( return self.cancel(cluster_id=cluster_id, command_id=command_id, context_id=context_id).result(timeout=timeout) def command_status(self, cluster_id: str, context_id: str, command_id: str) -> CommandStatusResponse: - """Get command info. - - Gets the status of and, if available, the results from a currently executing command. + """Gets the status of and, if available, the results from a currently executing command. The command ID is obtained from a prior successful call to __execute__. @@ -11731,9 +11667,7 @@ def command_status(self, cluster_id: str, context_id: str, command_id: str) -> C return CommandStatusResponse.from_dict(res) def context_status(self, cluster_id: str, context_id: str) -> ContextStatusResponse: - """Get status. - - Gets the status for an execution context. + """Gets the status for an execution context. :param cluster_id: str :param context_id: str @@ -11756,9 +11690,7 @@ def context_status(self, cluster_id: str, context_id: str) -> ContextStatusRespo def create( self, *, cluster_id: Optional[str] = None, language: Optional[Language] = None ) -> Wait[ContextStatusResponse]: - """Create an execution context. - - Creates an execution context for running cluster commands. + """Creates an execution context for running cluster commands. If successful, this method returns the ID of the new execution context. @@ -11794,9 +11726,7 @@ def create_and_wait( return self.create(cluster_id=cluster_id, language=language).result(timeout=timeout) def destroy(self, cluster_id: str, context_id: str): - """Delete an execution context. - - Deletes an execution context. + """Deletes an execution context. :param cluster_id: str :param context_id: str @@ -11823,9 +11753,7 @@ def execute( context_id: Optional[str] = None, language: Optional[Language] = None, ) -> Wait[CommandStatusResponse]: - """Run a command. - - Runs a cluster command in the given execution context, using the provided language. + """Runs a cluster command in the given execution context, using the provided language. If successful, it returns an ID for tracking the status of the command's execution. @@ -11893,9 +11821,7 @@ def __init__(self, api_client): def create( self, name: str, script: str, *, enabled: Optional[bool] = None, position: Optional[int] = None ) -> CreateResponse: - """Create init script. - - Creates a new global init script in this workspace. + """Creates a new global init script in this workspace. :param name: str The name of the script @@ -11934,9 +11860,7 @@ def create( return CreateResponse.from_dict(res) def delete(self, script_id: str): - """Delete init script. - - Deletes a global init script. + """Deletes a global init script. :param script_id: str The ID of the global init script. @@ -11951,9 +11875,7 @@ def delete(self, script_id: str): self._api.do("DELETE", f"/api/2.0/global-init-scripts/{script_id}", headers=headers) def get(self, script_id: str) -> GlobalInitScriptDetailsWithContent: - """Get an init script. - - Gets all the details of a script, including its Base64-encoded contents. + """Gets all the details of a script, including its Base64-encoded contents. :param script_id: str The ID of the global init script. @@ -11969,9 +11891,7 @@ def get(self, script_id: str) -> GlobalInitScriptDetailsWithContent: return GlobalInitScriptDetailsWithContent.from_dict(res) def list(self) -> Iterator[GlobalInitScriptDetails]: - """Get init scripts. - - Get a list of all global init scripts for this workspace. This returns all properties for each script + """Get a list of all global init scripts for this workspace. This returns all properties for each script but **not** the script contents. To retrieve the contents of a script, use the [get a global init script](:method:globalinitscripts/get) operation. @@ -11989,9 +11909,7 @@ def list(self) -> Iterator[GlobalInitScriptDetails]: def update( self, script_id: str, name: str, script: str, *, enabled: Optional[bool] = None, position: Optional[int] = None ): - """Update init script. - - Updates a global init script, specifying only the fields to change. All fields are optional. + """Updates a global init script, specifying only the fields to change. All fields are optional. Unspecified fields retain their current value. :param script_id: str @@ -12068,9 +11986,7 @@ def create( preloaded_docker_images: Optional[List[DockerImage]] = None, preloaded_spark_versions: Optional[List[str]] = None, ) -> CreateInstancePoolResponse: - """Create a new instance pool. - - Creates a new instance pool using idle and ready-to-use cloud instances. + """Creates a new instance pool using idle and ready-to-use cloud instances. :param instance_pool_name: str Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 @@ -12157,9 +12073,7 @@ def create( return CreateInstancePoolResponse.from_dict(res) def delete(self, instance_pool_id: str): - """Delete an instance pool. - - Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously. + """Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously. :param instance_pool_id: str The instance pool to be terminated. @@ -12187,9 +12101,7 @@ def edit( max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None, ): - """Edit an existing instance pool. - - Modifies the configuration of an existing instance pool. + """Modifies the configuration of an existing instance pool. :param instance_pool_id: str Instance pool ID @@ -12244,9 +12156,7 @@ def edit( self._api.do("POST", "/api/2.0/instance-pools/edit", body=body, headers=headers) def get(self, instance_pool_id: str) -> GetInstancePool: - """Get instance pool information. - - Retrieve the information for an instance pool based on its identifier. + """Retrieve the information for an instance pool based on its identifier. :param instance_pool_id: str The canonical unique identifier for the instance pool. @@ -12265,9 +12175,7 @@ def get(self, instance_pool_id: str) -> GetInstancePool: return GetInstancePool.from_dict(res) def get_permission_levels(self, instance_pool_id: str) -> GetInstancePoolPermissionLevelsResponse: - """Get instance pool permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param instance_pool_id: str The instance pool for which to get or manage permissions. @@ -12285,9 +12193,7 @@ def get_permission_levels(self, instance_pool_id: str) -> GetInstancePoolPermiss return GetInstancePoolPermissionLevelsResponse.from_dict(res) def get_permissions(self, instance_pool_id: str) -> InstancePoolPermissions: - """Get instance pool permissions. - - Gets the permissions of an instance pool. Instance pools can inherit permissions from their root + """Gets the permissions of an instance pool. Instance pools can inherit permissions from their root object. :param instance_pool_id: str @@ -12304,9 +12210,7 @@ def get_permissions(self, instance_pool_id: str) -> InstancePoolPermissions: return InstancePoolPermissions.from_dict(res) def list(self) -> Iterator[InstancePoolAndStats]: - """List instance pool info. - - Gets a list of instance pools with their statistics. + """Gets a list of instance pools with their statistics. :returns: Iterator over :class:`InstancePoolAndStats` """ @@ -12322,9 +12226,7 @@ def list(self) -> Iterator[InstancePoolAndStats]: def set_permissions( self, instance_pool_id: str, *, access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None ) -> InstancePoolPermissions: - """Set instance pool permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param instance_pool_id: str @@ -12347,9 +12249,7 @@ def set_permissions( def update_permissions( self, instance_pool_id: str, *, access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None ) -> InstancePoolPermissions: - """Update instance pool permissions. - - Updates the permissions on an instance pool. Instance pools can inherit permissions from their root + """Updates the permissions on an instance pool. Instance pools can inherit permissions from their root object. :param instance_pool_id: str @@ -12391,9 +12291,7 @@ def add( is_meta_instance_profile: Optional[bool] = None, skip_validation: Optional[bool] = None, ): - """Register an instance profile. - - Registers an instance profile in Databricks. In the UI, you can then give users the permission to use + """Registers an instance profile in Databricks. In the UI, you can then give users the permission to use this instance profile when launching clusters. This API is only available to admin users. @@ -12445,9 +12343,7 @@ def edit( iam_role_arn: Optional[str] = None, is_meta_instance_profile: Optional[bool] = None, ): - """Edit an instance profile. - - The only supported field to change is the optional IAM role ARN associated with the instance profile. + """The only supported field to change is the optional IAM role ARN associated with the instance profile. It is required to specify the IAM role ARN if both of the following are true: * Your role name and instance profile name do not match. The name is the part after the last slash in @@ -12493,9 +12389,7 @@ def edit( self._api.do("POST", "/api/2.0/instance-profiles/edit", body=body, headers=headers) def list(self) -> Iterator[InstanceProfile]: - """List available instance profiles. - - List the instance profiles that the calling user can use to launch a cluster. + """List the instance profiles that the calling user can use to launch a cluster. This API is available to all users. @@ -12511,9 +12405,7 @@ def list(self) -> Iterator[InstanceProfile]: return parsed if parsed is not None else [] def remove(self, instance_profile_arn: str): - """Remove the instance profile. - - Remove the instance profile with the provided ARN. Existing clusters with this instance profile will + """Remove the instance profile with the provided ARN. Existing clusters with this instance profile will continue to function. This API is only accessible to admin users. @@ -12553,9 +12445,7 @@ def __init__(self, api_client): self._api = api_client def all_cluster_statuses(self) -> Iterator[ClusterLibraryStatuses]: - """Get all statuses. - - Get the status of all libraries on all clusters. A status is returned for all libraries installed on + """Get the status of all libraries on all clusters. A status is returned for all libraries installed on this cluster via the API or the libraries UI. :returns: Iterator over :class:`ClusterLibraryStatuses` @@ -12570,9 +12460,7 @@ def all_cluster_statuses(self) -> Iterator[ClusterLibraryStatuses]: return parsed if parsed is not None else [] def cluster_status(self, cluster_id: str) -> Iterator[LibraryFullStatus]: - """Get status. - - Get the status of libraries on a cluster. A status is returned for all libraries installed on this + """Get the status of libraries on a cluster. A status is returned for all libraries installed on this cluster via the API or the libraries UI. The order of returned libraries is as follows: 1. Libraries set to be installed on this cluster, in the order that the libraries were added to the cluster, are returned first. 2. Libraries that were previously requested to be installed on this cluster or, but @@ -12596,9 +12484,7 @@ def cluster_status(self, cluster_id: str) -> Iterator[LibraryFullStatus]: return parsed if parsed is not None else [] def install(self, cluster_id: str, libraries: List[Library]): - """Add a library. - - Add libraries to install on a cluster. The installation is asynchronous; it happens in the background + """Add libraries to install on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. :param cluster_id: str @@ -12621,9 +12507,7 @@ def install(self, cluster_id: str, libraries: List[Library]): self._api.do("POST", "/api/2.0/libraries/install", body=body, headers=headers) def uninstall(self, cluster_id: str, libraries: List[Library]): - """Uninstall libraries. - - Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is + """Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is restarted. A request to uninstall a library that is not currently installed is ignored. :param cluster_id: str @@ -12662,9 +12546,7 @@ def __init__(self, api_client): def enforce_compliance( self, cluster_id: str, *, validate_only: Optional[bool] = None ) -> EnforceClusterComplianceResponse: - """Enforce cluster policy compliance. - - Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if + """Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes @@ -12698,9 +12580,7 @@ def enforce_compliance( return EnforceClusterComplianceResponse.from_dict(res) def get_compliance(self, cluster_id: str) -> GetClusterComplianceResponse: - """Get cluster policy compliance. - - Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy + """Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy was updated after the cluster was last edited. :param cluster_id: str @@ -12722,9 +12602,7 @@ def get_compliance(self, cluster_id: str) -> GetClusterComplianceResponse: def list_compliance( self, policy_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[ClusterCompliance]: - """List cluster policy compliance. - - Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of + """Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of compliance if their policy was updated after the cluster was last edited. :param policy_id: str @@ -12775,9 +12653,7 @@ def __init__(self, api_client): self._api = api_client def get(self, policy_family_id: str, *, version: Optional[int] = None) -> PolicyFamily: - """Get policy family information. - - Retrieve the information for an policy family based on its identifier and version + """Retrieve the information for an policy family based on its identifier and version :param policy_family_id: str The family ID about which to retrieve information. @@ -12798,9 +12674,7 @@ def get(self, policy_family_id: str, *, version: Optional[int] = None) -> Policy return PolicyFamily.from_dict(res) def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[PolicyFamily]: - """List policy families. - - Returns the list of policy definition types available to use at their latest version. This API is + """Returns the list of policy definition types available to use at their latest version. This API is paginated. :param max_results: int (optional) diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index eb0bfbf16..44cb76800 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -1758,9 +1758,7 @@ def wait_get_message_genie_completed( raise TimeoutError(f"timed out after {timeout}: {status_message}") def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]: - """Create conversation message. - - Create new message in a [conversation](:method:genie/startconversation). The AI response uses all + """Create new message in a [conversation](:method:genie/startconversation). The AI response uses all previously created messages in the conversation to respond. :param space_id: str @@ -1806,9 +1804,7 @@ def create_message_and_wait( def execute_message_attachment_query( self, space_id: str, conversation_id: str, message_id: str, attachment_id: str ) -> GenieGetMessageQueryResultResponse: - """Execute message attachment SQL query. - - Execute the SQL for a message query attachment. Use this API when the query attachment has expired and + """Execute the SQL for a message query attachment. Use this API when the query attachment has expired and needs to be re-executed. :param space_id: str @@ -1837,9 +1833,7 @@ def execute_message_attachment_query( def execute_message_query( self, space_id: str, conversation_id: str, message_id: str ) -> GenieGetMessageQueryResultResponse: - """[Deprecated] Execute SQL query in a conversation message. - - Execute the SQL query in the message. + """Execute the SQL query in the message. :param space_id: str Genie space ID @@ -1865,9 +1859,7 @@ def execute_message_query( def generate_download_full_query_result( self, space_id: str, conversation_id: str, message_id: str, attachment_id: str ) -> GenieGenerateDownloadFullQueryResultResponse: - """Generate full query result download. - - Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of + """Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of the download. The query result is stored in an external link and can be retrieved using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. @@ -1899,9 +1891,7 @@ def generate_download_full_query_result( def get_download_full_query_result( self, space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str ) -> GenieGetDownloadFullQueryResultResponse: - """Get download full query result. - - After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and + """After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and successfully receiving a `download_id`, use this API to poll the download progress. When the download is complete, the API returns one or more external links to the query result files. Warning: Databricks strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. @@ -1936,9 +1926,7 @@ def get_download_full_query_result( return GenieGetDownloadFullQueryResultResponse.from_dict(res) def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage: - """Get conversation message. - - Get message from conversation. + """Get message from conversation. :param space_id: str The ID associated with the Genie space where the target conversation is located. @@ -1964,9 +1952,7 @@ def get_message(self, space_id: str, conversation_id: str, message_id: str) -> G def get_message_attachment_query_result( self, space_id: str, conversation_id: str, message_id: str, attachment_id: str ) -> GenieGetMessageQueryResultResponse: - """Get message attachment SQL query result. - - Get the result of SQL query if the message has a query attachment. This is only available if a message + """Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. :param space_id: str @@ -1995,9 +1981,7 @@ def get_message_attachment_query_result( def get_message_query_result( self, space_id: str, conversation_id: str, message_id: str ) -> GenieGetMessageQueryResultResponse: - """[Deprecated] Get conversation message SQL query result. - - Get the result of SQL query if the message has a query attachment. This is only available if a message + """Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY`. :param space_id: str @@ -2024,9 +2008,7 @@ def get_message_query_result( def get_message_query_result_by_attachment( self, space_id: str, conversation_id: str, message_id: str, attachment_id: str ) -> GenieGetMessageQueryResultResponse: - """[Deprecated] Get conversation message SQL query result. - - Get the result of SQL query if the message has a query attachment. This is only available if a message + """Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. :param space_id: str @@ -2053,9 +2035,7 @@ def get_message_query_result_by_attachment( return GenieGetMessageQueryResultResponse.from_dict(res) def get_space(self, space_id: str) -> GenieSpace: - """Get Genie Space. - - Get details of a Genie Space. + """Get details of a Genie Space. :param space_id: str The ID associated with the Genie space @@ -2073,9 +2053,7 @@ def get_space(self, space_id: str) -> GenieSpace: def list_spaces( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> GenieListSpacesResponse: - """List Genie spaces. - - Get list of Genie Spaces. + """Get list of Genie Spaces. :param page_size: int (optional) Maximum number of spaces to return per page @@ -2098,9 +2076,7 @@ def list_spaces( return GenieListSpacesResponse.from_dict(res) def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]: - """Start conversation. - - Start a new conversation. + """Start a new conversation. :param space_id: str The ID associated with the Genie space where you want to start a conversation. @@ -2142,9 +2118,7 @@ def __init__(self, api_client): self._api = api_client def create(self, dashboard: Dashboard) -> Dashboard: - """Create dashboard. - - Create a draft dashboard. + """Create a draft dashboard. :param dashboard: :class:`Dashboard` @@ -2263,9 +2237,7 @@ def delete_subscription( ) def get(self, dashboard_id: str) -> Dashboard: - """Get dashboard. - - Get a draft dashboard. + """Get a draft dashboard. :param dashboard_id: str UUID identifying the dashboard. @@ -2281,9 +2253,7 @@ def get(self, dashboard_id: str) -> Dashboard: return Dashboard.from_dict(res) def get_published(self, dashboard_id: str) -> PublishedDashboard: - """Get published dashboard. - - Get the current published dashboard. + """Get the current published dashboard. :param dashboard_id: str UUID identifying the published dashboard. @@ -2473,9 +2443,7 @@ def migrate( parent_path: Optional[str] = None, update_parameter_syntax: Optional[bool] = None, ) -> Dashboard: - """Migrate dashboard. - - Migrates a classic SQL dashboard to Lakeview. + """Migrates a classic SQL dashboard to Lakeview. :param source_dashboard_id: str UUID of the dashboard to be migrated. @@ -2509,9 +2477,7 @@ def migrate( def publish( self, dashboard_id: str, *, embed_credentials: Optional[bool] = None, warehouse_id: Optional[str] = None ) -> PublishedDashboard: - """Publish dashboard. - - Publish the current draft dashboard. + """Publish the current draft dashboard. :param dashboard_id: str UUID identifying the dashboard to be published. @@ -2537,9 +2503,7 @@ def publish( return PublishedDashboard.from_dict(res) def trash(self, dashboard_id: str): - """Trash dashboard. - - Trash a dashboard. + """Trash a dashboard. :param dashboard_id: str UUID identifying the dashboard. @@ -2554,9 +2518,7 @@ def trash(self, dashboard_id: str): self._api.do("DELETE", f"/api/2.0/lakeview/dashboards/{dashboard_id}", headers=headers) def unpublish(self, dashboard_id: str): - """Unpublish dashboard. - - Unpublish the dashboard. + """Unpublish the dashboard. :param dashboard_id: str UUID identifying the published dashboard. @@ -2571,9 +2533,7 @@ def unpublish(self, dashboard_id: str): self._api.do("DELETE", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published", headers=headers) def update(self, dashboard_id: str, dashboard: Dashboard) -> Dashboard: - """Update dashboard. - - Update a draft dashboard. + """Update a draft dashboard. :param dashboard_id: str UUID identifying the dashboard. @@ -2622,9 +2582,7 @@ def __init__(self, api_client): def get_published_dashboard_token_info( self, dashboard_id: str, *, external_value: Optional[str] = None, external_viewer_id: Optional[str] = None ) -> GetPublishedDashboardTokenInfoResponse: - """Read an information of a published dashboard to mint an OAuth token. - - Get a required authorization details and scopes of a published dashboard to mint an OAuth token. The + """Get a required authorization details and scopes of a published dashboard to mint an OAuth token. The `authorization_details` can be enriched to apply additional restriction. Example: Adding the following `authorization_details` object to downscope the viewer permission to diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index c9a9c0ced..186447595 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -74,11 +74,15 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseCatalog: @dataclass class DatabaseCredential: + expiration_time: Optional[str] = None + token: Optional[str] = None def as_dict(self) -> dict: """Serializes the DatabaseCredential into a dictionary suitable for use as a JSON request body.""" body = {} + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time if self.token is not None: body["token"] = self.token return body @@ -86,6 +90,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the DatabaseCredential into a shallow dictionary of its immediate attributes.""" body = {} + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time if self.token is not None: body["token"] = self.token return body @@ -93,7 +99,7 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabaseCredential: """Deserializes the DatabaseCredential from a dictionary.""" - return cls(token=d.get("token", None)) + return cls(expiration_time=d.get("expiration_time", None), token=d.get("token", None)) @dataclass @@ -113,6 +119,12 @@ class DatabaseInstance: creator: Optional[str] = None """The email of the creator of the instance.""" + effective_stopped: Optional[bool] = None + """xref AIP-129. `stopped` is owned by the client, while `effective_stopped` is owned by the + server. `stopped` will only be set in Create/Update response messages if and only if the user + provides the field via the request. `effective_stopped` on the other hand will always bet set in + all response messages (Create/Update/Get/List).""" + pg_version: Optional[str] = None """The version of Postgres running on the instance.""" @@ -137,6 +149,8 @@ def as_dict(self) -> dict: body["creation_time"] = self.creation_time if self.creator is not None: body["creator"] = self.creator + if self.effective_stopped is not None: + body["effective_stopped"] = self.effective_stopped if self.name is not None: body["name"] = self.name if self.pg_version is not None: @@ -160,6 +174,8 @@ def as_shallow_dict(self) -> dict: body["creation_time"] = self.creation_time if self.creator is not None: body["creator"] = self.creator + if self.effective_stopped is not None: + body["effective_stopped"] = self.effective_stopped if self.name is not None: body["name"] = self.name if self.pg_version is not None: @@ -181,6 +197,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance: capacity=d.get("capacity", None), creation_time=d.get("creation_time", None), creator=d.get("creator", None), + effective_stopped=d.get("effective_stopped", None), name=d.get("name", None), pg_version=d.get("pg_version", None), read_write_dns=d.get("read_write_dns", None), @@ -227,9 +244,6 @@ class DatabaseTable: postgres database. Note that this has implications for the `create_database_objects_is_missing` field in `spec`.""" - table_serving_url: Optional[str] = None - """Data serving REST API URL for this table""" - def as_dict(self) -> dict: """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body.""" body = {} @@ -239,8 +253,6 @@ def as_dict(self) -> dict: body["logical_database_name"] = self.logical_database_name if self.name is not None: body["name"] = self.name - if self.table_serving_url is not None: - body["table_serving_url"] = self.table_serving_url return body def as_shallow_dict(self) -> dict: @@ -252,8 +264,6 @@ def as_shallow_dict(self) -> dict: body["logical_database_name"] = self.logical_database_name if self.name is not None: body["name"] = self.name - if self.table_serving_url is not None: - body["table_serving_url"] = self.table_serving_url return body @classmethod @@ -263,7 +273,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseTable: database_instance_name=d.get("database_instance_name", None), logical_database_name=d.get("logical_database_name", None), name=d.get("name", None), - table_serving_url=d.get("table_serving_url", None), ) @@ -487,9 +496,6 @@ class SyncedDatabaseTable: spec: Optional[SyncedTableSpec] = None """Specification of a synced database table.""" - table_serving_url: Optional[str] = None - """Data serving REST API URL for this table""" - unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline @@ -508,8 +514,6 @@ def as_dict(self) -> dict: body["name"] = self.name if self.spec: body["spec"] = self.spec.as_dict() - if self.table_serving_url is not None: - body["table_serving_url"] = self.table_serving_url if self.unity_catalog_provisioning_state is not None: body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value return body @@ -527,8 +531,6 @@ def as_shallow_dict(self) -> dict: body["name"] = self.name if self.spec: body["spec"] = self.spec - if self.table_serving_url is not None: - body["table_serving_url"] = self.table_serving_url if self.unity_catalog_provisioning_state is not None: body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state return body @@ -542,7 +544,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SyncedDatabaseTable: logical_database_name=d.get("logical_database_name", None), name=d.get("name", None), spec=_from_dict(d, "spec", SyncedTableSpec), - table_serving_url=d.get("table_serving_url", None), unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), ) @@ -744,11 +745,14 @@ class SyncedTableSpec: """If true, the synced table's logical database and schema resources in PG will be created if they do not already exist.""" - new_pipeline_spec: Optional[NewPipelineSpec] = None - """Spec of new pipeline. Should be empty if pipeline_id is set""" + existing_pipeline_id: Optional[str] = None + """User-specified ID of a pre-existing pipeline to bin pack. This field is optional, and should be + empty if new_pipeline_spec is set. This field will only be set by the server in response + messages if it is specified in the request. The SyncedTableStatus message will always contain + the effective pipeline ID (either client provided or server generated), however.""" - pipeline_id: Optional[str] = None - """ID of the associated pipeline. Should be empty if new_pipeline_spec is set""" + new_pipeline_spec: Optional[NewPipelineSpec] = None + """Spec of new pipeline. Should be empty if pipeline_id / existing_pipeline_id is set""" primary_key_columns: Optional[List[str]] = None """Primary Key columns to be used for data insert/update in the destination.""" @@ -767,10 +771,10 @@ def as_dict(self) -> dict: body = {} if self.create_database_objects_if_missing is not None: body["create_database_objects_if_missing"] = self.create_database_objects_if_missing + if self.existing_pipeline_id is not None: + body["existing_pipeline_id"] = self.existing_pipeline_id if self.new_pipeline_spec: body["new_pipeline_spec"] = self.new_pipeline_spec.as_dict() - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id if self.primary_key_columns: body["primary_key_columns"] = [v for v in self.primary_key_columns] if self.scheduling_policy is not None: @@ -786,10 +790,10 @@ def as_shallow_dict(self) -> dict: body = {} if self.create_database_objects_if_missing is not None: body["create_database_objects_if_missing"] = self.create_database_objects_if_missing + if self.existing_pipeline_id is not None: + body["existing_pipeline_id"] = self.existing_pipeline_id if self.new_pipeline_spec: body["new_pipeline_spec"] = self.new_pipeline_spec - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id if self.primary_key_columns: body["primary_key_columns"] = self.primary_key_columns if self.scheduling_policy is not None: @@ -805,8 +809,8 @@ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableSpec: """Deserializes the SyncedTableSpec from a dictionary.""" return cls( create_database_objects_if_missing=d.get("create_database_objects_if_missing", None), + existing_pipeline_id=d.get("existing_pipeline_id", None), new_pipeline_spec=_from_dict(d, "new_pipeline_spec", NewPipelineSpec), - pipeline_id=d.get("pipeline_id", None), primary_key_columns=d.get("primary_key_columns", None), scheduling_policy=_enum(d, "scheduling_policy", SyncedTableSchedulingPolicy), source_table_full_name=d.get("source_table_full_name", None), @@ -848,6 +852,10 @@ class SyncedTableStatus: message: Optional[str] = None """A text description of the current state of the synced table.""" + pipeline_id: Optional[str] = None + """ID of the associated pipeline. The pipeline ID may have been provided by the client (in the case + of bin packing), or generated by the server (when creating a new pipeline).""" + provisioning_status: Optional[SyncedTableProvisioningStatus] = None """Detailed status of a synced table. Shown if the synced table is in the PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" @@ -867,6 +875,8 @@ def as_dict(self) -> dict: body["failed_status"] = self.failed_status.as_dict() if self.message is not None: body["message"] = self.message + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id if self.provisioning_status: body["provisioning_status"] = self.provisioning_status.as_dict() if self.triggered_update_status: @@ -884,6 +894,8 @@ def as_shallow_dict(self) -> dict: body["failed_status"] = self.failed_status if self.message is not None: body["message"] = self.message + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id if self.provisioning_status: body["provisioning_status"] = self.provisioning_status if self.triggered_update_status: @@ -898,6 +910,7 @@ def from_dict(cls, d: Dict[str, Any]) -> SyncedTableStatus: detailed_state=_enum(d, "detailed_state", SyncedTableState), failed_status=_from_dict(d, "failed_status", SyncedTableFailedStatus), message=d.get("message", None), + pipeline_id=d.get("pipeline_id", None), provisioning_status=_from_dict(d, "provisioning_status", SyncedTableProvisioningStatus), triggered_update_status=_from_dict(d, "triggered_update_status", SyncedTableTriggeredUpdateStatus), ) @@ -1047,10 +1060,12 @@ def delete_database_instance(self, name: str, *, force: Optional[bool] = None, p By default, a instance cannot be deleted if it has descendant instances created via PITR. If this flag is specified as true, all descendent instances will be deleted as well. :param purge: bool (optional) - If false, the database instance is soft deleted. Soft deleted instances behave as if they are - deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted by - calling the undelete API for a limited time. If true, the database instance is hard deleted and - cannot be undeleted. + Note purge=false is in development. If false, the database instance is soft deleted (implementation + pending). Soft deleted instances behave as if they are deleted, and cannot be used for CRUD + operations nor connected to. However they can be undeleted by calling the undelete API for a limited + time (implementation pending). If true, the database instance is hard deleted and cannot be + undeleted. For the time being, setting this value to true is required to delete an instance (soft + delete is not yet supported). """ diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py index a50e83a30..f912a510f 100755 --- a/databricks/sdk/service/files.py +++ b/databricks/sdk/service/files.py @@ -739,9 +739,7 @@ def __init__(self, api_client): self._api = api_client def add_block(self, handle: int, data: str): - """Append data block. - - Appends a block of data to the stream specified by the input handle. If the handle does not exist, + """Appends a block of data to the stream specified by the input handle. If the handle does not exist, this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``. If the block of data exceeds 1 MB, this call will throw an exception with ``MAX_BLOCK_SIZE_EXCEEDED``. @@ -766,9 +764,7 @@ def add_block(self, handle: int, data: str): self._api.do("POST", "/api/2.0/dbfs/add-block", body=body, headers=headers) def close(self, handle: int): - """Close the stream. - - Closes the stream specified by the input handle. If the handle does not exist, this call throws an + """Closes the stream specified by the input handle. If the handle does not exist, this call throws an exception with ``RESOURCE_DOES_NOT_EXIST``. :param handle: int @@ -787,9 +783,7 @@ def close(self, handle: int): self._api.do("POST", "/api/2.0/dbfs/close", body=body, headers=headers) def create(self, path: str, *, overwrite: Optional[bool] = None) -> CreateResponse: - """Open a stream. - - Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle + """Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle timeout on this handle. If a file or directory already exists on the given path and __overwrite__ is set to false, this call will throw an exception with ``RESOURCE_ALREADY_EXISTS``. @@ -819,9 +813,7 @@ def create(self, path: str, *, overwrite: Optional[bool] = None) -> CreateRespon return CreateResponse.from_dict(res) def delete(self, path: str, *, recursive: Optional[bool] = None): - """Delete a file/directory. - - Delete the file or directory (optionally recursively delete all files in the directory). This call + """Delete the file or directory (optionally recursively delete all files in the directory). This call throws an exception with `IO_ERROR` if the path is a non-empty directory and `recursive` is set to `false` or on other similar errors. @@ -857,9 +849,7 @@ def delete(self, path: str, *, recursive: Optional[bool] = None): self._api.do("POST", "/api/2.0/dbfs/delete", body=body, headers=headers) def get_status(self, path: str) -> FileInfo: - """Get the information of a file or directory. - - Gets the file information for a file or directory. If the file or directory does not exist, this call + """Gets the file information for a file or directory. If the file or directory does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. :param path: str @@ -879,9 +869,7 @@ def get_status(self, path: str) -> FileInfo: return FileInfo.from_dict(res) def list(self, path: str) -> Iterator[FileInfo]: - """List directory contents or file details. - - List the contents of a directory, or details of the file. If the file or directory does not exist, + """List the contents of a directory, or details of the file. If the file or directory does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. When calling list on a large directory, the list operation will time out after approximately 60 @@ -909,9 +897,7 @@ def list(self, path: str) -> Iterator[FileInfo]: return parsed if parsed is not None else [] def mkdirs(self, path: str): - """Create a directory. - - Creates the given directory and necessary parent directories if they do not exist. If a file (not a + """Creates the given directory and necessary parent directories if they do not exist. If a file (not a directory) exists at any prefix of the input path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. **Note**: If this operation fails, it might have succeeded in creating some of the necessary parent directories. @@ -932,9 +918,7 @@ def mkdirs(self, path: str): self._api.do("POST", "/api/2.0/dbfs/mkdirs", body=body, headers=headers) def move(self, source_path: str, destination_path: str): - """Move a file. - - Moves a file from one location to another location within DBFS. If the source file does not exist, + """Moves a file from one location to another location within DBFS. If the source file does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists in the destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the given source path is a directory, this call always recursively moves all files. @@ -959,9 +943,7 @@ def move(self, source_path: str, destination_path: str): self._api.do("POST", "/api/2.0/dbfs/move", body=body, headers=headers) def put(self, path: str, *, contents: Optional[str] = None, overwrite: Optional[bool] = None): - """Upload a file. - - Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but + """Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but can also be used as a convenient single call for data upload. Alternatively you can pass contents as base64 string. @@ -996,9 +978,7 @@ def put(self, path: str, *, contents: Optional[str] = None, overwrite: Optional[ self._api.do("POST", "/api/2.0/dbfs/put", body=body, headers=headers) def read(self, path: str, *, length: Optional[int] = None, offset: Optional[int] = None) -> ReadResponse: - """Get the contents of a file. - - Returns the contents of a file. If the file does not exist, this call throws an exception with + """Returns the contents of a file. If the file does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the offset is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read length exceeds 1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`. @@ -1057,9 +1037,7 @@ def __init__(self, api_client): self._api = api_client def create_directory(self, directory_path: str): - """Create a directory. - - Creates an empty directory. If necessary, also creates any parent directories of the new, empty + """Creates an empty directory. If necessary, also creates any parent directories of the new, empty directory (like the shell command `mkdir -p`). If called on an existing directory, returns a success response; this method is idempotent (it will succeed if the directory already exists). @@ -1076,9 +1054,7 @@ def create_directory(self, directory_path: str): ) def delete(self, file_path: str): - """Delete a file. - - Deletes a file. If the request is successful, there is no response body. + """Deletes a file. If the request is successful, there is no response body. :param file_path: str The absolute path of the file. @@ -1091,9 +1067,7 @@ def delete(self, file_path: str): self._api.do("DELETE", f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}", headers=headers) def delete_directory(self, directory_path: str): - """Delete a directory. - - Deletes an empty directory. + """Deletes an empty directory. To delete a non-empty directory, first delete all of its contents. This can be done by listing the directory contents and deleting each file and subdirectory recursively. @@ -1111,9 +1085,7 @@ def delete_directory(self, directory_path: str): ) def download(self, file_path: str) -> DownloadResponse: - """Download a file. - - Downloads a file. The file contents are the response body. This is a standard HTTP file download, not + """Downloads a file. The file contents are the response body. This is a standard HTTP file download, not a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers. :param file_path: str @@ -1140,9 +1112,7 @@ def download(self, file_path: str) -> DownloadResponse: return DownloadResponse.from_dict(res) def get_directory_metadata(self, directory_path: str): - """Get directory metadata. - - Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response + """Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response body. This method is useful to check if a directory exists and the caller has access to it. @@ -1163,9 +1133,7 @@ def get_directory_metadata(self, directory_path: str): ) def get_metadata(self, file_path: str) -> GetMetadataResponse: - """Get file metadata. - - Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body. + """Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body. :param file_path: str The absolute path of the file. @@ -1190,9 +1158,7 @@ def get_metadata(self, file_path: str) -> GetMetadataResponse: def list_directory_contents( self, directory_path: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[DirectoryEntry]: - """List directory contents. - - Returns the contents of a directory. If there is no directory at the specified path, the API returns a + """Returns the contents of a directory. If there is no directory at the specified path, the API returns a HTTP 404 error. :param directory_path: str @@ -1242,9 +1208,7 @@ def list_directory_contents( query["page_token"] = json["next_page_token"] def upload(self, file_path: str, contents: BinaryIO, *, overwrite: Optional[bool] = None): - """Upload a file. - - Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an + """Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an octet stream); do not encode or otherwise modify the bytes before sending. The contents of the resulting file will be exactly the bytes sent in the request body. If the request is successful, there is no response body. diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index 3b43f74b0..a0f21da6e 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -2163,9 +2163,7 @@ def __init__(self, api_client): self._api = api_client def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRolesForResourceResponse: - """Get assignable roles for a resource. - - Gets all the roles that can be granted on an account level resource. A role is grantable if the rule + """Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the role. :param resource: str @@ -2195,9 +2193,7 @@ def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRoles return GetAssignableRolesForResourceResponse.from_dict(res) def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: - """Get a rule set. - - Get a rule set by its name. A rule set is always attached to a resource and contains a list of access + """Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default rule set for each resource is supported. :param name: str @@ -2241,9 +2237,7 @@ def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: return RuleSetResponse.from_dict(res) def update_rule_set(self, name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse: - """Update a rule set. - - Replace the rules of a rule set. First, use get to read the current version of the rule set before + """Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between concurrent updates. :param name: str @@ -2280,9 +2274,7 @@ def __init__(self, api_client): self._api = api_client def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRolesForResourceResponse: - """Get assignable roles for a resource. - - Gets all the roles that can be granted on an account level resource. A role is grantable if the rule + """Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the role. :param resource: str @@ -2309,9 +2301,7 @@ def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRoles return GetAssignableRolesForResourceResponse.from_dict(res) def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: - """Get a rule set. - - Get a rule set by its name. A rule set is always attached to a resource and contains a list of access + """Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default rule set for each resource is supported. :param name: str @@ -2350,9 +2340,7 @@ def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: return RuleSetResponse.from_dict(res) def update_rule_set(self, name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse: - """Update a rule set. - - Replace the rules of a rule set. First, use get to read the current version of the rule set before + """Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between concurrent updates. :param name: str @@ -2399,9 +2387,7 @@ def create( roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[GroupSchema]] = None, ) -> Group: - """Create a new group. - - Creates a group in the Databricks account with a unique name, using the supplied group details. + """Creates a group in the Databricks account with a unique name, using the supplied group details. :param display_name: str (optional) String that represents a human-readable group name @@ -2454,9 +2440,7 @@ def create( return Group.from_dict(res) def delete(self, id: str): - """Delete a group. - - Deletes a group from the Databricks account. + """Deletes a group from the Databricks account. :param id: str Unique ID for a group in the Databricks account. @@ -2469,9 +2453,7 @@ def delete(self, id: str): self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", headers=headers) def get(self, id: str) -> Group: - """Get group details. - - Gets the information for a specific group in the Databricks account. + """Gets the information for a specific group in the Databricks account. :param id: str Unique ID for a group in the Databricks account. @@ -2497,9 +2479,7 @@ def list( sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None, ) -> Iterator[Group]: - """List group details. - - Gets all details of the groups associated with the Databricks account. + """Gets all details of the groups associated with the Databricks account. :param attributes: str (optional) Comma-separated list of attributes to return in response. @@ -2564,9 +2544,7 @@ def list( query["startIndex"] += len(json["Resources"]) def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Update group details. - - Partially updates the details of a group. + """Partially updates the details of a group. :param id: str Unique ID in the Databricks workspace. @@ -2602,9 +2580,7 @@ def update( roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[GroupSchema]] = None, ): - """Replace a group. - - Updates the details of a group by replacing the entire group entity. + """Updates the details of a group by replacing the entire group entity. :param id: str Databricks group ID @@ -2674,9 +2650,7 @@ def create( roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[ServicePrincipalSchema]] = None, ) -> ServicePrincipal: - """Create a service principal. - - Creates a new service principal in the Databricks account. + """Creates a new service principal in the Databricks account. :param active: bool (optional) If this user is active @@ -2730,9 +2704,7 @@ def create( return ServicePrincipal.from_dict(res) def delete(self, id: str): - """Delete a service principal. - - Delete a single service principal in the Databricks account. + """Delete a single service principal in the Databricks account. :param id: str Unique ID for a service principal in the Databricks account. @@ -2747,9 +2719,7 @@ def delete(self, id: str): ) def get(self, id: str) -> ServicePrincipal: - """Get service principal details. - - Gets the details for a single service principal define in the Databricks account. + """Gets the details for a single service principal define in the Databricks account. :param id: str Unique ID for a service principal in the Databricks account. @@ -2777,9 +2747,7 @@ def list( sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None, ) -> Iterator[ServicePrincipal]: - """List service principals. - - Gets the set of service principals associated with a Databricks account. + """Gets the set of service principals associated with a Databricks account. :param attributes: str (optional) Comma-separated list of attributes to return in response. @@ -2847,9 +2815,7 @@ def list( query["startIndex"] += len(json["Resources"]) def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Update service principal details. - - Partially updates the details of a single service principal in the Databricks account. + """Partially updates the details of a single service principal in the Databricks account. :param id: str Unique ID in the Databricks workspace. @@ -2888,9 +2854,7 @@ def update( roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[ServicePrincipalSchema]] = None, ): - """Replace service principal. - - Updates the details of a single service principal. + """Updates the details of a single service principal. This action replaces the existing service principal with the same name. @@ -2974,9 +2938,7 @@ def create( schemas: Optional[List[UserSchema]] = None, user_name: Optional[str] = None, ) -> User: - """Create a new user. - - Creates a new user in the Databricks account. This new user will also be added to the Databricks + """Creates a new user in the Databricks account. This new user will also be added to the Databricks account. :param active: bool (optional) @@ -3042,9 +3004,7 @@ def create( return User.from_dict(res) def delete(self, id: str): - """Delete a user. - - Deletes a user. Deleting a user from a Databricks account also removes objects associated with the + """Deletes a user. Deleting a user from a Databricks account also removes objects associated with the user. :param id: str @@ -3069,9 +3029,7 @@ def get( sort_order: Optional[GetSortOrder] = None, start_index: Optional[int] = None, ) -> User: - """Get user details. - - Gets information for a specific user in Databricks account. + """Gets information for a specific user in Databricks account. :param id: str Unique ID for a user in the Databricks account. @@ -3134,9 +3092,7 @@ def list( sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None, ) -> Iterator[User]: - """List users. - - Gets details for all the users associated with a Databricks account. + """Gets details for all the users associated with a Databricks account. :param attributes: str (optional) Comma-separated list of attributes to return in response. @@ -3202,9 +3158,7 @@ def list( query["startIndex"] += len(json["Resources"]) def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Update user details. - - Partially updates a user resource by applying the supplied operations on specific user attributes. + """Partially updates a user resource by applying the supplied operations on specific user attributes. :param id: str Unique ID in the Databricks workspace. @@ -3242,9 +3196,7 @@ def update( schemas: Optional[List[UserSchema]] = None, user_name: Optional[str] = None, ): - """Replace a user. - - Replaces a user's information with the data supplied in request. + """Replaces a user's information with the data supplied in request. :param id: str Databricks user ID. @@ -3310,9 +3262,7 @@ def __init__(self, api_client): self._api = api_client def me(self) -> User: - """Get current user info. - - Get details about the current method caller's identity. + """Get details about the current method caller's identity. :returns: :class:`User` """ @@ -3349,9 +3299,7 @@ def create( roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[GroupSchema]] = None, ) -> Group: - """Create a new group. - - Creates a group in the Databricks workspace with a unique name, using the supplied group details. + """Creates a group in the Databricks workspace with a unique name, using the supplied group details. :param display_name: str (optional) String that represents a human-readable group name @@ -3402,9 +3350,7 @@ def create( return Group.from_dict(res) def delete(self, id: str): - """Delete a group. - - Deletes a group from the Databricks workspace. + """Deletes a group from the Databricks workspace. :param id: str Unique ID for a group in the Databricks workspace. @@ -3417,9 +3363,7 @@ def delete(self, id: str): self._api.do("DELETE", f"/api/2.0/preview/scim/v2/Groups/{id}", headers=headers) def get(self, id: str) -> Group: - """Get group details. - - Gets the information for a specific group in the Databricks workspace. + """Gets the information for a specific group in the Databricks workspace. :param id: str Unique ID for a group in the Databricks workspace. @@ -3445,9 +3389,7 @@ def list( sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None, ) -> Iterator[Group]: - """List group details. - - Gets all details of the groups associated with the Databricks workspace. + """Gets all details of the groups associated with the Databricks workspace. :param attributes: str (optional) Comma-separated list of attributes to return in response. @@ -3510,9 +3452,7 @@ def list( query["startIndex"] += len(json["Resources"]) def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Update group details. - - Partially updates the details of a group. + """Partially updates the details of a group. :param id: str Unique ID in the Databricks workspace. @@ -3546,9 +3486,7 @@ def update( roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[GroupSchema]] = None, ): - """Replace a group. - - Updates the details of a group by replacing the entire group entity. + """Updates the details of a group by replacing the entire group entity. :param id: str Databricks group ID @@ -3668,9 +3606,7 @@ def __init__(self, api_client): self._api = api_client def get(self, request_object_type: str, request_object_id: str) -> ObjectPermissions: - """Get object permissions. - - Gets the permissions of an object. Objects can inherit permissions from their parent objects or root + """Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. :param request_object_type: str @@ -3691,9 +3627,7 @@ def get(self, request_object_type: str, request_object_id: str) -> ObjectPermiss return ObjectPermissions.from_dict(res) def get_permission_levels(self, request_object_type: str, request_object_id: str) -> GetPermissionLevelsResponse: - """Get object permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, @@ -3720,9 +3654,7 @@ def set( *, access_control_list: Optional[List[AccessControlRequest]] = None, ) -> ObjectPermissions: - """Set object permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their parent objects or root object. @@ -3756,9 +3688,7 @@ def update( *, access_control_list: Optional[List[AccessControlRequest]] = None, ) -> ObjectPermissions: - """Update object permissions. - - Updates the permissions on an object. Objects can inherit permissions from their parent objects or + """Updates the permissions on an object. Objects can inherit permissions from their parent objects or root object. :param request_object_type: str @@ -3808,9 +3738,7 @@ def create( roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[ServicePrincipalSchema]] = None, ) -> ServicePrincipal: - """Create a service principal. - - Creates a new service principal in the Databricks workspace. + """Creates a new service principal in the Databricks workspace. :param active: bool (optional) If this user is active @@ -3862,9 +3790,7 @@ def create( return ServicePrincipal.from_dict(res) def delete(self, id: str): - """Delete a service principal. - - Delete a single service principal in the Databricks workspace. + """Delete a single service principal in the Databricks workspace. :param id: str Unique ID for a service principal in the Databricks workspace. @@ -3877,9 +3803,7 @@ def delete(self, id: str): self._api.do("DELETE", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", headers=headers) def get(self, id: str) -> ServicePrincipal: - """Get service principal details. - - Gets the details for a single service principal define in the Databricks workspace. + """Gets the details for a single service principal define in the Databricks workspace. :param id: str Unique ID for a service principal in the Databricks workspace. @@ -3905,9 +3829,7 @@ def list( sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None, ) -> Iterator[ServicePrincipal]: - """List service principals. - - Gets the set of service principals associated with a Databricks workspace. + """Gets the set of service principals associated with a Databricks workspace. :param attributes: str (optional) Comma-separated list of attributes to return in response. @@ -3970,9 +3892,7 @@ def list( query["startIndex"] += len(json["Resources"]) def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Update service principal details. - - Partially updates the details of a single service principal in the Databricks workspace. + """Partially updates the details of a single service principal in the Databricks workspace. :param id: str Unique ID in the Databricks workspace. @@ -4006,9 +3926,7 @@ def update( roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[ServicePrincipalSchema]] = None, ): - """Replace service principal. - - Updates the details of a single service principal. + """Updates the details of a single service principal. This action replaces the existing service principal with the same name. @@ -4087,9 +4005,7 @@ def create( schemas: Optional[List[UserSchema]] = None, user_name: Optional[str] = None, ) -> User: - """Create a new user. - - Creates a new user in the Databricks workspace. This new user will also be added to the Databricks + """Creates a new user in the Databricks workspace. This new user will also be added to the Databricks account. :param active: bool (optional) @@ -4153,9 +4069,7 @@ def create( return User.from_dict(res) def delete(self, id: str): - """Delete a user. - - Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the + """Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the user. :param id: str @@ -4180,9 +4094,7 @@ def get( sort_order: Optional[GetSortOrder] = None, start_index: Optional[int] = None, ) -> User: - """Get user details. - - Gets information for a specific user in Databricks workspace. + """Gets information for a specific user in Databricks workspace. :param id: str Unique ID for a user in the Databricks workspace. @@ -4233,9 +4145,7 @@ def get( return User.from_dict(res) def get_permission_levels(self) -> GetPasswordPermissionLevelsResponse: - """Get password permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :returns: :class:`GetPasswordPermissionLevelsResponse` """ @@ -4248,9 +4158,7 @@ def get_permission_levels(self) -> GetPasswordPermissionLevelsResponse: return GetPasswordPermissionLevelsResponse.from_dict(res) def get_permissions(self) -> PasswordPermissions: - """Get password permissions. - - Gets the permissions of all passwords. Passwords can inherit permissions from their root object. + """Gets the permissions of all passwords. Passwords can inherit permissions from their root object. :returns: :class:`PasswordPermissions` """ @@ -4273,9 +4181,7 @@ def list( sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None, ) -> Iterator[User]: - """List users. - - Gets details for all the users associated with a Databricks workspace. + """Gets details for all the users associated with a Databricks workspace. :param attributes: str (optional) Comma-separated list of attributes to return in response. @@ -4339,9 +4245,7 @@ def list( query["startIndex"] += len(json["Resources"]) def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Update user details. - - Partially updates a user resource by applying the supplied operations on specific user attributes. + """Partially updates a user resource by applying the supplied operations on specific user attributes. :param id: str Unique ID in the Databricks workspace. @@ -4365,9 +4269,7 @@ def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: O def set_permissions( self, *, access_control_list: Optional[List[PasswordAccessControlRequest]] = None ) -> PasswordPermissions: - """Set password permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) @@ -4400,9 +4302,7 @@ def update( schemas: Optional[List[UserSchema]] = None, user_name: Optional[str] = None, ): - """Replace a user. - - Replaces a user's information with the data supplied in request. + """Replaces a user's information with the data supplied in request. :param id: str Databricks user ID. @@ -4463,9 +4363,7 @@ def update( def update_permissions( self, *, access_control_list: Optional[List[PasswordAccessControlRequest]] = None ) -> PasswordPermissions: - """Update password permissions. - - Updates the permissions on all passwords. Passwords can inherit permissions from their root object. + """Updates the permissions on all passwords. Passwords can inherit permissions from their root object. :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) @@ -4491,9 +4389,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, workspace_id: int, principal_id: int): - """Delete permissions assignment. - - Deletes the workspace permissions assignment in a given account and workspace for the specified + """Deletes the workspace permissions assignment in a given account and workspace for the specified principal. :param workspace_id: int @@ -4515,9 +4411,7 @@ def delete(self, workspace_id: int, principal_id: int): ) def get(self, workspace_id: int) -> WorkspacePermissions: - """List workspace permissions. - - Get an array of workspace permissions for the specified account and workspace. + """Get an array of workspace permissions for the specified account and workspace. :param workspace_id: int The workspace ID. @@ -4537,9 +4431,7 @@ def get(self, workspace_id: int) -> WorkspacePermissions: return WorkspacePermissions.from_dict(res) def list(self, workspace_id: int) -> Iterator[PermissionAssignment]: - """Get permission assignments. - - Get the permission assignments for the specified Databricks account and Databricks workspace. + """Get the permission assignments for the specified Databricks account and Databricks workspace. :param workspace_id: int The workspace ID for the account. @@ -4562,9 +4454,7 @@ def list(self, workspace_id: int) -> Iterator[PermissionAssignment]: def update( self, workspace_id: int, principal_id: int, *, permissions: Optional[List[WorkspacePermission]] = None ) -> PermissionAssignment: - """Create or update permissions assignment. - - Creates or updates the workspace permissions assignment in a given account and workspace for the + """Creates or updates the workspace permissions assignment in a given account and workspace for the specified principal. :param workspace_id: int diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 3d3635e2a..37099ef24 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -1399,7 +1399,8 @@ def from_dict(cls, d: Dict[str, Any]) -> DashboardTaskOutput: @dataclass class DbtCloudJobRunStep: - """Format of response retrieved from dbt Cloud, for inclusion in output""" + """Format of response retrieved from dbt Cloud, for inclusion in output Deprecated in favor of + DbtPlatformJobRunStep""" index: Optional[int] = None """Orders the steps in the job""" @@ -1410,7 +1411,7 @@ class DbtCloudJobRunStep: name: Optional[str] = None """Name of the step in the job""" - status: Optional[DbtCloudRunStatus] = None + status: Optional[DbtPlatformRunStatus] = None """State of the step""" def as_dict(self) -> dict: @@ -1446,23 +1447,14 @@ def from_dict(cls, d: Dict[str, Any]) -> DbtCloudJobRunStep: index=d.get("index", None), logs=d.get("logs", None), name=d.get("name", None), - status=_enum(d, "status", DbtCloudRunStatus), + status=_enum(d, "status", DbtPlatformRunStatus), ) -class DbtCloudRunStatus(Enum): - """Response enumeration from calling the dbt Cloud API, for inclusion in output""" - - CANCELLED = "CANCELLED" - ERROR = "ERROR" - QUEUED = "QUEUED" - RUNNING = "RUNNING" - STARTING = "STARTING" - SUCCESS = "SUCCESS" - - @dataclass class DbtCloudTask: + """Deprecated in favor of DbtPlatformTask""" + connection_resource_name: Optional[str] = None """The resource name of the UC connection that authenticates the dbt Cloud for this task""" @@ -1498,6 +1490,8 @@ def from_dict(cls, d: Dict[str, Any]) -> DbtCloudTask: @dataclass class DbtCloudTaskOutput: + """Deprecated in favor of DbtPlatformTaskOutput""" + dbt_cloud_job_run_id: Optional[int] = None """Id of the job run in dbt Cloud""" @@ -1572,6 +1566,176 @@ def from_dict(cls, d: Dict[str, Any]) -> DbtOutput: return cls(artifacts_headers=d.get("artifacts_headers", None), artifacts_link=d.get("artifacts_link", None)) +@dataclass +class DbtPlatformJobRunStep: + """Format of response retrieved from dbt platform, for inclusion in output""" + + index: Optional[int] = None + """Orders the steps in the job""" + + logs: Optional[str] = None + """Output of the step""" + + logs_truncated: Optional[bool] = None + """Whether the logs of this step have been truncated. If true, the logs has been truncated to 10000 + characters.""" + + name: Optional[str] = None + """Name of the step in the job""" + + name_truncated: Optional[bool] = None + """Whether the name of the job has been truncated. If true, the name has been truncated to 100 + characters.""" + + status: Optional[DbtPlatformRunStatus] = None + """State of the step""" + + def as_dict(self) -> dict: + """Serializes the DbtPlatformJobRunStep into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.index is not None: + body["index"] = self.index + if self.logs is not None: + body["logs"] = self.logs + if self.logs_truncated is not None: + body["logs_truncated"] = self.logs_truncated + if self.name is not None: + body["name"] = self.name + if self.name_truncated is not None: + body["name_truncated"] = self.name_truncated + if self.status is not None: + body["status"] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DbtPlatformJobRunStep into a shallow dictionary of its immediate attributes.""" + body = {} + if self.index is not None: + body["index"] = self.index + if self.logs is not None: + body["logs"] = self.logs + if self.logs_truncated is not None: + body["logs_truncated"] = self.logs_truncated + if self.name is not None: + body["name"] = self.name + if self.name_truncated is not None: + body["name_truncated"] = self.name_truncated + if self.status is not None: + body["status"] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DbtPlatformJobRunStep: + """Deserializes the DbtPlatformJobRunStep from a dictionary.""" + return cls( + index=d.get("index", None), + logs=d.get("logs", None), + logs_truncated=d.get("logs_truncated", None), + name=d.get("name", None), + name_truncated=d.get("name_truncated", None), + status=_enum(d, "status", DbtPlatformRunStatus), + ) + + +class DbtPlatformRunStatus(Enum): + """Response enumeration from calling the dbt platform API, for inclusion in output""" + + CANCELLED = "CANCELLED" + ERROR = "ERROR" + QUEUED = "QUEUED" + RUNNING = "RUNNING" + STARTING = "STARTING" + SUCCESS = "SUCCESS" + + +@dataclass +class DbtPlatformTask: + connection_resource_name: Optional[str] = None + """The resource name of the UC connection that authenticates the dbt platform for this task""" + + dbt_platform_job_id: Optional[str] = None + """Id of the dbt platform job to be triggered. Specified as a string for maximum compatibility with + clients.""" + + def as_dict(self) -> dict: + """Serializes the DbtPlatformTask into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.connection_resource_name is not None: + body["connection_resource_name"] = self.connection_resource_name + if self.dbt_platform_job_id is not None: + body["dbt_platform_job_id"] = self.dbt_platform_job_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DbtPlatformTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.connection_resource_name is not None: + body["connection_resource_name"] = self.connection_resource_name + if self.dbt_platform_job_id is not None: + body["dbt_platform_job_id"] = self.dbt_platform_job_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DbtPlatformTask: + """Deserializes the DbtPlatformTask from a dictionary.""" + return cls( + connection_resource_name=d.get("connection_resource_name", None), + dbt_platform_job_id=d.get("dbt_platform_job_id", None), + ) + + +@dataclass +class DbtPlatformTaskOutput: + dbt_platform_job_run_id: Optional[str] = None + """Id of the job run in dbt platform. Specified as a string for maximum compatibility with clients.""" + + dbt_platform_job_run_output: Optional[List[DbtPlatformJobRunStep]] = None + """Steps of the job run as received from dbt platform""" + + dbt_platform_job_run_url: Optional[str] = None + """Url where full run details can be viewed""" + + steps_truncated: Optional[bool] = None + """Whether the number of steps in the output has been truncated. If true, the output will contain + the first 20 steps of the output.""" + + def as_dict(self) -> dict: + """Serializes the DbtPlatformTaskOutput into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dbt_platform_job_run_id is not None: + body["dbt_platform_job_run_id"] = self.dbt_platform_job_run_id + if self.dbt_platform_job_run_output: + body["dbt_platform_job_run_output"] = [v.as_dict() for v in self.dbt_platform_job_run_output] + if self.dbt_platform_job_run_url is not None: + body["dbt_platform_job_run_url"] = self.dbt_platform_job_run_url + if self.steps_truncated is not None: + body["steps_truncated"] = self.steps_truncated + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DbtPlatformTaskOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dbt_platform_job_run_id is not None: + body["dbt_platform_job_run_id"] = self.dbt_platform_job_run_id + if self.dbt_platform_job_run_output: + body["dbt_platform_job_run_output"] = self.dbt_platform_job_run_output + if self.dbt_platform_job_run_url is not None: + body["dbt_platform_job_run_url"] = self.dbt_platform_job_run_url + if self.steps_truncated is not None: + body["steps_truncated"] = self.steps_truncated + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DbtPlatformTaskOutput: + """Deserializes the DbtPlatformTaskOutput from a dictionary.""" + return cls( + dbt_platform_job_run_id=d.get("dbt_platform_job_run_id", None), + dbt_platform_job_run_output=_repeated_dict(d, "dbt_platform_job_run_output", DbtPlatformJobRunStep), + dbt_platform_job_run_url=d.get("dbt_platform_job_run_url", None), + steps_truncated=d.get("steps_truncated", None), + ) + + @dataclass class DbtTask: commands: List[str] @@ -5955,10 +6119,13 @@ class RunOutput: """The output of a dashboard task, if available""" dbt_cloud_output: Optional[DbtCloudTaskOutput] = None + """Deprecated in favor of the new dbt_platform_output""" dbt_output: Optional[DbtOutput] = None """The output of a dbt task, if available.""" + dbt_platform_output: Optional[DbtPlatformTaskOutput] = None + error: Optional[str] = None """An error message indicating why a task failed or why output is not available. The message is unstructured, and its exact format is subject to change.""" @@ -6008,6 +6175,8 @@ def as_dict(self) -> dict: body["dbt_cloud_output"] = self.dbt_cloud_output.as_dict() if self.dbt_output: body["dbt_output"] = self.dbt_output.as_dict() + if self.dbt_platform_output: + body["dbt_platform_output"] = self.dbt_platform_output.as_dict() if self.error is not None: body["error"] = self.error if self.error_trace is not None: @@ -6039,6 +6208,8 @@ def as_shallow_dict(self) -> dict: body["dbt_cloud_output"] = self.dbt_cloud_output if self.dbt_output: body["dbt_output"] = self.dbt_output + if self.dbt_platform_output: + body["dbt_platform_output"] = self.dbt_platform_output if self.error is not None: body["error"] = self.error if self.error_trace is not None: @@ -6069,6 +6240,7 @@ def from_dict(cls, d: Dict[str, Any]) -> RunOutput: dashboard_output=_from_dict(d, "dashboard_output", DashboardTaskOutput), dbt_cloud_output=_from_dict(d, "dbt_cloud_output", DbtCloudTaskOutput), dbt_output=_from_dict(d, "dbt_output", DbtOutput), + dbt_platform_output=_from_dict(d, "dbt_platform_output", DbtPlatformTaskOutput), error=d.get("error", None), error_trace=d.get("error_trace", None), info=d.get("info", None), @@ -6388,7 +6560,9 @@ class RunTask: """The task refreshes a dashboard and sends a snapshot to subscribers.""" dbt_cloud_task: Optional[DbtCloudTask] = None - """Task type for dbt cloud""" + """Task type for dbt cloud, deprecated in favor of the new name dbt_platform_task""" + + dbt_platform_task: Optional[DbtPlatformTask] = None dbt_task: Optional[DbtTask] = None """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task @@ -6572,6 +6746,8 @@ def as_dict(self) -> dict: body["dashboard_task"] = self.dashboard_task.as_dict() if self.dbt_cloud_task: body["dbt_cloud_task"] = self.dbt_cloud_task.as_dict() + if self.dbt_platform_task: + body["dbt_platform_task"] = self.dbt_platform_task.as_dict() if self.dbt_task: body["dbt_task"] = self.dbt_task.as_dict() if self.depends_on: @@ -6669,6 +6845,8 @@ def as_shallow_dict(self) -> dict: body["dashboard_task"] = self.dashboard_task if self.dbt_cloud_task: body["dbt_cloud_task"] = self.dbt_cloud_task + if self.dbt_platform_task: + body["dbt_platform_task"] = self.dbt_platform_task if self.dbt_task: body["dbt_task"] = self.dbt_task if self.depends_on: @@ -6760,6 +6938,7 @@ def from_dict(cls, d: Dict[str, Any]) -> RunTask: condition_task=_from_dict(d, "condition_task", RunConditionTask), dashboard_task=_from_dict(d, "dashboard_task", DashboardTask), dbt_cloud_task=_from_dict(d, "dbt_cloud_task", DbtCloudTask), + dbt_platform_task=_from_dict(d, "dbt_platform_task", DbtPlatformTask), dbt_task=_from_dict(d, "dbt_task", DbtTask), depends_on=_repeated_dict(d, "depends_on", TaskDependency), description=d.get("description", None), @@ -7784,7 +7963,9 @@ class SubmitTask: """The task refreshes a dashboard and sends a snapshot to subscribers.""" dbt_cloud_task: Optional[DbtCloudTask] = None - """Task type for dbt cloud""" + """Task type for dbt cloud, deprecated in favor of the new name dbt_platform_task""" + + dbt_platform_task: Optional[DbtPlatformTask] = None dbt_task: Optional[DbtTask] = None """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task @@ -7898,6 +8079,8 @@ def as_dict(self) -> dict: body["dashboard_task"] = self.dashboard_task.as_dict() if self.dbt_cloud_task: body["dbt_cloud_task"] = self.dbt_cloud_task.as_dict() + if self.dbt_platform_task: + body["dbt_platform_task"] = self.dbt_platform_task.as_dict() if self.dbt_task: body["dbt_task"] = self.dbt_task.as_dict() if self.depends_on: @@ -7961,6 +8144,8 @@ def as_shallow_dict(self) -> dict: body["dashboard_task"] = self.dashboard_task if self.dbt_cloud_task: body["dbt_cloud_task"] = self.dbt_cloud_task + if self.dbt_platform_task: + body["dbt_platform_task"] = self.dbt_platform_task if self.dbt_task: body["dbt_task"] = self.dbt_task if self.depends_on: @@ -8021,6 +8206,7 @@ def from_dict(cls, d: Dict[str, Any]) -> SubmitTask: condition_task=_from_dict(d, "condition_task", ConditionTask), dashboard_task=_from_dict(d, "dashboard_task", DashboardTask), dbt_cloud_task=_from_dict(d, "dbt_cloud_task", DbtCloudTask), + dbt_platform_task=_from_dict(d, "dbt_platform_task", DbtPlatformTask), dbt_task=_from_dict(d, "dbt_task", DbtTask), depends_on=_repeated_dict(d, "depends_on", TaskDependency), description=d.get("description", None), @@ -8202,7 +8388,9 @@ class Task: """The task refreshes a dashboard and sends a snapshot to subscribers.""" dbt_cloud_task: Optional[DbtCloudTask] = None - """Task type for dbt cloud""" + """Task type for dbt cloud, deprecated in favor of the new name dbt_platform_task""" + + dbt_platform_task: Optional[DbtPlatformTask] = None dbt_task: Optional[DbtTask] = None """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task @@ -8341,6 +8529,8 @@ def as_dict(self) -> dict: body["dashboard_task"] = self.dashboard_task.as_dict() if self.dbt_cloud_task: body["dbt_cloud_task"] = self.dbt_cloud_task.as_dict() + if self.dbt_platform_task: + body["dbt_platform_task"] = self.dbt_platform_task.as_dict() if self.dbt_task: body["dbt_task"] = self.dbt_task.as_dict() if self.depends_on: @@ -8414,6 +8604,8 @@ def as_shallow_dict(self) -> dict: body["dashboard_task"] = self.dashboard_task if self.dbt_cloud_task: body["dbt_cloud_task"] = self.dbt_cloud_task + if self.dbt_platform_task: + body["dbt_platform_task"] = self.dbt_platform_task if self.dbt_task: body["dbt_task"] = self.dbt_task if self.depends_on: @@ -8484,6 +8676,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Task: condition_task=_from_dict(d, "condition_task", ConditionTask), dashboard_task=_from_dict(d, "dashboard_task", DashboardTask), dbt_cloud_task=_from_dict(d, "dbt_cloud_task", DbtCloudTask), + dbt_platform_task=_from_dict(d, "dbt_platform_task", DbtPlatformTask), dbt_task=_from_dict(d, "dbt_task", DbtTask), depends_on=_repeated_dict(d, "depends_on", TaskDependency), description=d.get("description", None), @@ -9274,9 +9467,7 @@ def wait_get_run_job_terminated_or_skipped( raise TimeoutError(f"timed out after {timeout}: {status_message}") def cancel_all_runs(self, *, all_queued_runs: Optional[bool] = None, job_id: Optional[int] = None): - """Cancel all runs of a job. - - Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs + """Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs from being started. :param all_queued_runs: bool (optional) @@ -9299,9 +9490,7 @@ def cancel_all_runs(self, *, all_queued_runs: Optional[bool] = None, job_id: Opt self._api.do("POST", "/api/2.2/jobs/runs/cancel-all", body=body, headers=headers) def cancel_run(self, run_id: int) -> Wait[Run]: - """Cancel a run. - - Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when + """Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when this request completes. :param run_id: int @@ -9359,8 +9548,6 @@ def create( ) -> CreateResponse: """Create a new job. - Create a new job. - :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param budget_policy_id: str (optional) @@ -9518,9 +9705,7 @@ def create( return CreateResponse.from_dict(res) def delete(self, job_id: int): - """Delete a job. - - Deletes a job. + """Deletes a job. :param job_id: int The canonical identifier of the job to delete. This field is required. @@ -9537,9 +9722,7 @@ def delete(self, job_id: int): self._api.do("POST", "/api/2.2/jobs/delete", body=body, headers=headers) def delete_run(self, run_id: int): - """Delete a job run. - - Deletes a non-active run. Returns an error if the run is active. + """Deletes a non-active run. Returns an error if the run is active. :param run_id: int ID of the run to delete. @@ -9556,9 +9739,7 @@ def delete_run(self, run_id: int): self._api.do("POST", "/api/2.2/jobs/runs/delete", body=body, headers=headers) def export_run(self, run_id: int, *, views_to_export: Optional[ViewsToExport] = None) -> ExportRunOutput: - """Export and retrieve a job run. - - Export and retrieve the job run task. + """Export and retrieve the job run task. :param run_id: int The canonical identifier for the run. This field is required. @@ -9581,9 +9762,7 @@ def export_run(self, run_id: int, *, views_to_export: Optional[ViewsToExport] = return ExportRunOutput.from_dict(res) def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job: - """Get a single job. - - Retrieves the details for a single job. + """Retrieves the details for a single job. Large arrays in the results will be paginated when they exceed 100 elements. A request for a single job will return all properties for that job, and the first 100 elements of array properties (`tasks`, @@ -9614,9 +9793,7 @@ def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job: return Job.from_dict(res) def get_permission_levels(self, job_id: str) -> GetJobPermissionLevelsResponse: - """Get job permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param job_id: str The job for which to get or manage permissions. @@ -9632,9 +9809,7 @@ def get_permission_levels(self, job_id: str) -> GetJobPermissionLevelsResponse: return GetJobPermissionLevelsResponse.from_dict(res) def get_permissions(self, job_id: str) -> JobPermissions: - """Get job permissions. - - Gets the permissions of a job. Jobs can inherit permissions from their root object. + """Gets the permissions of a job. Jobs can inherit permissions from their root object. :param job_id: str The job for which to get or manage permissions. @@ -9657,9 +9832,7 @@ def get_run( include_resolved_values: Optional[bool] = None, page_token: Optional[str] = None, ) -> Run: - """Get a single job run. - - Retrieves the metadata of a run. + """Retrieves the metadata of a run. Large arrays in the results will be paginated when they exceed 100 elements. A request for a single run will return all properties for that run, and the first 100 elements of array properties (`tasks`, @@ -9698,9 +9871,7 @@ def get_run( return Run.from_dict(res) def get_run_output(self, run_id: int) -> RunOutput: - """Get the output for a single run. - - Retrieve the output and metadata of a single task run. When a notebook task returns a value through + """Retrieve the output and metadata of a single task run. When a notebook task returns a value through the `dbutils.notebook.exit()` call, you can use this endpoint to retrieve that value. Databricks restricts this API to returning the first 5 MB of the output. To return a larger result, you can store job results in a cloud storage service. @@ -9734,9 +9905,7 @@ def list( offset: Optional[int] = None, page_token: Optional[str] = None, ) -> Iterator[BaseJob]: - """List jobs. - - Retrieves a list of jobs. + """Retrieves a list of jobs. :param expand_tasks: bool (optional) Whether to include task and cluster details in the response. Note that only the first 100 elements @@ -9794,9 +9963,7 @@ def list_runs( start_time_from: Optional[int] = None, start_time_to: Optional[int] = None, ) -> Iterator[BaseRun]: - """List job runs. - - List runs in descending order by start time. + """List runs in descending order by start time. :param active_only: bool (optional) If active_only is `true`, only active runs are included in the results; otherwise, lists both active @@ -9884,9 +10051,7 @@ def repair_run( spark_submit_params: Optional[List[str]] = None, sql_params: Optional[Dict[str, str]] = None, ) -> Wait[Run]: - """Repair a job run. - - Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job + """Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job and task settings, and can be viewed in the history for the original job run. :param run_id: int @@ -10064,9 +10229,7 @@ def repair_run_and_wait( ).result(timeout=timeout) def reset(self, job_id: int, new_settings: JobSettings): - """Update all job settings (reset). - - Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update + """Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update job settings partially. :param job_id: int @@ -10108,9 +10271,7 @@ def run_now( spark_submit_params: Optional[List[str]] = None, sql_params: Optional[Dict[str, str]] = None, ) -> Wait[Run]: - """Trigger a new job run. - - Run a job and return the `run_id` of the triggered run. + """Run a job and return the `run_id` of the triggered run. :param job_id: int The ID of the job to be executed @@ -10293,9 +10454,7 @@ def run_now_and_wait( def set_permissions( self, job_id: str, *, access_control_list: Optional[List[JobAccessControlRequest]] = None ) -> JobPermissions: - """Set job permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param job_id: str @@ -10333,9 +10492,7 @@ def submit( timeout_seconds: Optional[int] = None, webhook_notifications: Optional[WebhookNotifications] = None, ) -> Wait[Run]: - """Create and trigger a one-time run. - - Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job. + """Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job. Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the run state after the job is submitted. @@ -10472,9 +10629,7 @@ def submit_and_wait( def update( self, job_id: int, *, fields_to_remove: Optional[List[str]] = None, new_settings: Optional[JobSettings] = None ): - """Update job settings partially. - - Add, update, or remove specific settings of an existing job. Use the [_Reset_ + """Add, update, or remove specific settings of an existing job. Use the [_Reset_ endpoint](:method:jobs/reset) to overwrite all job settings. :param job_id: int @@ -10512,9 +10667,7 @@ def update( def update_permissions( self, job_id: str, *, access_control_list: Optional[List[JobAccessControlRequest]] = None ) -> JobPermissions: - """Update job permissions. - - Updates the permissions on a job. Jobs can inherit permissions from their root object. + """Updates the permissions on a job. Jobs can inherit permissions from their root object. :param job_id: str The job for which to get or manage permissions. @@ -10552,9 +10705,7 @@ def __init__(self, api_client): def enforce_compliance( self, job_id: int, *, validate_only: Optional[bool] = None ) -> EnforcePolicyComplianceResponse: - """Enforce job policy compliance. - - Updates a job so the job clusters that are created when running the job (specified in `new_cluster`) + """Updates a job so the job clusters that are created when running the job (specified in `new_cluster`) are compliant with the current versions of their respective cluster policies. All-purpose clusters used in the job will not be updated. @@ -10579,9 +10730,7 @@ def enforce_compliance( return EnforcePolicyComplianceResponse.from_dict(res) def get_compliance(self, job_id: int) -> GetPolicyComplianceResponse: - """Get job policy compliance. - - Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy + """Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy they use was updated after the job was last edited and some of its job clusters no longer comply with their updated policies. @@ -10604,9 +10753,7 @@ def get_compliance(self, job_id: int) -> GetPolicyComplianceResponse: def list_compliance( self, policy_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[JobCompliance]: - """List job policy compliance. - - Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of + """Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of compliance if a cluster policy they use was updated after the job was last edited and its job clusters no longer comply with the updated policy. diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index 6d3a8815b..9ac4c153a 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -3512,9 +3512,7 @@ def __init__(self, api_client): def get( self, listing_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[SharedDataObject]: - """Get listing content metadata. - - Get a high level preview of the metadata of listing installable content. + """Get a high level preview of the metadata of listing installable content. :param listing_id: str :param page_size: int (optional) @@ -3546,9 +3544,7 @@ def get( def list( self, listing_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[ListingFulfillment]: - """List all listing fulfillments. - - Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential installation. + """Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential installation. Standard installations contain metadata about the attached share or git repo. Only one of these fields will be present. Personalized installations contain metadata about the attached share or git repo, as well as the Delta Sharing recipient type. @@ -3597,9 +3593,7 @@ def create( repo_detail: Optional[RepoInstallation] = None, share_name: Optional[str] = None, ) -> Installation: - """Install from a listing. - - Install payload associated with a Databricks Marketplace listing. + """Install payload associated with a Databricks Marketplace listing. :param listing_id: str :param accepted_consumer_terms: :class:`ConsumerTerms` (optional) @@ -3633,9 +3627,7 @@ def create( return Installation.from_dict(res) def delete(self, listing_id: str, installation_id: str): - """Uninstall from a listing. - - Uninstall an installation associated with a Databricks Marketplace listing. + """Uninstall an installation associated with a Databricks Marketplace listing. :param listing_id: str :param installation_id: str @@ -3656,9 +3648,7 @@ def delete(self, listing_id: str, installation_id: str): def list( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[InstallationDetail]: - """List all installations. - - List all installations across all listings. + """List all installations across all listings. :param page_size: int (optional) :param page_token: str (optional) @@ -3687,9 +3677,7 @@ def list( def list_listing_installations( self, listing_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[InstallationDetail]: - """List installations for a listing. - - List all installations for a particular listing. + """List all installations for a particular listing. :param listing_id: str :param page_size: int (optional) @@ -3729,9 +3717,7 @@ def update( *, rotate_token: Optional[bool] = None, ) -> UpdateInstallationResponse: - """Update an installation. - - This is a update API that will update the part of the fields defined in the installation table as well + """This is a update API that will update the part of the fields defined in the installation table as well as interact with external services according to the fields not included in the installation table 1. the token will be rotate if the rotateToken flag is true 2. the token will be forcibly rotate if the rotateToken flag is true and the tokenInfo field is empty @@ -3770,9 +3756,7 @@ def __init__(self, api_client): self._api = api_client def batch_get(self, *, ids: Optional[List[str]] = None) -> BatchGetListingsResponse: - """Get one batch of listings. One may specify up to 50 IDs per request. - - Batch get a published listing in the Databricks Marketplace that the consumer has access to. + """Batch get a published listing in the Databricks Marketplace that the consumer has access to. :param ids: List[str] (optional) @@ -3790,9 +3774,7 @@ def batch_get(self, *, ids: Optional[List[str]] = None) -> BatchGetListingsRespo return BatchGetListingsResponse.from_dict(res) def get(self, id: str) -> GetListingResponse: - """Get listing. - - Get a published listing in the Databricks Marketplace that the consumer has access to. + """Get a published listing in the Databricks Marketplace that the consumer has access to. :param id: str @@ -3819,9 +3801,7 @@ def list( provider_ids: Optional[List[str]] = None, tags: Optional[List[ListingTag]] = None, ) -> Iterator[Listing]: - """List listings. - - List all published listings in the Databricks Marketplace that the consumer has access to. + """List all published listings in the Databricks Marketplace that the consumer has access to. :param assets: List[:class:`AssetType`] (optional) Matches any of the following asset types @@ -3887,9 +3867,7 @@ def search( page_token: Optional[str] = None, provider_ids: Optional[List[str]] = None, ) -> Iterator[Listing]: - """Search listings. - - Search published listings in the Databricks Marketplace that the consumer has access to. This query + """Search published listings in the Databricks Marketplace that the consumer has access to. This query supports a variety of different search parameters and performs fuzzy matching. :param query: str @@ -3958,9 +3936,7 @@ def create( last_name: Optional[str] = None, recipient_type: Optional[DeltaSharingRecipientType] = None, ) -> CreatePersonalizationRequestResponse: - """Create a personalization request. - - Create a personalization request for a listing. + """Create a personalization request for a listing. :param listing_id: str :param intended_use: str @@ -4005,9 +3981,7 @@ def create( return CreatePersonalizationRequestResponse.from_dict(res) def get(self, listing_id: str) -> GetPersonalizationRequestResponse: - """Get the personalization request for a listing. - - Get the personalization request for a listing. Each consumer can make at *most* one personalization + """Get the personalization request for a listing. Each consumer can make at *most* one personalization request for a listing. :param listing_id: str @@ -4027,9 +4001,7 @@ def get(self, listing_id: str) -> GetPersonalizationRequestResponse: def list( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[PersonalizationRequest]: - """List all personalization requests. - - List personalization requests for a consumer across all listings. + """List personalization requests for a consumer across all listings. :param page_size: int (optional) :param page_token: str (optional) @@ -4065,9 +4037,7 @@ def __init__(self, api_client): self._api = api_client def batch_get(self, *, ids: Optional[List[str]] = None) -> BatchGetProvidersResponse: - """Get one batch of providers. One may specify up to 50 IDs per request. - - Batch get a provider in the Databricks Marketplace with at least one visible listing. + """Batch get a provider in the Databricks Marketplace with at least one visible listing. :param ids: List[str] (optional) @@ -4085,9 +4055,7 @@ def batch_get(self, *, ids: Optional[List[str]] = None) -> BatchGetProvidersResp return BatchGetProvidersResponse.from_dict(res) def get(self, id: str) -> GetProviderResponse: - """Get a provider. - - Get a provider in the Databricks Marketplace with at least one visible listing. + """Get a provider in the Databricks Marketplace with at least one visible listing. :param id: str @@ -4104,9 +4072,7 @@ def get(self, id: str) -> GetProviderResponse: def list( self, *, is_featured: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[ProviderInfo]: - """List providers. - - List all providers in the Databricks Marketplace with at least one visible listing. + """List all providers in the Databricks Marketplace with at least one visible listing. :param is_featured: bool (optional) :param page_size: int (optional) @@ -4143,9 +4109,7 @@ def __init__(self, api_client): self._api = api_client def create(self, filter: ExchangeFilter) -> CreateExchangeFilterResponse: - """Create a new exchange filter. - - Add an exchange filter. + """Add an exchange filter. :param filter: :class:`ExchangeFilter` @@ -4163,9 +4127,7 @@ def create(self, filter: ExchangeFilter) -> CreateExchangeFilterResponse: return CreateExchangeFilterResponse.from_dict(res) def delete(self, id: str): - """Delete an exchange filter. - - Delete an exchange filter + """Delete an exchange filter :param id: str @@ -4181,9 +4143,7 @@ def delete(self, id: str): def list( self, exchange_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[ExchangeFilter]: - """List exchange filters. - - List exchange filter + """List exchange filter :param exchange_id: str :param page_size: int (optional) @@ -4213,9 +4173,7 @@ def list( query["page_token"] = json["next_page_token"] def update(self, id: str, filter: ExchangeFilter) -> UpdateExchangeFilterResponse: - """Update exchange filter. - - Update an exchange filter. + """Update an exchange filter. :param id: str :param filter: :class:`ExchangeFilter` @@ -4241,9 +4199,7 @@ def __init__(self, api_client): self._api = api_client def add_listing_to_exchange(self, listing_id: str, exchange_id: str) -> AddExchangeForListingResponse: - """Add an exchange for listing. - - Associate an exchange with a listing + """Associate an exchange with a listing :param listing_id: str :param exchange_id: str @@ -4264,9 +4220,7 @@ def add_listing_to_exchange(self, listing_id: str, exchange_id: str) -> AddExcha return AddExchangeForListingResponse.from_dict(res) def create(self, exchange: Exchange) -> CreateExchangeResponse: - """Create an exchange. - - Create an exchange + """Create an exchange :param exchange: :class:`Exchange` @@ -4284,9 +4238,7 @@ def create(self, exchange: Exchange) -> CreateExchangeResponse: return CreateExchangeResponse.from_dict(res) def delete(self, id: str): - """Delete an exchange. - - This removes a listing from marketplace. + """This removes a listing from marketplace. :param id: str @@ -4300,9 +4252,7 @@ def delete(self, id: str): self._api.do("DELETE", f"/api/2.0/marketplace-exchange/exchanges/{id}", headers=headers) def delete_listing_from_exchange(self, id: str): - """Remove an exchange for listing. - - Disassociate an exchange with a listing + """Disassociate an exchange with a listing :param id: str @@ -4318,8 +4268,6 @@ def delete_listing_from_exchange(self, id: str): def get(self, id: str) -> GetExchangeResponse: """Get an exchange. - Get an exchange. - :param id: str :returns: :class:`GetExchangeResponse` @@ -4333,9 +4281,7 @@ def get(self, id: str) -> GetExchangeResponse: return GetExchangeResponse.from_dict(res) def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Exchange]: - """List exchanges. - - List exchanges visible to provider + """List exchanges visible to provider :param page_size: int (optional) :param page_token: str (optional) @@ -4364,9 +4310,7 @@ def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = N def list_exchanges_for_listing( self, listing_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[ExchangeListing]: - """List exchanges for listing. - - List exchanges associated with a listing + """List exchanges associated with a listing :param listing_id: str :param page_size: int (optional) @@ -4400,9 +4344,7 @@ def list_exchanges_for_listing( def list_listings_for_exchange( self, exchange_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[ExchangeListing]: - """List listings for exchange. - - List listings associated with an exchange + """List listings associated with an exchange :param exchange_id: str :param page_size: int (optional) @@ -4434,9 +4376,7 @@ def list_listings_for_exchange( query["page_token"] = json["next_page_token"] def update(self, id: str, exchange: Exchange) -> UpdateExchangeResponse: - """Update exchange. - - Update an exchange + """Update an exchange :param id: str :param exchange: :class:`Exchange` @@ -4469,9 +4409,7 @@ def create( *, display_name: Optional[str] = None, ) -> CreateFileResponse: - """Create a file. - - Create a file. Currently, only provider icons and attached notebooks are supported. + """Create a file. Currently, only provider icons and attached notebooks are supported. :param file_parent: :class:`FileParent` :param marketplace_file_type: :class:`MarketplaceFileType` @@ -4498,9 +4436,7 @@ def create( return CreateFileResponse.from_dict(res) def delete(self, file_id: str): - """Delete a file. - - Delete a file + """Delete a file :param file_id: str @@ -4514,9 +4450,7 @@ def delete(self, file_id: str): self._api.do("DELETE", f"/api/2.0/marketplace-provider/files/{file_id}", headers=headers) def get(self, file_id: str) -> GetFileResponse: - """Get a file. - - Get a file + """Get a file :param file_id: str @@ -4533,9 +4467,7 @@ def get(self, file_id: str) -> GetFileResponse: def list( self, file_parent: FileParent, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[FileInfo]: - """List files. - - List files attached to a parent entity. + """List files attached to a parent entity. :param file_parent: :class:`FileParent` :param page_size: int (optional) @@ -4573,9 +4505,7 @@ def __init__(self, api_client): self._api = api_client def create(self, listing: Listing) -> CreateListingResponse: - """Create a listing. - - Create a new listing + """Create a new listing :param listing: :class:`Listing` @@ -4593,9 +4523,7 @@ def create(self, listing: Listing) -> CreateListingResponse: return CreateListingResponse.from_dict(res) def delete(self, id: str): - """Delete a listing. - - Delete a listing + """Delete a listing :param id: str @@ -4609,9 +4537,7 @@ def delete(self, id: str): self._api.do("DELETE", f"/api/2.0/marketplace-provider/listings/{id}", headers=headers) def get(self, id: str) -> GetListingResponse: - """Get a listing. - - Get a listing + """Get a listing :param id: str @@ -4626,9 +4552,7 @@ def get(self, id: str) -> GetListingResponse: return GetListingResponse.from_dict(res) def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Listing]: - """List listings. - - List listings owned by this provider + """List listings owned by this provider :param page_size: int (optional) :param page_token: str (optional) @@ -4655,9 +4579,7 @@ def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = N query["page_token"] = json["next_page_token"] def update(self, id: str, listing: Listing) -> UpdateListingResponse: - """Update listing. - - Update a listing + """Update a listing :param id: str :param listing: :class:`Listing` @@ -4686,9 +4608,7 @@ def __init__(self, api_client): def list( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[PersonalizationRequest]: - """All personalization requests across all listings. - - List personalization requests to this provider. This will return all personalization requests, + """List personalization requests to this provider. This will return all personalization requests, regardless of which listing they are for. :param page_size: int (optional) @@ -4726,9 +4646,7 @@ def update( reason: Optional[str] = None, share: Optional[ShareInfo] = None, ) -> UpdatePersonalizationRequestResponse: - """Update personalization request status. - - Update personalization request. This method only permits updating the status of the request. + """Update personalization request. This method only permits updating the status of the request. :param listing_id: str :param request_id: str @@ -4766,9 +4684,7 @@ def __init__(self, api_client): self._api = api_client def create(self) -> ProviderAnalyticsDashboard: - """Create provider analytics dashboard. - - Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused with the + """Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused with the Lakeview dashboard id. :returns: :class:`ProviderAnalyticsDashboard` @@ -4784,8 +4700,6 @@ def create(self) -> ProviderAnalyticsDashboard: def get(self) -> ListProviderAnalyticsDashboardResponse: """Get provider analytics dashboard. - Get provider analytics dashboard. - :returns: :class:`ListProviderAnalyticsDashboardResponse` """ @@ -4799,8 +4713,6 @@ def get(self) -> ListProviderAnalyticsDashboardResponse: def get_latest_version(self) -> GetLatestVersionProviderAnalyticsDashboardResponse: """Get latest version of provider analytics dashboard. - Get latest version of provider analytics dashboard. - :returns: :class:`GetLatestVersionProviderAnalyticsDashboardResponse` """ @@ -4814,8 +4726,6 @@ def get_latest_version(self) -> GetLatestVersionProviderAnalyticsDashboardRespon def update(self, id: str, *, version: Optional[int] = None) -> UpdateProviderAnalyticsDashboardResponse: """Update provider analytics dashboard. - Update provider analytics dashboard. - :param id: str id is immutable property and can't be updated. :param version: int (optional) @@ -4843,9 +4753,7 @@ def __init__(self, api_client): self._api = api_client def create(self, provider: ProviderInfo) -> CreateProviderResponse: - """Create a provider. - - Create a provider + """Create a provider :param provider: :class:`ProviderInfo` @@ -4863,9 +4771,7 @@ def create(self, provider: ProviderInfo) -> CreateProviderResponse: return CreateProviderResponse.from_dict(res) def delete(self, id: str): - """Delete provider. - - Delete provider + """Delete provider :param id: str @@ -4879,9 +4785,7 @@ def delete(self, id: str): self._api.do("DELETE", f"/api/2.0/marketplace-provider/providers/{id}", headers=headers) def get(self, id: str) -> GetProviderResponse: - """Get provider. - - Get provider profile + """Get provider profile :param id: str @@ -4896,9 +4800,7 @@ def get(self, id: str) -> GetProviderResponse: return GetProviderResponse.from_dict(res) def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ProviderInfo]: - """List providers. - - List provider profiles for account. + """List provider profiles for account. :param page_size: int (optional) :param page_token: str (optional) @@ -4925,9 +4827,7 @@ def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = N query["page_token"] = json["next_page_token"] def update(self, id: str, provider: ProviderInfo) -> UpdateProviderResponse: - """Update provider. - - Update provider profile + """Update provider profile :param id: str :param provider: :class:`ProviderInfo` diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index b5a58078d..c8acbfd6d 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -1502,6 +1502,24 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteModelVersionTagResponse: return cls() +@dataclass +class DeleteOnlineStoreResponse: + def as_dict(self) -> dict: + """Serializes the DeleteOnlineStoreResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteOnlineStoreResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteOnlineStoreResponse: + """Deserializes the DeleteOnlineStoreResponse from a dictionary.""" + return cls() + + @dataclass class DeleteRun: run_id: str @@ -2875,6 +2893,41 @@ def from_dict(cls, d: Dict[str, Any]) -> ListModelsResponse: ) +@dataclass +class ListOnlineStoresResponse: + next_page_token: Optional[str] = None + """Pagination token to request the next page of results for this query.""" + + online_stores: Optional[List[OnlineStore]] = None + """List of online stores.""" + + def as_dict(self) -> dict: + """Serializes the ListOnlineStoresResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.online_stores: + body["online_stores"] = [v.as_dict() for v in self.online_stores] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListOnlineStoresResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.online_stores: + body["online_stores"] = self.online_stores + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListOnlineStoresResponse: + """Deserializes the ListOnlineStoresResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + online_stores=_repeated_dict(d, "online_stores", OnlineStore), + ) + + @dataclass class ListRegistryWebhooks: next_page_token: Optional[str] = None @@ -4324,6 +4377,77 @@ def from_dict(cls, d: Dict[str, Any]) -> ModelVersionTag: return cls(key=d.get("key", None), value=d.get("value", None)) +@dataclass +class OnlineStore: + """An OnlineStore is a logical database instance that stores and serves features online.""" + + name: str + """The name of the online store. This is the unique identifier for the online store.""" + + capacity: Optional[str] = None + """The capacity of the online store. Valid values are "CU_1", "CU_2", "CU_4", "CU_8".""" + + creation_time: Optional[str] = None + """The timestamp when the online store was created.""" + + creator: Optional[str] = None + """The email of the creator of the online store.""" + + state: Optional[OnlineStoreState] = None + """The current state of the online store.""" + + def as_dict(self) -> dict: + """Serializes the OnlineStore into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.capacity is not None: + body["capacity"] = self.capacity + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.creator is not None: + body["creator"] = self.creator + if self.name is not None: + body["name"] = self.name + if self.state is not None: + body["state"] = self.state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the OnlineStore into a shallow dictionary of its immediate attributes.""" + body = {} + if self.capacity is not None: + body["capacity"] = self.capacity + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.creator is not None: + body["creator"] = self.creator + if self.name is not None: + body["name"] = self.name + if self.state is not None: + body["state"] = self.state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> OnlineStore: + """Deserializes the OnlineStore from a dictionary.""" + return cls( + capacity=d.get("capacity", None), + creation_time=d.get("creation_time", None), + creator=d.get("creator", None), + name=d.get("name", None), + state=_enum(d, "state", OnlineStoreState), + ) + + +class OnlineStoreState(Enum): + + AVAILABLE = "AVAILABLE" + DELETING = "DELETING" + FAILING_OVER = "FAILING_OVER" + STARTING = "STARTING" + STOPPED = "STOPPED" + UPDATING = "UPDATING" + + @dataclass class Param: """Param associated with a run.""" @@ -4369,6 +4493,124 @@ class PermissionLevel(Enum): CAN_READ = "CAN_READ" +@dataclass +class PublishSpec: + online_store: str + """The name of the target online store.""" + + online_table_name: Optional[str] = None + """The full three-part (catalog, schema, table) name of the online table. Auto-generated if not + specified.""" + + publish_mode: Optional[PublishSpecPublishMode] = None + """The publish mode of the pipeline that syncs the online table with the source table. Defaults to + TRIGGERED if not specified. All publish modes require the source table to have Change Data Feed + (CDF) enabled.""" + + def as_dict(self) -> dict: + """Serializes the PublishSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.online_store is not None: + body["online_store"] = self.online_store + if self.online_table_name is not None: + body["online_table_name"] = self.online_table_name + if self.publish_mode is not None: + body["publish_mode"] = self.publish_mode.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PublishSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.online_store is not None: + body["online_store"] = self.online_store + if self.online_table_name is not None: + body["online_table_name"] = self.online_table_name + if self.publish_mode is not None: + body["publish_mode"] = self.publish_mode + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PublishSpec: + """Deserializes the PublishSpec from a dictionary.""" + return cls( + online_store=d.get("online_store", None), + online_table_name=d.get("online_table_name", None), + publish_mode=_enum(d, "publish_mode", PublishSpecPublishMode), + ) + + +class PublishSpecPublishMode(Enum): + + CONTINUOUS = "CONTINUOUS" + TRIGGERED = "TRIGGERED" + + +@dataclass +class PublishTableRequest: + publish_spec: PublishSpec + """The specification for publishing the online table from the source table.""" + + source_table_name: Optional[str] = None + """The full three-part (catalog, schema, table) name of the source table.""" + + def as_dict(self) -> dict: + """Serializes the PublishTableRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.publish_spec: + body["publish_spec"] = self.publish_spec.as_dict() + if self.source_table_name is not None: + body["source_table_name"] = self.source_table_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PublishTableRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.publish_spec: + body["publish_spec"] = self.publish_spec + if self.source_table_name is not None: + body["source_table_name"] = self.source_table_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PublishTableRequest: + """Deserializes the PublishTableRequest from a dictionary.""" + return cls( + publish_spec=_from_dict(d, "publish_spec", PublishSpec), source_table_name=d.get("source_table_name", None) + ) + + +@dataclass +class PublishTableResponse: + online_table_name: Optional[str] = None + """The full three-part (catalog, schema, table) name of the online table.""" + + pipeline_id: Optional[str] = None + """The ID of the pipeline that syncs the online table with the source table.""" + + def as_dict(self) -> dict: + """Serializes the PublishTableResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.online_table_name is not None: + body["online_table_name"] = self.online_table_name + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PublishTableResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.online_table_name is not None: + body["online_table_name"] = self.online_table_name + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PublishTableResponse: + """Deserializes the PublishTableResponse from a dictionary.""" + return cls(online_table_name=d.get("online_table_name", None), pipeline_id=d.get("pipeline_id", None)) + + @dataclass class RegisteredModelAccessControlRequest: group_name: Optional[str] = None @@ -6869,9 +7111,7 @@ def __init__(self, api_client): def create_experiment( self, name: str, *, artifact_location: Optional[str] = None, tags: Optional[List[ExperimentTag]] = None ) -> CreateExperimentResponse: - """Create experiment. - - Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that + """Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that another experiment with the same name does not already exist and fails if another experiment with the same name already exists. @@ -6962,9 +7202,7 @@ def create_run( tags: Optional[List[RunTag]] = None, user_id: Optional[str] = None, ) -> CreateRunResponse: - """Create a run. - - Creates a new run within an experiment. A run is usually a single execution of a machine learning or + """Creates a new run within an experiment. A run is usually a single execution of a machine learning or data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric`, and `mlflowRunTag` associated with a single execution. @@ -7002,9 +7240,7 @@ def create_run( return CreateRunResponse.from_dict(res) def delete_experiment(self, experiment_id: str): - """Delete an experiment. - - Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the + """Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the experiment uses FileStore, artifacts associated with the experiment are also deleted. :param experiment_id: str @@ -7055,9 +7291,7 @@ def delete_logged_model_tag(self, model_id: str, tag_key: str): self._api.do("DELETE", f"/api/2.0/mlflow/logged-models/{model_id}/tags/{tag_key}", headers=headers) def delete_run(self, run_id: str): - """Delete a run. - - Marks a run for deletion. + """Marks a run for deletion. :param run_id: str ID of the run to delete. @@ -7077,9 +7311,7 @@ def delete_run(self, run_id: str): def delete_runs( self, experiment_id: str, max_timestamp_millis: int, *, max_runs: Optional[int] = None ) -> DeleteRunsResponse: - """Delete runs by creation time. - - Bulk delete runs in an experiment that were created prior to or at the specified timestamp. Deletes at + """Bulk delete runs in an experiment that were created prior to or at the specified timestamp. Deletes at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the client code snippet on @@ -7110,9 +7342,7 @@ def delete_runs( return DeleteRunsResponse.from_dict(res) def delete_tag(self, run_id: str, key: str): - """Delete a tag on a run. - - Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run + """Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. :param run_id: str @@ -7157,9 +7387,7 @@ def finalize_logged_model(self, model_id: str, status: LoggedModelStatus) -> Fin return FinalizeLoggedModelResponse.from_dict(res) def get_by_name(self, experiment_name: str) -> GetExperimentByNameResponse: - """Get an experiment by name. - - Gets metadata for an experiment. + """Gets metadata for an experiment. This endpoint will return deleted experiments, but prefers the active experiment if an active and deleted experiment share the same name. If multiple deleted experiments share the same name, the API @@ -7184,9 +7412,7 @@ def get_by_name(self, experiment_name: str) -> GetExperimentByNameResponse: return GetExperimentByNameResponse.from_dict(res) def get_experiment(self, experiment_id: str) -> GetExperimentResponse: - """Get an experiment. - - Gets metadata for an experiment. This method works on deleted experiments. + """Gets metadata for an experiment. This method works on deleted experiments. :param experiment_id: str ID of the associated experiment. @@ -7213,9 +7439,7 @@ def get_history( run_id: Optional[str] = None, run_uuid: Optional[str] = None, ) -> Iterator[Metric]: - """Get metric history for a run. - - Gets a list of all values for the specified metric for a given run. + """Gets a list of all values for the specified metric for a given run. :param metric_key: str Name of the metric. @@ -7274,9 +7498,7 @@ def get_logged_model(self, model_id: str) -> GetLoggedModelResponse: return GetLoggedModelResponse.from_dict(res) def get_permission_levels(self, experiment_id: str) -> GetExperimentPermissionLevelsResponse: - """Get experiment permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param experiment_id: str The experiment for which to get or manage permissions. @@ -7292,9 +7514,7 @@ def get_permission_levels(self, experiment_id: str) -> GetExperimentPermissionLe return GetExperimentPermissionLevelsResponse.from_dict(res) def get_permissions(self, experiment_id: str) -> ExperimentPermissions: - """Get experiment permissions. - - Gets the permissions of an experiment. Experiments can inherit permissions from their root object. + """Gets the permissions of an experiment. Experiments can inherit permissions from their root object. :param experiment_id: str The experiment for which to get or manage permissions. @@ -7310,9 +7530,7 @@ def get_permissions(self, experiment_id: str) -> ExperimentPermissions: return ExperimentPermissions.from_dict(res) def get_run(self, run_id: str, *, run_uuid: Optional[str] = None) -> GetRunResponse: - """Get a run. - - Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the + """Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the same key are logged for a run, return only the value with the latest timestamp. If there are multiple values with the latest timestamp, return the maximum of these values. @@ -7346,9 +7564,7 @@ def list_artifacts( run_id: Optional[str] = None, run_uuid: Optional[str] = None, ) -> Iterator[FileInfo]: - """List artifacts. - - List artifacts for a run. Takes an optional `artifact_path` prefix which if specified, the response + """List artifacts for a run. Takes an optional `artifact_path` prefix which if specified, the response contains only artifacts with the specified prefix. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports pagination. See [List directory contents | Files @@ -7400,9 +7616,7 @@ def list_experiments( page_token: Optional[str] = None, view_type: Optional[ViewType] = None, ) -> Iterator[Experiment]: - """List experiments. - - Gets a list of all experiments. + """Gets a list of all experiments. :param max_results: int (optional) Maximum number of experiments desired. If `max_results` is unspecified, return all experiments. If @@ -7444,9 +7658,7 @@ def log_batch( run_id: Optional[str] = None, tags: Optional[List[RunTag]] = None, ): - """Log a batch of metrics/params/tags for a run. - - Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server + """Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server will respond with an error (non-200 status code). In case of error (due to internal server error or an invalid request), partial data may be written. @@ -7520,11 +7732,7 @@ def log_batch( def log_inputs( self, run_id: str, *, datasets: Optional[List[DatasetInput]] = None, models: Optional[List[ModelInput]] = None ): - """Log inputs to a run. - - **NOTE:** Experimental: This API may change or be removed in a future release without warning. - - Logs inputs, such as datasets and models, to an MLflow Run. + """Logs inputs, such as datasets and models, to an MLflow Run. :param run_id: str ID of the run to log under @@ -7550,9 +7758,7 @@ def log_inputs( self._api.do("POST", "/api/2.0/mlflow/runs/log-inputs", body=body, headers=headers) def log_logged_model_params(self, model_id: str, *, params: Optional[List[LoggedModelParameter]] = None): - """Log params for a logged model. - - Logs params for a logged model. A param is a key-value pair (string key, string value). Examples + """Logs params for a logged model. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training. A param can be logged only once for a logged model, and attempting to overwrite an existing param with a different value will result in an error @@ -7586,9 +7792,7 @@ def log_metric( run_uuid: Optional[str] = None, step: Optional[int] = None, ): - """Log a metric for a run. - - Log a metric for a run. A metric is a key-value pair (string key, float value) with an associated + """Log a metric for a run. A metric is a key-value pair (string key, float value) with an associated timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be logged multiple times. @@ -7643,9 +7847,10 @@ def log_metric( self._api.do("POST", "/api/2.0/mlflow/runs/log-metric", body=body, headers=headers) def log_model(self, *, model_json: Optional[str] = None, run_id: Optional[str] = None): - """Log a model. + """**Note:** the [Create a logged model](/api/workspace/experiments/createloggedmodel) API replaces this + endpoint. - **NOTE:** Experimental: This API may change or be removed in a future release without warning. + Log a model to an MLflow Run. :param model_json: str (optional) MLmodel file in json format. @@ -7667,11 +7872,7 @@ def log_model(self, *, model_json: Optional[str] = None, run_id: Optional[str] = self._api.do("POST", "/api/2.0/mlflow/runs/log-model", body=body, headers=headers) def log_outputs(self, run_id: str, *, models: Optional[List[ModelOutput]] = None): - """Log outputs from a run. - - **NOTE**: Experimental: This API may change or be removed in a future release without warning. - - Logs outputs, such as models, from an MLflow Run. + """Logs outputs, such as models, from an MLflow Run. :param run_id: str The ID of the Run from which to log outputs. @@ -7693,9 +7894,7 @@ def log_outputs(self, run_id: str, *, models: Optional[List[ModelOutput]] = None self._api.do("POST", "/api/2.0/mlflow/runs/outputs", body=body, headers=headers) def log_param(self, key: str, value: str, *, run_id: Optional[str] = None, run_uuid: Optional[str] = None): - """Log a param for a run. - - Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include + """Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A param can be logged only once for a run. @@ -7728,9 +7927,7 @@ def log_param(self, key: str, value: str, *, run_id: Optional[str] = None, run_u self._api.do("POST", "/api/2.0/mlflow/runs/log-parameter", body=body, headers=headers) def restore_experiment(self, experiment_id: str): - """Restore an experiment. - - Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics, + """Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics, params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are also restored. @@ -7752,9 +7949,7 @@ def restore_experiment(self, experiment_id: str): self._api.do("POST", "/api/2.0/mlflow/experiments/restore", body=body, headers=headers) def restore_run(self, run_id: str): - """Restore a run. - - Restores a deleted run. This also restores associated metadata, runs, metrics, params, and tags. + """Restores a deleted run. This also restores associated metadata, runs, metrics, params, and tags. Throws `RESOURCE_DOES_NOT_EXIST` if the run was never created or was permanently deleted. @@ -7776,9 +7971,7 @@ def restore_run(self, run_id: str): def restore_runs( self, experiment_id: str, min_timestamp_millis: int, *, max_runs: Optional[int] = None ) -> RestoreRunsResponse: - """Restore runs by deletion time. - - Bulk restore runs in an experiment that were deleted no earlier than the specified timestamp. Restores + """Bulk restore runs in an experiment that were deleted no earlier than the specified timestamp. Restores at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the client code snippet on @@ -7817,9 +8010,7 @@ def search_experiments( page_token: Optional[str] = None, view_type: Optional[ViewType] = None, ) -> Iterator[Experiment]: - """Search experiments. - - Searches for experiments that satisfy specified search criteria. + """Searches for experiments that satisfy specified search criteria. :param filter: str (optional) String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'") @@ -7871,9 +8062,7 @@ def search_logged_models( order_by: Optional[List[SearchLoggedModelsOrderBy]] = None, page_token: Optional[str] = None, ) -> SearchLoggedModelsResponse: - """Search logged models. - - Search for Logged Models that satisfy specified search criteria. + """Search for Logged Models that satisfy specified search criteria. :param datasets: List[:class:`SearchLoggedModelsDataset`] (optional) List of datasets on which to apply the metrics filter clauses. For example, a filter with @@ -7928,9 +8117,7 @@ def search_runs( page_token: Optional[str] = None, run_view_type: Optional[ViewType] = None, ) -> Iterator[Run]: - """Search for runs. - - Searches for runs that satisfy expressions. + """Searches for runs that satisfy expressions. Search expressions can use `mlflowMetric` and `mlflowParam` keys. @@ -7990,9 +8177,7 @@ def search_runs( body["page_token"] = json["next_page_token"] def set_experiment_tag(self, experiment_id: str, key: str, value: str): - """Set a tag for an experiment. - - Sets a tag on an experiment. Experiment tags are metadata that can be updated. + """Sets a tag on an experiment. Experiment tags are metadata that can be updated. :param experiment_id: str ID of the experiment under which to log the tag. Must be provided. @@ -8018,7 +8203,7 @@ def set_experiment_tag(self, experiment_id: str, key: str, value: str): self._api.do("POST", "/api/2.0/mlflow/experiments/set-experiment-tag", body=body, headers=headers) def set_logged_model_tags(self, model_id: str, *, tags: Optional[List[LoggedModelTag]] = None): - """Set a tag for a logged model. + """Set tags for a logged model. :param model_id: str The ID of the logged model to set the tags on. @@ -8040,9 +8225,7 @@ def set_logged_model_tags(self, model_id: str, *, tags: Optional[List[LoggedMode def set_permissions( self, experiment_id: str, *, access_control_list: Optional[List[ExperimentAccessControlRequest]] = None ) -> ExperimentPermissions: - """Set experiment permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param experiment_id: str @@ -8063,9 +8246,7 @@ def set_permissions( return ExperimentPermissions.from_dict(res) def set_tag(self, key: str, value: str, *, run_id: Optional[str] = None, run_uuid: Optional[str] = None): - """Set a tag for a run. - - Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. + """Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. :param key: str Name of the tag. Keys up to 250 bytes in size are supported. @@ -8096,9 +8277,7 @@ def set_tag(self, key: str, value: str, *, run_id: Optional[str] = None, run_uui self._api.do("POST", "/api/2.0/mlflow/runs/set-tag", body=body, headers=headers) def update_experiment(self, experiment_id: str, *, new_name: Optional[str] = None): - """Update an experiment. - - Updates experiment metadata. + """Updates experiment metadata. :param experiment_id: str ID of the associated experiment. @@ -8122,9 +8301,7 @@ def update_experiment(self, experiment_id: str, *, new_name: Optional[str] = Non def update_permissions( self, experiment_id: str, *, access_control_list: Optional[List[ExperimentAccessControlRequest]] = None ) -> ExperimentPermissions: - """Update experiment permissions. - - Updates the permissions on an experiment. Experiments can inherit permissions from their root object. + """Updates the permissions on an experiment. Experiments can inherit permissions from their root object. :param experiment_id: str The experiment for which to get or manage permissions. @@ -8152,9 +8329,7 @@ def update_run( run_uuid: Optional[str] = None, status: Optional[UpdateRunStatus] = None, ) -> UpdateRunResponse: - """Update a run. - - Updates run metadata. + """Updates run metadata. :param end_time: int (optional) Unix timestamp in milliseconds of when the run ended. @@ -8190,6 +8365,146 @@ def update_run( return UpdateRunResponse.from_dict(res) +class FeatureStoreAPI: + """A feature store is a centralized repository that enables data scientists to find and share features. Using + a feature store also ensures that the code used to compute feature values is the same during model + training and when the model is used for inference. + + An online store is a low-latency database used for feature lookup during real-time model inference or + serve feature for real-time applications.""" + + def __init__(self, api_client): + self._api = api_client + + def create_online_store(self, online_store: OnlineStore) -> OnlineStore: + """Create an Online Feature Store. + + :param online_store: :class:`OnlineStore` + An OnlineStore is a logical database instance that stores and serves features online. + + :returns: :class:`OnlineStore` + """ + body = online_store.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/feature-store/online-stores", body=body, headers=headers) + return OnlineStore.from_dict(res) + + def delete_online_store(self, name: str): + """Delete an Online Feature Store. + + :param name: str + Name of the online store to delete. + + + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/feature-store/online-stores/{name}", headers=headers) + + def get_online_store(self, name: str) -> OnlineStore: + """Get an Online Feature Store. + + :param name: str + Name of the online store to get. + + :returns: :class:`OnlineStore` + """ + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/feature-store/online-stores/{name}", headers=headers) + return OnlineStore.from_dict(res) + + def list_online_stores( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[OnlineStore]: + """List Online Feature Stores. + + :param page_size: int (optional) + The maximum number of results to return. Defaults to 100 if not specified. + :param page_token: str (optional) + Pagination token to go to the next page based on a previous query. + + :returns: Iterator over :class:`OnlineStore` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do("GET", "/api/2.0/feature-store/online-stores", query=query, headers=headers) + if "online_stores" in json: + for v in json["online_stores"]: + yield OnlineStore.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def publish_table(self, source_table_name: str, publish_spec: PublishSpec) -> PublishTableResponse: + """Publish features. + + :param source_table_name: str + The full three-part (catalog, schema, table) name of the source table. + :param publish_spec: :class:`PublishSpec` + The specification for publishing the online table from the source table. + + :returns: :class:`PublishTableResponse` + """ + body = {} + if publish_spec is not None: + body["publish_spec"] = publish_spec.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/feature-store/tables/{source_table_name}/publish", body=body, headers=headers + ) + return PublishTableResponse.from_dict(res) + + def update_online_store(self, name: str, online_store: OnlineStore, update_mask: str) -> OnlineStore: + """Update an Online Feature Store. + + :param name: str + The name of the online store. This is the unique identifier for the online store. + :param online_store: :class:`OnlineStore` + An OnlineStore is a logical database instance that stores and serves features online. + :param update_mask: str + The list of fields to update. + + :returns: :class:`OnlineStore` + """ + body = online_store.as_dict() + query = {} + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.0/feature-store/online-stores/{name}", query=query, body=body, headers=headers + ) + return OnlineStore.from_dict(res) + + class ForecastingAPI: """The Forecasting API allows you to create and get serverless forecasting experiments""" @@ -8252,9 +8567,7 @@ def create_experiment( timeseries_identifier_columns: Optional[List[str]] = None, training_frameworks: Optional[List[str]] = None, ) -> Wait[ForecastingExperiment]: - """Create a forecasting experiment. - - Creates a serverless forecasting experiment. Returns the experiment ID. + """Creates a serverless forecasting experiment. Returns the experiment ID. :param train_data_path: str The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, @@ -8399,9 +8712,7 @@ def create_experiment_and_wait( ).result(timeout=timeout) def get_experiment(self, experiment_id: str) -> ForecastingExperiment: - """Get a forecasting experiment. - - Public RPC to get forecasting experiment + """Public RPC to get forecasting experiment :param experiment_id: str The unique ID of a forecasting experiment @@ -8432,9 +8743,7 @@ def __init__(self, api_client): def approve_transition_request( self, name: str, version: str, stage: Stage, archive_existing_versions: bool, *, comment: Optional[str] = None ) -> ApproveTransitionRequestResponse: - """Approve transition request. - - Approves a model version stage transition request. + """Approves a model version stage transition request. :param name: str Name of the model. @@ -8477,9 +8786,7 @@ def approve_transition_request( return ApproveTransitionRequestResponse.from_dict(res) def create_comment(self, name: str, version: str, comment: str) -> CreateCommentResponse: - """Post a comment. - - Posts a comment on a model version. A comment can be submitted either by a user or programmatically to + """Posts a comment on a model version. A comment can be submitted either by a user or programmatically to display relevant information about the model. For example, test results or deployment errors. :param name: str @@ -8509,9 +8816,7 @@ def create_comment(self, name: str, version: str, comment: str) -> CreateComment def create_model( self, name: str, *, description: Optional[str] = None, tags: Optional[List[ModelTag]] = None ) -> CreateModelResponse: - """Create a model. - - Creates a new registered model with the name specified in the request body. + """Creates a new registered model with the name specified in the request body. Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. @@ -8549,9 +8854,7 @@ def create_model_version( run_link: Optional[str] = None, tags: Optional[List[ModelVersionTag]] = None, ) -> CreateModelVersionResponse: - """Create a model version. - - Creates a model version. + """Creates a model version. :param name: str Register model under this name @@ -8594,9 +8897,7 @@ def create_model_version( def create_transition_request( self, name: str, version: str, stage: Stage, *, comment: Optional[str] = None ) -> CreateTransitionRequestResponse: - """Make a transition request. - - Creates a model version stage transition request. + """Creates a model version stage transition request. :param name: str Name of the model. @@ -8644,9 +8945,7 @@ def create_webhook( model_name: Optional[str] = None, status: Optional[RegistryWebhookStatus] = None, ) -> CreateWebhookResponse: - """Create a webhook. - - **NOTE**: This endpoint is in Public Preview. + """**NOTE**: This endpoint is in Public Preview. Creates a registry webhook. @@ -8719,9 +9018,7 @@ def create_webhook( return CreateWebhookResponse.from_dict(res) def delete_comment(self, id: str): - """Delete a comment. - - Deletes a comment on a model version. + """Deletes a comment on a model version. :param id: str Unique identifier of an activity @@ -8739,9 +9036,7 @@ def delete_comment(self, id: str): self._api.do("DELETE", "/api/2.0/mlflow/comments/delete", query=query, headers=headers) def delete_model(self, name: str): - """Delete a model. - - Deletes a registered model. + """Deletes a registered model. :param name: str Registered model unique name identifier. @@ -8759,9 +9054,7 @@ def delete_model(self, name: str): self._api.do("DELETE", "/api/2.0/mlflow/registered-models/delete", query=query, headers=headers) def delete_model_tag(self, name: str, key: str): - """Delete a model tag. - - Deletes the tag for a registered model. + """Deletes the tag for a registered model. :param name: str Name of the registered model that the tag was logged under. @@ -8784,9 +9077,7 @@ def delete_model_tag(self, name: str, key: str): self._api.do("DELETE", "/api/2.0/mlflow/registered-models/delete-tag", query=query, headers=headers) def delete_model_version(self, name: str, version: str): - """Delete a model version. - - Deletes a model version. + """Deletes a model version. :param name: str Name of the registered model @@ -8808,9 +9099,7 @@ def delete_model_version(self, name: str, version: str): self._api.do("DELETE", "/api/2.0/mlflow/model-versions/delete", query=query, headers=headers) def delete_model_version_tag(self, name: str, version: str, key: str): - """Delete a model version tag. - - Deletes a model version tag. + """Deletes a model version tag. :param name: str Name of the registered model that the tag was logged under. @@ -8845,9 +9134,7 @@ def delete_transition_request( *, comment: Optional[str] = None, ): - """Delete a transition request. - - Cancels a model version stage transition request. + """Cancels a model version stage transition request. :param name: str Name of the model. @@ -8890,9 +9177,7 @@ def delete_transition_request( self._api.do("DELETE", "/api/2.0/mlflow/transition-requests/delete", query=query, headers=headers) def delete_webhook(self, *, id: Optional[str] = None): - """Delete a webhook. - - **NOTE:** This endpoint is in Public Preview. + """**NOTE:** This endpoint is in Public Preview. Deletes a registry webhook. @@ -8912,9 +9197,7 @@ def delete_webhook(self, *, id: Optional[str] = None): self._api.do("DELETE", "/api/2.0/mlflow/registry-webhooks/delete", query=query, headers=headers) def get_latest_versions(self, name: str, *, stages: Optional[List[str]] = None) -> Iterator[ModelVersion]: - """Get the latest version. - - Gets the latest version of a registered model. + """Gets the latest version of a registered model. :param name: str Registered model unique name identifier. @@ -8938,9 +9221,7 @@ def get_latest_versions(self, name: str, *, stages: Optional[List[str]] = None) return parsed if parsed is not None else [] def get_model(self, name: str) -> GetModelResponse: - """Get model. - - Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also + """Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also returns the model's Databricks workspace ID and the permission level of the requesting user on the model. @@ -8965,8 +9246,6 @@ def get_model(self, name: str) -> GetModelResponse: def get_model_version(self, name: str, version: str) -> GetModelVersionResponse: """Get a model version. - Get a model version. - :param name: str Name of the registered model :param version: str @@ -8988,9 +9267,7 @@ def get_model_version(self, name: str, version: str) -> GetModelVersionResponse: return GetModelVersionResponse.from_dict(res) def get_model_version_download_uri(self, name: str, version: str) -> GetModelVersionDownloadUriResponse: - """Get a model version URI. - - Gets a URI to download the model version. + """Gets a URI to download the model version. :param name: str Name of the registered model @@ -9013,9 +9290,7 @@ def get_model_version_download_uri(self, name: str, version: str) -> GetModelVer return GetModelVersionDownloadUriResponse.from_dict(res) def get_permission_levels(self, registered_model_id: str) -> GetRegisteredModelPermissionLevelsResponse: - """Get registered model permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param registered_model_id: str The registered model for which to get or manage permissions. @@ -9033,9 +9308,7 @@ def get_permission_levels(self, registered_model_id: str) -> GetRegisteredModelP return GetRegisteredModelPermissionLevelsResponse.from_dict(res) def get_permissions(self, registered_model_id: str) -> RegisteredModelPermissions: - """Get registered model permissions. - - Gets the permissions of a registered model. Registered models can inherit permissions from their root + """Gets the permissions of a registered model. Registered models can inherit permissions from their root object. :param registered_model_id: str @@ -9052,9 +9325,7 @@ def get_permissions(self, registered_model_id: str) -> RegisteredModelPermission return RegisteredModelPermissions.from_dict(res) def list_models(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Model]: - """List models. - - Lists all available registered models, up to the limit specified in __max_results__. + """Lists all available registered models, up to the limit specified in __max_results__. :param max_results: int (optional) Maximum number of registered models desired. Max threshold is 1000. @@ -9083,9 +9354,7 @@ def list_models(self, *, max_results: Optional[int] = None, page_token: Optional query["page_token"] = json["next_page_token"] def list_transition_requests(self, name: str, version: str) -> Iterator[Activity]: - """List transition requests. - - Gets a list of all open stage transition requests for the model version. + """Gets a list of all open stage transition requests for the model version. :param name: str Name of the model. @@ -9115,9 +9384,7 @@ def list_webhooks( model_name: Optional[str] = None, page_token: Optional[str] = None, ) -> Iterator[RegistryWebhook]: - """List registry webhooks. - - **NOTE:** This endpoint is in Public Preview. + """**NOTE:** This endpoint is in Public Preview. Lists all registry webhooks. @@ -9156,9 +9423,7 @@ def list_webhooks( def reject_transition_request( self, name: str, version: str, stage: Stage, *, comment: Optional[str] = None ) -> RejectTransitionRequestResponse: - """Reject a transition request. - - Rejects a model version stage transition request. + """Rejects a model version stage transition request. :param name: str Name of the model. @@ -9197,9 +9462,7 @@ def reject_transition_request( return RejectTransitionRequestResponse.from_dict(res) def rename_model(self, name: str, *, new_name: Optional[str] = None) -> RenameModelResponse: - """Rename a model. - - Renames a registered model. + """Renames a registered model. :param name: str Registered model unique name identifier. @@ -9229,9 +9492,7 @@ def search_model_versions( order_by: Optional[List[str]] = None, page_token: Optional[str] = None, ) -> Iterator[ModelVersion]: - """Searches model versions. - - Searches for specific model versions based on the supplied __filter__. + """Searches for specific model versions based on the supplied __filter__. :param filter: str (optional) String filter condition, like "name='my-model-name'". Must be a single boolean condition, with @@ -9278,9 +9539,7 @@ def search_models( order_by: Optional[List[str]] = None, page_token: Optional[str] = None, ) -> Iterator[Model]: - """Search models. - - Search for registered models based on the specified __filter__. + """Search for registered models based on the specified __filter__. :param filter: str (optional) String filter condition, like "name LIKE 'my-model-name'". Interpreted in the backend automatically @@ -9321,9 +9580,7 @@ def search_models( query["page_token"] = json["next_page_token"] def set_model_tag(self, name: str, key: str, value: str): - """Set a tag. - - Sets a tag on a registered model. + """Sets a tag on a registered model. :param name: str Unique name of the model. @@ -9352,9 +9609,7 @@ def set_model_tag(self, name: str, key: str, value: str): self._api.do("POST", "/api/2.0/mlflow/registered-models/set-tag", body=body, headers=headers) def set_model_version_tag(self, name: str, version: str, key: str, value: str): - """Set a version tag. - - Sets a model version tag. + """Sets a model version tag. :param name: str Unique name of the model. @@ -9392,9 +9647,7 @@ def set_permissions( *, access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None, ) -> RegisteredModelPermissions: - """Set registered model permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param registered_model_id: str @@ -9419,9 +9672,7 @@ def set_permissions( def test_registry_webhook( self, id: str, *, event: Optional[RegistryWebhookEvent] = None ) -> TestRegistryWebhookResponse: - """Test a webhook. - - **NOTE:** This endpoint is in Public Preview. + """**NOTE:** This endpoint is in Public Preview. Tests a registry webhook. @@ -9449,9 +9700,7 @@ def test_registry_webhook( def transition_stage( self, name: str, version: str, stage: Stage, archive_existing_versions: bool, *, comment: Optional[str] = None ) -> TransitionStageResponse: - """Transition a stage. - - Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint] + """Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint] that also accepts a comment associated with the transition to be recorded.", [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage @@ -9499,9 +9748,7 @@ def transition_stage( return TransitionStageResponse.from_dict(res) def update_comment(self, id: str, comment: str) -> UpdateCommentResponse: - """Update a comment. - - Post an edit to a comment on a model version. + """Post an edit to a comment on a model version. :param id: str Unique identifier of an activity @@ -9524,9 +9771,7 @@ def update_comment(self, id: str, comment: str) -> UpdateCommentResponse: return UpdateCommentResponse.from_dict(res) def update_model(self, name: str, *, description: Optional[str] = None): - """Update model. - - Updates a registered model. + """Updates a registered model. :param name: str Registered model unique name identifier. @@ -9548,9 +9793,7 @@ def update_model(self, name: str, *, description: Optional[str] = None): self._api.do("PATCH", "/api/2.0/mlflow/registered-models/update", body=body, headers=headers) def update_model_version(self, name: str, version: str, *, description: Optional[str] = None): - """Update model version. - - Updates the model version. + """Updates the model version. :param name: str Name of the registered model @@ -9581,9 +9824,7 @@ def update_permissions( *, access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None, ) -> RegisteredModelPermissions: - """Update registered model permissions. - - Updates the permissions on a registered model. Registered models can inherit permissions from their + """Updates the permissions on a registered model. Registered models can inherit permissions from their root object. :param registered_model_id: str @@ -9615,9 +9856,7 @@ def update_webhook( job_spec: Optional[JobSpec] = None, status: Optional[RegistryWebhookStatus] = None, ): - """Update a webhook. - - **NOTE:** This endpoint is in Public Preview. + """**NOTE:** This endpoint is in Public Preview. Updates a registry webhook. diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 030633eb8..174ee21a7 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -1355,8 +1355,6 @@ def create( ) -> CreateCustomAppIntegrationOutput: """Create Custom OAuth App Integration. - Create Custom OAuth App Integration. - You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. :param confidential: bool (optional) @@ -1403,9 +1401,7 @@ def create( return CreateCustomAppIntegrationOutput.from_dict(res) def delete(self, integration_id: str): - """Delete Custom OAuth App Integration. - - Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via + """Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. :param integration_id: str @@ -1424,9 +1420,7 @@ def delete(self, integration_id: str): ) def get(self, integration_id: str) -> GetCustomAppIntegrationOutput: - """Get OAuth Custom App Integration. - - Gets the Custom OAuth App Integration for the given integration id. + """Gets the Custom OAuth App Integration for the given integration id. :param integration_id: str The OAuth app integration ID. @@ -1452,9 +1446,7 @@ def list( page_size: Optional[int] = None, page_token: Optional[str] = None, ) -> Iterator[GetCustomAppIntegrationOutput]: - """Get custom oauth app integrations. - - Get the list of custom OAuth app integrations for the specified Databricks account + """Get the list of custom OAuth app integrations for the specified Databricks account :param include_creator_username: bool (optional) :param page_size: int (optional) @@ -1497,9 +1489,7 @@ def update( token_access_policy: Optional[TokenAccessPolicy] = None, user_authorized_scopes: Optional[List[str]] = None, ): - """Updates Custom OAuth App Integration. - - Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration + """Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. :param integration_id: str @@ -1549,9 +1539,7 @@ def __init__(self, api_client): def list( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[PublishedAppOutput]: - """Get all the published OAuth apps. - - Get all the available published OAuth apps in Databricks. + """Get all the available published OAuth apps in Databricks. :param page_size: int (optional) The max number of OAuth published apps to return in one page. @@ -1594,8 +1582,6 @@ def create( ) -> CreatePublishedAppIntegrationOutput: """Create Published OAuth App Integration. - Create Published OAuth App Integration. - You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param app_id: str (optional) @@ -1624,9 +1610,7 @@ def create( return CreatePublishedAppIntegrationOutput.from_dict(res) def delete(self, integration_id: str): - """Delete Published OAuth App Integration. - - Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app + """Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param integration_id: str @@ -1645,9 +1629,7 @@ def delete(self, integration_id: str): ) def get(self, integration_id: str) -> GetPublishedAppIntegrationOutput: - """Get OAuth Published App Integration. - - Gets the Published OAuth App Integration for the given integration id. + """Gets the Published OAuth App Integration for the given integration id. :param integration_id: str @@ -1668,9 +1650,7 @@ def get(self, integration_id: str) -> GetPublishedAppIntegrationOutput: def list( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[GetPublishedAppIntegrationOutput]: - """Get published oauth app integrations. - - Get the list of published OAuth app integrations for the specified Databricks account + """Get the list of published OAuth app integrations for the specified Databricks account :param page_size: int (optional) :param page_token: str (optional) @@ -1702,9 +1682,7 @@ def list( query["page_token"] = json["next_page_token"] def update(self, integration_id: str, *, token_access_policy: Optional[TokenAccessPolicy] = None): - """Updates Published OAuth App Integration. - - Updates an existing published OAuth App Integration. You can retrieve the published OAuth app + """Updates an existing published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param integration_id: str @@ -1777,7 +1755,7 @@ def __init__(self, api_client): def create( self, service_principal_id: int, policy: FederationPolicy, *, policy_id: Optional[str] = None ) -> FederationPolicy: - """Create service principal federation policy. + """Create account federation policy. :param service_principal_id: int The service principal id for the federation policy. @@ -1807,7 +1785,7 @@ def create( return FederationPolicy.from_dict(res) def delete(self, service_principal_id: int, policy_id: str): - """Delete service principal federation policy. + """Delete account federation policy. :param service_principal_id: int The service principal id for the federation policy. @@ -1828,7 +1806,7 @@ def delete(self, service_principal_id: int, policy_id: str): ) def get(self, service_principal_id: int, policy_id: str) -> FederationPolicy: - """Get service principal federation policy. + """Get account federation policy. :param service_principal_id: int The service principal id for the federation policy. @@ -1852,7 +1830,7 @@ def get(self, service_principal_id: int, policy_id: str) -> FederationPolicy: def list( self, service_principal_id: int, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[FederationPolicy]: - """List service principal federation policies. + """List account federation policies. :param service_principal_id: int The service principal id for the federation policy. @@ -1888,7 +1866,7 @@ def list( def update( self, service_principal_id: int, policy_id: str, policy: FederationPolicy, *, update_mask: Optional[str] = None ) -> FederationPolicy: - """Update service principal federation policy. + """Update account federation policy. :param service_principal_id: int The service principal id for the federation policy. @@ -1943,9 +1921,7 @@ def __init__(self, api_client): def create( self, service_principal_id: int, *, lifetime: Optional[str] = None ) -> CreateServicePrincipalSecretResponse: - """Create service principal secret. - - Create a secret for the given service principal. + """Create a secret for the given service principal. :param service_principal_id: int The service principal ID. @@ -1972,9 +1948,7 @@ def create( return CreateServicePrincipalSecretResponse.from_dict(res) def delete(self, service_principal_id: int, secret_id: str): - """Delete service principal secret. - - Delete a secret from the given service principal. + """Delete a secret from the given service principal. :param service_principal_id: int The service principal ID. @@ -1993,9 +1967,7 @@ def delete(self, service_principal_id: int, secret_id: str): ) def list(self, service_principal_id: int, *, page_token: Optional[str] = None) -> Iterator[SecretInfo]: - """List service principal secrets. - - List all secrets associated with the given service principal. This operation only returns information + """List all secrets associated with the given service principal. This operation only returns information about the secrets themselves and does not include the secret values. :param service_principal_id: int diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index ef4363bb8..ca0a7604e 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -58,6 +58,9 @@ class CreatePipeline: edition: Optional[str] = None """Pipeline product edition.""" + environment: Optional[PipelinesEnvironment] = None + """Environment specification for this pipeline used to install dependencies.""" + event_log: Optional[EventLogSpec] = None """Event log configuration for this pipeline""" @@ -149,6 +152,8 @@ def as_dict(self) -> dict: body["dry_run"] = self.dry_run if self.edition is not None: body["edition"] = self.edition + if self.environment: + body["environment"] = self.environment.as_dict() if self.event_log: body["event_log"] = self.event_log.as_dict() if self.filters: @@ -212,6 +217,8 @@ def as_shallow_dict(self) -> dict: body["dry_run"] = self.dry_run if self.edition is not None: body["edition"] = self.edition + if self.environment: + body["environment"] = self.environment if self.event_log: body["event_log"] = self.event_log if self.filters: @@ -265,6 +272,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CreatePipeline: development=d.get("development", None), dry_run=d.get("dry_run", None), edition=d.get("edition", None), + environment=_from_dict(d, "environment", PipelinesEnvironment), event_log=_from_dict(d, "event_log", EventLogSpec), filters=_from_dict(d, "filters", Filters), gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition), @@ -455,6 +463,9 @@ class EditPipeline: edition: Optional[str] = None """Pipeline product edition.""" + environment: Optional[PipelinesEnvironment] = None + """Environment specification for this pipeline used to install dependencies.""" + event_log: Optional[EventLogSpec] = None """Event log configuration for this pipeline""" @@ -551,6 +562,8 @@ def as_dict(self) -> dict: body["development"] = self.development if self.edition is not None: body["edition"] = self.edition + if self.environment: + body["environment"] = self.environment.as_dict() if self.event_log: body["event_log"] = self.event_log.as_dict() if self.expected_last_modified is not None: @@ -616,6 +629,8 @@ def as_shallow_dict(self) -> dict: body["development"] = self.development if self.edition is not None: body["edition"] = self.edition + if self.environment: + body["environment"] = self.environment if self.event_log: body["event_log"] = self.event_log if self.expected_last_modified is not None: @@ -672,6 +687,7 @@ def from_dict(cls, d: Dict[str, Any]) -> EditPipeline: deployment=_from_dict(d, "deployment", PipelineDeployment), development=d.get("development", None), edition=d.get("edition", None), + environment=_from_dict(d, "environment", PipelinesEnvironment), event_log=_from_dict(d, "event_log", EventLogSpec), expected_last_modified=d.get("expected_last_modified", None), filters=_from_dict(d, "filters", Filters), @@ -2362,6 +2378,9 @@ class PipelineSpec: edition: Optional[str] = None """Pipeline product edition.""" + environment: Optional[PipelinesEnvironment] = None + """Environment specification for this pipeline used to install dependencies.""" + event_log: Optional[EventLogSpec] = None """Event log configuration for this pipeline""" @@ -2441,6 +2460,8 @@ def as_dict(self) -> dict: body["development"] = self.development if self.edition is not None: body["edition"] = self.edition + if self.environment: + body["environment"] = self.environment.as_dict() if self.event_log: body["event_log"] = self.event_log.as_dict() if self.filters: @@ -2498,6 +2519,8 @@ def as_shallow_dict(self) -> dict: body["development"] = self.development if self.edition is not None: body["edition"] = self.edition + if self.environment: + body["environment"] = self.environment if self.event_log: body["event_log"] = self.event_log if self.filters: @@ -2547,6 +2570,7 @@ def from_dict(cls, d: Dict[str, Any]) -> PipelineSpec: deployment=_from_dict(d, "deployment", PipelineDeployment), development=d.get("development", None), edition=d.get("edition", None), + environment=_from_dict(d, "environment", PipelinesEnvironment), event_log=_from_dict(d, "event_log", EventLogSpec), filters=_from_dict(d, "filters", Filters), gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition), @@ -2702,6 +2726,39 @@ def from_dict(cls, d: Dict[str, Any]) -> PipelineTrigger: return cls(cron=_from_dict(d, "cron", CronTrigger), manual=_from_dict(d, "manual", ManualTrigger)) +@dataclass +class PipelinesEnvironment: + """The environment entity used to preserve serverless environment side panel, jobs' environment for + non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal + environment spec, only pip dependencies are supported.""" + + dependencies: Optional[List[str]] = None + """List of pip dependencies, as supported by the version of pip in this environment. Each + dependency is a pip requirement file line + https://pip.pypa.io/en/stable/reference/requirements-file-format/ Allowed dependency could be + , , (WSFS or Volumes in + Databricks), """ + + def as_dict(self) -> dict: + """Serializes the PipelinesEnvironment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dependencies: + body["dependencies"] = [v for v in self.dependencies] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PipelinesEnvironment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dependencies: + body["dependencies"] = self.dependencies + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PipelinesEnvironment: + """Deserializes the PipelinesEnvironment from a dictionary.""" + return cls(dependencies=d.get("dependencies", None)) + + @dataclass class ReportSpec: source_url: str @@ -2915,7 +2972,7 @@ def from_dict(cls, d: Dict[str, Any]) -> SchemaSpec: @dataclass class Sequencing: control_plane_seq_no: Optional[int] = None - """A sequence number, unique and increasing within the control plane.""" + """A sequence number, unique and increasing per pipeline.""" data_plane_id: Optional[DataPlaneId] = None """the ID assigned by the data plane.""" @@ -3584,6 +3641,7 @@ def create( development: Optional[bool] = None, dry_run: Optional[bool] = None, edition: Optional[str] = None, + environment: Optional[PipelinesEnvironment] = None, event_log: Optional[EventLogSpec] = None, filters: Optional[Filters] = None, gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, @@ -3603,9 +3661,7 @@ def create( target: Optional[str] = None, trigger: Optional[PipelineTrigger] = None, ) -> CreatePipelineResponse: - """Create a pipeline. - - Creates a new data processing pipeline based on the requested configuration. If successful, this + """Creates a new data processing pipeline based on the requested configuration. If successful, this method returns the ID of the new pipeline. :param allow_duplicate_names: bool (optional) @@ -3631,6 +3687,8 @@ def create( :param dry_run: bool (optional) :param edition: str (optional) Pipeline product edition. + :param environment: :class:`PipelinesEnvironment` (optional) + Environment specification for this pipeline used to install dependencies. :param event_log: :class:`EventLogSpec` (optional) Event log configuration for this pipeline :param filters: :class:`Filters` (optional) @@ -3703,6 +3761,8 @@ def create( body["dry_run"] = dry_run if edition is not None: body["edition"] = edition + if environment is not None: + body["environment"] = environment.as_dict() if event_log is not None: body["event_log"] = event_log.as_dict() if filters is not None: @@ -3748,9 +3808,7 @@ def create( return CreatePipelineResponse.from_dict(res) def delete(self, pipeline_id: str): - """Delete a pipeline. - - Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and + """Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and its tables. You cannot undo this action. :param pipeline_id: str @@ -3780,9 +3838,7 @@ def get(self, pipeline_id: str) -> GetPipelineResponse: return GetPipelineResponse.from_dict(res) def get_permission_levels(self, pipeline_id: str) -> GetPipelinePermissionLevelsResponse: - """Get pipeline permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param pipeline_id: str The pipeline for which to get or manage permissions. @@ -3798,9 +3854,7 @@ def get_permission_levels(self, pipeline_id: str) -> GetPipelinePermissionLevels return GetPipelinePermissionLevelsResponse.from_dict(res) def get_permissions(self, pipeline_id: str) -> PipelinePermissions: - """Get pipeline permissions. - - Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object. + """Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object. :param pipeline_id: str The pipeline for which to get or manage permissions. @@ -3816,9 +3870,7 @@ def get_permissions(self, pipeline_id: str) -> PipelinePermissions: return PipelinePermissions.from_dict(res) def get_update(self, pipeline_id: str, update_id: str) -> GetUpdateResponse: - """Get a pipeline update. - - Gets an update from an active pipeline. + """Gets an update from an active pipeline. :param pipeline_id: str The ID of the pipeline. @@ -3844,9 +3896,7 @@ def list_pipeline_events( order_by: Optional[List[str]] = None, page_token: Optional[str] = None, ) -> Iterator[PipelineEvent]: - """List pipeline events. - - Retrieves events for a pipeline. + """Retrieves events for a pipeline. :param pipeline_id: str The pipeline to return events for. @@ -3902,9 +3952,7 @@ def list_pipelines( order_by: Optional[List[str]] = None, page_token: Optional[str] = None, ) -> Iterator[PipelineStateInfo]: - """List pipelines. - - Lists pipelines defined in the Delta Live Tables system. + """Lists pipelines defined in the Delta Live Tables system. :param filter: str (optional) Select a subset of results based on the specified criteria. The supported filters are: @@ -3958,9 +4006,7 @@ def list_updates( page_token: Optional[str] = None, until_update_id: Optional[str] = None, ) -> ListUpdatesResponse: - """List pipeline updates. - - List updates for an active pipeline. + """List updates for an active pipeline. :param pipeline_id: str The pipeline to return updates for. @@ -3991,9 +4037,7 @@ def list_updates( def set_permissions( self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None ) -> PipelinePermissions: - """Set pipeline permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param pipeline_id: str @@ -4023,9 +4067,7 @@ def start_update( refresh_selection: Optional[List[str]] = None, validate_only: Optional[bool] = None, ) -> StartUpdateResponse: - """Start a pipeline. - - Starts a new update for the pipeline. If there is already an active update for the pipeline, the + """Starts a new update for the pipeline. If there is already an active update for the pipeline, the request will fail and the active update will remain running. :param pipeline_id: str @@ -4067,9 +4109,7 @@ def start_update( return StartUpdateResponse.from_dict(res) def stop(self, pipeline_id: str) -> Wait[GetPipelineResponse]: - """Stop a pipeline. - - Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this + """Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this request is a no-op. :param pipeline_id: str @@ -4105,6 +4145,7 @@ def update( deployment: Optional[PipelineDeployment] = None, development: Optional[bool] = None, edition: Optional[str] = None, + environment: Optional[PipelinesEnvironment] = None, event_log: Optional[EventLogSpec] = None, expected_last_modified: Optional[int] = None, filters: Optional[Filters] = None, @@ -4125,9 +4166,7 @@ def update( target: Optional[str] = None, trigger: Optional[PipelineTrigger] = None, ): - """Edit a pipeline. - - Updates a pipeline with the supplied configuration. + """Updates a pipeline with the supplied configuration. :param pipeline_id: str Unique identifier for this pipeline. @@ -4153,6 +4192,8 @@ def update( Whether the pipeline is in Development mode. Defaults to false. :param edition: str (optional) Pipeline product edition. + :param environment: :class:`PipelinesEnvironment` (optional) + Environment specification for this pipeline used to install dependencies. :param event_log: :class:`EventLogSpec` (optional) Event log configuration for this pipeline :param expected_last_modified: int (optional) @@ -4226,6 +4267,8 @@ def update( body["development"] = development if edition is not None: body["edition"] = edition + if environment is not None: + body["environment"] = environment.as_dict() if event_log is not None: body["event_log"] = event_log.as_dict() if expected_last_modified is not None: @@ -4274,9 +4317,7 @@ def update( def update_permissions( self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None ) -> PipelinePermissions: - """Update pipeline permissions. - - Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object. + """Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object. :param pipeline_id: str The pipeline for which to get or manage permissions. diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index e56c0c382..12a5d61fa 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -2326,9 +2326,7 @@ def __init__(self, api_client): self._api = api_client def create(self, credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential: - """Create credential configuration. - - Creates a Databricks credential configuration that represents cloud cross-account credentials for a + """Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account ID) in the returned credential object, and configure the required access policy. @@ -2361,9 +2359,7 @@ def create(self, credentials_name: str, aws_credentials: CreateCredentialAwsCred return Credential.from_dict(res) def delete(self, credentials_id: str): - """Delete credential configuration. - - Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot + """Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot delete a credential that is associated with any workspace. :param credentials_id: str @@ -2381,9 +2377,7 @@ def delete(self, credentials_id: str): ) def get(self, credentials_id: str) -> Credential: - """Get credential configuration. - - Gets a Databricks credential configuration object for an account, both specified by ID. + """Gets a Databricks credential configuration object for an account, both specified by ID. :param credentials_id: str Databricks Account API credential configuration ID @@ -2401,9 +2395,7 @@ def get(self, credentials_id: str) -> Credential: return Credential.from_dict(res) def list(self) -> Iterator[Credential]: - """Get all credential configurations. - - Gets all Databricks credential configurations associated with an account specified by ID. + """Gets all Databricks credential configurations associated with an account specified by ID. :returns: Iterator over :class:`Credential` """ @@ -2441,9 +2433,7 @@ def create( aws_key_info: Optional[CreateAwsKeyInfo] = None, gcp_key_info: Optional[CreateGcpKeyInfo] = None, ) -> CustomerManagedKey: - """Create encryption key configuration. - - Creates a customer-managed key configuration object for an account, specified by ID. This operation + """Creates a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history. If it is @@ -2482,9 +2472,7 @@ def create( return CustomerManagedKey.from_dict(res) def delete(self, customer_managed_key_id: str): - """Delete encryption key configuration. - - Deletes a customer-managed key configuration object for an account. You cannot delete a configuration + """Deletes a customer-managed key configuration object for an account. You cannot delete a configuration that is associated with a running workspace. :param customer_managed_key_id: str @@ -2504,9 +2492,7 @@ def delete(self, customer_managed_key_id: str): ) def get(self, customer_managed_key_id: str) -> CustomerManagedKey: - """Get encryption key configuration. - - Gets a customer-managed key configuration object for an account, specified by ID. This operation + """Gets a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If assigned as a workspace's customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history. If it is @@ -2537,9 +2523,7 @@ def get(self, customer_managed_key_id: str) -> CustomerManagedKey: return CustomerManagedKey.from_dict(res) def list(self) -> Iterator[CustomerManagedKey]: - """Get all encryption key configurations. - - Gets all customer-managed key configuration objects for an account. If the key is specified as a + """Gets all customer-managed key configuration objects for an account. If the key is specified as a workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history. If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the @@ -2578,9 +2562,7 @@ def create( vpc_endpoints: Optional[NetworkVpcEndpoints] = None, vpc_id: Optional[str] = None, ) -> Network: - """Create network configuration. - - Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be + """Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be used for new Databricks clusters. This requires a pre-existing VPC and subnets. :param network_name: str @@ -2627,9 +2609,7 @@ def create( return Network.from_dict(res) def delete(self, network_id: str): - """Delete a network configuration. - - Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot + """Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot delete a network that is associated with a workspace. This operation is available only if your account is on the E2 version of the platform. @@ -2647,9 +2627,7 @@ def delete(self, network_id: str): self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/networks/{network_id}", headers=headers) def get(self, network_id: str) -> Network: - """Get a network configuration. - - Gets a Databricks network configuration, which represents a cloud VPC and its resources. + """Gets a Databricks network configuration, which represents a cloud VPC and its resources. :param network_id: str Databricks Account API network configuration ID. @@ -2665,9 +2643,7 @@ def get(self, network_id: str) -> Network: return Network.from_dict(res) def list(self) -> Iterator[Network]: - """Get all network configurations. - - Gets a list of all Databricks network configurations for an account, specified by ID. + """Gets a list of all Databricks network configurations for an account, specified by ID. This operation is available only if your account is on the E2 version of the platform. @@ -2697,9 +2673,7 @@ def create( private_access_level: Optional[PrivateAccessLevel] = None, public_access_enabled: Optional[bool] = None, ) -> PrivateAccessSettings: - """Create private access settings. - - Creates a private access settings object, which specifies how your workspace is accessed over [AWS + """Creates a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object referenced by ID in the workspace's `private_access_settings_id` property. @@ -2764,9 +2738,7 @@ def create( return PrivateAccessSettings.from_dict(res) def delete(self, private_access_settings_id: str): - """Delete a private access settings object. - - Deletes a private access settings object, which determines how your workspace is accessed over [AWS + """Deletes a private access settings object, which determines how your workspace is accessed over [AWS PrivateLink]. Before configuring PrivateLink, read the [Databricks article about PrivateLink].", @@ -2791,9 +2763,7 @@ def delete(self, private_access_settings_id: str): ) def get(self, private_access_settings_id: str) -> PrivateAccessSettings: - """Get a private access settings object. - - Gets a private access settings object, which specifies how your workspace is accessed over [AWS + """Gets a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. Before configuring PrivateLink, read the [Databricks article about PrivateLink].", @@ -2819,9 +2789,7 @@ def get(self, private_access_settings_id: str) -> PrivateAccessSettings: return PrivateAccessSettings.from_dict(res) def list(self) -> Iterator[PrivateAccessSettings]: - """Get all private access settings objects. - - Gets a list of all private access settings objects for an account, specified by ID. + """Gets a list of all private access settings objects for an account, specified by ID. :returns: Iterator over :class:`PrivateAccessSettings` """ @@ -2843,9 +2811,7 @@ def replace( private_access_level: Optional[PrivateAccessLevel] = None, public_access_enabled: Optional[bool] = None, ): - """Replace private access settings. - - Updates an existing private access settings object, which specifies how your workspace is accessed + """Updates an existing private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object referenced by ID in the workspace's `private_access_settings_id` property. @@ -2929,9 +2895,7 @@ def __init__(self, api_client): self._api = api_client def create(self, storage_configuration_name: str, root_bucket_info: RootBucketInfo) -> StorageConfiguration: - """Create new storage configuration. - - Creates new storage configuration for an account, specified by ID. Uploads a storage configuration + """Creates new storage configuration for an account, specified by ID. Uploads a storage configuration object that represents the root AWS S3 bucket in your account. Databricks stores related workspace assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the required bucket policy. @@ -2964,9 +2928,7 @@ def create(self, storage_configuration_name: str, root_bucket_info: RootBucketIn return StorageConfiguration.from_dict(res) def delete(self, storage_configuration_id: str): - """Delete storage configuration. - - Deletes a Databricks storage configuration. You cannot delete a storage configuration that is + """Deletes a Databricks storage configuration. You cannot delete a storage configuration that is associated with any workspace. :param storage_configuration_id: str @@ -2986,9 +2948,7 @@ def delete(self, storage_configuration_id: str): ) def get(self, storage_configuration_id: str) -> StorageConfiguration: - """Get storage configuration. - - Gets a Databricks storage configuration for an account, both specified by ID. + """Gets a Databricks storage configuration for an account, both specified by ID. :param storage_configuration_id: str Databricks Account API storage configuration ID. @@ -3008,9 +2968,7 @@ def get(self, storage_configuration_id: str) -> StorageConfiguration: return StorageConfiguration.from_dict(res) def list(self) -> Iterator[StorageConfiguration]: - """Get all storage configurations. - - Gets a list of all Databricks storage configurations for your account, specified by ID. + """Gets a list of all Databricks storage configurations for your account, specified by ID. :returns: Iterator over :class:`StorageConfiguration` """ @@ -3037,9 +2995,7 @@ def create( gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None, region: Optional[str] = None, ) -> VpcEndpoint: - """Create VPC endpoint configuration. - - Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to + """Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. After you create the VPC endpoint configuration, the Databricks [endpoint service] automatically @@ -3083,9 +3039,7 @@ def create( return VpcEndpoint.from_dict(res) def delete(self, vpc_endpoint_id: str): - """Delete VPC endpoint configuration. - - Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate + """Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate privately with Databricks over [AWS PrivateLink]. Before configuring PrivateLink, read the [Databricks article about PrivateLink]. @@ -3109,9 +3063,7 @@ def delete(self, vpc_endpoint_id: str): ) def get(self, vpc_endpoint_id: str) -> VpcEndpoint: - """Get a VPC endpoint configuration. - - Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate + """Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink @@ -3133,9 +3085,7 @@ def get(self, vpc_endpoint_id: str) -> VpcEndpoint: return VpcEndpoint.from_dict(res) def list(self) -> Iterator[VpcEndpoint]: - """Get all VPC endpoint configurations. - - Gets a list of all VPC endpoints for an account, specified by ID. + """Gets a list of all VPC endpoints for an account, specified by ID. Before configuring PrivateLink, read the [Databricks article about PrivateLink]. @@ -3216,9 +3166,7 @@ def create( storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, ) -> Wait[Workspace]: - """Create a new workspace. - - Creates a new workspace. + """Creates a new workspace. **Important**: This operation is asynchronous. A response with HTTP status code 200 means the request has been accepted and is in progress, but does not mean that the workspace deployed successfully and @@ -3412,9 +3360,7 @@ def create_and_wait( ).result(timeout=timeout) def delete(self, workspace_id: int): - """Delete a workspace. - - Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. + """Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. However, it might take a few minutes for all workspaces resources to be deleted, depending on the size and number of workspace resources. @@ -3434,9 +3380,7 @@ def delete(self, workspace_id: int): self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", headers=headers) def get(self, workspace_id: int) -> Workspace: - """Get a workspace. - - Gets information including status for a Databricks workspace, specified by ID. In the response, the + """Gets information including status for a Databricks workspace, specified by ID. In the response, the `workspace_status` field indicates the current status. After initial workspace creation (which is asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace becomes available when the status changes to `RUNNING`. @@ -3465,9 +3409,7 @@ def get(self, workspace_id: int) -> Workspace: return Workspace.from_dict(res) def list(self) -> Iterator[Workspace]: - """Get all workspaces. - - Gets a list of all workspaces associated with an account, specified by ID. + """Gets a list of all workspaces associated with an account, specified by ID. This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. @@ -3496,9 +3438,7 @@ def update( storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, ) -> Wait[Workspace]: - """Update workspace configuration. - - Updates a workspace configuration for either a running workspace or a failed workspace. The elements + """Updates a workspace configuration for either a running workspace or a failed workspace. The elements that can be updated varies between these two use cases. ### Update a failed workspace You can update a Databricks workspace configuration for failed workspace diff --git a/databricks/sdk/service/qualitymonitorv2.py b/databricks/sdk/service/qualitymonitorv2.py index bf3ef953f..8daed836d 100755 --- a/databricks/sdk/service/qualitymonitorv2.py +++ b/databricks/sdk/service/qualitymonitorv2.py @@ -163,9 +163,7 @@ def __init__(self, api_client): self._api = api_client def create_quality_monitor(self, quality_monitor: QualityMonitor) -> QualityMonitor: - """Create a quality monitor. - - Create a quality monitor on UC object + """Create a quality monitor on UC object :param quality_monitor: :class:`QualityMonitor` @@ -181,9 +179,7 @@ def create_quality_monitor(self, quality_monitor: QualityMonitor) -> QualityMoni return QualityMonitor.from_dict(res) def delete_quality_monitor(self, object_type: str, object_id: str): - """Delete a quality monitor. - - Delete a quality monitor on UC object + """Delete a quality monitor on UC object :param object_type: str The type of the monitored object. Can be one of the following: schema. @@ -200,9 +196,7 @@ def delete_quality_monitor(self, object_type: str, object_id: str): self._api.do("DELETE", f"/api/2.0/quality-monitors/{object_type}/{object_id}", headers=headers) def get_quality_monitor(self, object_type: str, object_id: str) -> QualityMonitor: - """Read a quality monitor. - - Read a quality monitor on UC object + """Read a quality monitor on UC object :param object_type: str The type of the monitored object. Can be one of the following: schema. @@ -222,9 +216,7 @@ def get_quality_monitor(self, object_type: str, object_id: str) -> QualityMonito def list_quality_monitor( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[QualityMonitor]: - """List quality monitors. - - (Unimplemented) List quality monitors + """(Unimplemented) List quality monitors :param page_size: int (optional) :param page_token: str (optional) @@ -253,9 +245,7 @@ def list_quality_monitor( def update_quality_monitor( self, object_type: str, object_id: str, quality_monitor: QualityMonitor ) -> QualityMonitor: - """Update a quality monitor. - - (Unimplemented) Update a quality monitor on UC object + """(Unimplemented) Update a quality monitor on UC object :param object_type: str The type of the monitored object. Can be one of the following: schema. diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 8d8c09ff8..23d7db400 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -3799,6 +3799,9 @@ class ServingEndpoint: creator: Optional[str] = None """The email of the user who created the serving endpoint.""" + description: Optional[str] = None + """Description of the endpoint""" + id: Optional[str] = None """System-generated ID of the endpoint, included to be used by the Permissions API.""" @@ -3830,6 +3833,8 @@ def as_dict(self) -> dict: body["creation_timestamp"] = self.creation_timestamp if self.creator is not None: body["creator"] = self.creator + if self.description is not None: + body["description"] = self.description if self.id is not None: body["id"] = self.id if self.last_updated_timestamp is not None: @@ -3857,6 +3862,8 @@ def as_shallow_dict(self) -> dict: body["creation_timestamp"] = self.creation_timestamp if self.creator is not None: body["creator"] = self.creator + if self.description is not None: + body["description"] = self.description if self.id is not None: body["id"] = self.id if self.last_updated_timestamp is not None: @@ -3880,6 +3887,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ServingEndpoint: config=_from_dict(d, "config", EndpointCoreConfigSummary), creation_timestamp=d.get("creation_timestamp", None), creator=d.get("creator", None), + description=d.get("description", None), id=d.get("id", None), last_updated_timestamp=d.get("last_updated_timestamp", None), name=d.get("name", None), @@ -4021,6 +4029,9 @@ class ServingEndpointDetailed: data_plane_info: Optional[ModelDataPlaneInfo] = None """Information required to query DataPlane APIs.""" + description: Optional[str] = None + """Description of the serving model""" + endpoint_url: Optional[str] = None """Endpoint invocation url if route optimization is enabled for endpoint""" @@ -4067,6 +4078,8 @@ def as_dict(self) -> dict: body["creator"] = self.creator if self.data_plane_info: body["data_plane_info"] = self.data_plane_info.as_dict() + if self.description is not None: + body["description"] = self.description if self.endpoint_url is not None: body["endpoint_url"] = self.endpoint_url if self.id is not None: @@ -4104,6 +4117,8 @@ def as_shallow_dict(self) -> dict: body["creator"] = self.creator if self.data_plane_info: body["data_plane_info"] = self.data_plane_info + if self.description is not None: + body["description"] = self.description if self.endpoint_url is not None: body["endpoint_url"] = self.endpoint_url if self.id is not None: @@ -4136,6 +4151,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointDetailed: creation_timestamp=d.get("creation_timestamp", None), creator=d.get("creator", None), data_plane_info=_from_dict(d, "data_plane_info", ModelDataPlaneInfo), + description=d.get("description", None), endpoint_url=d.get("endpoint_url", None), id=d.get("id", None), last_updated_timestamp=d.get("last_updated_timestamp", None), @@ -4489,9 +4505,7 @@ def wait_get_serving_endpoint_not_updating( raise TimeoutError(f"timed out after {timeout}: {status_message}") def build_logs(self, name: str, served_model_name: str) -> BuildLogsResponse: - """Get build logs for a served model. - - Retrieves the build logs associated with the provided served model. + """Retrieves the build logs associated with the provided served model. :param name: str The name of the serving endpoint that the served model belongs to. This field is required. @@ -4672,9 +4686,7 @@ def delete(self, name: str): self._api.do("DELETE", f"/api/2.0/serving-endpoints/{name}", headers=headers) def export_metrics(self, name: str) -> ExportMetricsResponse: - """Get metrics of a serving endpoint. - - Retrieves the metrics associated with the provided serving endpoint in either Prometheus or + """Retrieves the metrics associated with the provided serving endpoint in either Prometheus or OpenMetrics exposition format. :param name: str @@ -4691,9 +4703,7 @@ def export_metrics(self, name: str) -> ExportMetricsResponse: return ExportMetricsResponse.from_dict(res) def get(self, name: str) -> ServingEndpointDetailed: - """Get a single serving endpoint. - - Retrieves the details for a single serving endpoint. + """Retrieves the details for a single serving endpoint. :param name: str The name of the serving endpoint. This field is required. @@ -4709,9 +4719,7 @@ def get(self, name: str) -> ServingEndpointDetailed: return ServingEndpointDetailed.from_dict(res) def get_open_api(self, name: str) -> GetOpenApiResponse: - """Get the schema for a serving endpoint. - - Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for + """Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for the supported paths, input and output format and datatypes. :param name: str @@ -4728,9 +4736,7 @@ def get_open_api(self, name: str) -> GetOpenApiResponse: return GetOpenApiResponse.from_dict(res) def get_permission_levels(self, serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse: - """Get serving endpoint permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. @@ -4748,9 +4754,7 @@ def get_permission_levels(self, serving_endpoint_id: str) -> GetServingEndpointP return GetServingEndpointPermissionLevelsResponse.from_dict(res) def get_permissions(self, serving_endpoint_id: str) -> ServingEndpointPermissions: - """Get serving endpoint permissions. - - Gets the permissions of a serving endpoint. Serving endpoints can inherit permissions from their root + """Gets the permissions of a serving endpoint. Serving endpoints can inherit permissions from their root object. :param serving_endpoint_id: str @@ -4830,9 +4834,7 @@ def list(self) -> Iterator[ServingEndpoint]: return parsed if parsed is not None else [] def logs(self, name: str, served_model_name: str) -> ServerLogsResponse: - """Get the latest logs for a served model. - - Retrieves the service logs associated with the provided served model. + """Retrieves the service logs associated with the provided served model. :param name: str The name of the serving endpoint that the served model belongs to. This field is required. @@ -4854,9 +4856,7 @@ def logs(self, name: str, served_model_name: str) -> ServerLogsResponse: def patch( self, name: str, *, add_tags: Optional[List[EndpointTag]] = None, delete_tags: Optional[List[str]] = None ) -> EndpointTags: - """Update tags of a serving endpoint. - - Used to batch add and delete tags from a serving endpoint with a single API call. + """Used to batch add and delete tags from a serving endpoint with a single API call. :param name: str The name of the serving endpoint who's tags to patch. This field is required. @@ -4881,9 +4881,7 @@ def patch( return EndpointTags.from_dict(res) def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> PutResponse: - """Update rate limits of a serving endpoint. - - Deprecated: Please use AI Gateway to manage rate limits instead. + """Deprecated: Please use AI Gateway to manage rate limits instead. :param name: str The name of the serving endpoint whose rate limits are being updated. This field is required. @@ -4913,9 +4911,7 @@ def put_ai_gateway( rate_limits: Optional[List[AiGatewayRateLimit]] = None, usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None, ) -> PutAiGatewayResponse: - """Update AI Gateway of a serving endpoint. - - Used to update the AI Gateway of a serving endpoint. NOTE: External model, provisioned throughput, and + """Used to update the AI Gateway of a serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables. :param name: str @@ -5070,9 +5066,7 @@ def set_permissions( *, access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None, ) -> ServingEndpointPermissions: - """Set serving endpoint permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param serving_endpoint_id: str @@ -5103,9 +5097,7 @@ def update_config( served_models: Optional[List[ServedModelInput]] = None, traffic_config: Optional[TrafficConfig] = None, ) -> Wait[ServingEndpointDetailed]: - """Update config of a serving endpoint. - - Updates any combination of the serving endpoint's served entities, the compute configuration of those + """Updates any combination of the serving endpoint's served entities, the compute configuration of those served entities, and the endpoint's traffic config. An endpoint that already has an update in progress can not be updated until the current update completes or fails. @@ -5173,9 +5165,7 @@ def update_permissions( *, access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None, ) -> ServingEndpointPermissions: - """Update serving endpoint permissions. - - Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from their + """Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from their root object. :param serving_endpoint_id: str @@ -5200,9 +5190,7 @@ def update_permissions( def update_provisioned_throughput_endpoint_config( self, name: str, config: PtEndpointCoreConfig ) -> Wait[ServingEndpointDetailed]: - """Update config of a PT serving endpoint. - - Updates any combination of the pt endpoint's served entities, the compute configuration of those + """Updates any combination of the pt endpoint's served entities, the compute configuration of those served entities, and the endpoint's traffic config. Updates are instantaneous and endpoint should be updated instantly diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 3bdbffb31..6d1318a22 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -2544,7 +2544,6 @@ class EgressNetworkPolicyNetworkAccessPolicyStorageDestination: bucket_name: Optional[str] = None region: Optional[str] = None - """The region of the S3 bucket.""" storage_destination_type: Optional[ EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType @@ -6750,9 +6749,7 @@ def __init__(self, api_client): def create( self, label: str, list_type: ListType, *, ip_addresses: Optional[List[str]] = None ) -> CreateIpAccessListResponse: - """Create access list. - - Creates an IP access list for the account. + """Creates an IP access list for the account. A list can be an allow list or a block list. See the top of this file for a description of how the server treats allow lists and block lists at runtime. @@ -6795,9 +6792,7 @@ def create( return CreateIpAccessListResponse.from_dict(res) def delete(self, ip_access_list_id: str): - """Delete access list. - - Deletes an IP access list, specified by its list ID. + """Deletes an IP access list, specified by its list ID. :param ip_access_list_id: str The ID for the corresponding IP access list @@ -6812,9 +6807,7 @@ def delete(self, ip_access_list_id: str): ) def get(self, ip_access_list_id: str) -> GetIpAccessListResponse: - """Get IP access list. - - Gets an IP access list, specified by its list ID. + """Gets an IP access list, specified by its list ID. :param ip_access_list_id: str The ID for the corresponding IP access list @@ -6832,9 +6825,7 @@ def get(self, ip_access_list_id: str) -> GetIpAccessListResponse: return GetIpAccessListResponse.from_dict(res) def list(self) -> Iterator[IpAccessListInfo]: - """Get access lists. - - Gets all IP access lists for the specified account. + """Gets all IP access lists for the specified account. :returns: Iterator over :class:`IpAccessListInfo` """ @@ -6856,9 +6847,7 @@ def replace( *, ip_addresses: Optional[List[str]] = None, ): - """Replace access list. - - Replaces an IP access list, specified by its ID. + """Replaces an IP access list, specified by its ID. A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. When replacing an IP access list: * For all @@ -6912,9 +6901,7 @@ def update( label: Optional[str] = None, list_type: Optional[ListType] = None, ): - """Update access list. - - Updates an existing IP access list, specified by its ID. + """Updates an existing IP access list, specified by its ID. A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. @@ -7022,9 +7009,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, *, etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: - """Delete the AI/BI dashboard embedding access policy. - - Delete the AI/BI dashboard embedding access policy, reverting back to the default. + """Delete the AI/BI dashboard embedding access policy, reverting back to the default. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7052,9 +7037,7 @@ def delete(self, *, etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingA return DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.from_dict(res) def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingAccessPolicySetting: - """Retrieve the AI/BI dashboard embedding access policy. - - Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS, + """Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS, permitting AI/BI dashboards to be embedded on approved domains. :param etag: str (optional) @@ -7082,9 +7065,7 @@ def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingAccessPoli def update( self, allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str ) -> AibiDashboardEmbeddingAccessPolicySetting: - """Update the AI/BI dashboard embedding access policy. - - Updates the AI/BI dashboard embedding access policy at the workspace level. + """Updates the AI/BI dashboard embedding access policy at the workspace level. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -7128,9 +7109,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, *, etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: - """Delete AI/BI dashboard embedding approved domains. - - Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default + """Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default empty list. :param etag: str (optional) @@ -7159,9 +7138,7 @@ def delete(self, *, etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingA return DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.from_dict(res) def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingApprovedDomainsSetting: - """Retrieve the list of domains approved to host embedded AI/BI dashboards. - - Retrieves the list of domains approved to host embedded AI/BI dashboards. + """Retrieves the list of domains approved to host embedded AI/BI dashboards. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7191,9 +7168,7 @@ def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingApprovedDo def update( self, allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str ) -> AibiDashboardEmbeddingApprovedDomainsSetting: - """Update the list of domains approved to host embedded AI/BI dashboards. - - Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the + """Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the current workspace access policy is not ALLOW_APPROVED_DOMAINS. :param allow_missing: bool @@ -7241,9 +7216,7 @@ def __init__(self, api_client): self._api = api_client def get(self, *, etag: Optional[str] = None) -> AutomaticClusterUpdateSetting: - """Get the automatic cluster update setting. - - Gets the automatic cluster update setting. + """Gets the automatic cluster update setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7270,9 +7243,7 @@ def get(self, *, etag: Optional[str] = None) -> AutomaticClusterUpdateSetting: def update( self, allow_missing: bool, setting: AutomaticClusterUpdateSetting, field_mask: str ) -> AutomaticClusterUpdateSetting: - """Update the automatic cluster update setting. - - Updates the automatic cluster update setting for the workspace. A fresh etag needs to be provided in + """Updates the automatic cluster update setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. @@ -7321,9 +7292,7 @@ def __init__(self, api_client): self._api = api_client def get(self, *, etag: Optional[str] = None) -> ComplianceSecurityProfileSetting: - """Get the compliance security profile setting. - - Gets the compliance security profile setting. + """Gets the compliance security profile setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7350,9 +7319,7 @@ def get(self, *, etag: Optional[str] = None) -> ComplianceSecurityProfileSetting def update( self, allow_missing: bool, setting: ComplianceSecurityProfileSetting, field_mask: str ) -> ComplianceSecurityProfileSetting: - """Update the compliance security profile setting. - - Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided + """Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. @@ -7401,9 +7368,7 @@ def __init__(self, api_client): def exchange_token( self, partition_id: PartitionId, token_type: List[TokenType], scopes: List[str] ) -> ExchangeTokenResponse: - """Exchange token. - - Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to + """Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to determine token permissions. :param partition_id: :class:`PartitionId` @@ -7443,9 +7408,7 @@ def __init__(self, api_client): self._api = api_client def get(self, *, etag: Optional[str] = None) -> CspEnablementAccountSetting: - """Get the compliance security profile setting for new workspaces. - - Gets the compliance security profile setting for new workspaces. + """Gets the compliance security profile setting for new workspaces. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7475,9 +7438,7 @@ def get(self, *, etag: Optional[str] = None) -> CspEnablementAccountSetting: def update( self, allow_missing: bool, setting: CspEnablementAccountSetting, field_mask: str ) -> CspEnablementAccountSetting: - """Update the compliance security profile setting for new workspaces. - - Updates the value of the compliance security profile setting for new workspaces. + """Updates the value of the compliance security profile setting for new workspaces. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -7525,9 +7486,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, *, etag: Optional[str] = None) -> DeleteDashboardEmailSubscriptionsResponse: - """Delete the Dashboard Email Subscriptions setting. - - Reverts the Dashboard Email Subscriptions setting to its default value. + """Reverts the Dashboard Email Subscriptions setting to its default value. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7555,9 +7514,7 @@ def delete(self, *, etag: Optional[str] = None) -> DeleteDashboardEmailSubscript return DeleteDashboardEmailSubscriptionsResponse.from_dict(res) def get(self, *, etag: Optional[str] = None) -> DashboardEmailSubscriptions: - """Get the Dashboard Email Subscriptions setting. - - Gets the Dashboard Email Subscriptions setting. + """Gets the Dashboard Email Subscriptions setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7584,9 +7541,7 @@ def get(self, *, etag: Optional[str] = None) -> DashboardEmailSubscriptions: def update( self, allow_missing: bool, setting: DashboardEmailSubscriptions, field_mask: str ) -> DashboardEmailSubscriptions: - """Update the Dashboard Email Subscriptions setting. - - Updates the Dashboard Email Subscriptions setting. + """Updates the Dashboard Email Subscriptions setting. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -7638,9 +7593,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, *, etag: Optional[str] = None) -> DeleteDefaultNamespaceSettingResponse: - """Delete the default namespace setting. - - Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE` + """Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` request before the `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 response. @@ -7668,9 +7621,7 @@ def delete(self, *, etag: Optional[str] = None) -> DeleteDefaultNamespaceSetting return DeleteDefaultNamespaceSettingResponse.from_dict(res) def get(self, *, etag: Optional[str] = None) -> DefaultNamespaceSetting: - """Get the default namespace setting. - - Gets the default namespace setting. + """Gets the default namespace setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7695,9 +7646,7 @@ def get(self, *, etag: Optional[str] = None) -> DefaultNamespaceSetting: return DefaultNamespaceSetting.from_dict(res) def update(self, allow_missing: bool, setting: DefaultNamespaceSetting, field_mask: str) -> DefaultNamespaceSetting: - """Update the default namespace setting. - - Updates the default namespace setting for the workspace. A fresh etag needs to be provided in `PATCH` + """Updates the default namespace setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. Note that if the setting does not exist, `GET` returns a NOT_FOUND error and the etag is present in the error response, which should be set in the `PATCH` request. If the setting is @@ -7756,9 +7705,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyAccessResponse: - """Delete Legacy Access Disablement Status. - - Deletes legacy access disablement status. + """Deletes legacy access disablement status. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7783,9 +7730,7 @@ def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyAccessResp return DeleteDisableLegacyAccessResponse.from_dict(res) def get(self, *, etag: Optional[str] = None) -> DisableLegacyAccess: - """Retrieve Legacy Access Disablement Status. - - Retrieves legacy access disablement Status. + """Retrieves legacy access disablement Status. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7810,9 +7755,7 @@ def get(self, *, etag: Optional[str] = None) -> DisableLegacyAccess: return DisableLegacyAccess.from_dict(res) def update(self, allow_missing: bool, setting: DisableLegacyAccess, field_mask: str) -> DisableLegacyAccess: - """Update Legacy Access Disablement Status. - - Updates legacy access disablement status. + """Updates legacy access disablement status. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -7862,9 +7805,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyDbfsResponse: - """Delete the disable legacy DBFS setting. - - Deletes the disable legacy DBFS setting for a workspace, reverting back to the default. + """Deletes the disable legacy DBFS setting for a workspace, reverting back to the default. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7889,9 +7830,7 @@ def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyDbfsRespon return DeleteDisableLegacyDbfsResponse.from_dict(res) def get(self, *, etag: Optional[str] = None) -> DisableLegacyDbfs: - """Get the disable legacy DBFS setting. - - Gets the disable legacy DBFS setting. + """Gets the disable legacy DBFS setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7916,9 +7855,7 @@ def get(self, *, etag: Optional[str] = None) -> DisableLegacyDbfs: return DisableLegacyDbfs.from_dict(res) def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs: - """Update the disable legacy DBFS setting. - - Updates the disable legacy DBFS setting for the workspace. + """Updates the disable legacy DBFS setting for the workspace. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -7965,9 +7902,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyFeaturesResponse: - """Delete the disable legacy features setting. - - Deletes the disable legacy features setting. + """Deletes the disable legacy features setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -7995,9 +7930,7 @@ def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyFeaturesRe return DeleteDisableLegacyFeaturesResponse.from_dict(res) def get(self, *, etag: Optional[str] = None) -> DisableLegacyFeatures: - """Get the disable legacy features setting. - - Gets the value of the disable legacy features setting. + """Gets the value of the disable legacy features setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -8025,9 +7958,7 @@ def get(self, *, etag: Optional[str] = None) -> DisableLegacyFeatures: return DisableLegacyFeatures.from_dict(res) def update(self, allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str) -> DisableLegacyFeatures: - """Update the disable legacy features setting. - - Updates the value of the disable legacy features setting. + """Updates the value of the disable legacy features setting. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -8074,9 +8005,7 @@ def __init__(self, api_client): self._api = api_client def get_enable_export_notebook(self) -> EnableExportNotebook: - """Get the Notebook and File exporting setting. - - Gets the Notebook and File exporting setting. + """Gets the Notebook and File exporting setting. :returns: :class:`EnableExportNotebook` """ @@ -8091,9 +8020,7 @@ def get_enable_export_notebook(self) -> EnableExportNotebook: def patch_enable_export_notebook( self, allow_missing: bool, setting: EnableExportNotebook, field_mask: str ) -> EnableExportNotebook: - """Update the Notebook and File exporting setting. - - Updates the Notebook and File exporting setting. The model follows eventual consistency, which means + """Updates the Notebook and File exporting setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. :param allow_missing: bool @@ -8138,9 +8065,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, *, etag: Optional[str] = None) -> DeleteAccountIpAccessEnableResponse: - """Delete the account IP access toggle setting. - - Reverts the value of the account IP access toggle setting to default (ON) + """Reverts the value of the account IP access toggle setting to default (ON) :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -8168,9 +8093,7 @@ def delete(self, *, etag: Optional[str] = None) -> DeleteAccountIpAccessEnableRe return DeleteAccountIpAccessEnableResponse.from_dict(res) def get(self, *, etag: Optional[str] = None) -> AccountIpAccessEnable: - """Get the account IP access toggle setting. - - Gets the value of the account IP access toggle setting. + """Gets the value of the account IP access toggle setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -8198,9 +8121,7 @@ def get(self, *, etag: Optional[str] = None) -> AccountIpAccessEnable: return AccountIpAccessEnable.from_dict(res) def update(self, allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str) -> AccountIpAccessEnable: - """Update the account IP access toggle setting. - - Updates the value of the account IP access toggle setting. + """Updates the value of the account IP access toggle setting. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -8247,9 +8168,7 @@ def __init__(self, api_client): self._api = api_client def get_enable_notebook_table_clipboard(self) -> EnableNotebookTableClipboard: - """Get the Results Table Clipboard features setting. - - Gets the Results Table Clipboard features setting. + """Gets the Results Table Clipboard features setting. :returns: :class:`EnableNotebookTableClipboard` """ @@ -8266,9 +8185,7 @@ def get_enable_notebook_table_clipboard(self) -> EnableNotebookTableClipboard: def patch_enable_notebook_table_clipboard( self, allow_missing: bool, setting: EnableNotebookTableClipboard, field_mask: str ) -> EnableNotebookTableClipboard: - """Update the Results Table Clipboard features setting. - - Updates the Results Table Clipboard features setting. The model follows eventual consistency, which + """Updates the Results Table Clipboard features setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. :param allow_missing: bool @@ -8312,9 +8229,7 @@ def __init__(self, api_client): self._api = api_client def get_enable_results_downloading(self) -> EnableResultsDownloading: - """Get the Notebook results download setting. - - Gets the Notebook results download setting. + """Gets the Notebook results download setting. :returns: :class:`EnableResultsDownloading` """ @@ -8329,9 +8244,7 @@ def get_enable_results_downloading(self) -> EnableResultsDownloading: def patch_enable_results_downloading( self, allow_missing: bool, setting: EnableResultsDownloading, field_mask: str ) -> EnableResultsDownloading: - """Update the Notebook results download setting. - - Updates the Notebook results download setting. The model follows eventual consistency, which means the + """Updates the Notebook results download setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. :param allow_missing: bool @@ -8380,9 +8293,7 @@ def __init__(self, api_client): self._api = api_client def get(self, *, etag: Optional[str] = None) -> EnhancedSecurityMonitoringSetting: - """Get the enhanced security monitoring setting. - - Gets the enhanced security monitoring setting. + """Gets the enhanced security monitoring setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -8409,9 +8320,7 @@ def get(self, *, etag: Optional[str] = None) -> EnhancedSecurityMonitoringSettin def update( self, allow_missing: bool, setting: EnhancedSecurityMonitoringSetting, field_mask: str ) -> EnhancedSecurityMonitoringSetting: - """Update the enhanced security monitoring setting. - - Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided + """Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. @@ -8459,9 +8368,7 @@ def __init__(self, api_client): self._api = api_client def get(self, *, etag: Optional[str] = None) -> EsmEnablementAccountSetting: - """Get the enhanced security monitoring setting for new workspaces. - - Gets the enhanced security monitoring setting for new workspaces. + """Gets the enhanced security monitoring setting for new workspaces. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -8491,9 +8398,7 @@ def get(self, *, etag: Optional[str] = None) -> EsmEnablementAccountSetting: def update( self, allow_missing: bool, setting: EsmEnablementAccountSetting, field_mask: str ) -> EsmEnablementAccountSetting: - """Update the enhanced security monitoring setting for new workspaces. - - Updates the value of the enhanced security monitoring setting for new workspaces. + """Updates the value of the enhanced security monitoring setting for new workspaces. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -8557,9 +8462,7 @@ def __init__(self, api_client): def create( self, label: str, list_type: ListType, *, ip_addresses: Optional[List[str]] = None ) -> CreateIpAccessListResponse: - """Create access list. - - Creates an IP access list for this workspace. + """Creates an IP access list for this workspace. A list can be an allow list or a block list. See the top of this file for a description of how the server treats allow lists and block lists at runtime. @@ -8601,9 +8504,7 @@ def create( return CreateIpAccessListResponse.from_dict(res) def delete(self, ip_access_list_id: str): - """Delete access list. - - Deletes an IP access list, specified by its list ID. + """Deletes an IP access list, specified by its list ID. :param ip_access_list_id: str The ID for the corresponding IP access list @@ -8616,9 +8517,7 @@ def delete(self, ip_access_list_id: str): self._api.do("DELETE", f"/api/2.0/ip-access-lists/{ip_access_list_id}", headers=headers) def get(self, ip_access_list_id: str) -> FetchIpAccessListResponse: - """Get access list. - - Gets an IP access list, specified by its list ID. + """Gets an IP access list, specified by its list ID. :param ip_access_list_id: str The ID for the corresponding IP access list @@ -8634,9 +8533,7 @@ def get(self, ip_access_list_id: str) -> FetchIpAccessListResponse: return FetchIpAccessListResponse.from_dict(res) def list(self) -> Iterator[IpAccessListInfo]: - """Get access lists. - - Gets all IP access lists for the specified workspace. + """Gets all IP access lists for the specified workspace. :returns: Iterator over :class:`IpAccessListInfo` """ @@ -8658,9 +8555,7 @@ def replace( *, ip_addresses: Optional[List[str]] = None, ): - """Replace access list. - - Replaces an IP access list, specified by its ID. + """Replaces an IP access list, specified by its ID. A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. When replacing an IP access list: * For all @@ -8710,9 +8605,7 @@ def update( label: Optional[str] = None, list_type: Optional[ListType] = None, ): - """Update access list. - - Updates an existing IP access list, specified by its ID. + """Updates an existing IP access list, specified by its ID. A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. @@ -8765,9 +8658,7 @@ def __init__(self, api_client): self._api = api_client def get(self, *, etag: Optional[str] = None) -> LlmProxyPartnerPoweredAccount: - """Get the enable partner powered AI features account setting. - - Gets the enable partner powered AI features account setting. + """Gets the enable partner powered AI features account setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -8797,9 +8688,7 @@ def get(self, *, etag: Optional[str] = None) -> LlmProxyPartnerPoweredAccount: def update( self, allow_missing: bool, setting: LlmProxyPartnerPoweredAccount, field_mask: str ) -> LlmProxyPartnerPoweredAccount: - """Update the enable partner powered AI features account setting. - - Updates the enable partner powered AI features account setting. + """Updates the enable partner powered AI features account setting. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -8846,9 +8735,7 @@ def __init__(self, api_client): self._api = api_client def get(self, *, etag: Optional[str] = None) -> LlmProxyPartnerPoweredEnforce: - """Get the enforcement status of partner powered AI features account setting. - - Gets the enforcement status of partner powered AI features account setting. + """Gets the enforcement status of partner powered AI features account setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -8878,9 +8765,7 @@ def get(self, *, etag: Optional[str] = None) -> LlmProxyPartnerPoweredEnforce: def update( self, allow_missing: bool, setting: LlmProxyPartnerPoweredEnforce, field_mask: str ) -> LlmProxyPartnerPoweredEnforce: - """Update the enforcement status of partner powered AI features account setting. - - Updates the enable enforcement status of partner powered AI features account setting. + """Updates the enable enforcement status of partner powered AI features account setting. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -8926,9 +8811,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, *, etag: Optional[str] = None) -> DeleteLlmProxyPartnerPoweredWorkspaceResponse: - """Delete the enable partner powered AI features workspace setting. - - Reverts the enable partner powered AI features workspace setting to its default value. + """Reverts the enable partner powered AI features workspace setting to its default value. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -8953,9 +8836,7 @@ def delete(self, *, etag: Optional[str] = None) -> DeleteLlmProxyPartnerPoweredW return DeleteLlmProxyPartnerPoweredWorkspaceResponse.from_dict(res) def get(self, *, etag: Optional[str] = None) -> LlmProxyPartnerPoweredWorkspace: - """Get the enable partner powered AI features workspace setting. - - Gets the enable partner powered AI features workspace setting. + """Gets the enable partner powered AI features workspace setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -8982,9 +8863,7 @@ def get(self, *, etag: Optional[str] = None) -> LlmProxyPartnerPoweredWorkspace: def update( self, allow_missing: bool, setting: LlmProxyPartnerPoweredWorkspace, field_mask: str ) -> LlmProxyPartnerPoweredWorkspace: - """Update the enable partner powered AI features workspace setting. - - Updates the enable partner powered AI features workspace setting. + """Updates the enable partner powered AI features workspace setting. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -9036,9 +8915,7 @@ def __init__(self, api_client): def create_network_connectivity_configuration( self, network_connectivity_config: CreateNetworkConnectivityConfiguration ) -> NetworkConnectivityConfiguration: - """Create a network connectivity configuration. - - Creates a network connectivity configuration (NCC), which provides stable Azure service subnets when + """Creates a network connectivity configuration (NCC), which provides stable Azure service subnets when accessing your Azure Storage accounts. You can also use a network connectivity configuration to create Databricks managed private endpoints so that Databricks serverless compute resources privately access your resources. @@ -9069,9 +8946,7 @@ def create_network_connectivity_configuration( def create_private_endpoint_rule( self, network_connectivity_config_id: str, private_endpoint_rule: CreatePrivateEndpointRule ) -> NccPrivateEndpointRule: - """Create a private endpoint rule. - - Create a private endpoint rule for the specified network connectivity config object. Once the object + """Create a private endpoint rule for the specified network connectivity config object. Once the object is created, Databricks asynchronously provisions a new Azure private endpoint to your specified Azure resource. @@ -9104,9 +8979,7 @@ def create_private_endpoint_rule( return NccPrivateEndpointRule.from_dict(res) def delete_network_connectivity_configuration(self, network_connectivity_config_id: str): - """Delete a network connectivity configuration. - - Deletes a network connectivity configuration. + """Deletes a network connectivity configuration. :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. @@ -9127,9 +9000,7 @@ def delete_network_connectivity_configuration(self, network_connectivity_config_ def delete_private_endpoint_rule( self, network_connectivity_config_id: str, private_endpoint_rule_id: str ) -> NccPrivateEndpointRule: - """Delete a private endpoint rule. - - Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private + """Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and will be deleted after seven days of deactivation. When a private endpoint is deactivated, the `deactivated` field is set to `true` and the private endpoint is not available to your serverless compute resources. @@ -9156,9 +9027,7 @@ def delete_private_endpoint_rule( def get_network_connectivity_configuration( self, network_connectivity_config_id: str ) -> NetworkConnectivityConfiguration: - """Get a network connectivity configuration. - - Gets a network connectivity configuration. + """Gets a network connectivity configuration. :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. @@ -9180,9 +9049,7 @@ def get_network_connectivity_configuration( def get_private_endpoint_rule( self, network_connectivity_config_id: str, private_endpoint_rule_id: str ) -> NccPrivateEndpointRule: - """Gets a private endpoint rule. - - Gets the private endpoint rule. + """Gets the private endpoint rule. :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. @@ -9206,9 +9073,7 @@ def get_private_endpoint_rule( def list_network_connectivity_configurations( self, *, page_token: Optional[str] = None ) -> Iterator[NetworkConnectivityConfiguration]: - """List network connectivity configurations. - - Gets an array of network connectivity configurations. + """Gets an array of network connectivity configurations. :param page_token: str (optional) Pagination token to go to next page based on previous query. @@ -9240,9 +9105,7 @@ def list_network_connectivity_configurations( def list_private_endpoint_rules( self, network_connectivity_config_id: str, *, page_token: Optional[str] = None ) -> Iterator[NccPrivateEndpointRule]: - """List private endpoint rules. - - Gets an array of private endpoint rules. + """Gets an array of private endpoint rules. :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. @@ -9280,9 +9143,7 @@ def update_private_endpoint_rule( private_endpoint_rule: UpdatePrivateEndpointRule, update_mask: str, ) -> NccPrivateEndpointRule: - """Update a private endpoint rule. - - Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed resources + """Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed resources is allowed to be updated. :param network_connectivity_config_id: str @@ -9333,9 +9194,7 @@ def __init__(self, api_client): self._api = api_client def create_network_policy_rpc(self, network_policy: AccountNetworkPolicy) -> AccountNetworkPolicy: - """Create a network policy. - - Creates a new network policy to manage which network destinations can be accessed from the Databricks + """Creates a new network policy to manage which network destinations can be accessed from the Databricks environment. :param network_policy: :class:`AccountNetworkPolicy` @@ -9354,9 +9213,7 @@ def create_network_policy_rpc(self, network_policy: AccountNetworkPolicy) -> Acc return AccountNetworkPolicy.from_dict(res) def delete_network_policy_rpc(self, network_policy_id: str): - """Delete a network policy. - - Deletes a network policy. Cannot be called on 'default-policy'. + """Deletes a network policy. Cannot be called on 'default-policy'. :param network_policy_id: str The unique identifier of the network policy to delete. @@ -9373,9 +9230,7 @@ def delete_network_policy_rpc(self, network_policy_id: str): ) def get_network_policy_rpc(self, network_policy_id: str) -> AccountNetworkPolicy: - """Get a network policy. - - Gets a network policy. + """Gets a network policy. :param network_policy_id: str The unique identifier of the network policy to retrieve. @@ -9393,9 +9248,7 @@ def get_network_policy_rpc(self, network_policy_id: str) -> AccountNetworkPolicy return AccountNetworkPolicy.from_dict(res) def list_network_policies_rpc(self, *, page_token: Optional[str] = None) -> Iterator[AccountNetworkPolicy]: - """List network policies. - - Gets an array of network policies. + """Gets an array of network policies. :param page_token: str (optional) Pagination token to go to next page based on previous query. @@ -9424,9 +9277,7 @@ def list_network_policies_rpc(self, *, page_token: Optional[str] = None) -> Iter def update_network_policy_rpc( self, network_policy_id: str, network_policy: AccountNetworkPolicy ) -> AccountNetworkPolicy: - """Update a network policy. - - Updates a network policy. This allows you to modify the configuration of a network policy. + """Updates a network policy. This allows you to modify the configuration of a network policy. :param network_policy_id: str The unique identifier for the network policy. @@ -9459,9 +9310,7 @@ def __init__(self, api_client): self._api = api_client def create(self, *, config: Optional[Config] = None, display_name: Optional[str] = None) -> NotificationDestination: - """Create a notification destination. - - Creates a notification destination. Requires workspace admin permissions. + """Creates a notification destination. Requires workspace admin permissions. :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. @@ -9484,9 +9333,7 @@ def create(self, *, config: Optional[Config] = None, display_name: Optional[str] return NotificationDestination.from_dict(res) def delete(self, id: str): - """Delete a notification destination. - - Deletes a notification destination. Requires workspace admin permissions. + """Deletes a notification destination. Requires workspace admin permissions. :param id: str @@ -9500,9 +9347,7 @@ def delete(self, id: str): self._api.do("DELETE", f"/api/2.0/notification-destinations/{id}", headers=headers) def get(self, id: str) -> NotificationDestination: - """Get a notification destination. - - Gets a notification destination. + """Gets a notification destination. :param id: str @@ -9519,9 +9364,7 @@ def get(self, id: str) -> NotificationDestination: def list( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[ListNotificationDestinationsResult]: - """List notification destinations. - - Lists notification destinations. + """Lists notification destinations. :param page_size: int (optional) :param page_token: str (optional) @@ -9550,9 +9393,7 @@ def list( def update( self, id: str, *, config: Optional[Config] = None, display_name: Optional[str] = None ) -> NotificationDestination: - """Update a notification destination. - - Updates a notification destination. Requires workspace admin permissions. At least one field is + """Updates a notification destination. Requires workspace admin permissions. At least one field is required in the request body. :param id: str @@ -9591,9 +9432,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, *, etag: Optional[str] = None) -> DeletePersonalComputeSettingResponse: - """Delete Personal Compute setting. - - Reverts back the Personal Compute setting value to default (ON) + """Reverts back the Personal Compute setting value to default (ON) :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -9621,9 +9460,7 @@ def delete(self, *, etag: Optional[str] = None) -> DeletePersonalComputeSettingR return DeletePersonalComputeSettingResponse.from_dict(res) def get(self, *, etag: Optional[str] = None) -> PersonalComputeSetting: - """Get Personal Compute setting. - - Gets the value of the Personal Compute setting. + """Gets the value of the Personal Compute setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -9651,9 +9488,7 @@ def get(self, *, etag: Optional[str] = None) -> PersonalComputeSetting: return PersonalComputeSetting.from_dict(res) def update(self, allow_missing: bool, setting: PersonalComputeSetting, field_mask: str) -> PersonalComputeSetting: - """Update Personal Compute setting. - - Updates the value of the Personal Compute setting. + """Updates the value of the Personal Compute setting. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -9707,9 +9542,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, *, etag: Optional[str] = None) -> DeleteRestrictWorkspaceAdminsSettingResponse: - """Delete the restrict workspace admins setting. - - Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs to be + """Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` request before the DELETE request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 response. @@ -9737,9 +9570,7 @@ def delete(self, *, etag: Optional[str] = None) -> DeleteRestrictWorkspaceAdmins return DeleteRestrictWorkspaceAdminsSettingResponse.from_dict(res) def get(self, *, etag: Optional[str] = None) -> RestrictWorkspaceAdminsSetting: - """Get the restrict workspace admins setting. - - Gets the restrict workspace admins setting. + """Gets the restrict workspace admins setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -9766,9 +9597,7 @@ def get(self, *, etag: Optional[str] = None) -> RestrictWorkspaceAdminsSetting: def update( self, allow_missing: bool, setting: RestrictWorkspaceAdminsSetting, field_mask: str ) -> RestrictWorkspaceAdminsSetting: - """Update the restrict workspace admins setting. - - Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be provided in + """Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a GET request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. @@ -9913,9 +9742,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, *, etag: Optional[str] = None) -> DeleteSqlResultsDownloadResponse: - """Delete the SQL Results Download setting. - - Reverts the SQL Results Download setting to its default value. + """Reverts the SQL Results Download setting to its default value. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -9940,9 +9767,7 @@ def delete(self, *, etag: Optional[str] = None) -> DeleteSqlResultsDownloadRespo return DeleteSqlResultsDownloadResponse.from_dict(res) def get(self, *, etag: Optional[str] = None) -> SqlResultsDownload: - """Get the SQL Results Download setting. - - Gets the SQL Results Download setting. + """Gets the SQL Results Download setting. :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for @@ -9967,9 +9792,7 @@ def get(self, *, etag: Optional[str] = None) -> SqlResultsDownload: return SqlResultsDownload.from_dict(res) def update(self, allow_missing: bool, setting: SqlResultsDownload, field_mask: str) -> SqlResultsDownload: - """Update the SQL Results Download setting. - - Updates the SQL Results Download setting. + """Updates the SQL Results Download setting. :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. @@ -10015,9 +9838,7 @@ def __init__(self, api_client): def create_obo_token( self, application_id: str, *, comment: Optional[str] = None, lifetime_seconds: Optional[int] = None ) -> CreateOboTokenResponse: - """Create on-behalf token. - - Creates a token on behalf of a service principal. + """Creates a token on behalf of a service principal. :param application_id: str Application ID of the service principal. @@ -10044,9 +9865,7 @@ def create_obo_token( return CreateOboTokenResponse.from_dict(res) def delete(self, token_id: str): - """Delete a token. - - Deletes a token, specified by its ID. + """Deletes a token, specified by its ID. :param token_id: str The ID of the token to revoke. @@ -10059,9 +9878,7 @@ def delete(self, token_id: str): self._api.do("DELETE", f"/api/2.0/token-management/tokens/{token_id}", headers=headers) def get(self, token_id: str) -> GetTokenResponse: - """Get token info. - - Gets information about a token, specified by its ID. + """Gets information about a token, specified by its ID. :param token_id: str The ID of the token to get. @@ -10077,9 +9894,7 @@ def get(self, token_id: str) -> GetTokenResponse: return GetTokenResponse.from_dict(res) def get_permission_levels(self) -> GetTokenPermissionLevelsResponse: - """Get token permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :returns: :class:`GetTokenPermissionLevelsResponse` """ @@ -10092,9 +9907,7 @@ def get_permission_levels(self) -> GetTokenPermissionLevelsResponse: return GetTokenPermissionLevelsResponse.from_dict(res) def get_permissions(self) -> TokenPermissions: - """Get token permissions. - - Gets the permissions of all tokens. Tokens can inherit permissions from their root object. + """Gets the permissions of all tokens. Tokens can inherit permissions from their root object. :returns: :class:`TokenPermissions` """ @@ -10109,9 +9922,7 @@ def get_permissions(self) -> TokenPermissions: def list( self, *, created_by_id: Optional[int] = None, created_by_username: Optional[str] = None ) -> Iterator[TokenInfo]: - """List all tokens. - - Lists all tokens associated with the specified workspace or user. + """Lists all tokens associated with the specified workspace or user. :param created_by_id: int (optional) User ID of the user that created the token. @@ -10137,9 +9948,7 @@ def list( def set_permissions( self, *, access_control_list: Optional[List[TokenAccessControlRequest]] = None ) -> TokenPermissions: - """Set token permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) @@ -10160,9 +9969,7 @@ def set_permissions( def update_permissions( self, *, access_control_list: Optional[List[TokenAccessControlRequest]] = None ) -> TokenPermissions: - """Update token permissions. - - Updates the permissions on all tokens. Tokens can inherit permissions from their root object. + """Updates the permissions on all tokens. Tokens can inherit permissions from their root object. :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) @@ -10188,9 +9995,7 @@ def __init__(self, api_client): self._api = api_client def create(self, *, comment: Optional[str] = None, lifetime_seconds: Optional[int] = None) -> CreateTokenResponse: - """Create a user token. - - Creates and returns a token for a user. If this call is made through token authentication, it creates + """Creates and returns a token for a user. If this call is made through token authentication, it creates a token with the same client ID as the authenticated token. If the user's token quota is exceeded, this call returns an error **QUOTA_EXCEEDED**. @@ -10217,9 +10022,7 @@ def create(self, *, comment: Optional[str] = None, lifetime_seconds: Optional[in return CreateTokenResponse.from_dict(res) def delete(self, token_id: str): - """Revoke token. - - Revokes an access token. + """Revokes an access token. If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**. @@ -10239,9 +10042,7 @@ def delete(self, token_id: str): self._api.do("POST", "/api/2.0/token/delete", body=body, headers=headers) def list(self) -> Iterator[PublicTokenInfo]: - """List tokens. - - Lists all the valid tokens for a user-workspace pair. + """Lists all the valid tokens for a user-workspace pair. :returns: Iterator over :class:`PublicTokenInfo` """ @@ -10262,9 +10063,7 @@ def __init__(self, api_client): self._api = api_client def get_status(self, keys: str) -> WorkspaceConf: - """Check configuration status. - - Gets the configuration status for a workspace. + """Gets the configuration status for a workspace. :param keys: str @@ -10282,13 +10081,7 @@ def get_status(self, keys: str) -> WorkspaceConf: return res def set_status(self, contents: Dict[str, str]): - """Enable/disable features. - - Sets the configuration status for a workspace, including enabling or disabling it. - - - - """ + """Sets the configuration status for a workspace, including enabling or disabling it.""" headers = { "Content-Type": "application/json", @@ -10308,9 +10101,7 @@ def __init__(self, api_client): self._api = api_client def get_workspace_network_option_rpc(self, workspace_id: int) -> WorkspaceNetworkOption: - """Get workspace network option. - - Gets the network option for a workspace. Every workspace has exactly one network policy binding, with + """Gets the network option for a workspace. Every workspace has exactly one network policy binding, with 'default-policy' used if no explicit assignment exists. :param workspace_id: int @@ -10331,9 +10122,7 @@ def get_workspace_network_option_rpc(self, workspace_id: int) -> WorkspaceNetwor def update_workspace_network_option_rpc( self, workspace_id: int, workspace_network_option: WorkspaceNetworkOption ) -> WorkspaceNetworkOption: - """Update workspace network option. - - Updates the network option for a workspace. This operation associates the workspace with the specified + """Updates the network option for a workspace. This operation associates the workspace with the specified network policy. To revert to the default policy, specify 'default-policy' as the network_policy_id. :param workspace_id: int diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index f72682702..ca52c3a99 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -2962,9 +2962,7 @@ def create( comment: Optional[str] = None, recipient_profile_str: Optional[str] = None, ) -> ProviderInfo: - """Create an auth provider. - - Creates a new authentication provider minimally based on a name and authentication type. The caller + """Creates a new authentication provider minimally based on a name and authentication type. The caller must be an admin on the metastore. :param name: str @@ -2997,9 +2995,7 @@ def create( return ProviderInfo.from_dict(res) def delete(self, name: str): - """Delete a provider. - - Deletes an authentication provider, if the caller is a metastore admin or is the owner of the + """Deletes an authentication provider, if the caller is a metastore admin or is the owner of the provider. :param name: str @@ -3013,9 +3009,7 @@ def delete(self, name: str): self._api.do("DELETE", f"/api/2.1/unity-catalog/providers/{name}", headers=headers) def get(self, name: str) -> ProviderInfo: - """Get a provider. - - Gets a specific authentication provider. The caller must supply the name of the provider, and must + """Gets a specific authentication provider. The caller must supply the name of the provider, and must either be a metastore admin or the owner of the provider. :param name: str @@ -3038,9 +3032,7 @@ def list( max_results: Optional[int] = None, page_token: Optional[str] = None, ) -> Iterator[ProviderInfo]: - """List providers. - - Gets an array of available authentication providers. The caller must either be a metastore admin or + """Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There is no guarantee of a specific ordering of the elements in the array. @@ -3093,9 +3085,7 @@ def list_provider_share_assets( table_max_results: Optional[int] = None, volume_max_results: Optional[int] = None, ) -> ListProviderShareAssetsResponse: - """List assets by provider share. - - Get arrays of assets associated with a specified provider's share. The caller is the recipient of the + """Get arrays of assets associated with a specified provider's share. The caller is the recipient of the share. :param provider_name: str @@ -3135,9 +3125,7 @@ def list_provider_share_assets( def list_shares( self, name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[ProviderShare]: - """List shares by Provider. - - Gets an array of a specified provider's shares within the metastore where: + """Gets an array of a specified provider's shares within the metastore where: * the caller is a metastore admin, or * the caller is the owner. @@ -3186,9 +3174,7 @@ def update( owner: Optional[str] = None, recipient_profile_str: Optional[str] = None, ) -> ProviderInfo: - """Update a provider. - - Updates the information for an authentication provider, if the caller is a metastore admin or is the + """Updates the information for an authentication provider, if the caller is a metastore admin or is the owner of the provider. If the update changes the provider name, the caller must be both a metastore admin and the owner of the provider. @@ -3237,9 +3223,7 @@ def __init__(self, api_client): self._api = api_client def get_activation_url_info(self, activation_url: str): - """Get a share activation URL. - - Gets an activation URL for a share. + """Gets an activation URL for a share. :param activation_url: str The one time activation url. It also accepts activation token. @@ -3256,9 +3240,7 @@ def get_activation_url_info(self, activation_url: str): ) def retrieve_token(self, activation_url: str) -> RetrieveTokenResponse: - """Get an access token. - - Retrieve access token with an activation url. This is a public API without any authentication. + """Retrieve access token with an activation url. This is a public API without any authentication. :param activation_url: str The one time activation url. It also accepts activation token. @@ -3301,9 +3283,7 @@ def __init__(self, api_client): self._api = api_client def create(self, recipient_name: str, policy: FederationPolicy) -> FederationPolicy: - """Create recipient federation policy. - - Create a federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to + """Create a federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must be the owner of the recipient. When sharing data from Databricks to non-Databricks clients, you can define a federation policy to authenticate non-Databricks recipients. The federation policy validates OIDC claims in federated tokens and is @@ -3343,9 +3323,7 @@ def create(self, recipient_name: str, policy: FederationPolicy) -> FederationPol return FederationPolicy.from_dict(res) def delete(self, recipient_name: str, name: str): - """Delete recipient federation policy. - - Deletes an existing federation policy for an OIDC_FEDERATION recipient. The caller must be the owner + """Deletes an existing federation policy for an OIDC_FEDERATION recipient. The caller must be the owner of the recipient. :param recipient_name: str @@ -3365,9 +3343,7 @@ def delete(self, recipient_name: str, name: str): ) def get_federation_policy(self, recipient_name: str, name: str) -> FederationPolicy: - """Get recipient federation policy. - - Reads an existing federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks + """Reads an existing federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must have read access to the recipient. :param recipient_name: str @@ -3390,9 +3366,7 @@ def get_federation_policy(self, recipient_name: str, name: str) -> FederationPol def list( self, recipient_name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[FederationPolicy]: - """List recipient federation policies. - - Lists federation policies for an OIDC_FEDERATION recipient for sharing data from Databricks to + """Lists federation policies for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must have read access to the recipient. :param recipient_name: str @@ -3429,9 +3403,7 @@ def list( def update( self, recipient_name: str, name: str, policy: FederationPolicy, *, update_mask: Optional[str] = None ) -> FederationPolicy: - """Update recipient federation policy. - - Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of the + """Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of the recipient. :param recipient_name: str @@ -3498,9 +3470,7 @@ def create( properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None, sharing_code: Optional[str] = None, ) -> RecipientInfo: - """Create a share recipient. - - Creates a new recipient with the delta sharing authentication type in the metastore. The caller must + """Creates a new recipient with the delta sharing authentication type in the metastore. The caller must be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore. :param name: str @@ -3557,9 +3527,7 @@ def create( return RecipientInfo.from_dict(res) def delete(self, name: str): - """Delete a share recipient. - - Deletes the specified recipient from the metastore. The caller must be the owner of the recipient. + """Deletes the specified recipient from the metastore. The caller must be the owner of the recipient. :param name: str Name of the recipient. @@ -3572,9 +3540,7 @@ def delete(self, name: str): self._api.do("DELETE", f"/api/2.1/unity-catalog/recipients/{name}", headers=headers) def get(self, name: str) -> RecipientInfo: - """Get a share recipient. - - Gets a share recipient from the metastore if: + """Gets a share recipient from the metastore if: * the caller is the owner of the share recipient, or: * is a metastore admin @@ -3598,9 +3564,7 @@ def list( max_results: Optional[int] = None, page_token: Optional[str] = None, ) -> Iterator[RecipientInfo]: - """List share recipients. - - Gets an array of all share recipients within the current metastore where: + """Gets an array of all share recipients within the current metastore where: * the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific ordering of the elements in the array. @@ -3645,9 +3609,7 @@ def list( query["page_token"] = json["next_page_token"] def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> RecipientInfo: - """Rotate a token. - - Refreshes the specified recipient's delta sharing authentication token with the provided token info. + """Refreshes the specified recipient's delta sharing authentication token with the provided token info. The caller must be the owner of the recipient. :param name: str @@ -3673,9 +3635,7 @@ def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> Reci def share_permissions( self, name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None ) -> GetRecipientSharePermissionsResponse: - """Get recipient share permissions. - - Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the + """Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the owner of the Recipient. :param name: str @@ -3719,9 +3679,7 @@ def update( owner: Optional[str] = None, properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None, ) -> RecipientInfo: - """Update a share recipient. - - Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of + """Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of the recipient. If the recipient name will be updated, the user must be both a metastore admin and the owner of the recipient. @@ -3776,9 +3734,7 @@ def __init__(self, api_client): self._api = api_client def create(self, name: str, *, comment: Optional[str] = None, storage_root: Optional[str] = None) -> ShareInfo: - """Create a share. - - Creates a new share for data objects. Data objects can be added after creation with **update**. The + """Creates a new share for data objects. Data objects can be added after creation with **update**. The caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore. :param name: str @@ -3806,9 +3762,7 @@ def create(self, name: str, *, comment: Optional[str] = None, storage_root: Opti return ShareInfo.from_dict(res) def delete(self, name: str): - """Delete a share. - - Deletes a data object share from the metastore. The caller must be an owner of the share. + """Deletes a data object share from the metastore. The caller must be an owner of the share. :param name: str The name of the share. @@ -3821,9 +3775,7 @@ def delete(self, name: str): self._api.do("DELETE", f"/api/2.1/unity-catalog/shares/{name}", headers=headers) def get(self, name: str, *, include_shared_data: Optional[bool] = None) -> ShareInfo: - """Get a share. - - Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the + """Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the share. :param name: str @@ -3845,9 +3797,7 @@ def get(self, name: str, *, include_shared_data: Optional[bool] = None) -> Share return ShareInfo.from_dict(res) def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ShareInfo]: - """List shares. - - Gets an array of data object shares from the metastore. The caller must be a metastore admin or the + """Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific ordering of the elements in the array. :param max_results: int (optional) @@ -3887,9 +3837,7 @@ def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = def share_permissions( self, name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None ) -> GetSharePermissionsResponse: - """Get permissions. - - Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the + """Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the owner of the share. :param name: str @@ -3930,15 +3878,13 @@ def update( storage_root: Optional[str] = None, updates: Optional[List[SharedDataObjectUpdate]] = None, ) -> ShareInfo: - """Update a share. - - Updates the share with the changes and data objects in the request. The caller must be the owner of + """Updates the share with the changes and data objects in the request. The caller must be the owner of the share or a metastore admin. When the caller is a metastore admin, only the __owner__ field can be updated. - In the case that the share name is changed, **updateShare** requires that the caller is both the share - owner and a metastore admin. + In the case the share name is changed, **updateShare** requires that the caller is the owner of the + share and has the CREATE_SHARE privilege. If there are notebook files in the share, the __storage_root__ field cannot be updated. @@ -3989,9 +3935,7 @@ def update_permissions( changes: Optional[List[PermissionsChange]] = None, omit_permissions_list: Optional[bool] = None, ) -> UpdateSharePermissionsResponse: - """Update permissions. - - Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an + """Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an owner of the share. For new recipient grants, the user must also be the recipient owner or metastore admin. recipient diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 0e23b7a47..9e7eb240a 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -5719,12 +5719,16 @@ def from_dict(cls, d: Dict[str, Any]) -> QueryEditContent: @dataclass class QueryFilter: query_start_time_range: Optional[TimeRange] = None - """A range filter for query submitted time. The time range must be <= 30 days.""" + """A range filter for query submitted time. The time range must be less than or equal to 30 days.""" statement_ids: Optional[List[str]] = None """A list of statement IDs.""" statuses: Optional[List[QueryStatus]] = None + """A list of statuses (QUEUED, RUNNING, CANCELED, FAILED, FINISHED) to match query results. + Corresponds to the `status` field in the response. Filtering for multiple statuses is not + recommended. Instead, opt to filter by a single status multiple times and then combine the + results.""" user_ids: Optional[List[int]] = None """A list of user IDs who ran the queries.""" @@ -5785,7 +5789,9 @@ class QueryInfo: are expected to remain static over time, this cannot be guaranteed.""" duration: Optional[int] = None - """Total execution time of the statement ( excluding result fetch time ).""" + """Total time of the statement execution. This value does not include the time taken to retrieve + the results, which can result in a discrepancy between this value and the start-to-finish + wall-clock time.""" endpoint_id: Optional[str] = None """Alias for `warehouse_id`.""" @@ -8669,9 +8675,7 @@ def __init__(self, api_client): def create( self, *, alert: Optional[CreateAlertRequestAlert] = None, auto_resolve_display_name: Optional[bool] = None ) -> Alert: - """Create an alert. - - Creates an alert. + """Creates an alert. :param alert: :class:`CreateAlertRequestAlert` (optional) :param auto_resolve_display_name: bool (optional) @@ -8694,9 +8698,7 @@ def create( return Alert.from_dict(res) def delete(self, id: str): - """Delete an alert. - - Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and + """Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently deleted after 30 days. @@ -8712,9 +8714,7 @@ def delete(self, id: str): self._api.do("DELETE", f"/api/2.0/sql/alerts/{id}", headers=headers) def get(self, id: str) -> Alert: - """Get an alert. - - Gets an alert. + """Gets an alert. :param id: str @@ -8731,9 +8731,7 @@ def get(self, id: str) -> Alert: def list( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[ListAlertsResponseAlert]: - """List alerts. - - Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API + """Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. :param page_size: int (optional) @@ -8768,9 +8766,7 @@ def update( alert: Optional[UpdateAlertRequestAlert] = None, auto_resolve_display_name: Optional[bool] = None, ) -> Alert: - """Update an alert. - - Updates an alert. + """Updates an alert. :param id: str :param update_mask: str @@ -8829,9 +8825,7 @@ def create( parent: Optional[str] = None, rearm: Optional[int] = None, ) -> LegacyAlert: - """Create an alert. - - Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a + """Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies users or notification destinations if the condition was met. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create @@ -8873,9 +8867,7 @@ def create( return LegacyAlert.from_dict(res) def delete(self, alert_id: str): - """Delete an alert. - - Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike + """Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to the trash. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete @@ -8895,9 +8887,7 @@ def delete(self, alert_id: str): self._api.do("DELETE", f"/api/2.0/preview/sql/alerts/{alert_id}", headers=headers) def get(self, alert_id: str) -> LegacyAlert: - """Get an alert. - - Gets an alert. + """Gets an alert. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get instead. [Learn more] @@ -8917,9 +8907,7 @@ def get(self, alert_id: str) -> LegacyAlert: return LegacyAlert.from_dict(res) def list(self) -> Iterator[LegacyAlert]: - """Get alerts. - - Gets a list of alerts. + """Gets a list of alerts. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list instead. [Learn more] @@ -8937,9 +8925,7 @@ def list(self) -> Iterator[LegacyAlert]: return [LegacyAlert.from_dict(v) for v in res] def update(self, alert_id: str, name: str, options: AlertOptions, query_id: str, *, rearm: Optional[int] = None): - """Update an alert. - - Updates an alert. + """Updates an alert. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update instead. [Learn more] @@ -8983,9 +8969,7 @@ def __init__(self, api_client): self._api = api_client def create_alert(self, alert: AlertV2) -> AlertV2: - """Create an alert. - - Create Alert + """Create Alert :param alert: :class:`AlertV2` @@ -9001,9 +8985,7 @@ def create_alert(self, alert: AlertV2) -> AlertV2: return AlertV2.from_dict(res) def get_alert(self, id: str) -> AlertV2: - """Get an alert. - - Gets an alert. + """Gets an alert. :param id: str @@ -9018,9 +9000,7 @@ def get_alert(self, id: str) -> AlertV2: return AlertV2.from_dict(res) def list_alerts(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[AlertV2]: - """List alerts. - - Gets a list of alerts accessible to the user, ordered by creation time. + """Gets a list of alerts accessible to the user, ordered by creation time. :param page_size: int (optional) :param page_token: str (optional) @@ -9047,9 +9027,7 @@ def list_alerts(self, *, page_size: Optional[int] = None, page_token: Optional[s query["page_token"] = json["next_page_token"] def trash_alert(self, id: str): - """Delete an alert. - - Moves an alert to the trash. Trashed alerts immediately disappear from list views, and can no longer + """Moves an alert to the trash. Trashed alerts immediately disappear from list views, and can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently deleted after 30 days. @@ -9065,9 +9043,7 @@ def trash_alert(self, id: str): self._api.do("DELETE", f"/api/2.0/alerts/{id}", headers=headers) def update_alert(self, id: str, alert: AlertV2, update_mask: str) -> AlertV2: - """Update an alert. - - Update alert + """Update alert :param id: str UUID identifying the alert. @@ -9114,7 +9090,7 @@ def create( text: Optional[str] = None, visualization_id: Optional[str] = None, ) -> Widget: - """Add widget to a dashboard. + """Add widget to a dashboard :param dashboard_id: str Dashboard ID returned by :method:dashboards/create. @@ -9149,7 +9125,7 @@ def create( return Widget.from_dict(res) def delete(self, id: str): - """Remove widget. + """Remove widget :param id: str Widget ID returned by :method:dashboardwidgets/create @@ -9173,7 +9149,7 @@ def update( text: Optional[str] = None, visualization_id: Optional[str] = None, ) -> Widget: - """Update existing widget. + """Update existing widget :param id: str Widget ID returned by :method:dashboardwidgets/create @@ -9269,9 +9245,7 @@ def create( return Dashboard.from_dict(res) def delete(self, dashboard_id: str): - """Remove a dashboard. - - Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot + """Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot be shared. :param dashboard_id: str @@ -9286,9 +9260,7 @@ def delete(self, dashboard_id: str): self._api.do("DELETE", f"/api/2.0/preview/sql/dashboards/{dashboard_id}", headers=headers) def get(self, dashboard_id: str) -> Dashboard: - """Retrieve a definition. - - Returns a JSON representation of a dashboard object, including its visualization and query objects. + """Returns a JSON representation of a dashboard object, including its visualization and query objects. :param dashboard_id: str @@ -9310,9 +9282,7 @@ def list( page_size: Optional[int] = None, q: Optional[str] = None, ) -> Iterator[Dashboard]: - """Get dashboard objects. - - Fetch a paginated list of dashboard objects. + """Fetch a paginated list of dashboard objects. **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. @@ -9359,9 +9329,7 @@ def list( query["page"] += 1 def restore(self, dashboard_id: str): - """Restore a dashboard. - - A restored dashboard appears in list views and searches and can be shared. + """A restored dashboard appears in list views and searches and can be shared. :param dashboard_id: str @@ -9382,9 +9350,7 @@ def update( run_as_role: Optional[RunAsRole] = None, tags: Optional[List[str]] = None, ) -> Dashboard: - """Change a dashboard definition. - - Modify this dashboard definition. This operation only affects attributes of the dashboard object. It + """Modify this dashboard definition. This operation only affects attributes of the dashboard object. It does not add, modify, or remove widgets. **Note**: You cannot undo this operation. @@ -9432,9 +9398,7 @@ def __init__(self, api_client): self._api = api_client def list(self) -> Iterator[DataSource]: - """Get a list of SQL warehouses. - - Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this + """Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new queries against it. @@ -9475,9 +9439,7 @@ def __init__(self, api_client): self._api = api_client def get(self, object_type: ObjectTypePlural, object_id: str) -> GetResponse: - """Get object ACL. - - Gets a JSON representation of the access control list (ACL) for a specified object. + """Gets a JSON representation of the access control list (ACL) for a specified object. **Note**: A new version of the Databricks SQL API is now available. Please use :method:workspace/getpermissions instead. [Learn more] @@ -9506,9 +9468,7 @@ def set( *, access_control_list: Optional[List[AccessControl]] = None, ) -> SetResponse: - """Set object ACL. - - Sets the access control list (ACL) for a specified object. This operation will complete rewrite the + """Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL. **Note**: A new version of the Databricks SQL API is now available. Please use @@ -9540,9 +9500,7 @@ def set( def transfer_ownership( self, object_type: OwnableObjectType, object_id: TransferOwnershipObjectId, *, new_owner: Optional[str] = None ) -> Success: - """Transfer object ownership. - - Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. + """Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use :method:queries/update and :method:alerts/update respectively instead. [Learn more] @@ -9586,9 +9544,7 @@ def __init__(self, api_client): def create( self, *, auto_resolve_display_name: Optional[bool] = None, query: Optional[CreateQueryRequestQuery] = None ) -> Query: - """Create a query. - - Creates a query. + """Creates a query. :param auto_resolve_display_name: bool (optional) If true, automatically resolve query display name conflicts. Otherwise, fail the request if the @@ -9611,9 +9567,7 @@ def create( return Query.from_dict(res) def delete(self, id: str): - """Delete a query. - - Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and + """Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is permanently deleted after 30 days. @@ -9629,9 +9583,7 @@ def delete(self, id: str): self._api.do("DELETE", f"/api/2.0/sql/queries/{id}", headers=headers) def get(self, id: str) -> Query: - """Get a query. - - Gets a query. + """Gets a query. :param id: str @@ -9648,9 +9600,7 @@ def get(self, id: str) -> Query: def list( self, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[ListQueryObjectsResponseQuery]: - """List queries. - - Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API + """Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. :param page_size: int (optional) @@ -9680,9 +9630,7 @@ def list( def list_visualizations( self, id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None ) -> Iterator[Visualization]: - """List visualizations on a query. - - Gets a list of visualizations on a query. + """Gets a list of visualizations on a query. :param id: str :param page_size: int (optional) @@ -9717,9 +9665,7 @@ def update( auto_resolve_display_name: Optional[bool] = None, query: Optional[UpdateQueryRequestQuery] = None, ) -> Query: - """Update a query. - - Updates a query. + """Updates a query. :param id: str :param update_mask: str @@ -9780,9 +9726,7 @@ def create( run_as_role: Optional[RunAsRole] = None, tags: Optional[List[str]] = None, ) -> LegacyQuery: - """Create a new query definition. - - Creates a new query definition. Queries created with this endpoint belong to the authenticated user + """Creates a new query definition. Queries created with this endpoint belong to the authenticated user making the request. The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can @@ -9846,9 +9790,7 @@ def create( return LegacyQuery.from_dict(res) def delete(self, query_id: str): - """Delete a query. - - Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and + """Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is deleted after 30 days. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete @@ -9868,9 +9810,7 @@ def delete(self, query_id: str): self._api.do("DELETE", f"/api/2.0/preview/sql/queries/{query_id}", headers=headers) def get(self, query_id: str) -> LegacyQuery: - """Get a query definition. - - Retrieve a query object definition along with contextual permissions information about the currently + """Retrieve a query object definition along with contextual permissions information about the currently authenticated user. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get @@ -9898,9 +9838,7 @@ def list( page_size: Optional[int] = None, q: Optional[str] = None, ) -> Iterator[LegacyQuery]: - """Get a list of queries. - - Gets a list of queries. Optionally, this list can be filtered by a search term. + """Gets a list of queries. Optionally, this list can be filtered by a search term. **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. @@ -9964,9 +9902,7 @@ def list( query["page"] += 1 def restore(self, query_id: str): - """Restore a query. - - Restore a query that has been moved to the trash. A restored query appears in list views and searches. + """Restore a query that has been moved to the trash. A restored query appears in list views and searches. You can use restored queries for alerts. **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. @@ -9997,9 +9933,7 @@ def update( run_as_role: Optional[RunAsRole] = None, tags: Optional[List[str]] = None, ) -> LegacyQuery: - """Change a query definition. - - Modify this query definition. + """Modify this query definition. **Note**: You cannot undo this operation. @@ -10070,16 +10004,16 @@ def list( max_results: Optional[int] = None, page_token: Optional[str] = None, ) -> ListQueriesResponse: - """List Queries. - - List the history of queries through SQL warehouses, and serverless compute. + """List the history of queries through SQL warehouses, and serverless compute. You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are returned first (up to max_results in request). The pagination token returned in response can be used to list subsequent query statuses. :param filter_by: :class:`QueryFilter` (optional) - A filter to limit query history results. This field is optional. + An optional filter object to limit query history results. Accepts parameters such as user IDs, + endpoint IDs, and statuses to narrow the returned data. In a URL, the parameters of this filter are + specified with dot notation. For example: `filter_by.statement_ids`. :param include_metrics: bool (optional) Whether to include the query metrics with each query. Only use this for a small subset of queries (max_results). Defaults to false. @@ -10118,9 +10052,7 @@ def __init__(self, api_client): self._api = api_client def create(self, *, visualization: Optional[CreateVisualizationRequestVisualization] = None) -> Visualization: - """Add a visualization to a query. - - Adds a visualization to a query. + """Adds a visualization to a query. :param visualization: :class:`CreateVisualizationRequestVisualization` (optional) @@ -10138,9 +10070,7 @@ def create(self, *, visualization: Optional[CreateVisualizationRequestVisualizat return Visualization.from_dict(res) def delete(self, id: str): - """Remove a visualization. - - Removes a visualization. + """Removes a visualization. :param id: str @@ -10156,9 +10086,7 @@ def delete(self, id: str): def update( self, id: str, update_mask: str, *, visualization: Optional[UpdateVisualizationRequestVisualization] = None ) -> Visualization: - """Update a visualization. - - Updates a visualization. + """Updates a visualization. :param id: str :param update_mask: str @@ -10204,9 +10132,7 @@ def __init__(self, api_client): def create( self, query_id: str, type: str, options: Any, *, description: Optional[str] = None, name: Optional[str] = None ) -> LegacyVisualization: - """Add visualization to a query. - - Creates visualization in the query. + """Creates visualization in the query. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/create instead. [Learn more] @@ -10247,9 +10173,7 @@ def create( return LegacyVisualization.from_dict(res) def delete(self, id: str): - """Remove visualization. - - Removes a visualization from the query. + """Removes a visualization from the query. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/delete instead. [Learn more] @@ -10280,9 +10204,7 @@ def update( type: Optional[str] = None, updated_at: Optional[str] = None, ) -> LegacyVisualization: - """Edit existing visualization. - - Updates visualization in the query. + """Updates visualization in the query. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/update instead. [Learn more] @@ -10440,9 +10362,7 @@ def __init__(self, api_client): self._api = api_client def cancel_execution(self, statement_id: str): - """Cancel statement execution. - - Requests that an executing statement be canceled. Callers must poll for status to see the terminal + """Requests that an executing statement be canceled. Callers must poll for status to see the terminal state. :param statement_id: str @@ -10471,7 +10391,7 @@ def execute_statement( schema: Optional[str] = None, wait_timeout: Optional[str] = None, ) -> StatementResponse: - """Execute a SQL statement. + """Execute a SQL statement :param statement: str The SQL statement to execute. The statement can optionally be parameterized, see `parameters`. @@ -10611,9 +10531,7 @@ def execute_statement( return StatementResponse.from_dict(res) def get_statement(self, statement_id: str) -> StatementResponse: - """Get status, manifest, and result first chunk. - - This request can be used to poll for the statement's status. When the `status.state` field is + """This request can be used to poll for the statement's status. When the `status.state` field is `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and @@ -10636,9 +10554,7 @@ def get_statement(self, statement_id: str) -> StatementResponse: return StatementResponse.from_dict(res) def get_statement_result_chunk_n(self, statement_id: str, chunk_index: int) -> ResultData: - """Get result chunk by index. - - After the statement execution has `SUCCEEDED`, this request can be used to fetch any chunk by index. + """After the statement execution has `SUCCEEDED`, this request can be used to fetch any chunk by index. Whereas the first chunk with `chunk_index=0` is typically fetched with :method:statementexecution/executeStatement or :method:statementexecution/getStatement, this request can be used to fetch subsequent chunks. The response structure is identical to the nested `result` @@ -10748,9 +10664,7 @@ def create( tags: Optional[EndpointTags] = None, warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None, ) -> Wait[GetWarehouseResponse]: - """Create a warehouse. - - Creates a new SQL warehouse. + """Creates a new SQL warehouse. :param auto_stop_mins: int (optional) The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it @@ -10886,9 +10800,7 @@ def create_and_wait( ).result(timeout=timeout) def delete(self, id: str): - """Delete a warehouse. - - Deletes a SQL warehouse. + """Deletes a SQL warehouse. :param id: str Required. Id of the SQL warehouse. @@ -10920,9 +10832,7 @@ def edit( tags: Optional[EndpointTags] = None, warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None, ) -> Wait[GetWarehouseResponse]: - """Update a warehouse. - - Updates the configuration for a SQL warehouse. + """Updates the configuration for a SQL warehouse. :param id: str Required. Id of the warehouse to configure. @@ -11057,9 +10967,7 @@ def edit_and_wait( ).result(timeout=timeout) def get(self, id: str) -> GetWarehouseResponse: - """Get warehouse info. - - Gets the information for a single SQL warehouse. + """Gets the information for a single SQL warehouse. :param id: str Required. Id of the SQL warehouse. @@ -11075,9 +10983,7 @@ def get(self, id: str) -> GetWarehouseResponse: return GetWarehouseResponse.from_dict(res) def get_permission_levels(self, warehouse_id: str) -> GetWarehousePermissionLevelsResponse: - """Get SQL warehouse permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param warehouse_id: str The SQL warehouse for which to get or manage permissions. @@ -11093,9 +10999,7 @@ def get_permission_levels(self, warehouse_id: str) -> GetWarehousePermissionLeve return GetWarehousePermissionLevelsResponse.from_dict(res) def get_permissions(self, warehouse_id: str) -> WarehousePermissions: - """Get SQL warehouse permissions. - - Gets the permissions of a SQL warehouse. SQL warehouses can inherit permissions from their root + """Gets the permissions of a SQL warehouse. SQL warehouses can inherit permissions from their root object. :param warehouse_id: str @@ -11112,9 +11016,7 @@ def get_permissions(self, warehouse_id: str) -> WarehousePermissions: return WarehousePermissions.from_dict(res) def get_workspace_warehouse_config(self) -> GetWorkspaceWarehouseConfigResponse: - """Get the workspace configuration. - - Gets the workspace level configuration that is shared by all SQL warehouses in a workspace. + """Gets the workspace level configuration that is shared by all SQL warehouses in a workspace. :returns: :class:`GetWorkspaceWarehouseConfigResponse` """ @@ -11127,9 +11029,7 @@ def get_workspace_warehouse_config(self) -> GetWorkspaceWarehouseConfigResponse: return GetWorkspaceWarehouseConfigResponse.from_dict(res) def list(self, *, run_as_user_id: Optional[int] = None) -> Iterator[EndpointInfo]: - """List warehouses. - - Lists all SQL warehouses that a user has manager permissions on. + """Lists all SQL warehouses that a user has manager permissions on. :param run_as_user_id: int (optional) Service Principal which will be used to fetch the list of warehouses. If not specified, the user @@ -11152,9 +11052,7 @@ def list(self, *, run_as_user_id: Optional[int] = None) -> Iterator[EndpointInfo def set_permissions( self, warehouse_id: str, *, access_control_list: Optional[List[WarehouseAccessControlRequest]] = None ) -> WarehousePermissions: - """Set SQL warehouse permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param warehouse_id: str @@ -11187,9 +11085,7 @@ def set_workspace_warehouse_config( security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy] = None, sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None, ): - """Set the workspace configuration. - - Sets the workspace level configuration that is shared by all SQL warehouses in a workspace. + """Sets the workspace level configuration that is shared by all SQL warehouses in a workspace. :param channel: :class:`Channel` (optional) Optional: Channel selection details @@ -11243,9 +11139,7 @@ def set_workspace_warehouse_config( self._api.do("PUT", "/api/2.0/sql/config/warehouses", body=body, headers=headers) def start(self, id: str) -> Wait[GetWarehouseResponse]: - """Start a warehouse. - - Starts a SQL warehouse. + """Starts a SQL warehouse. :param id: str Required. Id of the SQL warehouse. @@ -11266,9 +11160,7 @@ def start_and_wait(self, id: str, timeout=timedelta(minutes=20)) -> GetWarehouse return self.start(id=id).result(timeout=timeout) def stop(self, id: str) -> Wait[GetWarehouseResponse]: - """Stop a warehouse. - - Stops a SQL warehouse. + """Stops a SQL warehouse. :param id: str Required. Id of the SQL warehouse. @@ -11291,9 +11183,7 @@ def stop_and_wait(self, id: str, timeout=timedelta(minutes=20)) -> GetWarehouseR def update_permissions( self, warehouse_id: str, *, access_control_list: Optional[List[WarehouseAccessControlRequest]] = None ) -> WarehousePermissions: - """Update SQL warehouse permissions. - - Updates the permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root + """Updates the permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root object. :param warehouse_id: str diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index 4a2a7100a..ea3af7760 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -1755,9 +1755,7 @@ def wait_get_endpoint_vector_search_endpoint_online( def create_endpoint( self, name: str, endpoint_type: EndpointType, *, budget_policy_id: Optional[str] = None ) -> Wait[EndpointInfo]: - """Create an endpoint. - - Create a new endpoint. + """Create a new endpoint. :param name: str Name of the vector search endpoint @@ -1802,9 +1800,7 @@ def create_endpoint_and_wait( ) def delete_endpoint(self, endpoint_name: str): - """Delete an endpoint. - - Delete a vector search endpoint. + """Delete a vector search endpoint. :param endpoint_name: str Name of the vector search endpoint @@ -1819,9 +1815,7 @@ def delete_endpoint(self, endpoint_name: str): self._api.do("DELETE", f"/api/2.0/vector-search/endpoints/{endpoint_name}", headers=headers) def get_endpoint(self, endpoint_name: str) -> EndpointInfo: - """Get an endpoint. - - Get details for a single vector search endpoint. + """Get details for a single vector search endpoint. :param endpoint_name: str Name of the endpoint @@ -1837,9 +1831,7 @@ def get_endpoint(self, endpoint_name: str) -> EndpointInfo: return EndpointInfo.from_dict(res) def list_endpoints(self, *, page_token: Optional[str] = None) -> Iterator[EndpointInfo]: - """List all endpoints. - - List all vector search endpoints in the workspace. + """List all vector search endpoints in the workspace. :param page_token: str (optional) Token for pagination @@ -1866,9 +1858,7 @@ def list_endpoints(self, *, page_token: Optional[str] = None) -> Iterator[Endpoi def update_endpoint_budget_policy( self, endpoint_name: str, budget_policy_id: str ) -> PatchEndpointBudgetPolicyResponse: - """Update the budget policy of an endpoint. - - Update the budget policy of an endpoint + """Update the budget policy of an endpoint :param endpoint_name: str Name of the vector search endpoint @@ -1938,9 +1928,7 @@ def create_index( delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest] = None, direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None, ) -> VectorIndex: - """Create an index. - - Create a new index. + """Create a new index. :param name: str Name of the index @@ -1982,9 +1970,7 @@ def create_index( return VectorIndex.from_dict(res) def delete_data_vector_index(self, index_name: str, primary_keys: List[str]) -> DeleteDataVectorIndexResponse: - """Delete data from index. - - Handles the deletion of data from a specified vector index. + """Handles the deletion of data from a specified vector index. :param index_name: str Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index. @@ -2009,8 +1995,6 @@ def delete_data_vector_index(self, index_name: str, primary_keys: List[str]) -> def delete_index(self, index_name: str): """Delete an index. - Delete an index. - :param index_name: str Name of the index @@ -2026,8 +2010,6 @@ def delete_index(self, index_name: str): def get_index(self, index_name: str) -> VectorIndex: """Get an index. - Get an index. - :param index_name: str Name of the index @@ -2042,9 +2024,7 @@ def get_index(self, index_name: str) -> VectorIndex: return VectorIndex.from_dict(res) def list_indexes(self, endpoint_name: str, *, page_token: Optional[str] = None) -> Iterator[MiniVectorIndex]: - """List indexes. - - List all indexes in the given endpoint. + """List all indexes in the given endpoint. :param endpoint_name: str Name of the endpoint @@ -2085,9 +2065,7 @@ def query_index( query_vector: Optional[List[float]] = None, score_threshold: Optional[float] = None, ) -> QueryVectorIndexResponse: - """Query an index. - - Query the specified vector index. + """Query the specified vector index. :param index_name: str Name of the vector index to query. @@ -2145,9 +2123,7 @@ def query_index( def query_next_page( self, index_name: str, *, endpoint_name: Optional[str] = None, page_token: Optional[str] = None ) -> QueryVectorIndexResponse: - """Query next page. - - Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request + """Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request to fetch next page of results. :param index_name: str @@ -2177,9 +2153,7 @@ def query_next_page( def scan_index( self, index_name: str, *, last_primary_key: Optional[str] = None, num_results: Optional[int] = None ) -> ScanVectorIndexResponse: - """Scan an index. - - Scan the specified vector index and return the first `num_results` entries after the exclusive + """Scan the specified vector index and return the first `num_results` entries after the exclusive `primary_key`. :param index_name: str @@ -2205,9 +2179,7 @@ def scan_index( return ScanVectorIndexResponse.from_dict(res) def sync_index(self, index_name: str): - """Synchronize an index. - - Triggers a synchronization process for a specified vector index. + """Triggers a synchronization process for a specified vector index. :param index_name: str Name of the vector index to synchronize. Must be a Delta Sync Index. @@ -2222,9 +2194,7 @@ def sync_index(self, index_name: str): self._api.do("POST", f"/api/2.0/vector-search/indexes/{index_name}/sync", headers=headers) def upsert_data_vector_index(self, index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse: - """Upsert data into an index. - - Handles the upserting of data into a specified vector index. + """Handles the upserting of data into a specified vector index. :param index_name: str Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index. diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index 6753ad880..556bf1fdb 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -2372,9 +2372,7 @@ def __init__(self, api_client): def create( self, git_provider: str, *, git_username: Optional[str] = None, personal_access_token: Optional[str] = None ) -> CreateCredentialsResponse: - """Create a credential entry. - - Creates a Git credential entry for the user. Only one Git credential per user is supported, so any + """Creates a Git credential entry for the user. Only one Git credential per user is supported, so any attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update existing credentials, or the DELETE endpoint to delete existing credentials. @@ -2412,9 +2410,7 @@ def create( return CreateCredentialsResponse.from_dict(res) def delete(self, credential_id: int): - """Delete a credential. - - Deletes the specified Git credential. + """Deletes the specified Git credential. :param credential_id: int The ID for the corresponding credential to access. @@ -2429,9 +2425,7 @@ def delete(self, credential_id: int): self._api.do("DELETE", f"/api/2.0/git-credentials/{credential_id}", headers=headers) def get(self, credential_id: int) -> GetCredentialsResponse: - """Get a credential entry. - - Gets the Git credential with the specified credential ID. + """Gets the Git credential with the specified credential ID. :param credential_id: int The ID for the corresponding credential to access. @@ -2447,9 +2441,7 @@ def get(self, credential_id: int) -> GetCredentialsResponse: return GetCredentialsResponse.from_dict(res) def list(self) -> Iterator[CredentialInfo]: - """Get Git credentials. - - Lists the calling user's Git credentials. One credential per user is supported. + """Lists the calling user's Git credentials. One credential per user is supported. :returns: Iterator over :class:`CredentialInfo` """ @@ -2470,9 +2462,7 @@ def update( git_username: Optional[str] = None, personal_access_token: Optional[str] = None, ): - """Update a credential. - - Updates the specified Git credential. + """Updates the specified Git credential. :param credential_id: int The ID for the corresponding credential to access. @@ -2526,9 +2516,7 @@ def __init__(self, api_client): def create( self, url: str, provider: str, *, path: Optional[str] = None, sparse_checkout: Optional[SparseCheckout] = None ) -> CreateRepoResponse: - """Create a repo. - - Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created + """Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created programmatically must be linked to a remote Git repo, unlike repos created in the browser. :param url: str @@ -2564,9 +2552,7 @@ def create( return CreateRepoResponse.from_dict(res) def delete(self, repo_id: int): - """Delete a repo. - - Deletes the specified repo. + """Deletes the specified repo. :param repo_id: int The ID for the corresponding repo to delete. @@ -2581,9 +2567,7 @@ def delete(self, repo_id: int): self._api.do("DELETE", f"/api/2.0/repos/{repo_id}", headers=headers) def get(self, repo_id: int) -> GetRepoResponse: - """Get a repo. - - Returns the repo with the given repo ID. + """Returns the repo with the given repo ID. :param repo_id: int ID of the Git folder (repo) object in the workspace. @@ -2599,9 +2583,7 @@ def get(self, repo_id: int) -> GetRepoResponse: return GetRepoResponse.from_dict(res) def get_permission_levels(self, repo_id: str) -> GetRepoPermissionLevelsResponse: - """Get repo permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param repo_id: str The repo for which to get or manage permissions. @@ -2617,9 +2599,7 @@ def get_permission_levels(self, repo_id: str) -> GetRepoPermissionLevelsResponse return GetRepoPermissionLevelsResponse.from_dict(res) def get_permissions(self, repo_id: str) -> RepoPermissions: - """Get repo permissions. - - Gets the permissions of a repo. Repos can inherit permissions from their root object. + """Gets the permissions of a repo. Repos can inherit permissions from their root object. :param repo_id: str The repo for which to get or manage permissions. @@ -2635,9 +2615,7 @@ def get_permissions(self, repo_id: str) -> RepoPermissions: return RepoPermissions.from_dict(res) def list(self, *, next_page_token: Optional[str] = None, path_prefix: Optional[str] = None) -> Iterator[RepoInfo]: - """Get repos. - - Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate + """Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate through additional pages. :param next_page_token: str (optional) @@ -2672,9 +2650,7 @@ def list(self, *, next_page_token: Optional[str] = None, path_prefix: Optional[s def set_permissions( self, repo_id: str, *, access_control_list: Optional[List[RepoAccessControlRequest]] = None ) -> RepoPermissions: - """Set repo permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. :param repo_id: str @@ -2702,9 +2678,7 @@ def update( sparse_checkout: Optional[SparseCheckoutUpdate] = None, tag: Optional[str] = None, ): - """Update a repo. - - Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same + """Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same branch. :param repo_id: int @@ -2738,9 +2712,7 @@ def update( def update_permissions( self, repo_id: str, *, access_control_list: Optional[List[RepoAccessControlRequest]] = None ) -> RepoPermissions: - """Update repo permissions. - - Updates the permissions on a repo. Repos can inherit permissions from their root object. + """Updates the permissions on a repo. Repos can inherit permissions from their root object. :param repo_id: str The repo for which to get or manage permissions. @@ -2782,9 +2754,7 @@ def create_scope( initial_manage_principal: Optional[str] = None, scope_backend_type: Optional[ScopeBackendType] = None, ): - """Create a new secret scope. - - The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not + """The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters. :param scope: str @@ -2815,9 +2785,7 @@ def create_scope( self._api.do("POST", "/api/2.0/secrets/scopes/create", body=body, headers=headers) def delete_acl(self, scope: str, principal: str): - """Delete an ACL. - - Deletes the given ACL on the given scope. + """Deletes the given ACL on the given scope. Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have @@ -2843,9 +2811,7 @@ def delete_acl(self, scope: str, principal: str): self._api.do("POST", "/api/2.0/secrets/acls/delete", body=body, headers=headers) def delete_scope(self, scope: str): - """Delete a secret scope. - - Deletes a secret scope. + """Deletes a secret scope. Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. @@ -2866,9 +2832,7 @@ def delete_scope(self, scope: str): self._api.do("POST", "/api/2.0/secrets/scopes/delete", body=body, headers=headers) def delete_secret(self, scope: str, key: str): - """Delete a secret. - - Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the + """Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the secret scope. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED` @@ -2894,9 +2858,7 @@ def delete_secret(self, scope: str, key: str): self._api.do("POST", "/api/2.0/secrets/delete", body=body, headers=headers) def get_acl(self, scope: str, principal: str) -> AclItem: - """Get secret ACL details. - - Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE` + """Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the @@ -2923,9 +2885,7 @@ def get_acl(self, scope: str, principal: str) -> AclItem: return AclItem.from_dict(res) def get_secret(self, scope: str, key: str) -> GetSecretResponse: - """Get a secret. - - Gets the bytes representation of a secret value for the specified scope and key. + """Gets the bytes representation of a secret value for the specified scope and key. Users need the READ permission to make this call. @@ -2956,9 +2916,7 @@ def get_secret(self, scope: str, key: str) -> GetSecretResponse: return GetSecretResponse.from_dict(res) def list_acls(self, scope: str) -> Iterator[AclItem]: - """Lists ACLs. - - List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API. + """List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. @@ -2981,9 +2939,7 @@ def list_acls(self, scope: str) -> Iterator[AclItem]: return parsed if parsed is not None else [] def list_scopes(self) -> Iterator[SecretScope]: - """List all scopes. - - Lists all secret scopes available in the workspace. + """Lists all secret scopes available in the workspace. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. @@ -2999,9 +2955,7 @@ def list_scopes(self) -> Iterator[SecretScope]: return parsed if parsed is not None else [] def list_secrets(self, scope: str) -> Iterator[SecretMetadata]: - """List secret keys. - - Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data + """Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data cannot be retrieved using this API. Users need the READ permission to make this call. The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if @@ -3026,9 +2980,7 @@ def list_secrets(self, scope: str) -> Iterator[SecretMetadata]: return parsed if parsed is not None else [] def put_acl(self, scope: str, principal: str, permission: AclPermission): - """Create/update an ACL. - - Creates or overwrites the Access Control List (ACL) associated with the given principal (user or + """Creates or overwrites the Access Control List (ACL) associated with the given principal (user or group) on the specified scope point. In general, a user or group will use the most powerful permission available to them, and permissions @@ -3079,9 +3031,7 @@ def put_acl(self, scope: str, principal: str, permission: AclPermission): def put_secret( self, scope: str, key: str, *, bytes_value: Optional[str] = None, string_value: Optional[str] = None ): - """Add a secret. - - Inserts a secret under the provided scope with the given name. If a secret already exists with the + """Inserts a secret under the provided scope with the given name. If a secret already exists with the same name, this command overwrites the existing secret's value. The server encrypts the secret using the secret scope's encryption settings before storing it. @@ -3135,9 +3085,7 @@ def __init__(self, api_client): self._api = api_client def delete(self, path: str, *, recursive: Optional[bool] = None): - """Delete a workspace object. - - Deletes an object or a directory (and optionally recursively deletes all objects in the directory). * + """Deletes an object or a directory (and optionally recursively deletes all objects in the directory). * If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty directory and `recursive` is set to `false`, this call returns an error `DIRECTORY_NOT_EMPTY`. @@ -3166,9 +3114,7 @@ def delete(self, path: str, *, recursive: Optional[bool] = None): self._api.do("POST", "/api/2.0/workspace/delete", body=body, headers=headers) def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportResponse: - """Export a workspace object. - - Exports an object or the contents of an entire directory. + """Exports an object or the contents of an entire directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. @@ -3208,9 +3154,7 @@ def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportR def get_permission_levels( self, workspace_object_type: str, workspace_object_id: str ) -> GetWorkspaceObjectPermissionLevelsResponse: - """Get workspace object permission levels. - - Gets the permission levels that a user can have on an object. + """Gets the permission levels that a user can have on an object. :param workspace_object_type: str The workspace object type for which to get or manage permissions. @@ -3232,9 +3176,7 @@ def get_permission_levels( return GetWorkspaceObjectPermissionLevelsResponse.from_dict(res) def get_permissions(self, workspace_object_type: str, workspace_object_id: str) -> WorkspaceObjectPermissions: - """Get workspace object permissions. - - Gets the permissions of a workspace object. Workspace objects can inherit permissions from their + """Gets the permissions of a workspace object. Workspace objects can inherit permissions from their parent objects or root object. :param workspace_object_type: str @@ -3255,9 +3197,7 @@ def get_permissions(self, workspace_object_type: str, workspace_object_id: str) return WorkspaceObjectPermissions.from_dict(res) def get_status(self, path: str) -> ObjectInfo: - """Get status. - - Gets the status of an object or a directory. If `path` does not exist, this call returns an error + """Gets the status of an object or a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. :param path: str @@ -3285,9 +3225,7 @@ def import_( language: Optional[Language] = None, overwrite: Optional[bool] = None, ): - """Import a workspace object. - - Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. + """Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. If `path` already exists and `overwrite` is set to `false`, this call returns an error `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE` format with the `language` field unset. To import a single file as `SOURCE`, you must set the @@ -3339,9 +3277,7 @@ def import_( self._api.do("POST", "/api/2.0/workspace/import", body=body, headers=headers) def list(self, path: str, *, notebooks_modified_after: Optional[int] = None) -> Iterator[ObjectInfo]: - """List contents. - - Lists the contents of a directory, or the object if it is not a directory. If the input path does not + """Lists the contents of a directory, or the object if it is not a directory. If the input path does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. :param path: str @@ -3366,9 +3302,7 @@ def list(self, path: str, *, notebooks_modified_after: Optional[int] = None) -> return parsed if parsed is not None else [] def mkdirs(self, path: str): - """Create a directory. - - Creates the specified directory (and necessary parent directories if they do not exist). If there is + """Creates the specified directory (and necessary parent directories if they do not exist). If there is an object (not a directory) at any prefix of the input path, this call returns an error `RESOURCE_ALREADY_EXISTS`. @@ -3398,9 +3332,7 @@ def set_permissions( *, access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None, ) -> WorkspaceObjectPermissions: - """Set workspace object permissions. - - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct + """Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their parent objects or root object. @@ -3432,9 +3364,7 @@ def update_permissions( *, access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None, ) -> WorkspaceObjectPermissions: - """Update workspace object permissions. - - Updates the permissions on a workspace object. Workspace objects can inherit permissions from their + """Updates the permissions on a workspace object. Workspace objects can inherit permissions from their parent objects or root object. :param workspace_object_type: str diff --git a/docs/account/billing/billable_usage.rst b/docs/account/billing/billable_usage.rst index b3bda9c61..2851dec74 100644 --- a/docs/account/billing/billable_usage.rst +++ b/docs/account/billing/billable_usage.rst @@ -20,8 +20,6 @@ resp = a.billable_usage.download(start_month="2024-08", end_month="2024-09") - Return billable usage logs. - Returns billable usage logs in CSV format for the specified account and date range. For the data schema, see [CSV file schema]. Note that this method might take multiple minutes to complete. diff --git a/docs/account/billing/budget_policy.rst b/docs/account/billing/budget_policy.rst index d77eaa4a2..ec3e8ffb2 100644 --- a/docs/account/billing/budget_policy.rst +++ b/docs/account/billing/budget_policy.rst @@ -8,8 +8,6 @@ .. py:method:: create( [, policy: Optional[BudgetPolicy], request_id: Optional[str]]) -> BudgetPolicy - Create a budget policy. - Creates a new policy. :param policy: :class:`BudgetPolicy` (optional) @@ -25,8 +23,6 @@ .. py:method:: delete(policy_id: str) - Delete a budget policy. - Deletes a policy :param policy_id: str @@ -37,8 +33,6 @@ .. py:method:: get(policy_id: str) -> BudgetPolicy - Get a budget policy. - Retrieves a policy by it's ID. :param policy_id: str @@ -49,8 +43,6 @@ .. py:method:: list( [, filter_by: Optional[Filter], page_size: Optional[int], page_token: Optional[str], sort_spec: Optional[SortSpec]]) -> Iterator[BudgetPolicy] - List policies. - Lists all policies. Policies are returned in the alphabetically ascending order of their names. :param filter_by: :class:`Filter` (optional) @@ -72,8 +64,6 @@ .. py:method:: update(policy_id: str, policy: BudgetPolicy [, limit_config: Optional[LimitConfig]]) -> BudgetPolicy - Update a budget policy. - Updates a policy :param policy_id: str diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst index cf87d1424..acbfbfb68 100644 --- a/docs/account/billing/budgets.rst +++ b/docs/account/billing/budgets.rst @@ -56,8 +56,6 @@ # cleanup a.budgets.delete(budget_id=created.budget.budget_configuration_id) - Create new budget. - Create a new budget configuration for an account. For full details, see https://docs.databricks.com/en/admin/account-settings/budgets.html. @@ -69,8 +67,6 @@ .. py:method:: delete(budget_id: str) - Delete budget. - Deletes a budget configuration for an account. Both account and budget configuration are specified by ID. This cannot be undone. @@ -130,8 +126,6 @@ # cleanup a.budgets.delete(budget_id=created.budget.budget_configuration_id) - Get budget. - Gets a budget configuration for an account. Both account and budget configuration are specified by ID. :param budget_id: str @@ -154,8 +148,6 @@ all = a.budgets.list(billing.ListBudgetConfigurationsRequest()) - Get all budgets. - Gets all budgets associated with this account. :param page_token: str (optional) @@ -242,8 +234,6 @@ # cleanup a.budgets.delete(budget_id=created.budget.budget_configuration_id) - Modify budget. - Updates a budget configuration for an account. Both account and budget configuration are specified by ID. diff --git a/docs/account/billing/log_delivery.rst b/docs/account/billing/log_delivery.rst index 4f3baef60..1eefc8802 100644 --- a/docs/account/billing/log_delivery.rst +++ b/docs/account/billing/log_delivery.rst @@ -52,8 +52,6 @@ status=billing.LogDeliveryConfigStatus.DISABLED, ) - Create a new log delivery configuration. - Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs to your storage location. This requires that you already created a [credential object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a @@ -128,8 +126,6 @@ status=billing.LogDeliveryConfigStatus.DISABLED, ) - Get log delivery configuration. - Gets a Databricks log delivery configuration object for an account, both specified by ID. :param log_delivery_configuration_id: str @@ -152,8 +148,6 @@ all = a.log_delivery.list(billing.ListLogDeliveryRequest()) - Get all log delivery configurations. - Gets all Databricks log delivery configurations associated with an account specified by ID. :param credentials_id: str (optional) @@ -171,8 +165,6 @@ .. py:method:: patch_status(log_delivery_configuration_id: str, status: LogDeliveryConfigStatus) - Enable or disable log delivery configuration. - Enables or disables a log delivery configuration. Deletion of delivery configurations is not supported, so disable log delivery configurations that are no longer needed. Note that you can't re-enable a delivery configuration if this would violate the delivery configuration limits described diff --git a/docs/account/billing/usage_dashboards.rst b/docs/account/billing/usage_dashboards.rst index a316bf232..4eef82411 100644 --- a/docs/account/billing/usage_dashboards.rst +++ b/docs/account/billing/usage_dashboards.rst @@ -10,8 +10,6 @@ .. py:method:: create( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> CreateBillingUsageDashboardResponse - Create new usage dashboard. - Create a usage dashboard specified by workspaceId, accountId, and dashboard type. :param dashboard_type: :class:`UsageDashboardType` (optional) @@ -25,8 +23,6 @@ .. py:method:: get( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> GetBillingUsageDashboardResponse - Get usage dashboard. - Get a usage dashboard specified by workspaceId, accountId, and dashboard type. :param dashboard_type: :class:`UsageDashboardType` (optional) diff --git a/docs/account/catalog/metastore_assignments.rst b/docs/account/catalog/metastore_assignments.rst index 1bfeedca0..4463ef712 100644 --- a/docs/account/catalog/metastore_assignments.rst +++ b/docs/account/catalog/metastore_assignments.rst @@ -8,8 +8,6 @@ .. py:method:: create(workspace_id: int, metastore_id: str [, metastore_assignment: Optional[CreateMetastoreAssignment]]) - Assigns a workspace to a metastore. - Creates an assignment to a metastore for a workspace :param workspace_id: int @@ -23,8 +21,6 @@ .. py:method:: delete(workspace_id: int, metastore_id: str) - Delete a metastore assignment. - Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. :param workspace_id: int @@ -37,8 +33,6 @@ .. py:method:: get(workspace_id: int) -> AccountsMetastoreAssignment - Gets the metastore assignment for a workspace. - Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment will not be found and a 404 returned. @@ -64,8 +58,6 @@ ws = a.metastore_assignments.list(metastore_id=os.environ["TEST_METASTORE_ID"]) - Get all workspaces assigned to a metastore. - Gets a list of all Databricks workspace IDs that have been assigned to given metastore. :param metastore_id: str @@ -76,8 +68,6 @@ .. py:method:: update(workspace_id: int, metastore_id: str [, metastore_assignment: Optional[UpdateMetastoreAssignment]]) - Updates a metastore assignment to a workspaces. - Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be updated. diff --git a/docs/account/catalog/metastores.rst b/docs/account/catalog/metastores.rst index 36df616ea..3cae5dd31 100644 --- a/docs/account/catalog/metastores.rst +++ b/docs/account/catalog/metastores.rst @@ -9,8 +9,6 @@ .. py:method:: create( [, metastore_info: Optional[CreateMetastore]]) -> AccountsMetastoreInfo - Create metastore. - Creates a Unity Catalog metastore. :param metastore_info: :class:`CreateMetastore` (optional) @@ -20,8 +18,6 @@ .. py:method:: delete(metastore_id: str [, force: Optional[bool]]) - Delete a metastore. - Deletes a Unity Catalog metastore for an account, both specified by ID. :param metastore_id: str @@ -34,8 +30,6 @@ .. py:method:: get(metastore_id: str) -> AccountsMetastoreInfo - Get a metastore. - Gets a Unity Catalog metastore from an account, both specified by ID. :param metastore_id: str @@ -46,8 +40,6 @@ .. py:method:: list() -> Iterator[MetastoreInfo] - Get all metastores associated with an account. - Gets all Unity Catalog metastores associated with an account specified by ID. :returns: Iterator over :class:`MetastoreInfo` @@ -55,8 +47,6 @@ .. py:method:: update(metastore_id: str [, metastore_info: Optional[UpdateMetastore]]) -> AccountsMetastoreInfo - Update a metastore. - Updates an existing Unity Catalog metastore. :param metastore_id: str diff --git a/docs/account/catalog/storage_credentials.rst b/docs/account/catalog/storage_credentials.rst index 0b9948015..f62632e8f 100644 --- a/docs/account/catalog/storage_credentials.rst +++ b/docs/account/catalog/storage_credentials.rst @@ -8,8 +8,6 @@ .. py:method:: create(metastore_id: str [, credential_info: Optional[CreateStorageCredential]]) -> AccountsStorageCredentialInfo - Create a storage credential. - Creates a new storage credential. The request object is specific to the cloud: * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials * @@ -27,8 +25,6 @@ .. py:method:: delete(metastore_id: str, storage_credential_name: str [, force: Optional[bool]]) - Delete a storage credential. - Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. @@ -44,8 +40,6 @@ .. py:method:: get(metastore_id: str, storage_credential_name: str) -> AccountsStorageCredentialInfo - Gets the named storage credential. - Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have a level of privilege on the storage credential. @@ -59,8 +53,6 @@ .. py:method:: list(metastore_id: str) -> Iterator[StorageCredentialInfo] - Get all storage credentials assigned to a metastore. - Gets a list of all storage credentials that have been assigned to given metastore. :param metastore_id: str @@ -71,8 +63,6 @@ .. py:method:: update(metastore_id: str, storage_credential_name: str [, credential_info: Optional[UpdateStorageCredential]]) -> AccountsStorageCredentialInfo - Updates a storage credential. - Updates a storage credential on the metastore. The caller must be the owner of the storage credential. If the caller is a metastore admin, only the __owner__ credential can be changed. diff --git a/docs/account/iam/access_control.rst b/docs/account/iam/access_control.rst index 475d28c07..0e271f62e 100644 --- a/docs/account/iam/access_control.rst +++ b/docs/account/iam/access_control.rst @@ -10,8 +10,6 @@ .. py:method:: get_assignable_roles_for_resource(resource: str) -> GetAssignableRolesForResourceResponse - Get assignable roles for a resource. - Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the role. @@ -28,8 +26,6 @@ .. py:method:: get_rule_set(name: str, etag: str) -> RuleSetResponse - Get a rule set. - Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default rule set for each resource is supported. @@ -58,8 +54,6 @@ .. py:method:: update_rule_set(name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse - Update a rule set. - Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between concurrent updates. diff --git a/docs/account/iam/groups.rst b/docs/account/iam/groups.rst index d005f7930..47b5d6cbb 100644 --- a/docs/account/iam/groups.rst +++ b/docs/account/iam/groups.rst @@ -13,8 +13,6 @@ .. py:method:: create( [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) -> Group - Create a new group. - Creates a group in the Databricks account with a unique name, using the supplied group details. :param display_name: str (optional) @@ -41,8 +39,6 @@ .. py:method:: delete(id: str) - Delete a group. - Deletes a group from the Databricks account. :param id: str @@ -53,8 +49,6 @@ .. py:method:: get(id: str) -> Group - Get group details. - Gets the information for a specific group in the Databricks account. :param id: str @@ -65,8 +59,6 @@ .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[Group] - List group details. - Gets all details of the groups associated with the Databricks account. :param attributes: str (optional) @@ -94,8 +86,6 @@ .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]]) - Update group details. - Partially updates the details of a group. :param id: str @@ -109,8 +99,6 @@ .. py:method:: update(id: str [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) - Replace a group. - Updates the details of a group by replacing the entire group entity. :param id: str diff --git a/docs/account/iam/service_principals.rst b/docs/account/iam/service_principals.rst index 302cf5f79..6ec4fb814 100644 --- a/docs/account/iam/service_principals.rst +++ b/docs/account/iam/service_principals.rst @@ -23,9 +23,10 @@ a = AccountClient() - spn = a.service_principals.create(display_name=f"sdk-{time.time_ns()}") - - Create a service principal. + sp_create = a.service_principals.create(active=True, display_name=f"sdk-{time.time_ns()}") + + # cleanup + a.service_principals.delete(id=sp_create.id) Creates a new service principal in the Databricks account. @@ -54,8 +55,6 @@ .. py:method:: delete(id: str) - Delete a service principal. - Delete a single service principal in the Databricks account. :param id: str @@ -84,8 +83,6 @@ # cleanup a.service_principals.delete(id=sp_create.id) - Get service principal details. - Gets the details for a single service principal define in the Databricks account. :param id: str @@ -116,8 +113,6 @@ # cleanup a.service_principals.delete(id=sp_create.id) - List service principals. - Gets the set of service principals associated with a Databricks account. :param attributes: str (optional) @@ -170,8 +165,6 @@ # cleanup a.service_principals.delete(id=sp_create.id) - Update service principal details. - Partially updates the details of a single service principal in the Databricks account. :param id: str @@ -205,8 +198,6 @@ # cleanup a.service_principals.delete(id=sp_create.id) - Replace service principal. - Updates the details of a single service principal. This action replaces the existing service principal with the same name. diff --git a/docs/account/iam/users.rst b/docs/account/iam/users.rst index 7e527ec45..4ddf58a71 100644 --- a/docs/account/iam/users.rst +++ b/docs/account/iam/users.rst @@ -35,8 +35,6 @@ # cleanup a.users.delete(id=user.id) - Create a new user. - Creates a new user in the Databricks account. This new user will also be added to the Databricks account. @@ -90,8 +88,6 @@ a.users.delete(id=user.id) - Delete a user. - Deletes a user. Deleting a user from a Databricks account also removes objects associated with the user. @@ -124,8 +120,6 @@ # cleanup a.users.delete(id=user.id) - Get user details. - Gets information for a specific user in Databricks account. :param id: str @@ -156,8 +150,6 @@ .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[User] - List users. - Gets details for all the users associated with a Databricks account. :param attributes: str (optional) @@ -217,8 +209,6 @@ # cleanup a.users.delete(id=user.id) - Update user details. - Partially updates a user resource by applying the supplied operations on specific user attributes. :param id: str @@ -232,8 +222,6 @@ .. py:method:: update(id: str [, active: Optional[bool], display_name: Optional[str], emails: Optional[List[ComplexValue]], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], name: Optional[Name], roles: Optional[List[ComplexValue]], schemas: Optional[List[UserSchema]], user_name: Optional[str]]) - Replace a user. - Replaces a user's information with the data supplied in request. :param id: str diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index 745bd75da..fa9c2ee3e 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -9,8 +9,6 @@ .. py:method:: delete(workspace_id: int, principal_id: int) - Delete permissions assignment. - Deletes the workspace permissions assignment in a given account and workspace for the specified principal. @@ -24,8 +22,6 @@ .. py:method:: get(workspace_id: int) -> WorkspacePermissions - List workspace permissions. - Get an array of workspace permissions for the specified account and workspace. :param workspace_id: int @@ -51,8 +47,6 @@ all = a.workspace_assignment.list(workspace_id=workspace_id) - Get permission assignments. - Get the permission assignments for the specified Databricks account and Databricks workspace. :param workspace_id: int @@ -80,16 +74,14 @@ spn_id = spn.id - workspace_id = os.environ["TEST_WORKSPACE_ID"] + workspace_id = os.environ["DUMMY_WORKSPACE_ID"] - a.workspace_assignment.update( + _ = a.workspace_assignment.update( workspace_id=workspace_id, principal_id=spn_id, permissions=[iam.WorkspacePermission.USER], ) - Create or update permissions assignment. - Creates or updates the workspace permissions assignment in a given account and workspace for the specified principal. diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst index 5110e70ad..09a4ce463 100644 --- a/docs/account/oauth2/custom_app_integration.rst +++ b/docs/account/oauth2/custom_app_integration.rst @@ -11,8 +11,6 @@ Create Custom OAuth App Integration. - Create Custom OAuth App Integration. - You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. :param confidential: bool (optional) @@ -35,8 +33,6 @@ .. py:method:: delete(integration_id: str) - Delete Custom OAuth App Integration. - Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. @@ -47,8 +43,6 @@ .. py:method:: get(integration_id: str) -> GetCustomAppIntegrationOutput - Get OAuth Custom App Integration. - Gets the Custom OAuth App Integration for the given integration id. :param integration_id: str @@ -59,8 +53,6 @@ .. py:method:: list( [, include_creator_username: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetCustomAppIntegrationOutput] - Get custom oauth app integrations. - Get the list of custom OAuth app integrations for the specified Databricks account :param include_creator_username: bool (optional) @@ -72,8 +64,6 @@ .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]]) - Updates Custom OAuth App Integration. - Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. diff --git a/docs/account/oauth2/o_auth_published_apps.rst b/docs/account/oauth2/o_auth_published_apps.rst index 873d8a650..98479d720 100644 --- a/docs/account/oauth2/o_auth_published_apps.rst +++ b/docs/account/oauth2/o_auth_published_apps.rst @@ -10,8 +10,6 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PublishedAppOutput] - Get all the published OAuth apps. - Get all the available published OAuth apps in Databricks. :param page_size: int (optional) diff --git a/docs/account/oauth2/published_app_integration.rst b/docs/account/oauth2/published_app_integration.rst index fd61c58fa..df635113b 100644 --- a/docs/account/oauth2/published_app_integration.rst +++ b/docs/account/oauth2/published_app_integration.rst @@ -11,8 +11,6 @@ Create Published OAuth App Integration. - Create Published OAuth App Integration. - You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param app_id: str (optional) @@ -25,8 +23,6 @@ .. py:method:: delete(integration_id: str) - Delete Published OAuth App Integration. - Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. @@ -37,8 +33,6 @@ .. py:method:: get(integration_id: str) -> GetPublishedAppIntegrationOutput - Get OAuth Published App Integration. - Gets the Published OAuth App Integration for the given integration id. :param integration_id: str @@ -48,8 +42,6 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetPublishedAppIntegrationOutput] - Get published oauth app integrations. - Get the list of published OAuth app integrations for the specified Databricks account :param page_size: int (optional) @@ -60,8 +52,6 @@ .. py:method:: update(integration_id: str [, token_access_policy: Optional[TokenAccessPolicy]]) - Updates Published OAuth App Integration. - Updates an existing published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. diff --git a/docs/account/oauth2/service_principal_federation_policy.rst b/docs/account/oauth2/service_principal_federation_policy.rst index f3335d87a..3f7f275ee 100644 --- a/docs/account/oauth2/service_principal_federation_policy.rst +++ b/docs/account/oauth2/service_principal_federation_policy.rst @@ -47,7 +47,7 @@ .. py:method:: create(service_principal_id: int, policy: FederationPolicy [, policy_id: Optional[str]]) -> FederationPolicy - Create service principal federation policy. + Create account federation policy. :param service_principal_id: int The service principal id for the federation policy. @@ -61,7 +61,7 @@ .. py:method:: delete(service_principal_id: int, policy_id: str) - Delete service principal federation policy. + Delete account federation policy. :param service_principal_id: int The service principal id for the federation policy. @@ -73,7 +73,7 @@ .. py:method:: get(service_principal_id: int, policy_id: str) -> FederationPolicy - Get service principal federation policy. + Get account federation policy. :param service_principal_id: int The service principal id for the federation policy. @@ -85,7 +85,7 @@ .. py:method:: list(service_principal_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy] - List service principal federation policies. + List account federation policies. :param service_principal_id: int The service principal id for the federation policy. @@ -97,7 +97,7 @@ .. py:method:: update(service_principal_id: int, policy_id: str, policy: FederationPolicy [, update_mask: Optional[str]]) -> FederationPolicy - Update service principal federation policy. + Update account federation policy. :param service_principal_id: int The service principal id for the federation policy. diff --git a/docs/account/oauth2/service_principal_secrets.rst b/docs/account/oauth2/service_principal_secrets.rst index 01965a19a..0317229c6 100644 --- a/docs/account/oauth2/service_principal_secrets.rst +++ b/docs/account/oauth2/service_principal_secrets.rst @@ -19,8 +19,6 @@ .. py:method:: create(service_principal_id: int [, lifetime: Optional[str]]) -> CreateServicePrincipalSecretResponse - Create service principal secret. - Create a secret for the given service principal. :param service_principal_id: int @@ -34,8 +32,6 @@ .. py:method:: delete(service_principal_id: int, secret_id: str) - Delete service principal secret. - Delete a secret from the given service principal. :param service_principal_id: int @@ -48,8 +44,6 @@ .. py:method:: list(service_principal_id: int [, page_token: Optional[str]]) -> Iterator[SecretInfo] - List service principal secrets. - List all secrets associated with the given service principal. This operation only returns information about the secrets themselves and does not include the secret values. diff --git a/docs/account/provisioning/credentials.rst b/docs/account/provisioning/credentials.rst index e307588f1..faf3b52ba 100644 --- a/docs/account/provisioning/credentials.rst +++ b/docs/account/provisioning/credentials.rst @@ -24,17 +24,15 @@ a = AccountClient() - creds = a.credentials.create( + role = a.credentials.create( credentials_name=f"sdk-{time.time_ns()}", aws_credentials=provisioning.CreateCredentialAwsCredentials( - sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"]) + sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]) ), ) # cleanup - a.credentials.delete(credentials_id=creds.credentials_id) - - Create credential configuration. + a.credentials.delete(credentials_id=role.credentials_id) Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks @@ -58,8 +56,6 @@ .. py:method:: delete(credentials_id: str) - Delete credential configuration. - Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot delete a credential that is associated with any workspace. @@ -96,8 +92,6 @@ # cleanup a.credentials.delete(credentials_id=role.credentials_id) - Get credential configuration. - Gets a Databricks credential configuration object for an account, both specified by ID. :param credentials_id: str @@ -119,8 +113,6 @@ configs = a.credentials.list() - Get all credential configurations. - Gets all Databricks credential configurations associated with an account specified by ID. :returns: Iterator over :class:`Credential` diff --git a/docs/account/provisioning/encryption_keys.rst b/docs/account/provisioning/encryption_keys.rst index 1c00a2914..f13513446 100644 --- a/docs/account/provisioning/encryption_keys.rst +++ b/docs/account/provisioning/encryption_keys.rst @@ -43,8 +43,6 @@ # cleanup a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id) - Create encryption key configuration. - Creates a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks @@ -69,8 +67,6 @@ .. py:method:: delete(customer_managed_key_id: str) - Delete encryption key configuration. - Deletes a customer-managed key configuration object for an account. You cannot delete a configuration that is associated with a running workspace. @@ -107,8 +103,6 @@ # cleanup a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id) - Get encryption key configuration. - Gets a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If assigned as a workspace's customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks @@ -141,8 +135,6 @@ all = a.encryption_keys.list() - Get all encryption key configurations. - Gets all customer-managed key configuration objects for an account. If the key is specified as a workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history. diff --git a/docs/account/provisioning/networks.rst b/docs/account/provisioning/networks.rst index 46bccd872..0b3b8f8c2 100644 --- a/docs/account/provisioning/networks.rst +++ b/docs/account/provisioning/networks.rst @@ -27,8 +27,6 @@ security_group_ids=[hex(time.time_ns())[2:]], ) - Create network configuration. - Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be used for new Databricks clusters. This requires a pre-existing VPC and subnets. @@ -57,8 +55,6 @@ .. py:method:: delete(network_id: str) - Delete a network configuration. - Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot delete a network that is associated with a workspace. @@ -92,8 +88,6 @@ by_id = a.networks.get(network_id=netw.network_id) - Get a network configuration. - Gets a Databricks network configuration, which represents a cloud VPC and its resources. :param network_id: str @@ -115,8 +109,6 @@ configs = a.networks.list() - Get all network configurations. - Gets a list of all Databricks network configurations for an account, specified by ID. This operation is available only if your account is on the E2 version of the platform. diff --git a/docs/account/provisioning/private_access.rst b/docs/account/provisioning/private_access.rst index e30ed2585..ca422e8b1 100644 --- a/docs/account/provisioning/private_access.rst +++ b/docs/account/provisioning/private_access.rst @@ -28,8 +28,6 @@ # cleanup a.private_access.delete(private_access_settings_id=created.private_access_settings_id) - Create private access settings. - Creates a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object referenced by ID in the workspace's `private_access_settings_id` property. @@ -76,8 +74,6 @@ .. py:method:: delete(private_access_settings_id: str) - Delete a private access settings object. - Deletes a private access settings object, which determines how your workspace is accessed over [AWS PrivateLink]. @@ -116,8 +112,6 @@ # cleanup a.private_access.delete(private_access_settings_id=created.private_access_settings_id) - Get a private access settings object. - Gets a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. @@ -145,8 +139,6 @@ all = a.private_access.list() - Get all private access settings objects. - Gets a list of all private access settings objects for an account, specified by ID. :returns: Iterator over :class:`PrivateAccessSettings` @@ -180,8 +172,6 @@ # cleanup a.private_access.delete(private_access_settings_id=created.private_access_settings_id) - Replace private access settings. - Updates an existing private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object referenced by ID in the workspace's `private_access_settings_id` property. diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst index 010795885..c89589282 100644 --- a/docs/account/provisioning/storage.rst +++ b/docs/account/provisioning/storage.rst @@ -16,6 +16,7 @@ .. code-block:: + import os import time from databricks.sdk import AccountClient @@ -23,15 +24,13 @@ a = AccountClient() - bucket = a.storage.create( + storage = a.storage.create( storage_configuration_name=f"sdk-{time.time_ns()}", - root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"), + root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]), ) # cleanup - a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) - - Create new storage configuration. + a.storage.delete(storage_configuration_id=storage.storage_configuration_id) Creates new storage configuration for an account, specified by ID. Uploads a storage configuration object that represents the root AWS S3 bucket in your account. Databricks stores related workspace @@ -53,8 +52,6 @@ .. py:method:: delete(storage_configuration_id: str) - Delete storage configuration. - Deletes a Databricks storage configuration. You cannot delete a storage configuration that is associated with any workspace. @@ -85,8 +82,6 @@ by_id = a.storage.get(storage_configuration_id=storage.storage_configuration_id) - Get storage configuration. - Gets a Databricks storage configuration for an account, both specified by ID. :param storage_configuration_id: str @@ -108,8 +103,6 @@ configs = a.storage.list() - Get all storage configurations. - Gets a list of all Databricks storage configurations for your account, specified by ID. :returns: Iterator over :class:`StorageConfiguration` diff --git a/docs/account/provisioning/vpc_endpoints.rst b/docs/account/provisioning/vpc_endpoints.rst index fecfbec5d..032365197 100644 --- a/docs/account/provisioning/vpc_endpoints.rst +++ b/docs/account/provisioning/vpc_endpoints.rst @@ -29,8 +29,6 @@ # cleanup a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id) - Create VPC endpoint configuration. - Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. @@ -58,8 +56,6 @@ .. py:method:: delete(vpc_endpoint_id: str) - Delete VPC endpoint configuration. - Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate privately with Databricks over [AWS PrivateLink]. @@ -100,8 +96,6 @@ # cleanup a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id) - Get a VPC endpoint configuration. - Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. @@ -127,8 +121,6 @@ all = a.vpc_endpoints.list() - Get all VPC endpoint configurations. - Gets a list of all VPC endpoints for an account, specified by ID. Before configuring PrivateLink, read the [Databricks article about PrivateLink]. diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst index 26ec685e5..f49d58cc2 100644 --- a/docs/account/provisioning/workspaces.rst +++ b/docs/account/provisioning/workspaces.rst @@ -50,8 +50,6 @@ a.credentials.delete(credentials_id=role.credentials_id) a.workspaces.delete(workspace_id=waiter.workspace_id) - Create a new workspace. - Creates a new workspace. **Important**: This operation is asynchronous. A response with HTTP status code 200 means the request @@ -160,8 +158,6 @@ .. py:method:: delete(workspace_id: int) - Delete a workspace. - Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. However, it might take a few minutes for all workspaces resources to be deleted, depending on the size and number of workspace resources. @@ -190,8 +186,6 @@ by_id = a.workspaces.get(workspace_id=created.workspace_id) - Get a workspace. - Gets information including status for a Databricks workspace, specified by ID. In the response, the `workspace_status` field indicates the current status. After initial workspace creation (which is asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace @@ -224,8 +218,6 @@ all = a.workspaces.list() - Get all workspaces. - Gets a list of all workspaces associated with an account, specified by ID. This operation is available only if your account is on the E2 version of the platform or on a select @@ -266,8 +258,6 @@ # cleanup a.credentials.delete(credentials_id=update_role.credentials_id) - Update workspace configuration. - Updates a workspace configuration for either a running workspace or a failed workspace. The elements that can be updated varies between these two use cases. diff --git a/docs/account/settings/csp_enablement_account.rst b/docs/account/settings/csp_enablement_account.rst index a2b8cb91a..815203fff 100644 --- a/docs/account/settings/csp_enablement_account.rst +++ b/docs/account/settings/csp_enablement_account.rst @@ -13,8 +13,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> CspEnablementAccountSetting - Get the compliance security profile setting for new workspaces. - Gets the compliance security profile setting for new workspaces. :param etag: str (optional) @@ -29,8 +27,6 @@ .. py:method:: update(allow_missing: bool, setting: CspEnablementAccountSetting, field_mask: str) -> CspEnablementAccountSetting - Update the compliance security profile setting for new workspaces. - Updates the value of the compliance security profile setting for new workspaces. :param allow_missing: bool diff --git a/docs/account/settings/disable_legacy_features.rst b/docs/account/settings/disable_legacy_features.rst index 212e3f98e..5c3c2656d 100644 --- a/docs/account/settings/disable_legacy_features.rst +++ b/docs/account/settings/disable_legacy_features.rst @@ -12,8 +12,6 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyFeaturesResponse - Delete the disable legacy features setting. - Deletes the disable legacy features setting. :param etag: str (optional) @@ -28,8 +26,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyFeatures - Get the disable legacy features setting. - Gets the value of the disable legacy features setting. :param etag: str (optional) @@ -44,8 +40,6 @@ .. py:method:: update(allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str) -> DisableLegacyFeatures - Update the disable legacy features setting. - Updates the value of the disable legacy features setting. :param allow_missing: bool diff --git a/docs/account/settings/enable_ip_access_lists.rst b/docs/account/settings/enable_ip_access_lists.rst index b570b2e37..f23187dc7 100644 --- a/docs/account/settings/enable_ip_access_lists.rst +++ b/docs/account/settings/enable_ip_access_lists.rst @@ -9,8 +9,6 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAccountIpAccessEnableResponse - Delete the account IP access toggle setting. - Reverts the value of the account IP access toggle setting to default (ON) :param etag: str (optional) @@ -25,8 +23,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> AccountIpAccessEnable - Get the account IP access toggle setting. - Gets the value of the account IP access toggle setting. :param etag: str (optional) @@ -41,8 +37,6 @@ .. py:method:: update(allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str) -> AccountIpAccessEnable - Update the account IP access toggle setting. - Updates the value of the account IP access toggle setting. :param allow_missing: bool diff --git a/docs/account/settings/esm_enablement_account.rst b/docs/account/settings/esm_enablement_account.rst index e14d1a71f..1764b7a08 100644 --- a/docs/account/settings/esm_enablement_account.rst +++ b/docs/account/settings/esm_enablement_account.rst @@ -10,8 +10,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> EsmEnablementAccountSetting - Get the enhanced security monitoring setting for new workspaces. - Gets the enhanced security monitoring setting for new workspaces. :param etag: str (optional) @@ -26,8 +24,6 @@ .. py:method:: update(allow_missing: bool, setting: EsmEnablementAccountSetting, field_mask: str) -> EsmEnablementAccountSetting - Update the enhanced security monitoring setting for new workspaces. - Updates the value of the enhanced security monitoring setting for new workspaces. :param allow_missing: bool diff --git a/docs/account/settings/ip_access_lists.rst b/docs/account/settings/ip_access_lists.rst index 031354b15..0837cd0f7 100644 --- a/docs/account/settings/ip_access_lists.rst +++ b/docs/account/settings/ip_access_lists.rst @@ -25,8 +25,6 @@ .. py:method:: create(label: str, list_type: ListType [, ip_addresses: Optional[List[str]]]) -> CreateIpAccessListResponse - Create access list. - Creates an IP access list for the account. A list can be an allow list or a block list. See the top of this file for a description of how the @@ -55,8 +53,6 @@ .. py:method:: delete(ip_access_list_id: str) - Delete access list. - Deletes an IP access list, specified by its list ID. :param ip_access_list_id: str @@ -67,8 +63,6 @@ .. py:method:: get(ip_access_list_id: str) -> GetIpAccessListResponse - Get IP access list. - Gets an IP access list, specified by its list ID. :param ip_access_list_id: str @@ -79,8 +73,6 @@ .. py:method:: list() -> Iterator[IpAccessListInfo] - Get access lists. - Gets all IP access lists for the specified account. :returns: Iterator over :class:`IpAccessListInfo` @@ -88,8 +80,6 @@ .. py:method:: replace(ip_access_list_id: str, label: str, list_type: ListType, enabled: bool [, ip_addresses: Optional[List[str]]]) - Replace access list. - Replaces an IP access list, specified by its ID. A list can include allow lists and block lists. See the top of this file for a description of how the @@ -118,8 +108,6 @@ .. py:method:: update(ip_access_list_id: str [, enabled: Optional[bool], ip_addresses: Optional[List[str]], label: Optional[str], list_type: Optional[ListType]]) - Update access list. - Updates an existing IP access list, specified by its ID. A list can include allow lists and block lists. See the top of this file for a description of how the diff --git a/docs/account/settings/llm_proxy_partner_powered_account.rst b/docs/account/settings/llm_proxy_partner_powered_account.rst index fe5a55183..58db9d11b 100644 --- a/docs/account/settings/llm_proxy_partner_powered_account.rst +++ b/docs/account/settings/llm_proxy_partner_powered_account.rst @@ -8,8 +8,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> LlmProxyPartnerPoweredAccount - Get the enable partner powered AI features account setting. - Gets the enable partner powered AI features account setting. :param etag: str (optional) @@ -24,8 +22,6 @@ .. py:method:: update(allow_missing: bool, setting: LlmProxyPartnerPoweredAccount, field_mask: str) -> LlmProxyPartnerPoweredAccount - Update the enable partner powered AI features account setting. - Updates the enable partner powered AI features account setting. :param allow_missing: bool diff --git a/docs/account/settings/llm_proxy_partner_powered_enforce.rst b/docs/account/settings/llm_proxy_partner_powered_enforce.rst index 084b744e0..65dc1d42c 100644 --- a/docs/account/settings/llm_proxy_partner_powered_enforce.rst +++ b/docs/account/settings/llm_proxy_partner_powered_enforce.rst @@ -9,8 +9,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> LlmProxyPartnerPoweredEnforce - Get the enforcement status of partner powered AI features account setting. - Gets the enforcement status of partner powered AI features account setting. :param etag: str (optional) @@ -25,8 +23,6 @@ .. py:method:: update(allow_missing: bool, setting: LlmProxyPartnerPoweredEnforce, field_mask: str) -> LlmProxyPartnerPoweredEnforce - Update the enforcement status of partner powered AI features account setting. - Updates the enable enforcement status of partner powered AI features account setting. :param allow_missing: bool diff --git a/docs/account/settings/network_connectivity.rst b/docs/account/settings/network_connectivity.rst index 8b3a9d704..edfb87fb7 100644 --- a/docs/account/settings/network_connectivity.rst +++ b/docs/account/settings/network_connectivity.rst @@ -15,8 +15,6 @@ .. py:method:: create_network_connectivity_configuration(network_connectivity_config: CreateNetworkConnectivityConfiguration) -> NetworkConnectivityConfiguration - Create a network connectivity configuration. - Creates a network connectivity configuration (NCC), which provides stable Azure service subnets when accessing your Azure Storage accounts. You can also use a network connectivity configuration to create Databricks managed private endpoints so that Databricks serverless compute resources privately access @@ -37,8 +35,6 @@ .. py:method:: create_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule: CreatePrivateEndpointRule) -> NccPrivateEndpointRule - Create a private endpoint rule. - Create a private endpoint rule for the specified network connectivity config object. Once the object is created, Databricks asynchronously provisions a new Azure private endpoint to your specified Azure resource. @@ -60,8 +56,6 @@ .. py:method:: delete_network_connectivity_configuration(network_connectivity_config_id: str) - Delete a network connectivity configuration. - Deletes a network connectivity configuration. :param network_connectivity_config_id: str @@ -72,8 +66,6 @@ .. py:method:: delete_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str) -> NccPrivateEndpointRule - Delete a private endpoint rule. - Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and will be deleted after seven days of deactivation. When a private endpoint is deactivated, the `deactivated` field is @@ -89,8 +81,6 @@ .. py:method:: get_network_connectivity_configuration(network_connectivity_config_id: str) -> NetworkConnectivityConfiguration - Get a network connectivity configuration. - Gets a network connectivity configuration. :param network_connectivity_config_id: str @@ -101,8 +91,6 @@ .. py:method:: get_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str) -> NccPrivateEndpointRule - Gets a private endpoint rule. - Gets the private endpoint rule. :param network_connectivity_config_id: str @@ -115,8 +103,6 @@ .. py:method:: list_network_connectivity_configurations( [, page_token: Optional[str]]) -> Iterator[NetworkConnectivityConfiguration] - List network connectivity configurations. - Gets an array of network connectivity configurations. :param page_token: str (optional) @@ -127,8 +113,6 @@ .. py:method:: list_private_endpoint_rules(network_connectivity_config_id: str [, page_token: Optional[str]]) -> Iterator[NccPrivateEndpointRule] - List private endpoint rules. - Gets an array of private endpoint rules. :param network_connectivity_config_id: str @@ -141,8 +125,6 @@ .. py:method:: update_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str, private_endpoint_rule: UpdatePrivateEndpointRule, update_mask: str) -> NccPrivateEndpointRule - Update a private endpoint rule. - Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed resources is allowed to be updated. diff --git a/docs/account/settings/network_policies.rst b/docs/account/settings/network_policies.rst index 7eb489bb8..e6d9b5173 100644 --- a/docs/account/settings/network_policies.rst +++ b/docs/account/settings/network_policies.rst @@ -13,8 +13,6 @@ .. py:method:: create_network_policy_rpc(network_policy: AccountNetworkPolicy) -> AccountNetworkPolicy - Create a network policy. - Creates a new network policy to manage which network destinations can be accessed from the Databricks environment. @@ -25,8 +23,6 @@ .. py:method:: delete_network_policy_rpc(network_policy_id: str) - Delete a network policy. - Deletes a network policy. Cannot be called on 'default-policy'. :param network_policy_id: str @@ -37,8 +33,6 @@ .. py:method:: get_network_policy_rpc(network_policy_id: str) -> AccountNetworkPolicy - Get a network policy. - Gets a network policy. :param network_policy_id: str @@ -49,8 +43,6 @@ .. py:method:: list_network_policies_rpc( [, page_token: Optional[str]]) -> Iterator[AccountNetworkPolicy] - List network policies. - Gets an array of network policies. :param page_token: str (optional) @@ -61,8 +53,6 @@ .. py:method:: update_network_policy_rpc(network_policy_id: str, network_policy: AccountNetworkPolicy) -> AccountNetworkPolicy - Update a network policy. - Updates a network policy. This allows you to modify the configuration of a network policy. :param network_policy_id: str diff --git a/docs/account/settings/personal_compute.rst b/docs/account/settings/personal_compute.rst index 58b35e7f7..c4a950330 100644 --- a/docs/account/settings/personal_compute.rst +++ b/docs/account/settings/personal_compute.rst @@ -14,8 +14,6 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeletePersonalComputeSettingResponse - Delete Personal Compute setting. - Reverts back the Personal Compute setting value to default (ON) :param etag: str (optional) @@ -30,8 +28,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> PersonalComputeSetting - Get Personal Compute setting. - Gets the value of the Personal Compute setting. :param etag: str (optional) @@ -46,8 +42,6 @@ .. py:method:: update(allow_missing: bool, setting: PersonalComputeSetting, field_mask: str) -> PersonalComputeSetting - Update Personal Compute setting. - Updates the value of the Personal Compute setting. :param allow_missing: bool diff --git a/docs/account/settings/workspace_network_configuration.rst b/docs/account/settings/workspace_network_configuration.rst index 3ed40313f..8e91bc291 100644 --- a/docs/account/settings/workspace_network_configuration.rst +++ b/docs/account/settings/workspace_network_configuration.rst @@ -12,8 +12,6 @@ .. py:method:: get_workspace_network_option_rpc(workspace_id: int) -> WorkspaceNetworkOption - Get workspace network option. - Gets the network option for a workspace. Every workspace has exactly one network policy binding, with 'default-policy' used if no explicit assignment exists. @@ -25,8 +23,6 @@ .. py:method:: update_workspace_network_option_rpc(workspace_id: int, workspace_network_option: WorkspaceNetworkOption) -> WorkspaceNetworkOption - Update workspace network option. - Updates the network option for a workspace. This operation associates the workspace with the specified network policy. To revert to the default policy, specify 'default-policy' as the network_policy_id. diff --git a/docs/dbdataclasses/aibuilder.rst b/docs/dbdataclasses/aibuilder.rst index cb5400647..b04e12c38 100644 --- a/docs/dbdataclasses/aibuilder.rst +++ b/docs/dbdataclasses/aibuilder.rst @@ -8,7 +8,11 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CancelResponse +.. autoclass:: CancelOptimizeResponse + :members: + :undoc-members: + +.. autoclass:: CreateCustomLlmRequest :members: :undoc-members: @@ -20,6 +24,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DeleteCustomLlmResponse + :members: + :undoc-members: + .. autoclass:: StartCustomLlmOptimizationRunRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index 5fd115b65..9ebf9b05f 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -246,7 +246,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ConnectionType - Next Id: 31 + Next Id: 33 .. py:attribute:: BIGQUERY :value: "BIGQUERY" @@ -1567,7 +1567,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ValidationResultOperation - The operation tested. + A enum represents the file operation performed on the external location with the storage credential .. py:attribute:: DELETE :value: "DELETE" @@ -1586,7 +1586,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ValidationResultResult - The results of the tested operation. + A enum represents the result of the file operation .. py:attribute:: FAIL :value: "FAIL" diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index 4046dabe3..af4915a6b 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -204,9 +204,25 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. py:class:: DbtCloudRunStatus +.. autoclass:: DbtCloudTask + :members: + :undoc-members: - Response enumeration from calling the dbt Cloud API, for inclusion in output +.. autoclass:: DbtCloudTaskOutput + :members: + :undoc-members: + +.. autoclass:: DbtOutput + :members: + :undoc-members: + +.. autoclass:: DbtPlatformJobRunStep + :members: + :undoc-members: + +.. py:class:: DbtPlatformRunStatus + + Response enumeration from calling the dbt platform API, for inclusion in output .. py:attribute:: CANCELLED :value: "CANCELLED" @@ -226,15 +242,11 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCESS :value: "SUCCESS" -.. autoclass:: DbtCloudTask - :members: - :undoc-members: - -.. autoclass:: DbtCloudTaskOutput +.. autoclass:: DbtPlatformTask :members: :undoc-members: -.. autoclass:: DbtOutput +.. autoclass:: DbtPlatformTaskOutput :members: :undoc-members: diff --git a/docs/dbdataclasses/ml.rst b/docs/dbdataclasses/ml.rst index 0891291c4..75a9798db 100644 --- a/docs/dbdataclasses/ml.rst +++ b/docs/dbdataclasses/ml.rst @@ -192,6 +192,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DeleteOnlineStoreResponse + :members: + :undoc-members: + .. autoclass:: DeleteRun :members: :undoc-members: @@ -396,6 +400,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListOnlineStoresResponse + :members: + :undoc-members: + .. autoclass:: ListRegistryWebhooks :members: :undoc-members: @@ -542,6 +550,30 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: OnlineStore + :members: + :undoc-members: + +.. py:class:: OnlineStoreState + + .. py:attribute:: AVAILABLE + :value: "AVAILABLE" + + .. py:attribute:: DELETING + :value: "DELETING" + + .. py:attribute:: FAILING_OVER + :value: "FAILING_OVER" + + .. py:attribute:: STARTING + :value: "STARTING" + + .. py:attribute:: STOPPED + :value: "STOPPED" + + .. py:attribute:: UPDATING + :value: "UPDATING" + .. autoclass:: Param :members: :undoc-members: @@ -565,6 +597,26 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CAN_READ :value: "CAN_READ" +.. autoclass:: PublishSpec + :members: + :undoc-members: + +.. py:class:: PublishSpecPublishMode + + .. py:attribute:: CONTINUOUS + :value: "CONTINUOUS" + + .. py:attribute:: TRIGGERED + :value: "TRIGGERED" + +.. autoclass:: PublishTableRequest + :members: + :undoc-members: + +.. autoclass:: PublishTableResponse + :members: + :undoc-members: + .. autoclass:: RegisteredModelAccessControlRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index f4618951e..44679fc41 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -339,6 +339,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: PipelinesEnvironment + :members: + :undoc-members: + .. autoclass:: ReportSpec :members: :undoc-members: diff --git a/docs/workspace/aibuilder/ai_builder.rst b/docs/workspace/aibuilder/ai_builder.rst new file mode 100644 index 000000000..e20356914 --- /dev/null +++ b/docs/workspace/aibuilder/ai_builder.rst @@ -0,0 +1,92 @@ +``w.ai_builder``: AI Builder Service +==================================== +.. currentmodule:: databricks.sdk.service.aibuilder + +.. py:class:: AiBuilderAPI + + The Custom LLMs service manages state and powers the UI for the Custom LLM product. + + .. py:method:: cancel_optimize(id: str) + + Cancel a Custom LLM Optimization Run. + + :param id: str + + + + + .. py:method:: create_custom_llm(name: str, instructions: str [, agent_artifact_path: Optional[str], datasets: Optional[List[Dataset]], guidelines: Optional[List[str]]]) -> CustomLlm + + Create a Custom LLM. + + :param name: str + Name of the custom LLM. Only alphanumeric characters and dashes allowed. + :param instructions: str + Instructions for the custom LLM to follow + :param agent_artifact_path: str (optional) + Optional: UC path for agent artifacts. If you are using a dataset that you only have read + permissions, please provide a destination path where you have write permissions. Please provide this + in catalog.schema format. + :param datasets: List[:class:`Dataset`] (optional) + Datasets used for training and evaluating the model, not for inference. Currently, only 1 dataset is + accepted. + :param guidelines: List[str] (optional) + Guidelines for the custom LLM to adhere to + + :returns: :class:`CustomLlm` + + + .. py:method:: delete_custom_llm(id: str) + + Delete a Custom LLM. + + :param id: str + The id of the custom llm + + + + + .. py:method:: get_custom_llm(id: str) -> CustomLlm + + Get a Custom LLM. + + :param id: str + The id of the custom llm + + :returns: :class:`CustomLlm` + + + .. py:method:: start_optimize(id: str) -> CustomLlm + + Start a Custom LLM Optimization Run. + + :param id: str + The Id of the tile. + + :returns: :class:`CustomLlm` + + + .. py:method:: update_custom_llm(id: str, custom_llm: CustomLlm, update_mask: str) -> CustomLlm + + Update a Custom LLM. + + :param id: str + The id of the custom llm + :param custom_llm: :class:`CustomLlm` + The CustomLlm containing the fields which should be updated. + :param update_mask: str + The list of the CustomLlm fields to update. These should correspond to the values (or lack thereof) + present in `custom_llm`. + + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`CustomLlm` + \ No newline at end of file diff --git a/docs/workspace/aibuilder/index.rst b/docs/workspace/aibuilder/index.rst index ff3ba49e7..f9685354a 100644 --- a/docs/workspace/aibuilder/index.rst +++ b/docs/workspace/aibuilder/index.rst @@ -7,4 +7,4 @@ Create and manage AI Builder resources. .. toctree:: :maxdepth: 1 - custom_llms \ No newline at end of file + ai_builder \ No newline at end of file diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst index be094be30..06172aa93 100644 --- a/docs/workspace/apps/apps.rst +++ b/docs/workspace/apps/apps.rst @@ -9,8 +9,6 @@ .. py:method:: create(app: App [, no_compute: Optional[bool]]) -> Wait[App] - Create an app. - Creates a new app. :param app: :class:`App` @@ -27,8 +25,6 @@ .. py:method:: delete(name: str) -> App - Delete an app. - Deletes an app. :param name: str @@ -39,8 +35,6 @@ .. py:method:: deploy(app_name: str, app_deployment: AppDeployment) -> Wait[AppDeployment] - Create an app deployment. - Creates an app deployment for the app with the supplied name. :param app_name: str @@ -57,8 +51,6 @@ .. py:method:: get(name: str) -> App - Get an app. - Retrieves information for the app with the supplied name. :param name: str @@ -69,8 +61,6 @@ .. py:method:: get_deployment(app_name: str, deployment_id: str) -> AppDeployment - Get an app deployment. - Retrieves information for the app deployment with the supplied name and deployment id. :param app_name: str @@ -83,8 +73,6 @@ .. py:method:: get_permission_levels(app_name: str) -> GetAppPermissionLevelsResponse - Get app permission levels. - Gets the permission levels that a user can have on an object. :param app_name: str @@ -95,8 +83,6 @@ .. py:method:: get_permissions(app_name: str) -> AppPermissions - Get app permissions. - Gets the permissions of an app. Apps can inherit permissions from their root object. :param app_name: str @@ -107,8 +93,6 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[App] - List apps. - Lists all apps in the workspace. :param page_size: int (optional) @@ -121,8 +105,6 @@ .. py:method:: list_deployments(app_name: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[AppDeployment] - List app deployments. - Lists all app deployments for the app with the supplied name. :param app_name: str @@ -137,8 +119,6 @@ .. py:method:: set_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions - Set app permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -151,8 +131,6 @@ .. py:method:: start(name: str) -> Wait[App] - Start an app. - Start the last active deployment of the app in the workspace. :param name: str @@ -168,8 +146,6 @@ .. py:method:: stop(name: str) -> Wait[App] - Stop an app. - Stops the active deployment of the app in the workspace. :param name: str @@ -185,8 +161,6 @@ .. py:method:: update(name: str, app: App) -> App - Update an app. - Updates the app with the supplied name. :param name: str @@ -199,8 +173,6 @@ .. py:method:: update_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions - Update app permissions. - Updates the permissions on an app. Apps can inherit permissions from their root object. :param app_name: str diff --git a/docs/workspace/catalog/artifact_allowlists.rst b/docs/workspace/catalog/artifact_allowlists.rst index f153dee79..af5a0b28b 100644 --- a/docs/workspace/catalog/artifact_allowlists.rst +++ b/docs/workspace/catalog/artifact_allowlists.rst @@ -9,8 +9,6 @@ .. py:method:: get(artifact_type: ArtifactType) -> ArtifactAllowlistInfo - Get an artifact allowlist. - Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. @@ -22,8 +20,6 @@ .. py:method:: update(artifact_type: ArtifactType, artifact_matchers: List[ArtifactMatcher] [, created_at: Optional[int], created_by: Optional[str], metastore_id: Optional[str]]) -> ArtifactAllowlistInfo - Set an artifact allowlist. - Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 2505551cd..9a18ede8a 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -24,12 +24,10 @@ w = WorkspaceClient() - created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") + created = w.catalogs.create(name=f"sdk-{time.time_ns()}") # cleanup - w.catalogs.delete(name=created_catalog.name, force=True) - - Create a catalog. + w.catalogs.delete(name=created.name, force=True) Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. @@ -58,8 +56,6 @@ .. py:method:: delete(name: str [, force: Optional[bool]]) - Delete a catalog. - Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the owner of the catalog. @@ -91,8 +87,6 @@ # cleanup w.catalogs.delete(name=created.name, force=True) - Get a catalog. - Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the catalog, or a user that has the **USE_CATALOG** privilege set for their account. @@ -119,8 +113,6 @@ all = w.catalogs.list(catalog.ListCatalogsRequest()) - List catalogs. - Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the @@ -163,8 +155,6 @@ # cleanup w.catalogs.delete(name=created.name, force=True) - Update a catalog. - Updates the catalog that matches the supplied name. The caller must be either the owner of the catalog, or a metastore admin (when changing the owner field of the catalog). diff --git a/docs/workspace/catalog/connections.rst b/docs/workspace/catalog/connections.rst index 50785cd23..ec7f39be2 100644 --- a/docs/workspace/catalog/connections.rst +++ b/docs/workspace/catalog/connections.rst @@ -41,8 +41,6 @@ # cleanup w.connections.delete(name=conn_create.name) - Create a connection. - Creates a new connection Creates a new connection to an external data source. It allows users to specify connection details and @@ -66,8 +64,6 @@ .. py:method:: delete(name: str) - Delete a connection. - Deletes the connection that matches the supplied name. :param name: str @@ -115,8 +111,6 @@ # cleanup w.connections.delete(name=conn_create.name) - Get a connection. - Gets a connection from it's name. :param name: str @@ -139,8 +133,6 @@ conn_list = w.connections.list(catalog.ListConnectionsRequest()) - List connections. - List all connections. :param max_results: int (optional) @@ -191,8 +183,6 @@ # cleanup w.connections.delete(name=conn_create.name) - Update a connection. - Updates the connection that matches the supplied name. :param name: str diff --git a/docs/workspace/catalog/credentials.rst b/docs/workspace/catalog/credentials.rst index 661d955b0..9784e5787 100644 --- a/docs/workspace/catalog/credentials.rst +++ b/docs/workspace/catalog/credentials.rst @@ -14,8 +14,6 @@ .. py:method:: create_credential(name: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], purpose: Optional[CredentialPurpose], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo - Create a credential. - Creates a new credential. The type of credential to be created is determined by the **purpose** field, which should be either **SERVICE** or **STORAGE**. @@ -26,15 +24,15 @@ The credential name. The name must be unique among storage and service credentials within the metastore. :param aws_iam_role: :class:`AwsIamRole` (optional) - The AWS IAM role configuration + The AWS IAM role configuration. :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) The Azure managed identity configuration. :param azure_service_principal: :class:`AzureServicePrincipal` (optional) - The Azure service principal configuration. Only applicable when purpose is **STORAGE**. + The Azure service principal configuration. :param comment: str (optional) Comment associated with the credential. :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional) - GCP long-lived credential. Databricks-created Google Cloud Storage service account. + The Databricks managed GCP service account configuration. :param purpose: :class:`CredentialPurpose` (optional) Indicates the purpose of the credential. :param read_only: bool (optional) @@ -48,8 +46,6 @@ .. py:method:: delete_credential(name_arg: str [, force: Optional[bool]]) - Delete a credential. - Deletes a service or storage credential from the metastore. The caller must be an owner of the credential. @@ -64,8 +60,6 @@ .. py:method:: generate_temporary_service_credential(credential_name: str [, azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions], gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions]]) -> TemporaryCredentials - Generate a temporary service credential. - Returns a set of temporary credentials generated using the specified service credential. The caller must be a metastore admin or have the metastore privilege **ACCESS** on the service credential. @@ -81,8 +75,6 @@ .. py:method:: get_credential(name_arg: str) -> CredentialInfo - Get a credential. - Gets a service or storage credential from the metastore. The caller must be a metastore admin, the owner of the credential, or have any permission on the credential. @@ -94,8 +86,6 @@ .. py:method:: list_credentials( [, max_results: Optional[int], page_token: Optional[str], purpose: Optional[CredentialPurpose]]) -> Iterator[CredentialInfo] - List credentials. - Gets an array of credentials (as __CredentialInfo__ objects). The array is limited to only the credentials that the caller has permission to access. If the caller @@ -117,8 +107,6 @@ .. py:method:: update_credential(name_arg: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo - Update a credential. - Updates a service or storage credential on the metastore. The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission. @@ -127,15 +115,15 @@ :param name_arg: str Name of the credential. :param aws_iam_role: :class:`AwsIamRole` (optional) - The AWS IAM role configuration + The AWS IAM role configuration. :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) The Azure managed identity configuration. :param azure_service_principal: :class:`AzureServicePrincipal` (optional) - The Azure service principal configuration. Only applicable when purpose is **STORAGE**. + The Azure service principal configuration. :param comment: str (optional) Comment associated with the credential. :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional) - GCP long-lived credential. Databricks-created Google Cloud Storage service account. + The Databricks managed GCP service account configuration. :param force: bool (optional) Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent external locations and external tables (when purpose is **STORAGE**). @@ -156,8 +144,6 @@ .. py:method:: validate_credential( [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], credential_name: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], external_location_name: Optional[str], purpose: Optional[CredentialPurpose], read_only: Optional[bool], url: Optional[str]]) -> ValidateCredentialResponse - Validate a credential. - Validates a credential. For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index 91d9af27f..c3b5217a1 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -30,22 +30,22 @@ w = WorkspaceClient() - credential = w.storage_credentials.create( + storage_credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + comment="created via SDK", ) - created = w.external_locations.create( + external_location = w.external_locations.create( name=f"sdk-{time.time_ns()}", - credential_name=credential.name, - url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), + credential_name=storage_credential.name, + comment="created via SDK", + url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}", ) # cleanup - w.storage_credentials.delete(name=credential.name) - w.external_locations.delete(name=created.name) - - Create an external location. + w.storage_credentials.delete(name=storage_credential.name) + w.external_locations.delete(name=external_location.name) Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage @@ -79,8 +79,6 @@ .. py:method:: delete(name: str [, force: Optional[bool]]) - Delete an external location. - Deletes the specified external location from the metastore. The caller must be the owner of the external location. @@ -124,8 +122,6 @@ w.storage_credentials.delete(name=credential.name) w.external_locations.delete(name=created.name) - Get an external location. - Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. @@ -151,8 +147,6 @@ all = w.external_locations.list() - List external locations. - Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on the external location. There is no guarantee of a specific ordering of the elements in the array. @@ -188,26 +182,24 @@ credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) created = w.external_locations.create( name=f"sdk-{time.time_ns()}", credential_name=credential.name, - url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), + url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', ) _ = w.external_locations.update( name=created.name, credential_name=credential.name, - url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), + url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', ) # cleanup - w.storage_credentials.delete(name=credential.name) - w.external_locations.delete(name=created.name) - - Update an external location. + w.storage_credentials.delete(delete=credential.name) + w.external_locations.delete(delete=created.name) Updates an external location in the metastore. The caller must be the owner of the external location, or be a metastore admin. In the second case, the admin can only update the name of the external diff --git a/docs/workspace/catalog/functions.rst b/docs/workspace/catalog/functions.rst index 3c736e714..0297c0e22 100644 --- a/docs/workspace/catalog/functions.rst +++ b/docs/workspace/catalog/functions.rst @@ -12,8 +12,6 @@ .. py:method:: create(function_info: CreateFunction) -> FunctionInfo - Create a function. - **WARNING: This API is experimental and will change in future versions** Creates a new function @@ -30,8 +28,6 @@ .. py:method:: delete(name: str [, force: Optional[bool]]) - Delete a function. - Deletes the function that matches the supplied name. For the deletion to succeed, the user must satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog - @@ -49,8 +45,6 @@ .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> FunctionInfo - Get a function. - Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must satisfy one of the following requirements: - Is a metastore admin - Is an owner of the function's parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog and be the owner @@ -70,8 +64,6 @@ .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[FunctionInfo] - List functions. - List functions within the specified parent catalog and schema. If the user is a metastore admin, all functions are returned in the output list. Otherwise, the user must have the **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and the output list contains only @@ -98,8 +90,6 @@ .. py:method:: update(name: str [, owner: Optional[str]]) -> FunctionInfo - Update a function. - Updates the function that matches the supplied name. Only the owner of the function can be updated. If the user is not a metastore admin, the user must be a member of the group that is the new function owner. - Is a metastore admin - Is the owner of the function's parent catalog - Is the owner of the diff --git a/docs/workspace/catalog/grants.rst b/docs/workspace/catalog/grants.rst index 603a20584..64c1b3484 100644 --- a/docs/workspace/catalog/grants.rst +++ b/docs/workspace/catalog/grants.rst @@ -60,8 +60,6 @@ w.catalogs.delete(name=created_catalog.name, force=True) w.tables.delete(full_name=table_full_name) - Get permissions. - Gets the permissions for a securable. Does not include inherited permissions. :param securable_type: str @@ -132,8 +130,6 @@ w.catalogs.delete(name=created_catalog.name, force=True) w.tables.delete(full_name=table_full_name) - Get effective permissions. - Gets the effective permissions for a securable. Includes inherited permissions from any parent securables. @@ -215,8 +211,6 @@ w.catalogs.delete(name=created_catalog.name, force=True) w.tables.delete(full_name=table_full_name) - Update permissions. - Updates the permissions for a securable. :param securable_type: str diff --git a/docs/workspace/catalog/metastores.rst b/docs/workspace/catalog/metastores.rst index cf35cc01b..35fdf0d41 100644 --- a/docs/workspace/catalog/metastores.rst +++ b/docs/workspace/catalog/metastores.rst @@ -42,8 +42,6 @@ # cleanup w.metastores.delete(id=created.metastore_id, force=True) - Create an assignment. - Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account admin. @@ -81,8 +79,6 @@ # cleanup w.metastores.delete(id=created.metastore_id, force=True) - Create a metastore. - Creates a new metastore based on a provided name and optional storage root path. By default (if the __owner__ field is not set), the owner of the new metastore is the user calling the __createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the ownership is @@ -111,8 +107,6 @@ current_metastore = w.metastores.current() - Get metastore assignment for workspace. - Gets the metastore assignment for the workspace being accessed. :returns: :class:`MetastoreAssignment` @@ -120,8 +114,6 @@ .. py:method:: delete(id: str [, force: Optional[bool]]) - Delete a metastore. - Deletes a metastore. The caller must be a metastore admin. :param id: str @@ -156,8 +148,6 @@ # cleanup w.metastores.delete(id=created.metastore_id, force=True) - Get a metastore. - Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this info. @@ -180,8 +170,6 @@ all = w.metastores.list() - List metastores. - Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in the array. @@ -212,8 +200,6 @@ summary = w.metastores.summary() - Get a metastore summary. - Gets information about a metastore. This summary includes the storage credential, the cloud vendor, the cloud region, and the global metastore ID. @@ -246,8 +232,6 @@ # cleanup w.metastores.delete(id=created.metastore_id, force=True) - Delete an assignment. - Deletes a metastore assignment. The caller must be an account administrator. :param workspace_id: int @@ -282,8 +266,6 @@ # cleanup w.metastores.delete(id=created.metastore_id, force=True) - Update a metastore. - Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__ field is set to the empty string (**""**), the ownership is updated to the System User. @@ -310,8 +292,6 @@ .. py:method:: update_assignment(workspace_id: int [, default_catalog_name: Optional[str], metastore_id: Optional[str]]) - Update an assignment. - Updates a metastore assignment. This operation can be used to update __metastore_id__ or __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore. The caller must be an account admin to update __metastore_id__; otherwise, the caller can be a diff --git a/docs/workspace/catalog/model_versions.rst b/docs/workspace/catalog/model_versions.rst index 99b62ae03..ec3ed4985 100644 --- a/docs/workspace/catalog/model_versions.rst +++ b/docs/workspace/catalog/model_versions.rst @@ -13,8 +13,6 @@ .. py:method:: delete(full_name: str, version: int) - Delete a Model Version. - Deletes a model version from the specified registered model. Any aliases assigned to the model version will also be deleted. @@ -32,8 +30,6 @@ .. py:method:: get(full_name: str, version: int [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> ModelVersionInfo - Get a Model Version. - Get a model version. The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the parent @@ -55,8 +51,6 @@ .. py:method:: get_by_alias(full_name: str, alias: str [, include_aliases: Optional[bool]]) -> ModelVersionInfo - Get Model Version By Alias. - Get a model version by alias. The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the @@ -75,8 +69,6 @@ .. py:method:: list(full_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ModelVersionInfo] - List Model Versions. - List model versions. You can list model versions under a particular schema, or list all model versions in the current metastore. @@ -108,8 +100,6 @@ .. py:method:: update(full_name: str, version: int [, comment: Optional[str]]) -> ModelVersionInfo - Update a Model Version. - Updates the specified model version. The caller must be a metastore admin or an owner of the parent registered model. For the latter case, diff --git a/docs/workspace/catalog/online_tables.rst b/docs/workspace/catalog/online_tables.rst index 898d00eb3..23768bcb5 100644 --- a/docs/workspace/catalog/online_tables.rst +++ b/docs/workspace/catalog/online_tables.rst @@ -8,8 +8,6 @@ .. py:method:: create(table: OnlineTable) -> Wait[OnlineTable] - Create an Online Table. - Create a new Online Table. :param table: :class:`OnlineTable` @@ -25,8 +23,6 @@ .. py:method:: delete(name: str) - Delete an Online Table. - Delete an online table. Warning: This will delete all the data in the online table. If the source Delta table was deleted or modified since this Online Table was created, this will lose the data forever! @@ -39,8 +35,6 @@ .. py:method:: get(name: str) -> OnlineTable - Get an Online Table. - Get information about an existing online table and its status. :param name: str diff --git a/docs/workspace/catalog/quality_monitors.rst b/docs/workspace/catalog/quality_monitors.rst index 255076aac..21066be5f 100644 --- a/docs/workspace/catalog/quality_monitors.rst +++ b/docs/workspace/catalog/quality_monitors.rst @@ -13,8 +13,6 @@ .. py:method:: cancel_refresh(table_name: str, refresh_id: str) - Cancel refresh. - Cancel an active monitor refresh for the given refresh ID. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the @@ -34,8 +32,6 @@ .. py:method:: create(table_name: str, assets_dir: str, output_schema_name: str [, baseline_table_name: Optional[str], custom_metrics: Optional[List[MonitorMetric]], data_classification_config: Optional[MonitorDataClassificationConfig], inference_log: Optional[MonitorInferenceLog], notifications: Optional[MonitorNotifications], schedule: Optional[MonitorCronSchedule], skip_builtin_dashboard: Optional[bool], slicing_exprs: Optional[List[str]], snapshot: Optional[MonitorSnapshot], time_series: Optional[MonitorTimeSeries], warehouse_id: Optional[str]]) -> MonitorInfo - Create a table monitor. - Creates a new monitor for the specified table. The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on the @@ -85,8 +81,6 @@ .. py:method:: delete(table_name: str) - Delete a table monitor. - Deletes a monitor for the specified table. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the @@ -107,8 +101,6 @@ .. py:method:: get(table_name: str) -> MonitorInfo - Get a table monitor. - Gets a monitor for the specified table. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the @@ -128,8 +120,6 @@ .. py:method:: get_refresh(table_name: str, refresh_id: str) -> MonitorRefreshInfo - Get refresh. - Gets info about a specific monitor refresh using the given refresh ID. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the @@ -149,8 +139,6 @@ .. py:method:: list_refreshes(table_name: str) -> MonitorRefreshListResponse - List refreshes. - Gets an array containing the history of the most recent refreshes (up to 25) for this table. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the @@ -168,8 +156,6 @@ .. py:method:: regenerate_dashboard(table_name: str [, warehouse_id: Optional[str]]) -> RegenerateDashboardResponse - Regenerate a monitoring dashboard. - Regenerates the monitoring dashboard for the specified table. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the @@ -191,8 +177,6 @@ .. py:method:: run_refresh(table_name: str) -> MonitorRefreshInfo - Queue a metric refresh for a monitor. - Queues a metric refresh on the monitor for the specified table. The refresh will execute in the background. @@ -211,8 +195,6 @@ .. py:method:: update(table_name: str, output_schema_name: str [, baseline_table_name: Optional[str], custom_metrics: Optional[List[MonitorMetric]], dashboard_id: Optional[str], data_classification_config: Optional[MonitorDataClassificationConfig], inference_log: Optional[MonitorInferenceLog], notifications: Optional[MonitorNotifications], schedule: Optional[MonitorCronSchedule], slicing_exprs: Optional[List[str]], snapshot: Optional[MonitorSnapshot], time_series: Optional[MonitorTimeSeries]]) -> MonitorInfo - Update a table monitor. - Updates a monitor for the specified table. The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the diff --git a/docs/workspace/catalog/registered_models.rst b/docs/workspace/catalog/registered_models.rst index 3f7ced621..4e6b0aa6b 100644 --- a/docs/workspace/catalog/registered_models.rst +++ b/docs/workspace/catalog/registered_models.rst @@ -31,8 +31,6 @@ .. py:method:: create(catalog_name: str, schema_name: str, name: str [, comment: Optional[str], storage_location: Optional[str]]) -> RegisteredModelInfo - Create a Registered Model. - Creates a new registered model in Unity Catalog. File storage for model versions in the registered model will be located in the default location which @@ -59,8 +57,6 @@ .. py:method:: delete(full_name: str) - Delete a Registered Model. - Deletes a registered model and all its model versions from the specified parent catalog and schema. The caller must be a metastore admin or an owner of the registered model. For the latter case, the @@ -75,8 +71,6 @@ .. py:method:: delete_alias(full_name: str, alias: str) - Delete a Registered Model Alias. - Deletes a registered model alias. The caller must be a metastore admin or an owner of the registered model. For the latter case, the @@ -93,8 +87,6 @@ .. py:method:: get(full_name: str [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> RegisteredModelInfo - Get a Registered Model. - Get a registered model. The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the @@ -114,8 +106,6 @@ .. py:method:: list( [, catalog_name: Optional[str], include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str], schema_name: Optional[str]]) -> Iterator[RegisteredModelInfo] - List Registered Models. - List registered models. You can list registered models under a particular schema, or list all registered models in the current metastore. @@ -158,8 +148,6 @@ .. py:method:: set_alias(full_name: str, alias: str, version_num: int) -> RegisteredModelAlias - Set a Registered Model Alias. - Set an alias on the specified registered model. The caller must be a metastore admin or an owner of the registered model. For the latter case, the @@ -178,8 +166,6 @@ .. py:method:: update(full_name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str]]) -> RegisteredModelInfo - Update a Registered Model. - Updates the specified registered model. The caller must be a metastore admin or an owner of the registered model. For the latter case, the diff --git a/docs/workspace/catalog/resource_quotas.rst b/docs/workspace/catalog/resource_quotas.rst index c1e14687c..dc7df22ac 100644 --- a/docs/workspace/catalog/resource_quotas.rst +++ b/docs/workspace/catalog/resource_quotas.rst @@ -14,8 +14,6 @@ .. py:method:: get_quota(parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse - Get information for a single resource quota. - The GetQuota API returns usage information for a single resource quota, defined as a child-parent pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered asynchronously. The updated count might not be returned in the first call. @@ -32,8 +30,6 @@ .. py:method:: list_quotas( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[QuotaInfo] - List all resource quotas under a metastore. - ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the counts returned. This API does not trigger a refresh of quota counts. diff --git a/docs/workspace/catalog/schemas.rst b/docs/workspace/catalog/schemas.rst index 7c4a84e53..60a4eb79d 100644 --- a/docs/workspace/catalog/schemas.rst +++ b/docs/workspace/catalog/schemas.rst @@ -30,8 +30,6 @@ w.catalogs.delete(name=created_catalog.name, force=True) w.schemas.delete(full_name=created_schema.full_name) - Create a schema. - Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent catalog. @@ -51,8 +49,6 @@ .. py:method:: delete(full_name: str [, force: Optional[bool]]) - Delete a schema. - Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an owner of the parent catalog. @@ -87,8 +83,6 @@ w.catalogs.delete(name=new_catalog.name, force=True) w.schemas.delete(full_name=created.full_name) - Get a schema. - Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the schema, or a user that has the **USE_SCHEMA** privilege on the schema. @@ -121,8 +115,6 @@ # cleanup w.catalogs.delete(name=new_catalog.name, force=True) - List schemas. - Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved. @@ -167,8 +159,6 @@ w.catalogs.delete(name=new_catalog.name, force=True) w.schemas.delete(full_name=created.full_name) - Update a schema. - Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If the caller is a metastore admin, only the __owner__ field can be changed in the update. If the __name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA** diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index 9a5ed0a46..194069200 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -30,20 +30,22 @@ w = WorkspaceClient() - credential = w.storage_credentials.create( + created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(name=credential.name) - - Create a storage credential. + w.storage_credentials.delete(name=created.name) Creates a new storage credential. + The caller must be a metastore admin or have the **CREATE_STORAGE_CREDENTIAL** privilege on the + metastore. + :param name: str - The credential name. The name must be unique within the metastore. + The credential name. The name must be unique among storage and service credentials within the + metastore. :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) The AWS IAM role configuration. :param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional) @@ -57,7 +59,8 @@ :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional) The Databricks managed GCP service account configuration. :param read_only: bool (optional) - Whether the storage credential is only usable for read operations. + Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**. :param skip_validation: bool (optional) Supplying true to this argument skips validation of the created credential. @@ -66,15 +69,14 @@ .. py:method:: delete(name: str [, force: Optional[bool]]) - Delete a credential. - Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. :param name: str Name of the storage credential. :param force: bool (optional) - Force deletion even if there are dependent external locations or external tables. + Force an update even if there are dependent external locations or external tables (when purpose is + **STORAGE**) or dependent services (when purpose is **SERVICE**). @@ -104,8 +106,6 @@ # cleanup w.storage_credentials.delete(delete=created.name) - Get a credential. - Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the storage credential. @@ -123,13 +123,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.storage_credentials.list(catalog.ListStorageCredentialsRequest()) - - List credentials. + all = w.storage_credentials.list() Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore @@ -165,22 +162,23 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) _ = w.storage_credentials.update( name=created.name, comment=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(delete=created.name) - - Update a credential. + w.storage_credentials.delete(name=created.name) Updates a storage credential on the metastore. + The caller must be the owner of the storage credential or a metastore admin. If the caller is a + metastore admin, only the **owner** field can be changed. + :param name: str Name of the storage credential. :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) @@ -198,12 +196,14 @@ :param force: bool (optional) Force update even if there are dependent external locations or external tables. :param isolation_mode: :class:`IsolationMode` (optional) + Whether the current securable is accessible from all workspaces or a specific set of workspaces. :param new_name: str (optional) New name for the storage credential. :param owner: str (optional) Username of current owner of credential. :param read_only: bool (optional) - Whether the storage credential is only usable for read operations. + Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**. :param skip_validation: bool (optional) Supplying true to this argument skips validation of the updated credential. @@ -212,8 +212,6 @@ .. py:method:: validate( [, aws_iam_role: Optional[AwsIamRoleRequest], azure_managed_identity: Optional[AzureManagedIdentityRequest], azure_service_principal: Optional[AzureServicePrincipal], cloudflare_api_token: Optional[CloudflareApiToken], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest], external_location_name: Optional[str], read_only: Optional[bool], storage_credential_name: Optional[str], url: Optional[str]]) -> ValidateStorageCredentialResponse - Validate a storage credential. - Validates a storage credential. At least one of __external_location_name__ and __url__ need to be provided. If only one of them is provided, it will be used for validation. And if both are provided, the __url__ will be used for validation, and __external_location_name__ will be ignored when checking @@ -239,7 +237,7 @@ :param read_only: bool (optional) Whether the storage credential is only usable for read operations. :param storage_credential_name: str (optional) - The name of the storage credential to validate. + Required. The name of an existing credential or long-lived cloud credential to validate. :param url: str (optional) The external location url to validate. diff --git a/docs/workspace/catalog/system_schemas.rst b/docs/workspace/catalog/system_schemas.rst index 545a3b2e2..6c94e5349 100644 --- a/docs/workspace/catalog/system_schemas.rst +++ b/docs/workspace/catalog/system_schemas.rst @@ -9,8 +9,6 @@ .. py:method:: disable(metastore_id: str, schema_name: str) - Disable a system schema. - Disables the system schema and removes it from the system catalog. The caller must be an account admin or a metastore admin. @@ -24,8 +22,6 @@ .. py:method:: enable(metastore_id: str, schema_name: str [, catalog_name: Optional[str]]) - Enable a system schema. - Enables the system schema and adds it to the system catalog. The caller must be an account admin or a metastore admin. @@ -41,8 +37,6 @@ .. py:method:: list(metastore_id: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[SystemSchemaInfo] - List system schemas. - Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore admin. diff --git a/docs/workspace/catalog/table_constraints.rst b/docs/workspace/catalog/table_constraints.rst index 6b974c463..96243192c 100644 --- a/docs/workspace/catalog/table_constraints.rst +++ b/docs/workspace/catalog/table_constraints.rst @@ -17,8 +17,6 @@ .. py:method:: create(full_name_arg: str, constraint: TableConstraint) -> TableConstraint - Create a table constraint. - Creates a new table constraint. For the table constraint creation to succeed, the user must satisfy both of these conditions: - the @@ -39,8 +37,6 @@ .. py:method:: delete(full_name: str, constraint_name: str, cascade: bool) - Delete a table constraint. - Deletes a table constraint. For the table constraint deletion to succeed, the user must satisfy both of these conditions: - the diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst index 4bbd3faad..9632dc0d8 100644 --- a/docs/workspace/catalog/tables.rst +++ b/docs/workspace/catalog/tables.rst @@ -15,8 +15,6 @@ .. py:method:: delete(full_name: str) - Delete a table. - Deletes a table from the specified parent catalog and schema. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent @@ -30,8 +28,6 @@ .. py:method:: exists(full_name: str) -> TableExistsResponse - Get boolean reflecting if table exists. - Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the @@ -85,8 +81,6 @@ w.catalogs.delete(name=created_catalog.name, force=True) w.tables.delete(full_name=table_full_name) - Get a table. - Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** @@ -123,14 +117,12 @@ created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name) - all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name) + summaries = w.tables.list_summaries(catalog_name=created_catalog.name, schema_name_pattern=created_schema.name) # cleanup w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) - List tables. - Gets an array of all tables for the current metastore under the parent catalog and schema. The caller must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent @@ -189,8 +181,6 @@ w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) - List table summaries. - Gets an array of summaries for tables for a schema and catalog within the metastore. The table summaries returned are either: @@ -224,8 +214,6 @@ .. py:method:: update(full_name: str [, owner: Optional[str]]) - Update a table owner. - Change the owner of the table. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** diff --git a/docs/workspace/catalog/temporary_table_credentials.rst b/docs/workspace/catalog/temporary_table_credentials.rst index b6ebbe819..ee3cc8907 100644 --- a/docs/workspace/catalog/temporary_table_credentials.rst +++ b/docs/workspace/catalog/temporary_table_credentials.rst @@ -19,8 +19,6 @@ .. py:method:: generate_temporary_table_credentials( [, operation: Optional[TableOperation], table_id: Optional[str]]) -> GenerateTemporaryTableCredentialResponse - Generate a temporary table credential. - Get a short-lived credential for directly accessing the table data on cloud storage. The metastore must have external_access_enabled flag set to true (default false). The caller must have EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog diff --git a/docs/workspace/catalog/volumes.rst b/docs/workspace/catalog/volumes.rst index 5b6662f48..a3472518a 100644 --- a/docs/workspace/catalog/volumes.rst +++ b/docs/workspace/catalog/volumes.rst @@ -58,8 +58,6 @@ w.catalogs.delete(name=created_catalog.name, force=True) w.volumes.delete(name=created_volume.full_name) - Create a Volume. - Creates a new volume. The user could create either an external volume or a managed volume. An external volume will be @@ -98,8 +96,6 @@ .. py:method:: delete(name: str) - Delete a Volume. - Deletes a volume from the specified parent catalog and schema. The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must @@ -135,8 +131,6 @@ w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) - List Volumes. - Gets an array of volumes for the current metastore under the parent catalog and schema. The returned volumes are filtered based on the privileges of the calling user. For example, the @@ -222,8 +216,6 @@ w.catalogs.delete(name=created_catalog.name, force=True) w.volumes.delete(name=created_volume.full_name) - Get a Volume. - Gets a volume from the metastore for a specific catalog and schema. The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the @@ -290,8 +282,6 @@ w.catalogs.delete(name=created_catalog.name, force=True) w.volumes.delete(name=created_volume.full_name) - Update a Volume. - Updates the specified volume under the specified parent catalog and schema. The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must diff --git a/docs/workspace/catalog/workspace_bindings.rst b/docs/workspace/catalog/workspace_bindings.rst index c507d4c78..6287456d9 100644 --- a/docs/workspace/catalog/workspace_bindings.rst +++ b/docs/workspace/catalog/workspace_bindings.rst @@ -39,8 +39,6 @@ # cleanup w.catalogs.delete(name=created.name, force=True) - Get catalog workspace bindings. - Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the catalog. @@ -52,8 +50,6 @@ .. py:method:: get_bindings(securable_type: str, securable_name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[WorkspaceBinding] - Get securable workspace bindings. - Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. @@ -96,8 +92,6 @@ # cleanup w.catalogs.delete(name=created.name, force=True) - Update catalog workspace bindings. - Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the catalog. @@ -113,8 +107,6 @@ .. py:method:: update_bindings(securable_type: str, securable_name: str [, add: Optional[List[WorkspaceBinding]], remove: Optional[List[WorkspaceBinding]]]) -> UpdateWorkspaceBindingsResponse - Update securable workspace bindings. - Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. diff --git a/docs/workspace/cleanrooms/clean_room_assets.rst b/docs/workspace/cleanrooms/clean_room_assets.rst index 1fabe51cb..86ab44e6d 100644 --- a/docs/workspace/cleanrooms/clean_room_assets.rst +++ b/docs/workspace/cleanrooms/clean_room_assets.rst @@ -9,8 +9,6 @@ .. py:method:: create(clean_room_name: str, asset: CleanRoomAsset) -> CleanRoomAsset - Create an asset. - Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC asset that is added through this method, the clean room owner must also have enough privilege on the asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to @@ -26,8 +24,6 @@ .. py:method:: delete(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str) - Delete an asset. - Delete a clean room asset - unshare/remove the asset from the clean room :param clean_room_name: str @@ -42,8 +38,6 @@ .. py:method:: get(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str) -> CleanRoomAsset - Get an asset. - Get the details of a clean room asset by its type and full name. :param clean_room_name: str @@ -70,8 +64,6 @@ .. py:method:: update(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str, asset: CleanRoomAsset) -> CleanRoomAsset - Update an asset. - Update a clean room asset. For example, updating the content of a notebook; changing the shared partitions of a table; etc. diff --git a/docs/workspace/cleanrooms/clean_room_task_runs.rst b/docs/workspace/cleanrooms/clean_room_task_runs.rst index b78bf2c2a..716008d83 100644 --- a/docs/workspace/cleanrooms/clean_room_task_runs.rst +++ b/docs/workspace/cleanrooms/clean_room_task_runs.rst @@ -8,8 +8,6 @@ .. py:method:: list(clean_room_name: str [, notebook_name: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoomNotebookTaskRun] - List notebook task runs. - List all the historical notebook task runs in a clean room. :param clean_room_name: str diff --git a/docs/workspace/cleanrooms/clean_rooms.rst b/docs/workspace/cleanrooms/clean_rooms.rst index 45981bd9c..6a987175c 100644 --- a/docs/workspace/cleanrooms/clean_rooms.rst +++ b/docs/workspace/cleanrooms/clean_rooms.rst @@ -10,8 +10,6 @@ .. py:method:: create(clean_room: CleanRoom) -> CleanRoom - Create a clean room. - Create a new clean room with the specified collaborators. This method is asynchronous; the returned name field inside the clean_room field can be used to poll the clean room status, using the :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING @@ -27,8 +25,6 @@ .. py:method:: create_output_catalog(clean_room_name: str, output_catalog: CleanRoomOutputCatalog) -> CreateCleanRoomOutputCatalogResponse - Create an output catalog. - Create the output catalog of the clean room. :param clean_room_name: str @@ -40,8 +36,6 @@ .. py:method:: delete(name: str) - Delete a clean room. - Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other collaborators have not deleted the clean room, they will still have the clean room in their metastore, but it will be in a DELETED state and no operations other than deletion can be performed on it. @@ -54,8 +48,6 @@ .. py:method:: get(name: str) -> CleanRoom - Get a clean room. - Get the details of a clean room given its name. :param name: str @@ -65,8 +57,6 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoom] - List clean rooms. - Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are returned. @@ -80,8 +70,6 @@ .. py:method:: update(name: str [, clean_room: Optional[CleanRoom]]) -> CleanRoom - Update a clean room. - Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM** privilege, or be metastore admin. diff --git a/docs/workspace/compute/cluster_policies.rst b/docs/workspace/compute/cluster_policies.rst index 790315fd9..a224b2d2c 100644 --- a/docs/workspace/compute/cluster_policies.rst +++ b/docs/workspace/compute/cluster_policies.rst @@ -49,8 +49,6 @@ # cleanup w.cluster_policies.delete(policy_id=created.policy_id) - Create a new policy. - Creates a new policy with prescribed settings. :param definition: str (optional) @@ -88,8 +86,6 @@ .. py:method:: delete(policy_id: str) - Delete a cluster policy. - Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited. :param policy_id: str @@ -139,8 +135,6 @@ # cleanup w.cluster_policies.delete(policy_id=created.policy_id) - Update a cluster policy. - Update an existing policy for cluster. This operation may make some clusters governed by the previous policy invalid. @@ -208,8 +202,6 @@ # cleanup w.cluster_policies.delete(policy_id=created.policy_id) - Get a cluster policy. - Get a cluster policy entity. Creation and editing is available to admins only. :param policy_id: str @@ -220,8 +212,6 @@ .. py:method:: get_permission_levels(cluster_policy_id: str) -> GetClusterPolicyPermissionLevelsResponse - Get cluster policy permission levels. - Gets the permission levels that a user can have on an object. :param cluster_policy_id: str @@ -232,8 +222,6 @@ .. py:method:: get_permissions(cluster_policy_id: str) -> ClusterPolicyPermissions - Get cluster policy permissions. - Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root object. @@ -257,8 +245,6 @@ all = w.cluster_policies.list(compute.ListClusterPoliciesRequest()) - List cluster policies. - Returns a list of policies accessible by the requesting user. :param sort_column: :class:`ListSortColumn` (optional) @@ -273,8 +259,6 @@ .. py:method:: set_permissions(cluster_policy_id: str [, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]]]) -> ClusterPolicyPermissions - Set cluster policy permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -287,8 +271,6 @@ .. py:method:: update_permissions(cluster_policy_id: str [, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]]]) -> ClusterPolicyPermissions - Update cluster policy permissions. - Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root object. diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index 961b0472a..5e1abee9f 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -59,8 +59,6 @@ w.users.delete(id=other_owner.id) w.clusters.permanent_delete(cluster_id=clstr.cluster_id) - Change cluster owner. - Change the owner of the cluster. You must be an admin and the cluster must be terminated to perform this operation. The service principal application ID can be supplied as an argument to `owner_username`. @@ -101,8 +99,6 @@ # cleanup w.clusters.permanent_delete(cluster_id=clstr.cluster_id) - Create new cluster. - Creates a new Spark cluster. This method will acquire new instances from the cloud provider if necessary. This method is asynchronous; the returned ``cluster_id`` can be used to poll the cluster status. When this method returns, the cluster will be in a ``PENDING`` state. The cluster will be @@ -320,8 +316,6 @@ # cleanup w.clusters.permanent_delete(cluster_id=clstr.cluster_id) - Terminate cluster. - Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is already in a `TERMINATING` or `TERMINATED` state, nothing will happen. @@ -375,8 +369,6 @@ # cleanup w.clusters.permanent_delete(cluster_id=clstr.cluster_id) - Update cluster configuration. - Updates the configuration of a cluster to match the provided attributes and size. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. @@ -616,8 +608,6 @@ # cleanup w.clusters.permanent_delete(cluster_id=clstr.cluster_id) - List cluster activity events. - Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more events to read, the response includes all the parameters necessary to request the next page of events. @@ -684,8 +674,6 @@ # cleanup w.clusters.permanent_delete(cluster_id=clstr.cluster_id) - Get cluster info. - Retrieves the information for a cluster given its identifier. Clusters can be described while they are running, or up to 60 days after they are terminated. @@ -697,8 +685,6 @@ .. py:method:: get_permission_levels(cluster_id: str) -> GetClusterPermissionLevelsResponse - Get cluster permission levels. - Gets the permission levels that a user can have on an object. :param cluster_id: str @@ -709,8 +695,6 @@ .. py:method:: get_permissions(cluster_id: str) -> ClusterPermissions - Get cluster permissions. - Gets the permissions of a cluster. Clusters can inherit permissions from their root object. :param cluster_id: str @@ -727,12 +711,11 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import compute w = WorkspaceClient() - nodes = w.clusters.list_node_types() - - List clusters. + all = w.clusters.list(compute.ListClustersRequest()) Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. @@ -764,8 +747,6 @@ nodes = w.clusters.list_node_types() - List node types. - Returns a list of supported Spark node types. These node types can be used to launch a cluster. :returns: :class:`ListNodeTypesResponse` @@ -773,8 +754,6 @@ .. py:method:: list_zones() -> ListAvailableZonesResponse - List availability zones. - Returns a list of availability zones where clusters can be created in (For example, us-west-2a). These zones can be used to launch a cluster. @@ -783,8 +762,6 @@ .. py:method:: permanent_delete(cluster_id: str) - Permanently delete cluster. - Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously removed. @@ -828,8 +805,6 @@ # cleanup w.clusters.permanent_delete(cluster_id=clstr.cluster_id) - Pin cluster. - Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a cluster that is already pinned will have no effect. This API can only be called by workspace admins. @@ -869,8 +844,6 @@ # cleanup w.clusters.permanent_delete(cluster_id=clstr.cluster_id) - Resize cluster. - Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a `RUNNING` state. @@ -928,8 +901,6 @@ # cleanup w.clusters.permanent_delete(cluster_id=clstr.cluster_id) - Restart cluster. - Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state, nothing will happen. @@ -1008,8 +979,6 @@ .. py:method:: set_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions - Set cluster permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -1022,8 +991,6 @@ .. py:method:: spark_versions() -> GetSparkVersionsResponse - List available Spark versions. - Returns the list of available Spark versions. These versions can be used to launch a cluster. :returns: :class:`GetSparkVersionsResponse` @@ -1060,8 +1027,6 @@ # cleanup w.clusters.permanent_delete(cluster_id=clstr.cluster_id) - Start terminated cluster. - Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except: - The previous cluster id and attributes are preserved. - The cluster starts with the last specified cluster size. - If the previous cluster was an autoscaling cluster, the current cluster starts with @@ -1110,8 +1075,6 @@ # cleanup w.clusters.permanent_delete(cluster_id=clstr.cluster_id) - Unpin cluster. - Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API. Unpinning a cluster that is not pinned will have no effect. This API can only be called by workspace admins. @@ -1123,8 +1086,6 @@ .. py:method:: update(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource]]) -> Wait[ClusterDetails] - Update cluster configuration (partial). - Updates the configuration of a cluster to match the partial set of attributes and size. Denote which fields to update using the `update_mask` field in the request body. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be @@ -1162,8 +1123,6 @@ .. py:method:: update_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions - Update cluster permissions. - Updates the permissions on a cluster. Clusters can inherit permissions from their root object. :param cluster_id: str diff --git a/docs/workspace/compute/command_execution.rst b/docs/workspace/compute/command_execution.rst index c96d044a2..a5d8b2a79 100644 --- a/docs/workspace/compute/command_execution.rst +++ b/docs/workspace/compute/command_execution.rst @@ -9,8 +9,6 @@ .. py:method:: cancel( [, cluster_id: Optional[str], command_id: Optional[str], context_id: Optional[str]]) -> Wait[CommandStatusResponse] - Cancel a command. - Cancels a currently running command within an execution context. The command ID is obtained from a prior successful call to __execute__. @@ -29,8 +27,6 @@ .. py:method:: command_status(cluster_id: str, context_id: str, command_id: str) -> CommandStatusResponse - Get command info. - Gets the status of and, if available, the results from a currently executing command. The command ID is obtained from a prior successful call to __execute__. @@ -44,8 +40,6 @@ .. py:method:: context_status(cluster_id: str, context_id: str) -> ContextStatusResponse - Get status. - Gets the status for an execution context. :param cluster_id: str @@ -75,8 +69,6 @@ # cleanup w.command_execution.destroy(cluster_id=cluster_id, context_id=context.id) - Create an execution context. - Creates an execution context for running cluster commands. If successful, this method returns the ID of the new execution context. @@ -95,8 +87,6 @@ .. py:method:: destroy(cluster_id: str, context_id: str) - Delete an execution context. - Deletes an execution context. :param cluster_id: str @@ -133,8 +123,6 @@ # cleanup w.command_execution.destroy(cluster_id=cluster_id, context_id=context.id) - Run a command. - Runs a cluster command in the given execution context, using the provided language. If successful, it returns an ID for tracking the status of the command's execution. diff --git a/docs/workspace/compute/global_init_scripts.rst b/docs/workspace/compute/global_init_scripts.rst index e2eba7604..78854102d 100644 --- a/docs/workspace/compute/global_init_scripts.rst +++ b/docs/workspace/compute/global_init_scripts.rst @@ -36,8 +36,6 @@ # cleanup w.global_init_scripts.delete(script_id=created.script_id) - Create init script. - Creates a new global init script in this workspace. :param name: str @@ -62,8 +60,6 @@ .. py:method:: delete(script_id: str) - Delete init script. - Deletes a global init script. :param script_id: str @@ -98,8 +94,6 @@ # cleanup w.global_init_scripts.delete(script_id=created.script_id) - Get an init script. - Gets all the details of a script, including its Base64-encoded contents. :param script_id: str @@ -121,8 +115,6 @@ all = w.global_init_scripts.list() - Get init scripts. - Get a list of all global init scripts for this workspace. This returns all properties for each script but **not** the script contents. To retrieve the contents of a script, use the [get a global init script](:method:globalinitscripts/get) operation. @@ -160,8 +152,6 @@ # cleanup w.global_init_scripts.delete(script_id=created.script_id) - Update init script. - Updates a global init script, specifying only the fields to change. All fields are optional. Unspecified fields retain their current value. diff --git a/docs/workspace/compute/instance_pools.rst b/docs/workspace/compute/instance_pools.rst index 0614f2101..e5c966895 100644 --- a/docs/workspace/compute/instance_pools.rst +++ b/docs/workspace/compute/instance_pools.rst @@ -39,8 +39,6 @@ # cleanup w.instance_pools.delete(instance_pool_id=created.instance_pool_id) - Create a new instance pool. - Creates a new instance pool using idle and ready-to-use cloud instances. :param instance_pool_name: str @@ -95,8 +93,6 @@ .. py:method:: delete(instance_pool_id: str) - Delete an instance pool. - Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously. :param instance_pool_id: str @@ -131,8 +127,6 @@ # cleanup w.instance_pools.delete(instance_pool_id=created.instance_pool_id) - Edit an existing instance pool. - Modifies the configuration of an existing instance pool. :param instance_pool_id: str @@ -188,8 +182,6 @@ # cleanup w.instance_pools.delete(instance_pool_id=created.instance_pool_id) - Get instance pool information. - Retrieve the information for an instance pool based on its identifier. :param instance_pool_id: str @@ -200,8 +192,6 @@ .. py:method:: get_permission_levels(instance_pool_id: str) -> GetInstancePoolPermissionLevelsResponse - Get instance pool permission levels. - Gets the permission levels that a user can have on an object. :param instance_pool_id: str @@ -212,8 +202,6 @@ .. py:method:: get_permissions(instance_pool_id: str) -> InstancePoolPermissions - Get instance pool permissions. - Gets the permissions of an instance pool. Instance pools can inherit permissions from their root object. @@ -236,8 +224,6 @@ all = w.instance_pools.list() - List instance pool info. - Gets a list of instance pools with their statistics. :returns: Iterator over :class:`InstancePoolAndStats` @@ -245,8 +231,6 @@ .. py:method:: set_permissions(instance_pool_id: str [, access_control_list: Optional[List[InstancePoolAccessControlRequest]]]) -> InstancePoolPermissions - Set instance pool permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -259,8 +243,6 @@ .. py:method:: update_permissions(instance_pool_id: str [, access_control_list: Optional[List[InstancePoolAccessControlRequest]]]) -> InstancePoolPermissions - Update instance pool permissions. - Updates the permissions on an instance pool. Instance pools can inherit permissions from their root object. diff --git a/docs/workspace/compute/instance_profiles.rst b/docs/workspace/compute/instance_profiles.rst index 182e1aa79..2af2d4b68 100644 --- a/docs/workspace/compute/instance_profiles.rst +++ b/docs/workspace/compute/instance_profiles.rst @@ -30,8 +30,6 @@ iam_role_arn="arn:aws:iam::000000000000:role/bcd", ) - Register an instance profile. - Registers an instance profile in Databricks. In the UI, you can then give users the permission to use this instance profile when launching clusters. @@ -80,8 +78,6 @@ iam_role_arn="arn:aws:iam::000000000000:role/bcdf", ) - Edit an instance profile. - The only supported field to change is the optional IAM role ARN associated with the instance profile. It is required to specify the IAM role ARN if both of the following are true: @@ -127,8 +123,6 @@ all = w.instance_profiles.list() - List available instance profiles. - List the instance profiles that the calling user can use to launch a cluster. This API is available to all users. @@ -138,8 +132,6 @@ .. py:method:: remove(instance_profile_arn: str) - Remove the instance profile. - Remove the instance profile with the provided ARN. Existing clusters with this instance profile will continue to function. diff --git a/docs/workspace/compute/libraries.rst b/docs/workspace/compute/libraries.rst index 339f54de2..f11857623 100644 --- a/docs/workspace/compute/libraries.rst +++ b/docs/workspace/compute/libraries.rst @@ -20,8 +20,6 @@ .. py:method:: all_cluster_statuses() -> Iterator[ClusterLibraryStatuses] - Get all statuses. - Get the status of all libraries on all clusters. A status is returned for all libraries installed on this cluster via the API or the libraries UI. @@ -30,8 +28,6 @@ .. py:method:: cluster_status(cluster_id: str) -> Iterator[LibraryFullStatus] - Get status. - Get the status of libraries on a cluster. A status is returned for all libraries installed on this cluster via the API or the libraries UI. The order of returned libraries is as follows: 1. Libraries set to be installed on this cluster, in the order that the libraries were added to the cluster, are @@ -46,8 +42,6 @@ .. py:method:: install(cluster_id: str, libraries: List[Library]) - Add a library. - Add libraries to install on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. @@ -61,8 +55,6 @@ .. py:method:: uninstall(cluster_id: str, libraries: List[Library]) - Uninstall libraries. - Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is restarted. A request to uninstall a library that is not currently installed is ignored. diff --git a/docs/workspace/compute/policy_compliance_for_clusters.rst b/docs/workspace/compute/policy_compliance_for_clusters.rst index fea7a08f9..92c1fc4cb 100644 --- a/docs/workspace/compute/policy_compliance_for_clusters.rst +++ b/docs/workspace/compute/policy_compliance_for_clusters.rst @@ -15,8 +15,6 @@ .. py:method:: enforce_compliance(cluster_id: str [, validate_only: Optional[bool]]) -> EnforceClusterComplianceResponse - Enforce cluster policy compliance. - Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. @@ -40,8 +38,6 @@ .. py:method:: get_compliance(cluster_id: str) -> GetClusterComplianceResponse - Get cluster policy compliance. - Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy was updated after the cluster was last edited. @@ -53,8 +49,6 @@ .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ClusterCompliance] - List cluster policy compliance. - Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of compliance if their policy was updated after the cluster was last edited. diff --git a/docs/workspace/compute/policy_families.rst b/docs/workspace/compute/policy_families.rst index 8bbcd039f..1e4b6a00e 100644 --- a/docs/workspace/compute/policy_families.rst +++ b/docs/workspace/compute/policy_families.rst @@ -30,8 +30,6 @@ first_family = w.policy_families.get(policy_family_id=all[0].policy_family_id) - Get policy family information. - Retrieve the information for an policy family based on its identifier and version :param policy_family_id: str @@ -56,8 +54,6 @@ all = w.policy_families.list(compute.ListPolicyFamiliesRequest()) - List policy families. - Returns the list of policy definition types available to use at their latest version. This API is paginated. diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst index fde42d405..1f0221ed3 100644 --- a/docs/workspace/dashboards/genie.rst +++ b/docs/workspace/dashboards/genie.rst @@ -11,8 +11,6 @@ .. py:method:: create_message(space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage] - Create conversation message. - Create new message in a [conversation](:method:genie/startconversation). The AI response uses all previously created messages in the conversation to respond. @@ -33,8 +31,6 @@ .. py:method:: execute_message_attachment_query(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse - Execute message attachment SQL query. - Execute the SQL for a message query attachment. Use this API when the query attachment has expired and needs to be re-executed. @@ -52,8 +48,6 @@ .. py:method:: execute_message_query(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse - [Deprecated] Execute SQL query in a conversation message. - Execute the SQL query in the message. :param space_id: str @@ -68,8 +62,6 @@ .. py:method:: generate_download_full_query_result(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGenerateDownloadFullQueryResultResponse - Generate full query result download. - Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of the download. The query result is stored in an external link and can be retrieved using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks @@ -90,8 +82,6 @@ .. py:method:: get_download_full_query_result(space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str) -> GenieGetDownloadFullQueryResultResponse - Get download full query result. - After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and successfully receiving a `download_id`, use this API to poll the download progress. When the download is complete, the API returns one or more external links to the query result files. Warning: Databricks @@ -117,8 +107,6 @@ .. py:method:: get_message(space_id: str, conversation_id: str, message_id: str) -> GenieMessage - Get conversation message. - Get message from conversation. :param space_id: str @@ -133,8 +121,6 @@ .. py:method:: get_message_attachment_query_result(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse - Get message attachment SQL query result. - Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. @@ -152,8 +138,6 @@ .. py:method:: get_message_query_result(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse - [Deprecated] Get conversation message SQL query result. - Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY`. @@ -169,8 +153,6 @@ .. py:method:: get_message_query_result_by_attachment(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse - [Deprecated] Get conversation message SQL query result. - Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. @@ -188,8 +170,6 @@ .. py:method:: get_space(space_id: str) -> GenieSpace - Get Genie Space. - Get details of a Genie Space. :param space_id: str @@ -200,8 +180,6 @@ .. py:method:: list_spaces( [, page_size: Optional[int], page_token: Optional[str]]) -> GenieListSpacesResponse - List Genie spaces. - Get list of Genie Spaces. :param page_size: int (optional) @@ -214,8 +192,6 @@ .. py:method:: start_conversation(space_id: str, content: str) -> Wait[GenieMessage] - Start conversation. - Start a new conversation. :param space_id: str diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst index 4becea5a7..cfa87a8f3 100644 --- a/docs/workspace/dashboards/lakeview.rst +++ b/docs/workspace/dashboards/lakeview.rst @@ -9,8 +9,6 @@ .. py:method:: create(dashboard: Dashboard) -> Dashboard - Create dashboard. - Create a draft dashboard. :param dashboard: :class:`Dashboard` @@ -76,8 +74,6 @@ .. py:method:: get(dashboard_id: str) -> Dashboard - Get dashboard. - Get a draft dashboard. :param dashboard_id: str @@ -88,8 +84,6 @@ .. py:method:: get_published(dashboard_id: str) -> PublishedDashboard - Get published dashboard. - Get the current published dashboard. :param dashboard_id: str @@ -176,8 +170,6 @@ .. py:method:: migrate(source_dashboard_id: str [, display_name: Optional[str], parent_path: Optional[str], update_parameter_syntax: Optional[bool]]) -> Dashboard - Migrate dashboard. - Migrates a classic SQL dashboard to Lakeview. :param source_dashboard_id: str @@ -195,8 +187,6 @@ .. py:method:: publish(dashboard_id: str [, embed_credentials: Optional[bool], warehouse_id: Optional[str]]) -> PublishedDashboard - Publish dashboard. - Publish the current draft dashboard. :param dashboard_id: str @@ -212,8 +202,6 @@ .. py:method:: trash(dashboard_id: str) - Trash dashboard. - Trash a dashboard. :param dashboard_id: str @@ -224,8 +212,6 @@ .. py:method:: unpublish(dashboard_id: str) - Unpublish dashboard. - Unpublish the dashboard. :param dashboard_id: str @@ -236,8 +222,6 @@ .. py:method:: update(dashboard_id: str, dashboard: Dashboard) -> Dashboard - Update dashboard. - Update a draft dashboard. :param dashboard_id: str diff --git a/docs/workspace/dashboards/lakeview_embedded.rst b/docs/workspace/dashboards/lakeview_embedded.rst index ce7cc9248..d1631bddb 100644 --- a/docs/workspace/dashboards/lakeview_embedded.rst +++ b/docs/workspace/dashboards/lakeview_embedded.rst @@ -8,8 +8,6 @@ .. py:method:: get_published_dashboard_token_info(dashboard_id: str [, external_value: Optional[str], external_viewer_id: Optional[str]]) -> GetPublishedDashboardTokenInfoResponse - Read an information of a published dashboard to mint an OAuth token. - Get a required authorization details and scopes of a published dashboard to mint an OAuth token. The `authorization_details` can be enriched to apply additional restriction. diff --git a/docs/workspace/database/database.rst b/docs/workspace/database/database.rst index 46a9dccab..d26728ed8 100644 --- a/docs/workspace/database/database.rst +++ b/docs/workspace/database/database.rst @@ -64,10 +64,12 @@ By default, a instance cannot be deleted if it has descendant instances created via PITR. If this flag is specified as true, all descendent instances will be deleted as well. :param purge: bool (optional) - If false, the database instance is soft deleted. Soft deleted instances behave as if they are - deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted by - calling the undelete API for a limited time. If true, the database instance is hard deleted and - cannot be undeleted. + Note purge=false is in development. If false, the database instance is soft deleted (implementation + pending). Soft deleted instances behave as if they are deleted, and cannot be used for CRUD + operations nor connected to. However they can be undeleted by calling the undelete API for a limited + time (implementation pending). If true, the database instance is hard deleted and cannot be + undeleted. For the time being, setting this value to true is required to delete an instance (soft + delete is not yet supported). diff --git a/docs/workspace/files/dbfs.rst b/docs/workspace/files/dbfs.rst index 3f214908d..4a18c836f 100644 --- a/docs/workspace/files/dbfs.rst +++ b/docs/workspace/files/dbfs.rst @@ -9,8 +9,6 @@ .. py:method:: add_block(handle: int, data: str) - Append data block. - Appends a block of data to the stream specified by the input handle. If the handle does not exist, this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``. @@ -26,8 +24,6 @@ .. py:method:: close(handle: int) - Close the stream. - Closes the stream specified by the input handle. If the handle does not exist, this call throws an exception with ``RESOURCE_DOES_NOT_EXIST``. @@ -43,8 +39,6 @@ .. py:method:: create(path: str [, overwrite: Optional[bool]]) -> CreateResponse - Open a stream. - Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle timeout on this handle. If a file or directory already exists on the given path and __overwrite__ is set to false, this call will throw an exception with ``RESOURCE_ALREADY_EXISTS``. @@ -97,8 +91,6 @@ .. py:method:: get_status(path: str) -> FileInfo - Get the information of a file or directory. - Gets the file information for a file or directory. If the file or directory does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. @@ -129,8 +121,6 @@ .. py:method:: move(source_path: str, destination_path: str) - Move a file. - Moves a file from one location to another location within DBFS. If the source file does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists in the destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the given source @@ -153,8 +143,6 @@ .. py:method:: put(path: str [, contents: Optional[str], overwrite: Optional[bool]]) - Upload a file. - Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but can also be used as a convenient single call for data upload. @@ -178,8 +166,6 @@ .. py:method:: read(path: str [, length: Optional[int], offset: Optional[int]]) -> ReadResponse - Get the contents of a file. - Returns the contents of a file. If the file does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the offset is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read length exceeds diff --git a/docs/workspace/files/files.rst b/docs/workspace/files/files.rst index f3e4ae304..6118d35e3 100644 --- a/docs/workspace/files/files.rst +++ b/docs/workspace/files/files.rst @@ -26,8 +26,6 @@ .. py:method:: create_directory(directory_path: str) - Create a directory. - Creates an empty directory. If necessary, also creates any parent directories of the new, empty directory (like the shell command `mkdir -p`). If called on an existing directory, returns a success response; this method is idempotent (it will succeed if the directory already exists). @@ -40,8 +38,6 @@ .. py:method:: delete(file_path: str) - Delete a file. - Deletes a file. If the request is successful, there is no response body. :param file_path: str @@ -52,8 +48,6 @@ .. py:method:: delete_directory(directory_path: str) - Delete a directory. - Deletes an empty directory. To delete a non-empty directory, first delete all of its contents. This can be done by listing the @@ -67,8 +61,6 @@ .. py:method:: download(file_path: str) -> DownloadResponse - Download a file. - Downloads a file. The file contents are the response body. This is a standard HTTP file download, not a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers. @@ -80,8 +72,6 @@ .. py:method:: get_directory_metadata(directory_path: str) - Get directory metadata. - Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response body. @@ -98,8 +88,6 @@ .. py:method:: get_metadata(file_path: str) -> GetMetadataResponse - Get file metadata. - Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body. :param file_path: str @@ -110,8 +98,6 @@ .. py:method:: list_directory_contents(directory_path: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[DirectoryEntry] - List directory contents. - Returns the contents of a directory. If there is no directory at the specified path, the API returns a HTTP 404 error. @@ -140,8 +126,6 @@ .. py:method:: upload(file_path: str, contents: BinaryIO [, overwrite: Optional[bool]]) - Upload a file. - Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an octet stream); do not encode or otherwise modify the bytes before sending. The contents of the resulting file will be exactly the bytes sent in the request body. If the request is successful, there diff --git a/docs/workspace/iam/account_access_control_proxy.rst b/docs/workspace/iam/account_access_control_proxy.rst index 66c396be5..39b5a83ca 100644 --- a/docs/workspace/iam/account_access_control_proxy.rst +++ b/docs/workspace/iam/account_access_control_proxy.rst @@ -10,8 +10,6 @@ .. py:method:: get_assignable_roles_for_resource(resource: str) -> GetAssignableRolesForResourceResponse - Get assignable roles for a resource. - Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the role. @@ -28,8 +26,6 @@ .. py:method:: get_rule_set(name: str, etag: str) -> RuleSetResponse - Get a rule set. - Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default rule set for each resource is supported. @@ -58,8 +54,6 @@ .. py:method:: update_rule_set(name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse - Update a rule set. - Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between concurrent updates. diff --git a/docs/workspace/iam/current_user.rst b/docs/workspace/iam/current_user.rst index bf739025c..3b361aaf1 100644 --- a/docs/workspace/iam/current_user.rst +++ b/docs/workspace/iam/current_user.rst @@ -19,8 +19,6 @@ me = w.current_user.me() - Get current user info. - Get details about the current method caller's identity. :returns: :class:`User` diff --git a/docs/workspace/iam/groups.rst b/docs/workspace/iam/groups.rst index fe0187cd6..737939095 100644 --- a/docs/workspace/iam/groups.rst +++ b/docs/workspace/iam/groups.rst @@ -29,8 +29,6 @@ # cleanup w.groups.delete(id=group.id) - Create a new group. - Creates a group in the Databricks workspace with a unique name, using the supplied group details. :param display_name: str (optional) @@ -71,11 +69,6 @@ group = w.groups.create(display_name=f"sdk-{time.time_ns()}") w.groups.delete(id=group.id) - - # cleanup - w.groups.delete(id=group.id) - - Delete a group. Deletes a group from the Databricks workspace. @@ -105,8 +98,6 @@ # cleanup w.groups.delete(id=group.id) - Get group details. - Gets the information for a specific group in the Databricks workspace. :param id: str @@ -117,8 +108,6 @@ .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[Group] - List group details. - Gets all details of the groups associated with the Databricks workspace. :param attributes: str (optional) @@ -185,8 +174,6 @@ w.users.delete(id=user.id) w.groups.delete(id=group.id) - Update group details. - Partially updates the details of a group. :param id: str @@ -200,8 +187,6 @@ .. py:method:: update(id: str [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) - Replace a group. - Updates the details of a group by replacing the entire group entity. :param id: str diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index 0c3ef26fc..4e55da96e 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -46,8 +46,6 @@ _ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) - Get object permissions. - Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. @@ -80,8 +78,6 @@ levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) - Get object permission levels. - Gets the permission levels that a user can have on an object. :param request_object_type: str @@ -127,8 +123,6 @@ # cleanup w.groups.delete(id=group.id) - Set object permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their parent objects or root object. @@ -146,8 +140,6 @@ .. py:method:: update(request_object_type: str, request_object_id: str [, access_control_list: Optional[List[AccessControlRequest]]]) -> ObjectPermissions - Update object permissions. - Updates the permissions on an object. Objects can inherit permissions from their parent objects or root object. diff --git a/docs/workspace/iam/service_principals.rst b/docs/workspace/iam/service_principals.rst index 74a498b00..0d0d447b9 100644 --- a/docs/workspace/iam/service_principals.rst +++ b/docs/workspace/iam/service_principals.rst @@ -20,21 +20,13 @@ import time from databricks.sdk import WorkspaceClient - from databricks.sdk.service import iam w = WorkspaceClient() - groups = w.groups.group_display_name_to_id_map(iam.ListGroupsRequest()) - - spn = w.service_principals.create( - display_name=f"sdk-{time.time_ns()}", - groups=[iam.ComplexValue(value=groups["admins"])], - ) + created = w.service_principals.create(display_name=f"sdk-{time.time_ns()}") # cleanup - w.service_principals.delete(id=spn.id) - - Create a service principal. + w.service_principals.delete(id=created.id) Creates a new service principal in the Databricks workspace. @@ -63,8 +55,6 @@ .. py:method:: delete(id: str) - Delete a service principal. - Delete a single service principal in the Databricks workspace. :param id: str @@ -93,8 +83,6 @@ # cleanup w.service_principals.delete(id=created.id) - Get service principal details. - Gets the details for a single service principal define in the Databricks workspace. :param id: str @@ -117,8 +105,6 @@ all = w.service_principals.list(iam.ListServicePrincipalsRequest()) - List service principals. - Gets the set of service principals associated with a Databricks workspace. :param attributes: str (optional) @@ -171,8 +157,6 @@ # cleanup w.service_principals.delete(id=created.id) - Update service principal details. - Partially updates the details of a single service principal in the Databricks workspace. :param id: str @@ -209,8 +193,6 @@ # cleanup w.service_principals.delete(id=created.id) - Replace service principal. - Updates the details of a single service principal. This action replaces the existing service principal with the same name. diff --git a/docs/workspace/iam/users.rst b/docs/workspace/iam/users.rst index 76837ac54..b48b26b02 100644 --- a/docs/workspace/iam/users.rst +++ b/docs/workspace/iam/users.rst @@ -32,8 +32,6 @@ user_name=f"sdk-{time.time_ns()}@example.com", ) - Create a new user. - Creates a new user in the Databricks workspace. This new user will also be added to the Databricks account. @@ -80,11 +78,12 @@ w = WorkspaceClient() - other_owner = w.users.create(user_name=f"sdk-{time.time_ns()}@example.com") + user = w.users.create( + display_name=f"sdk-{time.time_ns()}", + user_name=f"sdk-{time.time_ns()}@example.com", + ) - w.users.delete(id=other_owner.id) - - Delete a user. + w.users.delete(id=user.id) Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the user. @@ -115,8 +114,6 @@ fetch = w.users.get(id=user.id) - Get user details. - Gets information for a specific user in Databricks workspace. :param id: str @@ -147,8 +144,6 @@ .. py:method:: get_permission_levels() -> GetPasswordPermissionLevelsResponse - Get password permission levels. - Gets the permission levels that a user can have on an object. :returns: :class:`GetPasswordPermissionLevelsResponse` @@ -156,8 +151,6 @@ .. py:method:: get_permissions() -> PasswordPermissions - Get password permissions. - Gets the permissions of all passwords. Passwords can inherit permissions from their root object. :returns: :class:`PasswordPermissions` @@ -181,8 +174,6 @@ sort_order=iam.ListSortOrder.DESCENDING, ) - List users. - Gets details for all the users associated with a Databricks workspace. :param attributes: str (optional) @@ -234,8 +225,6 @@ schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP], ) - Update user details. - Partially updates a user resource by applying the supplied operations on specific user attributes. :param id: str @@ -249,8 +238,6 @@ .. py:method:: set_permissions( [, access_control_list: Optional[List[PasswordAccessControlRequest]]]) -> PasswordPermissions - Set password permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -279,8 +266,6 @@ w.users.update(id=user.id, user_name=user.user_name, active=True) - Replace a user. - Replaces a user's information with the data supplied in request. :param id: str @@ -315,8 +300,6 @@ .. py:method:: update_permissions( [, access_control_list: Optional[List[PasswordAccessControlRequest]]]) -> PasswordPermissions - Update password permissions. - Updates the permissions on all passwords. Passwords can inherit permissions from their root object. :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index 89ea5e2ae..288ab0ad5 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -59,8 +59,6 @@ # cleanup w.jobs.delete(job_id=created_job.job_id) - Cancel all runs of a job. - Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs from being started. @@ -114,8 +112,6 @@ # cleanup w.jobs.delete(job_id=created_job.job_id) - Cancel a run. - Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when this request completes. @@ -169,8 +165,6 @@ Create a new job. - Create a new job. - :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param budget_policy_id: str (optional) @@ -271,8 +265,6 @@ .. py:method:: delete(job_id: int) - Delete a job. - Deletes a job. :param job_id: int @@ -283,8 +275,6 @@ .. py:method:: delete_run(run_id: int) - Delete a job run. - Deletes a non-active run. Returns an error if the run is active. :param run_id: int @@ -334,8 +324,6 @@ # cleanup w.jobs.delete(job_id=created_job.job_id) - Export and retrieve a job run. - Export and retrieve the job run task. :param run_id: int @@ -367,23 +355,21 @@ w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"] ) - created_job = w.jobs.create( - name=f"sdk-{time.time_ns()}", + run = w.jobs.submit( + run_name=f"sdk-{time.time_ns()}", tasks=[ - jobs.Task( - description="test", + jobs.SubmitTask( existing_cluster_id=cluster_id, notebook_task=jobs.NotebookTask(notebook_path=notebook_path), - task_key="test", - timeout_seconds=0, + task_key=f"sdk-{time.time_ns()}", ) ], - ) + ).result() - by_id = w.jobs.get(job_id=created_job.job_id) + output = w.jobs.get_run_output(run_id=run.tasks[0].run_id) # cleanup - w.jobs.delete(job_id=created_job.job_id) + w.jobs.delete_run(run_id=run.run_id) Get a single job. @@ -401,8 +387,6 @@ .. py:method:: get_permission_levels(job_id: str) -> GetJobPermissionLevelsResponse - Get job permission levels. - Gets the permission levels that a user can have on an object. :param job_id: str @@ -413,8 +397,6 @@ .. py:method:: get_permissions(job_id: str) -> JobPermissions - Get job permissions. - Gets the permissions of a job. Jobs can inherit permissions from their root object. :param job_id: str @@ -514,8 +496,6 @@ # cleanup w.jobs.delete_run(run_id=run.run_id) - Get the output for a single run. - Retrieve the output and metadata of a single task run. When a notebook task returns a value through the `dbutils.notebook.exit()` call, you can use this endpoint to retrieve that value. Databricks restricts this API to returning the first 5 MB of the output. To return a larger result, you can store @@ -716,8 +696,6 @@ # cleanup w.jobs.delete(job_id=created_job.job_id) - Repair a job run. - Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job and task settings, and can be viewed in the history for the original job run. @@ -864,8 +842,6 @@ # cleanup w.jobs.delete(job_id=created_job.job_id) - Update all job settings (reset). - Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update job settings partially. @@ -919,8 +895,6 @@ # cleanup w.jobs.delete(job_id=created_job.job_id) - Trigger a new job run. - Run a job and return the `run_id` of the triggered run. :param job_id: int @@ -1030,8 +1004,6 @@ .. py:method:: set_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions - Set job permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -1077,8 +1049,6 @@ # cleanup w.jobs.delete_run(run_id=run.run_id) - Create and trigger a one-time run. - Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job. Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the run state after the job is submitted. @@ -1184,8 +1154,6 @@ # cleanup w.jobs.delete(job_id=created_job.job_id) - Update job settings partially. - Add, update, or remove specific settings of an existing job. Use the [_Reset_ endpoint](:method:jobs/reset) to overwrite all job settings. @@ -1211,8 +1179,6 @@ .. py:method:: update_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions - Update job permissions. - Updates the permissions on a job. Jobs can inherit permissions from their root object. :param job_id: str diff --git a/docs/workspace/jobs/policy_compliance_for_jobs.rst b/docs/workspace/jobs/policy_compliance_for_jobs.rst index b75a73eab..027471d77 100644 --- a/docs/workspace/jobs/policy_compliance_for_jobs.rst +++ b/docs/workspace/jobs/policy_compliance_for_jobs.rst @@ -17,8 +17,6 @@ .. py:method:: enforce_compliance(job_id: int [, validate_only: Optional[bool]]) -> EnforcePolicyComplianceResponse - Enforce job policy compliance. - Updates a job so the job clusters that are created when running the job (specified in `new_cluster`) are compliant with the current versions of their respective cluster policies. All-purpose clusters used in the job will not be updated. @@ -33,8 +31,6 @@ .. py:method:: get_compliance(job_id: int) -> GetPolicyComplianceResponse - Get job policy compliance. - Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy they use was updated after the job was last edited and some of its job clusters no longer comply with their updated policies. @@ -47,8 +43,6 @@ .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[JobCompliance] - List job policy compliance. - Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of compliance if a cluster policy they use was updated after the job was last edited and its job clusters no longer comply with the updated policy. diff --git a/docs/workspace/marketplace/consumer_fulfillments.rst b/docs/workspace/marketplace/consumer_fulfillments.rst index 149ec6451..8977abe5c 100644 --- a/docs/workspace/marketplace/consumer_fulfillments.rst +++ b/docs/workspace/marketplace/consumer_fulfillments.rst @@ -8,8 +8,6 @@ .. py:method:: get(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[SharedDataObject] - Get listing content metadata. - Get a high level preview of the metadata of listing installable content. :param listing_id: str @@ -21,8 +19,6 @@ .. py:method:: list(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListingFulfillment] - List all listing fulfillments. - Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential installation. Standard installations contain metadata about the attached share or git repo. Only one of these fields will be present. Personalized installations contain metadata about the attached share or git repo, as diff --git a/docs/workspace/marketplace/consumer_installations.rst b/docs/workspace/marketplace/consumer_installations.rst index a9539ad1f..8314bef65 100644 --- a/docs/workspace/marketplace/consumer_installations.rst +++ b/docs/workspace/marketplace/consumer_installations.rst @@ -8,8 +8,6 @@ .. py:method:: create(listing_id: str [, accepted_consumer_terms: Optional[ConsumerTerms], catalog_name: Optional[str], recipient_type: Optional[DeltaSharingRecipientType], repo_detail: Optional[RepoInstallation], share_name: Optional[str]]) -> Installation - Install from a listing. - Install payload associated with a Databricks Marketplace listing. :param listing_id: str @@ -25,8 +23,6 @@ .. py:method:: delete(listing_id: str, installation_id: str) - Uninstall from a listing. - Uninstall an installation associated with a Databricks Marketplace listing. :param listing_id: str @@ -37,8 +33,6 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[InstallationDetail] - List all installations. - List all installations across all listings. :param page_size: int (optional) @@ -49,8 +43,6 @@ .. py:method:: list_listing_installations(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[InstallationDetail] - List installations for a listing. - List all installations for a particular listing. :param listing_id: str @@ -62,8 +54,6 @@ .. py:method:: update(listing_id: str, installation_id: str, installation: InstallationDetail [, rotate_token: Optional[bool]]) -> UpdateInstallationResponse - Update an installation. - This is a update API that will update the part of the fields defined in the installation table as well as interact with external services according to the fields not included in the installation table 1. the token will be rotate if the rotateToken flag is true 2. the token will be forcibly rotate if the diff --git a/docs/workspace/marketplace/consumer_listings.rst b/docs/workspace/marketplace/consumer_listings.rst index 15ec3790e..ab26396d5 100644 --- a/docs/workspace/marketplace/consumer_listings.rst +++ b/docs/workspace/marketplace/consumer_listings.rst @@ -9,8 +9,6 @@ .. py:method:: batch_get( [, ids: Optional[List[str]]]) -> BatchGetListingsResponse - Get one batch of listings. One may specify up to 50 IDs per request. - Batch get a published listing in the Databricks Marketplace that the consumer has access to. :param ids: List[str] (optional) @@ -20,8 +18,6 @@ .. py:method:: get(id: str) -> GetListingResponse - Get listing. - Get a published listing in the Databricks Marketplace that the consumer has access to. :param id: str @@ -31,8 +27,6 @@ .. py:method:: list( [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], is_staff_pick: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], tags: Optional[List[ListingTag]]]) -> Iterator[Listing] - List listings. - List all published listings in the Databricks Marketplace that the consumer has access to. :param assets: List[:class:`AssetType`] (optional) @@ -57,8 +51,6 @@ .. py:method:: search(query: str [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]]]) -> Iterator[Listing] - Search listings. - Search published listings in the Databricks Marketplace that the consumer has access to. This query supports a variety of different search parameters and performs fuzzy matching. diff --git a/docs/workspace/marketplace/consumer_personalization_requests.rst b/docs/workspace/marketplace/consumer_personalization_requests.rst index 8624871ca..1f8c12932 100644 --- a/docs/workspace/marketplace/consumer_personalization_requests.rst +++ b/docs/workspace/marketplace/consumer_personalization_requests.rst @@ -8,8 +8,6 @@ .. py:method:: create(listing_id: str, intended_use: str, accepted_consumer_terms: ConsumerTerms [, comment: Optional[str], company: Optional[str], first_name: Optional[str], is_from_lighthouse: Optional[bool], last_name: Optional[str], recipient_type: Optional[DeltaSharingRecipientType]]) -> CreatePersonalizationRequestResponse - Create a personalization request. - Create a personalization request for a listing. :param listing_id: str @@ -27,8 +25,6 @@ .. py:method:: get(listing_id: str) -> GetPersonalizationRequestResponse - Get the personalization request for a listing. - Get the personalization request for a listing. Each consumer can make at *most* one personalization request for a listing. @@ -39,8 +35,6 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PersonalizationRequest] - List all personalization requests. - List personalization requests for a consumer across all listings. :param page_size: int (optional) diff --git a/docs/workspace/marketplace/consumer_providers.rst b/docs/workspace/marketplace/consumer_providers.rst index 615bf0752..1c275b1a5 100644 --- a/docs/workspace/marketplace/consumer_providers.rst +++ b/docs/workspace/marketplace/consumer_providers.rst @@ -8,8 +8,6 @@ .. py:method:: batch_get( [, ids: Optional[List[str]]]) -> BatchGetProvidersResponse - Get one batch of providers. One may specify up to 50 IDs per request. - Batch get a provider in the Databricks Marketplace with at least one visible listing. :param ids: List[str] (optional) @@ -19,8 +17,6 @@ .. py:method:: get(id: str) -> GetProviderResponse - Get a provider. - Get a provider in the Databricks Marketplace with at least one visible listing. :param id: str @@ -30,8 +26,6 @@ .. py:method:: list( [, is_featured: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo] - List providers. - List all providers in the Databricks Marketplace with at least one visible listing. :param is_featured: bool (optional) diff --git a/docs/workspace/marketplace/provider_exchange_filters.rst b/docs/workspace/marketplace/provider_exchange_filters.rst index 6c2254acd..c5d91f81a 100644 --- a/docs/workspace/marketplace/provider_exchange_filters.rst +++ b/docs/workspace/marketplace/provider_exchange_filters.rst @@ -8,8 +8,6 @@ .. py:method:: create(filter: ExchangeFilter) -> CreateExchangeFilterResponse - Create a new exchange filter. - Add an exchange filter. :param filter: :class:`ExchangeFilter` @@ -19,8 +17,6 @@ .. py:method:: delete(id: str) - Delete an exchange filter. - Delete an exchange filter :param id: str @@ -30,8 +26,6 @@ .. py:method:: list(exchange_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeFilter] - List exchange filters. - List exchange filter :param exchange_id: str @@ -43,8 +37,6 @@ .. py:method:: update(id: str, filter: ExchangeFilter) -> UpdateExchangeFilterResponse - Update exchange filter. - Update an exchange filter. :param id: str diff --git a/docs/workspace/marketplace/provider_exchanges.rst b/docs/workspace/marketplace/provider_exchanges.rst index edaae76e1..7b8841b07 100644 --- a/docs/workspace/marketplace/provider_exchanges.rst +++ b/docs/workspace/marketplace/provider_exchanges.rst @@ -8,8 +8,6 @@ .. py:method:: add_listing_to_exchange(listing_id: str, exchange_id: str) -> AddExchangeForListingResponse - Add an exchange for listing. - Associate an exchange with a listing :param listing_id: str @@ -20,8 +18,6 @@ .. py:method:: create(exchange: Exchange) -> CreateExchangeResponse - Create an exchange. - Create an exchange :param exchange: :class:`Exchange` @@ -31,8 +27,6 @@ .. py:method:: delete(id: str) - Delete an exchange. - This removes a listing from marketplace. :param id: str @@ -42,8 +36,6 @@ .. py:method:: delete_listing_from_exchange(id: str) - Remove an exchange for listing. - Disassociate an exchange with a listing :param id: str @@ -55,8 +47,6 @@ Get an exchange. - Get an exchange. - :param id: str :returns: :class:`GetExchangeResponse` @@ -64,8 +54,6 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Exchange] - List exchanges. - List exchanges visible to provider :param page_size: int (optional) @@ -76,8 +64,6 @@ .. py:method:: list_exchanges_for_listing(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeListing] - List exchanges for listing. - List exchanges associated with a listing :param listing_id: str @@ -89,8 +75,6 @@ .. py:method:: list_listings_for_exchange(exchange_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeListing] - List listings for exchange. - List listings associated with an exchange :param exchange_id: str @@ -102,8 +86,6 @@ .. py:method:: update(id: str, exchange: Exchange) -> UpdateExchangeResponse - Update exchange. - Update an exchange :param id: str diff --git a/docs/workspace/marketplace/provider_files.rst b/docs/workspace/marketplace/provider_files.rst index 413936020..0120e52a7 100644 --- a/docs/workspace/marketplace/provider_files.rst +++ b/docs/workspace/marketplace/provider_files.rst @@ -8,8 +8,6 @@ .. py:method:: create(file_parent: FileParent, marketplace_file_type: MarketplaceFileType, mime_type: str [, display_name: Optional[str]]) -> CreateFileResponse - Create a file. - Create a file. Currently, only provider icons and attached notebooks are supported. :param file_parent: :class:`FileParent` @@ -22,8 +20,6 @@ .. py:method:: delete(file_id: str) - Delete a file. - Delete a file :param file_id: str @@ -33,8 +29,6 @@ .. py:method:: get(file_id: str) -> GetFileResponse - Get a file. - Get a file :param file_id: str @@ -44,8 +38,6 @@ .. py:method:: list(file_parent: FileParent [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FileInfo] - List files. - List files attached to a parent entity. :param file_parent: :class:`FileParent` diff --git a/docs/workspace/marketplace/provider_listings.rst b/docs/workspace/marketplace/provider_listings.rst index dcfd45dd8..253dc8679 100644 --- a/docs/workspace/marketplace/provider_listings.rst +++ b/docs/workspace/marketplace/provider_listings.rst @@ -9,8 +9,6 @@ .. py:method:: create(listing: Listing) -> CreateListingResponse - Create a listing. - Create a new listing :param listing: :class:`Listing` @@ -20,8 +18,6 @@ .. py:method:: delete(id: str) - Delete a listing. - Delete a listing :param id: str @@ -31,8 +27,6 @@ .. py:method:: get(id: str) -> GetListingResponse - Get a listing. - Get a listing :param id: str @@ -42,8 +36,6 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Listing] - List listings. - List listings owned by this provider :param page_size: int (optional) @@ -54,8 +46,6 @@ .. py:method:: update(id: str, listing: Listing) -> UpdateListingResponse - Update listing. - Update a listing :param id: str diff --git a/docs/workspace/marketplace/provider_personalization_requests.rst b/docs/workspace/marketplace/provider_personalization_requests.rst index b9b5a0174..43ce06ca8 100644 --- a/docs/workspace/marketplace/provider_personalization_requests.rst +++ b/docs/workspace/marketplace/provider_personalization_requests.rst @@ -9,8 +9,6 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PersonalizationRequest] - All personalization requests across all listings. - List personalization requests to this provider. This will return all personalization requests, regardless of which listing they are for. @@ -22,8 +20,6 @@ .. py:method:: update(listing_id: str, request_id: str, status: PersonalizationRequestStatus [, reason: Optional[str], share: Optional[ShareInfo]]) -> UpdatePersonalizationRequestResponse - Update personalization request status. - Update personalization request. This method only permits updating the status of the request. :param listing_id: str diff --git a/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst b/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst index f77b9d436..bd873c551 100644 --- a/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst +++ b/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst @@ -8,8 +8,6 @@ .. py:method:: create() -> ProviderAnalyticsDashboard - Create provider analytics dashboard. - Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused with the Lakeview dashboard id. @@ -20,8 +18,6 @@ Get provider analytics dashboard. - Get provider analytics dashboard. - :returns: :class:`ListProviderAnalyticsDashboardResponse` @@ -29,8 +25,6 @@ Get latest version of provider analytics dashboard. - Get latest version of provider analytics dashboard. - :returns: :class:`GetLatestVersionProviderAnalyticsDashboardResponse` @@ -38,8 +32,6 @@ Update provider analytics dashboard. - Update provider analytics dashboard. - :param id: str id is immutable property and can't be updated. :param version: int (optional) diff --git a/docs/workspace/marketplace/provider_providers.rst b/docs/workspace/marketplace/provider_providers.rst index ac8a4fdc3..8d7a9cb34 100644 --- a/docs/workspace/marketplace/provider_providers.rst +++ b/docs/workspace/marketplace/provider_providers.rst @@ -8,8 +8,6 @@ .. py:method:: create(provider: ProviderInfo) -> CreateProviderResponse - Create a provider. - Create a provider :param provider: :class:`ProviderInfo` @@ -19,8 +17,6 @@ .. py:method:: delete(id: str) - Delete provider. - Delete provider :param id: str @@ -30,8 +26,6 @@ .. py:method:: get(id: str) -> GetProviderResponse - Get provider. - Get provider profile :param id: str @@ -41,8 +35,6 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo] - List providers. - List provider profiles for account. :param page_size: int (optional) @@ -53,8 +45,6 @@ .. py:method:: update(id: str, provider: ProviderInfo) -> UpdateProviderResponse - Update provider. - Update provider profile :param id: str diff --git a/docs/workspace/ml/experiments.rst b/docs/workspace/ml/experiments.rst index 791931167..0a514b33c 100644 --- a/docs/workspace/ml/experiments.rst +++ b/docs/workspace/ml/experiments.rst @@ -29,8 +29,6 @@ # cleanup w.experiments.delete_experiment(experiment_id=experiment.experiment_id) - Create experiment. - Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that another experiment with the same name does not already exist and fails if another experiment with the same name already exists. @@ -96,8 +94,6 @@ w.experiments.delete_experiment(experiment_id=experiment.experiment_id) w.experiments.delete_run(run_id=created.run.info.run_id) - Create a run. - Creates a new run within an experiment. A run is usually a single execution of a machine learning or data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric`, and `mlflowRunTag` associated with a single execution. @@ -119,8 +115,6 @@ .. py:method:: delete_experiment(experiment_id: str) - Delete an experiment. - Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the experiment uses FileStore, artifacts associated with the experiment are also deleted. @@ -154,8 +148,6 @@ .. py:method:: delete_run(run_id: str) - Delete a run. - Marks a run for deletion. :param run_id: str @@ -166,8 +158,6 @@ .. py:method:: delete_runs(experiment_id: str, max_timestamp_millis: int [, max_runs: Optional[int]]) -> DeleteRunsResponse - Delete runs by creation time. - Bulk delete runs in an experiment that were created prior to or at the specified timestamp. Deletes at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the client code snippet on @@ -186,8 +176,6 @@ .. py:method:: delete_tag(run_id: str, key: str) - Delete a tag on a run. - Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. @@ -214,8 +202,6 @@ .. py:method:: get_by_name(experiment_name: str) -> GetExperimentByNameResponse - Get an experiment by name. - Gets metadata for an experiment. This endpoint will return deleted experiments, but prefers the active experiment if an active and @@ -250,8 +236,6 @@ # cleanup w.experiments.delete_experiment(experiment_id=experiment.experiment_id) - Get an experiment. - Gets metadata for an experiment. This method works on deleted experiments. :param experiment_id: str @@ -262,8 +246,6 @@ .. py:method:: get_history(metric_key: str [, max_results: Optional[int], page_token: Optional[str], run_id: Optional[str], run_uuid: Optional[str]]) -> Iterator[Metric] - Get metric history for a run. - Gets a list of all values for the specified metric for a given run. :param metric_key: str @@ -294,8 +276,6 @@ .. py:method:: get_permission_levels(experiment_id: str) -> GetExperimentPermissionLevelsResponse - Get experiment permission levels. - Gets the permission levels that a user can have on an object. :param experiment_id: str @@ -306,8 +286,6 @@ .. py:method:: get_permissions(experiment_id: str) -> ExperimentPermissions - Get experiment permissions. - Gets the permissions of an experiment. Experiments can inherit permissions from their root object. :param experiment_id: str @@ -318,8 +296,6 @@ .. py:method:: get_run(run_id: str [, run_uuid: Optional[str]]) -> GetRunResponse - Get a run. - Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the same key are logged for a run, return only the value with the latest timestamp. @@ -336,8 +312,6 @@ .. py:method:: list_artifacts( [, page_token: Optional[str], path: Optional[str], run_id: Optional[str], run_uuid: Optional[str]]) -> Iterator[FileInfo] - List artifacts. - List artifacts for a run. Takes an optional `artifact_path` prefix which if specified, the response contains only artifacts with the specified prefix. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, @@ -375,8 +349,6 @@ all = w.experiments.list_experiments(ml.ListExperimentsRequest()) - List experiments. - Gets a list of all experiments. :param max_results: int (optional) @@ -393,8 +365,6 @@ .. py:method:: log_batch( [, metrics: Optional[List[Metric]], params: Optional[List[Param]], run_id: Optional[str], tags: Optional[List[RunTag]]]) - Log a batch of metrics/params/tags for a run. - Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server will respond with an error (non-200 status code). @@ -453,10 +423,6 @@ .. py:method:: log_inputs(run_id: str [, datasets: Optional[List[DatasetInput]], models: Optional[List[ModelInput]]]) - Log inputs to a run. - - **NOTE:** Experimental: This API may change or be removed in a future release without warning. - Logs inputs, such as datasets and models, to an MLflow Run. :param run_id: str @@ -471,8 +437,6 @@ .. py:method:: log_logged_model_params(model_id: str [, params: Optional[List[LoggedModelParameter]]]) - Log params for a logged model. - Logs params for a logged model. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training. A param can be logged only once for a logged model, and attempting to overwrite an existing param with a different value will result in an error @@ -487,8 +451,6 @@ .. py:method:: log_metric(key: str, value: float, timestamp: int [, dataset_digest: Optional[str], dataset_name: Optional[str], model_id: Optional[str], run_id: Optional[str], run_uuid: Optional[str], step: Optional[int]]) - Log a metric for a run. - Log a metric for a run. A metric is a key-value pair (string key, float value) with an associated timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be logged multiple times. @@ -520,9 +482,10 @@ .. py:method:: log_model( [, model_json: Optional[str], run_id: Optional[str]]) - Log a model. + **Note:** the [Create a logged model](/api/workspace/experiments/createloggedmodel) API replaces this + endpoint. - **NOTE:** Experimental: This API may change or be removed in a future release without warning. + Log a model to an MLflow Run. :param model_json: str (optional) MLmodel file in json format. @@ -534,10 +497,6 @@ .. py:method:: log_outputs(run_id: str [, models: Optional[List[ModelOutput]]]) - Log outputs from a run. - - **NOTE**: Experimental: This API may change or be removed in a future release without warning. - Logs outputs, such as models, from an MLflow Run. :param run_id: str @@ -550,8 +509,6 @@ .. py:method:: log_param(key: str, value: str [, run_id: Optional[str], run_uuid: Optional[str]]) - Log a param for a run. - Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A param can be logged only once for a run. @@ -571,8 +528,6 @@ .. py:method:: restore_experiment(experiment_id: str) - Restore an experiment. - Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics, params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are also restored. @@ -587,8 +542,6 @@ .. py:method:: restore_run(run_id: str) - Restore a run. - Restores a deleted run. This also restores associated metadata, runs, metrics, params, and tags. Throws `RESOURCE_DOES_NOT_EXIST` if the run was never created or was permanently deleted. @@ -601,8 +554,6 @@ .. py:method:: restore_runs(experiment_id: str, min_timestamp_millis: int [, max_runs: Optional[int]]) -> RestoreRunsResponse - Restore runs by deletion time. - Bulk restore runs in an experiment that were deleted no earlier than the specified timestamp. Restores at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the client code snippet on @@ -621,8 +572,6 @@ .. py:method:: search_experiments( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str], view_type: Optional[ViewType]]) -> Iterator[Experiment] - Search experiments. - Searches for experiments that satisfy specified search criteria. :param filter: str (optional) @@ -643,8 +592,6 @@ .. py:method:: search_logged_models( [, datasets: Optional[List[SearchLoggedModelsDataset]], experiment_ids: Optional[List[str]], filter: Optional[str], max_results: Optional[int], order_by: Optional[List[SearchLoggedModelsOrderBy]], page_token: Optional[str]]) -> SearchLoggedModelsResponse - Search logged models. - Search for Logged Models that satisfy specified search criteria. :param datasets: List[:class:`SearchLoggedModelsDataset`] (optional) @@ -672,8 +619,6 @@ .. py:method:: search_runs( [, experiment_ids: Optional[List[str]], filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str], run_view_type: Optional[ViewType]]) -> Iterator[Run] - Search for runs. - Searches for runs that satisfy expressions. Search expressions can use `mlflowMetric` and `mlflowParam` keys. @@ -709,8 +654,6 @@ .. py:method:: set_experiment_tag(experiment_id: str, key: str, value: str) - Set a tag for an experiment. - Sets a tag on an experiment. Experiment tags are metadata that can be updated. :param experiment_id: str @@ -725,7 +668,7 @@ .. py:method:: set_logged_model_tags(model_id: str [, tags: Optional[List[LoggedModelTag]]]) - Set a tag for a logged model. + Set tags for a logged model. :param model_id: str The ID of the logged model to set the tags on. @@ -737,8 +680,6 @@ .. py:method:: set_permissions(experiment_id: str [, access_control_list: Optional[List[ExperimentAccessControlRequest]]]) -> ExperimentPermissions - Set experiment permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -751,8 +692,6 @@ .. py:method:: set_tag(key: str, value: str [, run_id: Optional[str], run_uuid: Optional[str]]) - Set a tag for a run. - Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. :param key: str @@ -788,8 +727,6 @@ # cleanup w.experiments.delete_experiment(experiment_id=experiment.experiment_id) - Update an experiment. - Updates experiment metadata. :param experiment_id: str @@ -802,8 +739,6 @@ .. py:method:: update_permissions(experiment_id: str [, access_control_list: Optional[List[ExperimentAccessControlRequest]]]) -> ExperimentPermissions - Update experiment permissions. - Updates the permissions on an experiment. Experiments can inherit permissions from their root object. :param experiment_id: str @@ -840,8 +775,6 @@ w.experiments.delete_experiment(experiment_id=experiment.experiment_id) w.experiments.delete_run(run_id=created.run.info.run_id) - Update a run. - Updates run metadata. :param end_time: int (optional) diff --git a/docs/workspace/ml/feature_store.rst b/docs/workspace/ml/feature_store.rst new file mode 100644 index 000000000..6aa1f2398 --- /dev/null +++ b/docs/workspace/ml/feature_store.rst @@ -0,0 +1,80 @@ +``w.feature_store``: Feature Store +================================== +.. currentmodule:: databricks.sdk.service.ml + +.. py:class:: FeatureStoreAPI + + A feature store is a centralized repository that enables data scientists to find and share features. Using + a feature store also ensures that the code used to compute feature values is the same during model + training and when the model is used for inference. + + An online store is a low-latency database used for feature lookup during real-time model inference or + serve feature for real-time applications. + + .. py:method:: create_online_store(online_store: OnlineStore) -> OnlineStore + + Create an Online Feature Store. + + :param online_store: :class:`OnlineStore` + An OnlineStore is a logical database instance that stores and serves features online. + + :returns: :class:`OnlineStore` + + + .. py:method:: delete_online_store(name: str) + + Delete an Online Feature Store. + + :param name: str + Name of the online store to delete. + + + + + .. py:method:: get_online_store(name: str) -> OnlineStore + + Get an Online Feature Store. + + :param name: str + Name of the online store to get. + + :returns: :class:`OnlineStore` + + + .. py:method:: list_online_stores( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[OnlineStore] + + List Online Feature Stores. + + :param page_size: int (optional) + The maximum number of results to return. Defaults to 100 if not specified. + :param page_token: str (optional) + Pagination token to go to the next page based on a previous query. + + :returns: Iterator over :class:`OnlineStore` + + + .. py:method:: publish_table(source_table_name: str, publish_spec: PublishSpec) -> PublishTableResponse + + Publish features. + + :param source_table_name: str + The full three-part (catalog, schema, table) name of the source table. + :param publish_spec: :class:`PublishSpec` + The specification for publishing the online table from the source table. + + :returns: :class:`PublishTableResponse` + + + .. py:method:: update_online_store(name: str, online_store: OnlineStore, update_mask: str) -> OnlineStore + + Update an Online Feature Store. + + :param name: str + The name of the online store. This is the unique identifier for the online store. + :param online_store: :class:`OnlineStore` + An OnlineStore is a logical database instance that stores and serves features online. + :param update_mask: str + The list of fields to update. + + :returns: :class:`OnlineStore` + \ No newline at end of file diff --git a/docs/workspace/ml/forecasting.rst b/docs/workspace/ml/forecasting.rst index 79fca0ffe..3a65c4242 100644 --- a/docs/workspace/ml/forecasting.rst +++ b/docs/workspace/ml/forecasting.rst @@ -8,8 +8,6 @@ .. py:method:: create_experiment(train_data_path: str, target_column: str, time_column: str, forecast_granularity: str, forecast_horizon: int [, custom_weights_column: Optional[str], experiment_path: Optional[str], future_feature_data_path: Optional[str], holiday_regions: Optional[List[str]], include_features: Optional[List[str]], max_runtime: Optional[int], prediction_data_path: Optional[str], primary_metric: Optional[str], register_to: Optional[str], split_column: Optional[str], timeseries_identifier_columns: Optional[List[str]], training_frameworks: Optional[List[str]]]) -> Wait[ForecastingExperiment] - Create a forecasting experiment. - Creates a serverless forecasting experiment. Returns the experiment ID. :param train_data_path: str @@ -71,8 +69,6 @@ .. py:method:: get_experiment(experiment_id: str) -> ForecastingExperiment - Get a forecasting experiment. - Public RPC to get forecasting experiment :param experiment_id: str diff --git a/docs/workspace/ml/index.rst b/docs/workspace/ml/index.rst index 9114a2f19..6e6338b70 100644 --- a/docs/workspace/ml/index.rst +++ b/docs/workspace/ml/index.rst @@ -8,5 +8,6 @@ Create and manage experiments, features, and other machine learning artifacts :maxdepth: 1 experiments + feature_store forecasting model_registry \ No newline at end of file diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 4c1b3d917..0146f7902 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -14,8 +14,6 @@ .. py:method:: approve_transition_request(name: str, version: str, stage: Stage, archive_existing_versions: bool [, comment: Optional[str]]) -> ApproveTransitionRequestResponse - Approve transition request. - Approves a model version stage transition request. :param name: str @@ -66,8 +64,6 @@ # cleanup w.model_registry.delete_comment(id=created.comment.id) - Post a comment. - Posts a comment on a model version. A comment can be submitted either by a user or programmatically to display relevant information about the model. For example, test results or deployment errors. @@ -94,9 +90,9 @@ w = WorkspaceClient() - created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - - Create a model. + model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + + mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a new registered model with the name specified in the request body. @@ -127,9 +123,7 @@ model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") - - Create a model version. + mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a model version. @@ -153,8 +147,6 @@ .. py:method:: create_transition_request(name: str, version: str, stage: Stage [, comment: Optional[str]]) -> CreateTransitionRequestResponse - Make a transition request. - Creates a model version stage transition request. :param name: str @@ -200,8 +192,6 @@ # cleanup w.model_registry.delete_webhook(id=created.webhook.id) - Create a webhook. - **NOTE**: This endpoint is in Public Preview. Creates a registry webhook. @@ -256,8 +246,6 @@ .. py:method:: delete_comment(id: str) - Delete a comment. - Deletes a comment on a model version. :param id: str @@ -268,8 +256,6 @@ .. py:method:: delete_model(name: str) - Delete a model. - Deletes a registered model. :param name: str @@ -280,8 +266,6 @@ .. py:method:: delete_model_tag(name: str, key: str) - Delete a model tag. - Deletes the tag for a registered model. :param name: str @@ -295,8 +279,6 @@ .. py:method:: delete_model_version(name: str, version: str) - Delete a model version. - Deletes a model version. :param name: str @@ -309,8 +291,6 @@ .. py:method:: delete_model_version_tag(name: str, version: str, key: str) - Delete a model version tag. - Deletes a model version tag. :param name: str @@ -326,8 +306,6 @@ .. py:method:: delete_transition_request(name: str, version: str, stage: DeleteTransitionRequestStage, creator: str [, comment: Optional[str]]) - Delete a transition request. - Cancels a model version stage transition request. :param name: str @@ -355,8 +333,6 @@ .. py:method:: delete_webhook( [, id: Optional[str]]) - Delete a webhook. - **NOTE:** This endpoint is in Public Preview. Deletes a registry webhook. @@ -369,8 +345,6 @@ .. py:method:: get_latest_versions(name: str [, stages: Optional[List[str]]]) -> Iterator[ModelVersion] - Get the latest version. - Gets the latest version of a registered model. :param name: str @@ -398,8 +372,6 @@ model = w.model_registry.get_model(name=created.registered_model.name) - Get model. - Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also returns the model's Databricks workspace ID and the permission level of the requesting user on the model. @@ -416,8 +388,6 @@ Get a model version. - Get a model version. - :param name: str Name of the registered model :param version: str @@ -428,8 +398,6 @@ .. py:method:: get_model_version_download_uri(name: str, version: str) -> GetModelVersionDownloadUriResponse - Get a model version URI. - Gets a URI to download the model version. :param name: str @@ -442,8 +410,6 @@ .. py:method:: get_permission_levels(registered_model_id: str) -> GetRegisteredModelPermissionLevelsResponse - Get registered model permission levels. - Gets the permission levels that a user can have on an object. :param registered_model_id: str @@ -454,8 +420,6 @@ .. py:method:: get_permissions(registered_model_id: str) -> RegisteredModelPermissions - Get registered model permissions. - Gets the permissions of a registered model. Registered models can inherit permissions from their root object. @@ -479,8 +443,6 @@ all = w.model_registry.list_models(ml.ListModelsRequest()) - List models. - Lists all available registered models, up to the limit specified in __max_results__. :param max_results: int (optional) @@ -493,8 +455,6 @@ .. py:method:: list_transition_requests(name: str, version: str) -> Iterator[Activity] - List transition requests. - Gets a list of all open stage transition requests for the model version. :param name: str @@ -519,8 +479,6 @@ all = w.model_registry.list_webhooks(ml.ListWebhooksRequest()) - List registry webhooks. - **NOTE:** This endpoint is in Public Preview. Lists all registry webhooks. @@ -539,8 +497,6 @@ .. py:method:: reject_transition_request(name: str, version: str, stage: Stage [, comment: Optional[str]]) -> RejectTransitionRequestResponse - Reject a transition request. - Rejects a model version stage transition request. :param name: str @@ -565,8 +521,6 @@ .. py:method:: rename_model(name: str [, new_name: Optional[str]]) -> RenameModelResponse - Rename a model. - Renames a registered model. :param name: str @@ -579,8 +533,6 @@ .. py:method:: search_model_versions( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[ModelVersion] - Searches model versions. - Searches for specific model versions based on the supplied __filter__. :param filter: str (optional) @@ -600,8 +552,6 @@ .. py:method:: search_models( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[Model] - Search models. - Search for registered models based on the specified __filter__. :param filter: str (optional) @@ -622,8 +572,6 @@ .. py:method:: set_model_tag(name: str, key: str, value: str) - Set a tag. - Sets a tag on a registered model. :param name: str @@ -641,8 +589,6 @@ .. py:method:: set_model_version_tag(name: str, version: str, key: str, value: str) - Set a version tag. - Sets a model version tag. :param name: str @@ -662,8 +608,6 @@ .. py:method:: set_permissions(registered_model_id: str [, access_control_list: Optional[List[RegisteredModelAccessControlRequest]]]) -> RegisteredModelPermissions - Set registered model permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -676,8 +620,6 @@ .. py:method:: test_registry_webhook(id: str [, event: Optional[RegistryWebhookEvent]]) -> TestRegistryWebhookResponse - Test a webhook. - **NOTE:** This endpoint is in Public Preview. Tests a registry webhook. @@ -693,8 +635,6 @@ .. py:method:: transition_stage(name: str, version: str, stage: Stage, archive_existing_versions: bool [, comment: Optional[str]]) -> TransitionStageResponse - Transition a stage. - Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint] that also accepts a comment associated with the transition to be recorded.", @@ -750,8 +690,6 @@ # cleanup w.model_registry.delete_comment(id=created.comment.id) - Update a comment. - Post an edit to a comment on a model version. :param id: str @@ -784,8 +722,6 @@ description=f"sdk-{time.time_ns()}", ) - Update model. - Updates a registered model. :param name: str @@ -819,8 +755,6 @@ version=created.model_version.version, ) - Update model version. - Updates the model version. :param name: str @@ -835,8 +769,6 @@ .. py:method:: update_permissions(registered_model_id: str [, access_control_list: Optional[List[RegisteredModelAccessControlRequest]]]) -> RegisteredModelPermissions - Update registered model permissions. - Updates the permissions on a registered model. Registered models can inherit permissions from their root object. @@ -872,8 +804,6 @@ # cleanup w.model_registry.delete_webhook(id=created.webhook.id) - Update a webhook. - **NOTE:** This endpoint is in Public Preview. Updates a registry webhook. diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst index 92d8582db..5464eaa24 100644 --- a/docs/workspace/pipelines/pipelines.rst +++ b/docs/workspace/pipelines/pipelines.rst @@ -15,7 +15,7 @@ also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations. - .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], event_log: Optional[EventLogSpec], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], tags: Optional[Dict[str, str]], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse + .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], environment: Optional[PipelinesEnvironment], event_log: Optional[EventLogSpec], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], tags: Optional[Dict[str, str]], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse Usage: @@ -51,8 +51,6 @@ # cleanup w.pipelines.delete(pipeline_id=created.pipeline_id) - Create a pipeline. - Creates a new data processing pipeline based on the requested configuration. If successful, this method returns the ID of the new pipeline. @@ -79,6 +77,8 @@ :param dry_run: bool (optional) :param edition: str (optional) Pipeline product edition. + :param environment: :class:`PipelinesEnvironment` (optional) + Environment specification for this pipeline used to install dependencies. :param event_log: :class:`EventLogSpec` (optional) Event log configuration for this pipeline :param filters: :class:`Filters` (optional) @@ -131,8 +131,6 @@ .. py:method:: delete(pipeline_id: str) - Delete a pipeline. - Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and its tables. You cannot undo this action. @@ -188,8 +186,6 @@ .. py:method:: get_permission_levels(pipeline_id: str) -> GetPipelinePermissionLevelsResponse - Get pipeline permission levels. - Gets the permission levels that a user can have on an object. :param pipeline_id: str @@ -200,8 +196,6 @@ .. py:method:: get_permissions(pipeline_id: str) -> PipelinePermissions - Get pipeline permissions. - Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object. :param pipeline_id: str @@ -212,8 +206,6 @@ .. py:method:: get_update(pipeline_id: str, update_id: str) -> GetUpdateResponse - Get a pipeline update. - Gets an update from an active pipeline. :param pipeline_id: str @@ -262,8 +254,6 @@ # cleanup w.pipelines.delete(pipeline_id=created.pipeline_id) - List pipeline events. - Retrieves events for a pipeline. :param pipeline_id: str @@ -304,8 +294,6 @@ all = w.pipelines.list_pipelines(pipelines.ListPipelinesRequest()) - List pipelines. - Lists pipelines defined in the Delta Live Tables system. :param filter: str (optional) @@ -332,8 +320,6 @@ .. py:method:: list_updates(pipeline_id: str [, max_results: Optional[int], page_token: Optional[str], until_update_id: Optional[str]]) -> ListUpdatesResponse - List pipeline updates. - List updates for an active pipeline. :param pipeline_id: str @@ -350,8 +336,6 @@ .. py:method:: set_permissions(pipeline_id: str [, access_control_list: Optional[List[PipelineAccessControlRequest]]]) -> PipelinePermissions - Set pipeline permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -364,8 +348,6 @@ .. py:method:: start_update(pipeline_id: str [, cause: Optional[StartUpdateCause], full_refresh: Optional[bool], full_refresh_selection: Optional[List[str]], refresh_selection: Optional[List[str]], validate_only: Optional[bool]]) -> StartUpdateResponse - Start a pipeline. - Starts a new update for the pipeline. If there is already an active update for the pipeline, the request will fail and the active update will remain running. @@ -391,8 +373,6 @@ .. py:method:: stop(pipeline_id: str) -> Wait[GetPipelineResponse] - Stop a pipeline. - Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this request is a no-op. @@ -406,7 +386,7 @@ .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse - .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], event_log: Optional[EventLogSpec], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], tags: Optional[Dict[str, str]], target: Optional[str], trigger: Optional[PipelineTrigger]]) + .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], environment: Optional[PipelinesEnvironment], event_log: Optional[EventLogSpec], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], tags: Optional[Dict[str, str]], target: Optional[str], trigger: Optional[PipelineTrigger]]) Usage: @@ -458,8 +438,6 @@ # cleanup w.pipelines.delete(pipeline_id=created.pipeline_id) - Edit a pipeline. - Updates a pipeline with the supplied configuration. :param pipeline_id: str @@ -486,6 +464,8 @@ Whether the pipeline is in Development mode. Defaults to false. :param edition: str (optional) Pipeline product edition. + :param environment: :class:`PipelinesEnvironment` (optional) + Environment specification for this pipeline used to install dependencies. :param event_log: :class:`EventLogSpec` (optional) Event log configuration for this pipeline :param expected_last_modified: int (optional) @@ -541,8 +521,6 @@ .. py:method:: update_permissions(pipeline_id: str [, access_control_list: Optional[List[PipelineAccessControlRequest]]]) -> PipelinePermissions - Update pipeline permissions. - Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object. :param pipeline_id: str diff --git a/docs/workspace/qualitymonitorv2/quality_monitor_v2.rst b/docs/workspace/qualitymonitorv2/quality_monitor_v2.rst index 50f647795..006d137da 100644 --- a/docs/workspace/qualitymonitorv2/quality_monitor_v2.rst +++ b/docs/workspace/qualitymonitorv2/quality_monitor_v2.rst @@ -8,8 +8,6 @@ .. py:method:: create_quality_monitor(quality_monitor: QualityMonitor) -> QualityMonitor - Create a quality monitor. - Create a quality monitor on UC object :param quality_monitor: :class:`QualityMonitor` @@ -19,8 +17,6 @@ .. py:method:: delete_quality_monitor(object_type: str, object_id: str) - Delete a quality monitor. - Delete a quality monitor on UC object :param object_type: str @@ -33,8 +29,6 @@ .. py:method:: get_quality_monitor(object_type: str, object_id: str) -> QualityMonitor - Read a quality monitor. - Read a quality monitor on UC object :param object_type: str @@ -47,8 +41,6 @@ .. py:method:: list_quality_monitor( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[QualityMonitor] - List quality monitors. - (Unimplemented) List quality monitors :param page_size: int (optional) @@ -59,8 +51,6 @@ .. py:method:: update_quality_monitor(object_type: str, object_id: str, quality_monitor: QualityMonitor) -> QualityMonitor - Update a quality monitor. - (Unimplemented) Update a quality monitor on UC object :param object_type: str diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst index 1e2e32884..c6efae531 100644 --- a/docs/workspace/serving/serving_endpoints.rst +++ b/docs/workspace/serving/serving_endpoints.rst @@ -17,8 +17,6 @@ .. py:method:: build_logs(name: str, served_model_name: str) -> BuildLogsResponse - Get build logs for a served model. - Retrieves the build logs associated with the provided served model. :param name: str @@ -95,8 +93,6 @@ .. py:method:: export_metrics(name: str) -> ExportMetricsResponse - Get metrics of a serving endpoint. - Retrieves the metrics associated with the provided serving endpoint in either Prometheus or OpenMetrics exposition format. @@ -108,8 +104,6 @@ .. py:method:: get(name: str) -> ServingEndpointDetailed - Get a single serving endpoint. - Retrieves the details for a single serving endpoint. :param name: str @@ -126,8 +120,6 @@ .. py:method:: get_open_api(name: str) -> GetOpenApiResponse - Get the schema for a serving endpoint. - Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for the supported paths, input and output format and datatypes. @@ -139,8 +131,6 @@ .. py:method:: get_permission_levels(serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse - Get serving endpoint permission levels. - Gets the permission levels that a user can have on an object. :param serving_endpoint_id: str @@ -151,8 +141,6 @@ .. py:method:: get_permissions(serving_endpoint_id: str) -> ServingEndpointPermissions - Get serving endpoint permissions. - Gets the permissions of a serving endpoint. Serving endpoints can inherit permissions from their root object. @@ -191,8 +179,6 @@ .. py:method:: logs(name: str, served_model_name: str) -> ServerLogsResponse - Get the latest logs for a served model. - Retrieves the service logs associated with the provided served model. :param name: str @@ -205,8 +191,6 @@ .. py:method:: patch(name: str [, add_tags: Optional[List[EndpointTag]], delete_tags: Optional[List[str]]]) -> EndpointTags - Update tags of a serving endpoint. - Used to batch add and delete tags from a serving endpoint with a single API call. :param name: str @@ -221,8 +205,6 @@ .. py:method:: put(name: str [, rate_limits: Optional[List[RateLimit]]]) -> PutResponse - Update rate limits of a serving endpoint. - Deprecated: Please use AI Gateway to manage rate limits instead. :param name: str @@ -235,8 +217,6 @@ .. py:method:: put_ai_gateway(name: str [, fallback_config: Optional[FallbackConfig], guardrails: Optional[AiGatewayGuardrails], inference_table_config: Optional[AiGatewayInferenceTableConfig], rate_limits: Optional[List[AiGatewayRateLimit]], usage_tracking_config: Optional[AiGatewayUsageTrackingConfig]]) -> PutAiGatewayResponse - Update AI Gateway of a serving endpoint. - Used to update the AI Gateway of a serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables. @@ -312,8 +292,6 @@ .. py:method:: set_permissions(serving_endpoint_id: str [, access_control_list: Optional[List[ServingEndpointAccessControlRequest]]]) -> ServingEndpointPermissions - Set serving endpoint permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -326,8 +304,6 @@ .. py:method:: update_config(name: str [, auto_capture_config: Optional[AutoCaptureConfigInput], served_entities: Optional[List[ServedEntityInput]], served_models: Optional[List[ServedModelInput]], traffic_config: Optional[TrafficConfig]]) -> Wait[ServingEndpointDetailed] - Update config of a serving endpoint. - Updates any combination of the serving endpoint's served entities, the compute configuration of those served entities, and the endpoint's traffic config. An endpoint that already has an update in progress can not be updated until the current update completes or fails. @@ -357,8 +333,6 @@ .. py:method:: update_permissions(serving_endpoint_id: str [, access_control_list: Optional[List[ServingEndpointAccessControlRequest]]]) -> ServingEndpointPermissions - Update serving endpoint permissions. - Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from their root object. @@ -371,8 +345,6 @@ .. py:method:: update_provisioned_throughput_endpoint_config(name: str, config: PtEndpointCoreConfig) -> Wait[ServingEndpointDetailed] - Update config of a PT serving endpoint. - Updates any combination of the pt endpoint's served entities, the compute configuration of those served entities, and the endpoint's traffic config. Updates are instantaneous and endpoint should be updated instantly diff --git a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst index 1d5244f0a..e471a8257 100644 --- a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst +++ b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst @@ -9,8 +9,6 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse - Delete the AI/BI dashboard embedding access policy. - Delete the AI/BI dashboard embedding access policy, reverting back to the default. :param etag: str (optional) @@ -25,8 +23,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingAccessPolicySetting - Retrieve the AI/BI dashboard embedding access policy. - Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS, permitting AI/BI dashboards to be embedded on approved domains. @@ -42,8 +38,6 @@ .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str) -> AibiDashboardEmbeddingAccessPolicySetting - Update the AI/BI dashboard embedding access policy. - Updates the AI/BI dashboard embedding access policy at the workspace level. :param allow_missing: bool diff --git a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst index 546d9ad7d..53812bee9 100644 --- a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst +++ b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst @@ -9,8 +9,6 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse - Delete AI/BI dashboard embedding approved domains. - Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default empty list. @@ -26,8 +24,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingApprovedDomainsSetting - Retrieve the list of domains approved to host embedded AI/BI dashboards. - Retrieves the list of domains approved to host embedded AI/BI dashboards. :param etag: str (optional) @@ -42,8 +38,6 @@ .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str) -> AibiDashboardEmbeddingApprovedDomainsSetting - Update the list of domains approved to host embedded AI/BI dashboards. - Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the current workspace access policy is not ALLOW_APPROVED_DOMAINS. diff --git a/docs/workspace/settings/automatic_cluster_update.rst b/docs/workspace/settings/automatic_cluster_update.rst index 748cf428a..7ba756530 100644 --- a/docs/workspace/settings/automatic_cluster_update.rst +++ b/docs/workspace/settings/automatic_cluster_update.rst @@ -9,8 +9,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> AutomaticClusterUpdateSetting - Get the automatic cluster update setting. - Gets the automatic cluster update setting. :param etag: str (optional) @@ -25,8 +23,6 @@ .. py:method:: update(allow_missing: bool, setting: AutomaticClusterUpdateSetting, field_mask: str) -> AutomaticClusterUpdateSetting - Update the automatic cluster update setting. - Updates the automatic cluster update setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the diff --git a/docs/workspace/settings/compliance_security_profile.rst b/docs/workspace/settings/compliance_security_profile.rst index 807dcc1c6..de2894e86 100644 --- a/docs/workspace/settings/compliance_security_profile.rst +++ b/docs/workspace/settings/compliance_security_profile.rst @@ -11,8 +11,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> ComplianceSecurityProfileSetting - Get the compliance security profile setting. - Gets the compliance security profile setting. :param etag: str (optional) @@ -27,8 +25,6 @@ .. py:method:: update(allow_missing: bool, setting: ComplianceSecurityProfileSetting, field_mask: str) -> ComplianceSecurityProfileSetting - Update the compliance security profile setting. - Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and diff --git a/docs/workspace/settings/credentials_manager.rst b/docs/workspace/settings/credentials_manager.rst index ea3162f6c..8063d42ad 100644 --- a/docs/workspace/settings/credentials_manager.rst +++ b/docs/workspace/settings/credentials_manager.rst @@ -9,8 +9,6 @@ .. py:method:: exchange_token(partition_id: PartitionId, token_type: List[TokenType], scopes: List[str]) -> ExchangeTokenResponse - Exchange token. - Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to determine token permissions. diff --git a/docs/workspace/settings/dashboard_email_subscriptions.rst b/docs/workspace/settings/dashboard_email_subscriptions.rst index 22da502f6..ac8c3e834 100644 --- a/docs/workspace/settings/dashboard_email_subscriptions.rst +++ b/docs/workspace/settings/dashboard_email_subscriptions.rst @@ -10,8 +10,6 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDashboardEmailSubscriptionsResponse - Delete the Dashboard Email Subscriptions setting. - Reverts the Dashboard Email Subscriptions setting to its default value. :param etag: str (optional) @@ -26,8 +24,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> DashboardEmailSubscriptions - Get the Dashboard Email Subscriptions setting. - Gets the Dashboard Email Subscriptions setting. :param etag: str (optional) @@ -42,8 +38,6 @@ .. py:method:: update(allow_missing: bool, setting: DashboardEmailSubscriptions, field_mask: str) -> DashboardEmailSubscriptions - Update the Dashboard Email Subscriptions setting. - Updates the Dashboard Email Subscriptions setting. :param allow_missing: bool diff --git a/docs/workspace/settings/default_namespace.rst b/docs/workspace/settings/default_namespace.rst index a98d09b41..75f90464b 100644 --- a/docs/workspace/settings/default_namespace.rst +++ b/docs/workspace/settings/default_namespace.rst @@ -17,8 +17,6 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDefaultNamespaceSettingResponse - Delete the default namespace setting. - Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` request before the `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the @@ -36,8 +34,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> DefaultNamespaceSetting - Get the default namespace setting. - Gets the default namespace setting. :param etag: str (optional) @@ -52,8 +48,6 @@ .. py:method:: update(allow_missing: bool, setting: DefaultNamespaceSetting, field_mask: str) -> DefaultNamespaceSetting - Update the default namespace setting. - Updates the default namespace setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. Note that if the setting does not exist, `GET` returns a NOT_FOUND error and the diff --git a/docs/workspace/settings/disable_legacy_access.rst b/docs/workspace/settings/disable_legacy_access.rst index b72398c44..4f36218af 100644 --- a/docs/workspace/settings/disable_legacy_access.rst +++ b/docs/workspace/settings/disable_legacy_access.rst @@ -12,8 +12,6 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyAccessResponse - Delete Legacy Access Disablement Status. - Deletes legacy access disablement status. :param etag: str (optional) @@ -28,8 +26,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyAccess - Retrieve Legacy Access Disablement Status. - Retrieves legacy access disablement Status. :param etag: str (optional) @@ -44,8 +40,6 @@ .. py:method:: update(allow_missing: bool, setting: DisableLegacyAccess, field_mask: str) -> DisableLegacyAccess - Update Legacy Access Disablement Status. - Updates legacy access disablement status. :param allow_missing: bool diff --git a/docs/workspace/settings/disable_legacy_dbfs.rst b/docs/workspace/settings/disable_legacy_dbfs.rst index 8d56e058c..d54495db1 100644 --- a/docs/workspace/settings/disable_legacy_dbfs.rst +++ b/docs/workspace/settings/disable_legacy_dbfs.rst @@ -15,8 +15,6 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyDbfsResponse - Delete the disable legacy DBFS setting. - Deletes the disable legacy DBFS setting for a workspace, reverting back to the default. :param etag: str (optional) @@ -31,8 +29,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyDbfs - Get the disable legacy DBFS setting. - Gets the disable legacy DBFS setting. :param etag: str (optional) @@ -47,8 +43,6 @@ .. py:method:: update(allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs - Update the disable legacy DBFS setting. - Updates the disable legacy DBFS setting for the workspace. :param allow_missing: bool diff --git a/docs/workspace/settings/enable_export_notebook.rst b/docs/workspace/settings/enable_export_notebook.rst index 30d17e46a..9e0bec346 100644 --- a/docs/workspace/settings/enable_export_notebook.rst +++ b/docs/workspace/settings/enable_export_notebook.rst @@ -9,8 +9,6 @@ .. py:method:: get_enable_export_notebook() -> EnableExportNotebook - Get the Notebook and File exporting setting. - Gets the Notebook and File exporting setting. :returns: :class:`EnableExportNotebook` @@ -18,8 +16,6 @@ .. py:method:: patch_enable_export_notebook(allow_missing: bool, setting: EnableExportNotebook, field_mask: str) -> EnableExportNotebook - Update the Notebook and File exporting setting. - Updates the Notebook and File exporting setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. diff --git a/docs/workspace/settings/enable_notebook_table_clipboard.rst b/docs/workspace/settings/enable_notebook_table_clipboard.rst index 2a9c394a0..de65536da 100644 --- a/docs/workspace/settings/enable_notebook_table_clipboard.rst +++ b/docs/workspace/settings/enable_notebook_table_clipboard.rst @@ -9,8 +9,6 @@ .. py:method:: get_enable_notebook_table_clipboard() -> EnableNotebookTableClipboard - Get the Results Table Clipboard features setting. - Gets the Results Table Clipboard features setting. :returns: :class:`EnableNotebookTableClipboard` @@ -18,8 +16,6 @@ .. py:method:: patch_enable_notebook_table_clipboard(allow_missing: bool, setting: EnableNotebookTableClipboard, field_mask: str) -> EnableNotebookTableClipboard - Update the Results Table Clipboard features setting. - Updates the Results Table Clipboard features setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. diff --git a/docs/workspace/settings/enable_results_downloading.rst b/docs/workspace/settings/enable_results_downloading.rst index 0769eca22..799f8f04d 100644 --- a/docs/workspace/settings/enable_results_downloading.rst +++ b/docs/workspace/settings/enable_results_downloading.rst @@ -8,8 +8,6 @@ .. py:method:: get_enable_results_downloading() -> EnableResultsDownloading - Get the Notebook results download setting. - Gets the Notebook results download setting. :returns: :class:`EnableResultsDownloading` @@ -17,8 +15,6 @@ .. py:method:: patch_enable_results_downloading(allow_missing: bool, setting: EnableResultsDownloading, field_mask: str) -> EnableResultsDownloading - Update the Notebook results download setting. - Updates the Notebook results download setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. diff --git a/docs/workspace/settings/enhanced_security_monitoring.rst b/docs/workspace/settings/enhanced_security_monitoring.rst index d0f9eee3d..73b6fb837 100644 --- a/docs/workspace/settings/enhanced_security_monitoring.rst +++ b/docs/workspace/settings/enhanced_security_monitoring.rst @@ -13,8 +13,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> EnhancedSecurityMonitoringSetting - Get the enhanced security monitoring setting. - Gets the enhanced security monitoring setting. :param etag: str (optional) @@ -29,8 +27,6 @@ .. py:method:: update(allow_missing: bool, setting: EnhancedSecurityMonitoringSetting, field_mask: str) -> EnhancedSecurityMonitoringSetting - Update the enhanced security monitoring setting. - Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and diff --git a/docs/workspace/settings/ip_access_lists.rst b/docs/workspace/settings/ip_access_lists.rst index 03061165d..09fbc071f 100644 --- a/docs/workspace/settings/ip_access_lists.rst +++ b/docs/workspace/settings/ip_access_lists.rst @@ -45,8 +45,6 @@ # cleanup w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) - Create access list. - Creates an IP access list for this workspace. A list can be an allow list or a block list. See the top of this file for a description of how the @@ -76,8 +74,6 @@ .. py:method:: delete(ip_access_list_id: str) - Delete access list. - Deletes an IP access list, specified by its list ID. :param ip_access_list_id: str @@ -111,8 +107,6 @@ # cleanup w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) - Get access list. - Gets an IP access list, specified by its list ID. :param ip_access_list_id: str @@ -134,8 +128,6 @@ all = w.ip_access_lists.list() - Get access lists. - Gets all IP access lists for the specified workspace. :returns: Iterator over :class:`IpAccessListInfo` @@ -172,8 +164,6 @@ # cleanup w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) - Replace access list. - Replaces an IP access list, specified by its ID. A list can include allow lists and block lists. See the top of this file for a description of how the @@ -203,8 +193,6 @@ .. py:method:: update(ip_access_list_id: str [, enabled: Optional[bool], ip_addresses: Optional[List[str]], label: Optional[str], list_type: Optional[ListType]]) - Update access list. - Updates an existing IP access list, specified by its ID. A list can include allow lists and block lists. See the top of this file for a description of how the diff --git a/docs/workspace/settings/llm_proxy_partner_powered_workspace.rst b/docs/workspace/settings/llm_proxy_partner_powered_workspace.rst index 6f464addb..e7f6448e7 100644 --- a/docs/workspace/settings/llm_proxy_partner_powered_workspace.rst +++ b/docs/workspace/settings/llm_proxy_partner_powered_workspace.rst @@ -8,8 +8,6 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteLlmProxyPartnerPoweredWorkspaceResponse - Delete the enable partner powered AI features workspace setting. - Reverts the enable partner powered AI features workspace setting to its default value. :param etag: str (optional) @@ -24,8 +22,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> LlmProxyPartnerPoweredWorkspace - Get the enable partner powered AI features workspace setting. - Gets the enable partner powered AI features workspace setting. :param etag: str (optional) @@ -40,8 +36,6 @@ .. py:method:: update(allow_missing: bool, setting: LlmProxyPartnerPoweredWorkspace, field_mask: str) -> LlmProxyPartnerPoweredWorkspace - Update the enable partner powered AI features workspace setting. - Updates the enable partner powered AI features workspace setting. :param allow_missing: bool diff --git a/docs/workspace/settings/notification_destinations.rst b/docs/workspace/settings/notification_destinations.rst index 45c8abea1..2f99a3b3d 100644 --- a/docs/workspace/settings/notification_destinations.rst +++ b/docs/workspace/settings/notification_destinations.rst @@ -11,8 +11,6 @@ .. py:method:: create( [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination - Create a notification destination. - Creates a notification destination. Requires workspace admin permissions. :param config: :class:`Config` (optional) @@ -25,8 +23,6 @@ .. py:method:: delete(id: str) - Delete a notification destination. - Deletes a notification destination. Requires workspace admin permissions. :param id: str @@ -36,8 +32,6 @@ .. py:method:: get(id: str) -> NotificationDestination - Get a notification destination. - Gets a notification destination. :param id: str @@ -47,8 +41,6 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListNotificationDestinationsResult] - List notification destinations. - Lists notification destinations. :param page_size: int (optional) @@ -59,8 +51,6 @@ .. py:method:: update(id: str [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination - Update a notification destination. - Updates a notification destination. Requires workspace admin permissions. At least one field is required in the request body. diff --git a/docs/workspace/settings/restrict_workspace_admins.rst b/docs/workspace/settings/restrict_workspace_admins.rst index c2853d133..7dcbab9cb 100644 --- a/docs/workspace/settings/restrict_workspace_admins.rst +++ b/docs/workspace/settings/restrict_workspace_admins.rst @@ -16,8 +16,6 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteRestrictWorkspaceAdminsSettingResponse - Delete the restrict workspace admins setting. - Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` request before the DELETE request. If the setting is updated/deleted concurrently, `DELETE` fails with @@ -35,8 +33,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> RestrictWorkspaceAdminsSetting - Get the restrict workspace admins setting. - Gets the restrict workspace admins setting. :param etag: str (optional) @@ -51,8 +47,6 @@ .. py:method:: update(allow_missing: bool, setting: RestrictWorkspaceAdminsSetting, field_mask: str) -> RestrictWorkspaceAdminsSetting - Update the restrict workspace admins setting. - Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a GET request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the diff --git a/docs/workspace/settings/sql_results_download.rst b/docs/workspace/settings/sql_results_download.rst index 8cf1cc13a..8afad7764 100644 --- a/docs/workspace/settings/sql_results_download.rst +++ b/docs/workspace/settings/sql_results_download.rst @@ -9,8 +9,6 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteSqlResultsDownloadResponse - Delete the SQL Results Download setting. - Reverts the SQL Results Download setting to its default value. :param etag: str (optional) @@ -25,8 +23,6 @@ .. py:method:: get( [, etag: Optional[str]]) -> SqlResultsDownload - Get the SQL Results Download setting. - Gets the SQL Results Download setting. :param etag: str (optional) @@ -41,8 +37,6 @@ .. py:method:: update(allow_missing: bool, setting: SqlResultsDownload, field_mask: str) -> SqlResultsDownload - Update the SQL Results Download setting. - Updates the SQL Results Download setting. :param allow_missing: bool diff --git a/docs/workspace/settings/token_management.rst b/docs/workspace/settings/token_management.rst index ceaa64cc6..a5a22552c 100644 --- a/docs/workspace/settings/token_management.rst +++ b/docs/workspace/settings/token_management.rst @@ -34,8 +34,6 @@ w.service_principals.delete(id=spn.id) w.token_management.delete(token_id=obo.token_info.token_id) - Create on-behalf token. - Creates a token on behalf of a service principal. :param application_id: str @@ -50,8 +48,6 @@ .. py:method:: delete(token_id: str) - Delete a token. - Deletes a token, specified by its ID. :param token_id: str @@ -89,8 +85,6 @@ w.service_principals.delete(id=spn.id) w.token_management.delete(token_id=obo.token_info.token_id) - Get token info. - Gets information about a token, specified by its ID. :param token_id: str @@ -101,8 +95,6 @@ .. py:method:: get_permission_levels() -> GetTokenPermissionLevelsResponse - Get token permission levels. - Gets the permission levels that a user can have on an object. :returns: :class:`GetTokenPermissionLevelsResponse` @@ -110,8 +102,6 @@ .. py:method:: get_permissions() -> TokenPermissions - Get token permissions. - Gets the permissions of all tokens. Tokens can inherit permissions from their root object. :returns: :class:`TokenPermissions` @@ -131,8 +121,6 @@ all = w.token_management.list(settings.ListTokenManagementRequest()) - List all tokens. - Lists all tokens associated with the specified workspace or user. :param created_by_id: int (optional) @@ -145,8 +133,6 @@ .. py:method:: set_permissions( [, access_control_list: Optional[List[TokenAccessControlRequest]]]) -> TokenPermissions - Set token permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -157,8 +143,6 @@ .. py:method:: update_permissions( [, access_control_list: Optional[List[TokenAccessControlRequest]]]) -> TokenPermissions - Update token permissions. - Updates the permissions on all tokens. Tokens can inherit permissions from their root object. :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) diff --git a/docs/workspace/settings/tokens.rst b/docs/workspace/settings/tokens.rst index 200eb9c83..c1067d0f7 100644 --- a/docs/workspace/settings/tokens.rst +++ b/docs/workspace/settings/tokens.rst @@ -25,8 +25,6 @@ # cleanup w.tokens.delete(token_id=token.token_info.token_id) - Create a user token. - Creates and returns a token for a user. If this call is made through token authentication, it creates a token with the same client ID as the authenticated token. If the user's token quota is exceeded, this call returns an error **QUOTA_EXCEEDED**. @@ -43,8 +41,6 @@ .. py:method:: delete(token_id: str) - Revoke token. - Revokes an access token. If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**. @@ -68,8 +64,6 @@ all = w.tokens.list() - List tokens. - Lists all the valid tokens for a user-workspace pair. :returns: Iterator over :class:`PublicTokenInfo` diff --git a/docs/workspace/settings/workspace_conf.rst b/docs/workspace/settings/workspace_conf.rst index d73b16180..52701f8a0 100644 --- a/docs/workspace/settings/workspace_conf.rst +++ b/docs/workspace/settings/workspace_conf.rst @@ -19,8 +19,6 @@ conf = w.workspace_conf.get_status(keys="enableWorkspaceFilesystem") - Check configuration status. - Gets the configuration status for a workspace. :param keys: str @@ -30,10 +28,4 @@ .. py:method:: set_status(contents: Dict[str, str]) - Enable/disable features. - - Sets the configuration status for a workspace, including enabling or disabling it. - - - - \ No newline at end of file + Sets the configuration status for a workspace, including enabling or disabling it. \ No newline at end of file diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst index d78dd62a0..19f791a2a 100644 --- a/docs/workspace/sharing/providers.rst +++ b/docs/workspace/sharing/providers.rst @@ -32,8 +32,6 @@ # cleanup w.providers.delete(name=created.name) - Create an auth provider. - Creates a new authentication provider minimally based on a name and authentication type. The caller must be an admin on the metastore. @@ -52,8 +50,6 @@ .. py:method:: delete(name: str) - Delete a provider. - Deletes an authentication provider, if the caller is a metastore admin or is the owner of the provider. @@ -90,8 +86,6 @@ # cleanup w.providers.delete(name=created.name) - Get a provider. - Gets a specific authentication provider. The caller must supply the name of the provider, and must either be a metastore admin or the owner of the provider. @@ -108,27 +102,12 @@ .. code-block:: - import time - from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sharing w = WorkspaceClient() - public_share_recipient = """{ - "shareCredentialsVersion":1, - "bearerToken":"dapiabcdefghijklmonpqrstuvwxyz", - "endpoint":"https://sharing.delta.io/delta-sharing/" - } - """ - - created = w.providers.create(name=f"sdk-{time.time_ns()}", recipient_profile_str=public_share_recipient) - - shares = w.providers.list_shares(name=created.name) - - # cleanup - w.providers.delete(name=created.name) - - List providers. + all = w.providers.list(sharing.ListProvidersRequest()) Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There @@ -153,8 +132,6 @@ .. py:method:: list_provider_share_assets(provider_name: str, share_name: str [, function_max_results: Optional[int], notebook_max_results: Optional[int], table_max_results: Optional[int], volume_max_results: Optional[int]]) -> ListProviderShareAssetsResponse - List assets by provider share. - Get arrays of assets associated with a specified provider's share. The caller is the recipient of the share. @@ -201,8 +178,6 @@ # cleanup w.providers.delete(name=created.name) - List shares by Provider. - Gets an array of a specified provider's shares within the metastore where: * the caller is a metastore admin, or * the caller is the owner. @@ -250,8 +225,6 @@ # cleanup w.providers.delete(name=created.name) - Update a provider. - Updates the information for an authentication provider, if the caller is a metastore admin or is the owner of the provider. If the update changes the provider name, the caller must be both a metastore admin and the owner of the provider. diff --git a/docs/workspace/sharing/recipient_activation.rst b/docs/workspace/sharing/recipient_activation.rst index bc8ac2715..16656b384 100644 --- a/docs/workspace/sharing/recipient_activation.rst +++ b/docs/workspace/sharing/recipient_activation.rst @@ -14,8 +14,6 @@ .. py:method:: get_activation_url_info(activation_url: str) - Get a share activation URL. - Gets an activation URL for a share. :param activation_url: str @@ -26,8 +24,6 @@ .. py:method:: retrieve_token(activation_url: str) -> RetrieveTokenResponse - Get an access token. - Retrieve access token with an activation url. This is a public API without any authentication. :param activation_url: str diff --git a/docs/workspace/sharing/recipient_federation_policies.rst b/docs/workspace/sharing/recipient_federation_policies.rst index bd63cb0a8..5b27b11eb 100644 --- a/docs/workspace/sharing/recipient_federation_policies.rst +++ b/docs/workspace/sharing/recipient_federation_policies.rst @@ -26,8 +26,6 @@ .. py:method:: create(recipient_name: str, policy: FederationPolicy) -> FederationPolicy - Create recipient federation policy. - Create a federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must be the owner of the recipient. When sharing data from Databricks to non-Databricks clients, you can define a federation policy to authenticate @@ -59,8 +57,6 @@ .. py:method:: delete(recipient_name: str, name: str) - Delete recipient federation policy. - Deletes an existing federation policy for an OIDC_FEDERATION recipient. The caller must be the owner of the recipient. @@ -74,8 +70,6 @@ .. py:method:: get_federation_policy(recipient_name: str, name: str) -> FederationPolicy - Get recipient federation policy. - Reads an existing federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must have read access to the recipient. @@ -89,8 +83,6 @@ .. py:method:: list(recipient_name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy] - List recipient federation policies. - Lists federation policies for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must have read access to the recipient. @@ -104,8 +96,6 @@ .. py:method:: update(recipient_name: str, name: str, policy: FederationPolicy [, update_mask: Optional[str]]) -> FederationPolicy - Update recipient federation policy. - Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of the recipient. diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst index e640bf038..572f62cbf 100644 --- a/docs/workspace/sharing/recipients.rst +++ b/docs/workspace/sharing/recipients.rst @@ -36,8 +36,6 @@ # cleanup w.recipients.delete(name=created.name) - Create a share recipient. - Creates a new recipient with the delta sharing authentication type in the metastore. The caller must be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore. @@ -70,8 +68,6 @@ .. py:method:: delete(name: str) - Delete a share recipient. - Deletes the specified recipient from the metastore. The caller must be the owner of the recipient. :param name: str @@ -100,8 +96,6 @@ # cleanup w.recipients.delete(name=created.name) - Get a share recipient. - Gets a share recipient from the metastore if: * the caller is the owner of the share recipient, or: * is a metastore admin @@ -126,8 +120,6 @@ all = w.recipients.list(sharing.ListRecipientsRequest()) - List share recipients. - Gets an array of all share recipients within the current metastore where: * the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific @@ -170,8 +162,6 @@ # cleanup w.recipients.delete(name=created.name) - Rotate a token. - Refreshes the specified recipient's delta sharing authentication token with the provided token info. The caller must be the owner of the recipient. @@ -205,8 +195,6 @@ # cleanup w.recipients.delete(name=created.name) - Get recipient share permissions. - Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the owner of the Recipient. @@ -246,8 +234,6 @@ # cleanup w.recipients.delete(name=created.name) - Update a share recipient. - Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of the recipient. If the recipient name will be updated, the user must be both a metastore admin and the owner of the recipient. diff --git a/docs/workspace/sharing/shares.rst b/docs/workspace/sharing/shares.rst index 05dea6902..1bf63fdf7 100644 --- a/docs/workspace/sharing/shares.rst +++ b/docs/workspace/sharing/shares.rst @@ -27,8 +27,6 @@ # cleanup w.shares.delete(name=created_share.name) - Create a share. - Creates a new share for data objects. Data objects can be added after creation with **update**. The caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore. @@ -44,8 +42,6 @@ .. py:method:: delete(name: str) - Delete a share. - Deletes a data object share from the metastore. The caller must be an owner of the share. :param name: str @@ -74,8 +70,6 @@ # cleanup w.shares.delete(name=created_share.name) - Get a share. - Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the share. @@ -101,8 +95,6 @@ all = w.shares.list(sharing.ListSharesRequest()) - List shares. - Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific ordering of the elements in the array. @@ -122,8 +114,6 @@ .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> GetSharePermissionsResponse - Get permissions. - Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the owner of the share. @@ -195,15 +185,13 @@ w.tables.delete(full_name=table_full_name) w.shares.delete(name=created_share.name) - Update a share. - Updates the share with the changes and data objects in the request. The caller must be the owner of the share or a metastore admin. When the caller is a metastore admin, only the __owner__ field can be updated. - In the case that the share name is changed, **updateShare** requires that the caller is both the share - owner and a metastore admin. + In the case the share name is changed, **updateShare** requires that the caller is the owner of the + share and has the CREATE_SHARE privilege. If there are notebook files in the share, the __storage_root__ field cannot be updated. @@ -231,8 +219,6 @@ .. py:method:: update_permissions(name: str [, changes: Optional[List[PermissionsChange]], omit_permissions_list: Optional[bool]]) -> UpdateSharePermissionsResponse - Update permissions. - Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an owner of the share. diff --git a/docs/workspace/sql/alerts.rst b/docs/workspace/sql/alerts.rst index be09efec5..c3efdb347 100644 --- a/docs/workspace/sql/alerts.rst +++ b/docs/workspace/sql/alerts.rst @@ -50,8 +50,6 @@ w.queries.delete(id=query.id) w.alerts.delete(id=alert.id) - Create an alert. - Creates an alert. :param alert: :class:`CreateAlertRequestAlert` (optional) @@ -64,8 +62,6 @@ .. py:method:: delete(id: str) - Delete an alert. - Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently deleted after 30 days. @@ -118,8 +114,6 @@ w.queries.delete(id=query.id) w.alerts.delete(id=alert.id) - Get an alert. - Gets an alert. :param id: str @@ -141,8 +135,6 @@ all = w.alerts.list(sql.ListAlertsRequest()) - List alerts. - Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. @@ -199,8 +191,6 @@ w.queries.delete(id=query.id) w.alerts.delete(id=alert.id) - Update an alert. - Updates an alert. :param id: str diff --git a/docs/workspace/sql/alerts_legacy.rst b/docs/workspace/sql/alerts_legacy.rst index 5b048d2bf..d9662f456 100644 --- a/docs/workspace/sql/alerts_legacy.rst +++ b/docs/workspace/sql/alerts_legacy.rst @@ -16,8 +16,6 @@ .. py:method:: create(name: str, options: AlertOptions, query_id: str [, parent: Optional[str], rearm: Optional[int]]) -> LegacyAlert - Create an alert. - Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies users or notification destinations if the condition was met. @@ -43,8 +41,6 @@ .. py:method:: delete(alert_id: str) - Delete an alert. - Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to the trash. @@ -60,8 +56,6 @@ .. py:method:: get(alert_id: str) -> LegacyAlert - Get an alert. - Gets an alert. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get @@ -76,8 +70,6 @@ .. py:method:: list() -> Iterator[LegacyAlert] - Get alerts. - Gets a list of alerts. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list @@ -90,8 +82,6 @@ .. py:method:: update(alert_id: str, name: str, options: AlertOptions, query_id: str [, rearm: Optional[int]]) - Update an alert. - Updates an alert. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update diff --git a/docs/workspace/sql/alerts_v2.rst b/docs/workspace/sql/alerts_v2.rst index 0c61c7d7c..96ee533fb 100644 --- a/docs/workspace/sql/alerts_v2.rst +++ b/docs/workspace/sql/alerts_v2.rst @@ -8,8 +8,6 @@ .. py:method:: create_alert(alert: AlertV2) -> AlertV2 - Create an alert. - Create Alert :param alert: :class:`AlertV2` @@ -19,8 +17,6 @@ .. py:method:: get_alert(id: str) -> AlertV2 - Get an alert. - Gets an alert. :param id: str @@ -30,8 +26,6 @@ .. py:method:: list_alerts( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[AlertV2] - List alerts. - Gets a list of alerts accessible to the user, ordered by creation time. :param page_size: int (optional) @@ -42,8 +36,6 @@ .. py:method:: trash_alert(id: str) - Delete an alert. - Moves an alert to the trash. Trashed alerts immediately disappear from list views, and can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently deleted after 30 days. @@ -55,8 +47,6 @@ .. py:method:: update_alert(id: str, alert: AlertV2, update_mask: str) -> AlertV2 - Update an alert. - Update alert :param id: str diff --git a/docs/workspace/sql/dashboard_widgets.rst b/docs/workspace/sql/dashboard_widgets.rst index 63e100640..d6ce2bdcf 100644 --- a/docs/workspace/sql/dashboard_widgets.rst +++ b/docs/workspace/sql/dashboard_widgets.rst @@ -9,7 +9,7 @@ .. py:method:: create(dashboard_id: str, options: WidgetOptions, width: int [, text: Optional[str], visualization_id: Optional[str]]) -> Widget - Add widget to a dashboard. + Add widget to a dashboard :param dashboard_id: str Dashboard ID returned by :method:dashboards/create. @@ -27,7 +27,7 @@ .. py:method:: delete(id: str) - Remove widget. + Remove widget :param id: str Widget ID returned by :method:dashboardwidgets/create @@ -37,7 +37,7 @@ .. py:method:: update(id: str, dashboard_id: str, options: WidgetOptions, width: int [, text: Optional[str], visualization_id: Optional[str]]) -> Widget - Update existing widget. + Update existing widget :param id: str Widget ID returned by :method:dashboardwidgets/create diff --git a/docs/workspace/sql/dashboards.rst b/docs/workspace/sql/dashboards.rst index 3ed0c4b77..340b606d3 100644 --- a/docs/workspace/sql/dashboards.rst +++ b/docs/workspace/sql/dashboards.rst @@ -66,8 +66,6 @@ # cleanup w.dashboards.delete(dashboard_id=created.id) - Remove a dashboard. - Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot be shared. @@ -96,8 +94,6 @@ # cleanup w.dashboards.delete(dashboard_id=created.id) - Retrieve a definition. - Returns a JSON representation of a dashboard object, including its visualization and query objects. :param dashboard_id: str @@ -119,8 +115,6 @@ all = w.dashboards.list(sql.ListDashboardsRequest()) - Get dashboard objects. - Fetch a paginated list of dashboard objects. **Warning**: Calling this API concurrently 10 or more times could result in throttling, service @@ -158,8 +152,6 @@ # cleanup w.dashboards.delete(dashboard_id=created.id) - Restore a dashboard. - A restored dashboard appears in list views and searches and can be shared. :param dashboard_id: str @@ -169,8 +161,6 @@ .. py:method:: update(dashboard_id: str [, name: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Dashboard - Change a dashboard definition. - Modify this dashboard definition. This operation only affects attributes of the dashboard object. It does not add, modify, or remove widgets. diff --git a/docs/workspace/sql/data_sources.rst b/docs/workspace/sql/data_sources.rst index 472bdfb0e..01ec16bdc 100644 --- a/docs/workspace/sql/data_sources.rst +++ b/docs/workspace/sql/data_sources.rst @@ -29,8 +29,6 @@ srcs = w.data_sources.list() - Get a list of SQL warehouses. - Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new queries against it. diff --git a/docs/workspace/sql/dbsql_permissions.rst b/docs/workspace/sql/dbsql_permissions.rst index a5bd010f1..c05c82012 100644 --- a/docs/workspace/sql/dbsql_permissions.rst +++ b/docs/workspace/sql/dbsql_permissions.rst @@ -22,8 +22,6 @@ .. py:method:: get(object_type: ObjectTypePlural, object_id: str) -> GetResponse - Get object ACL. - Gets a JSON representation of the access control list (ACL) for a specified object. **Note**: A new version of the Databricks SQL API is now available. Please use @@ -41,8 +39,6 @@ .. py:method:: set(object_type: ObjectTypePlural, object_id: str [, access_control_list: Optional[List[AccessControl]]]) -> SetResponse - Set object ACL. - Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL. @@ -62,8 +58,6 @@ .. py:method:: transfer_ownership(object_type: OwnableObjectType, object_id: TransferOwnershipObjectId [, new_owner: Optional[str]]) -> Success - Transfer object ownership. - Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index f8553bead..0dfb63fbf 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -36,8 +36,6 @@ # cleanup w.queries.delete(id=query.id) - Create a query. - Creates a query. :param auto_resolve_display_name: bool (optional) @@ -50,8 +48,6 @@ .. py:method:: delete(id: str) - Delete a query. - Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is permanently deleted after 30 days. @@ -91,8 +87,6 @@ # cleanup w.queries.delete(id=query.id) - Get a query. - Gets a query. :param id: str @@ -102,8 +96,6 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListQueryObjectsResponseQuery] - List queries. - Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. @@ -115,8 +107,6 @@ .. py:method:: list_visualizations(id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Visualization] - List visualizations on a query. - Gets a list of visualizations on a query. :param id: str @@ -164,8 +154,6 @@ # cleanup w.queries.delete(id=query.id) - Update a query. - Updates a query. :param id: str diff --git a/docs/workspace/sql/queries_legacy.rst b/docs/workspace/sql/queries_legacy.rst index c35ed9b69..61ff085a7 100644 --- a/docs/workspace/sql/queries_legacy.rst +++ b/docs/workspace/sql/queries_legacy.rst @@ -15,8 +15,6 @@ .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery - Create a new query definition. - Creates a new query definition. Queries created with this endpoint belong to the authenticated user making the request. @@ -58,8 +56,6 @@ .. py:method:: delete(query_id: str) - Delete a query. - Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is deleted after 30 days. @@ -75,8 +71,6 @@ .. py:method:: get(query_id: str) -> LegacyQuery - Get a query definition. - Retrieve a query object definition along with contextual permissions information about the currently authenticated user. @@ -92,8 +86,6 @@ .. py:method:: list( [, order: Optional[str], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[LegacyQuery] - Get a list of queries. - Gets a list of queries. Optionally, this list can be filtered by a search term. **Warning**: Calling this API concurrently 10 or more times could result in throttling, service @@ -130,8 +122,6 @@ .. py:method:: restore(query_id: str) - Restore a query. - Restore a query that has been moved to the trash. A restored query appears in list views and searches. You can use restored queries for alerts. @@ -147,8 +137,6 @@ .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery - Change a query definition. - Modify this query definition. **Note**: You cannot undo this operation. diff --git a/docs/workspace/sql/query_history.rst b/docs/workspace/sql/query_history.rst index 5acfb5127..14d006928 100644 --- a/docs/workspace/sql/query_history.rst +++ b/docs/workspace/sql/query_history.rst @@ -25,8 +25,6 @@ ) ) - List Queries. - List the history of queries through SQL warehouses, and serverless compute. You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are @@ -34,7 +32,9 @@ to list subsequent query statuses. :param filter_by: :class:`QueryFilter` (optional) - A filter to limit query history results. This field is optional. + An optional filter object to limit query history results. Accepts parameters such as user IDs, + endpoint IDs, and statuses to narrow the returned data. In a URL, the parameters of this filter are + specified with dot notation. For example: `filter_by.statement_ids`. :param include_metrics: bool (optional) Whether to include the query metrics with each query. Only use this for a small subset of queries (max_results). Defaults to false. diff --git a/docs/workspace/sql/query_visualizations.rst b/docs/workspace/sql/query_visualizations.rst index f0865ae0a..578a7598a 100644 --- a/docs/workspace/sql/query_visualizations.rst +++ b/docs/workspace/sql/query_visualizations.rst @@ -9,8 +9,6 @@ .. py:method:: create( [, visualization: Optional[CreateVisualizationRequestVisualization]]) -> Visualization - Add a visualization to a query. - Adds a visualization to a query. :param visualization: :class:`CreateVisualizationRequestVisualization` (optional) @@ -20,8 +18,6 @@ .. py:method:: delete(id: str) - Remove a visualization. - Removes a visualization. :param id: str @@ -31,8 +27,6 @@ .. py:method:: update(id: str, update_mask: str [, visualization: Optional[UpdateVisualizationRequestVisualization]]) -> Visualization - Update a visualization. - Updates a visualization. :param id: str diff --git a/docs/workspace/sql/query_visualizations_legacy.rst b/docs/workspace/sql/query_visualizations_legacy.rst index d91b97c8c..bf710ee89 100644 --- a/docs/workspace/sql/query_visualizations_legacy.rst +++ b/docs/workspace/sql/query_visualizations_legacy.rst @@ -14,8 +14,6 @@ .. py:method:: create(query_id: str, type: str, options: Any [, description: Optional[str], name: Optional[str]]) -> LegacyVisualization - Add visualization to a query. - Creates visualization in the query. **Note**: A new version of the Databricks SQL API is now available. Please use @@ -40,8 +38,6 @@ .. py:method:: delete(id: str) - Remove visualization. - Removes a visualization from the query. **Note**: A new version of the Databricks SQL API is now available. Please use @@ -57,8 +53,6 @@ .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[LegacyQuery], type: Optional[str], updated_at: Optional[str]]) -> LegacyVisualization - Edit existing visualization. - Updates visualization in the query. **Note**: A new version of the Databricks SQL API is now available. Please use diff --git a/docs/workspace/sql/statement_execution.rst b/docs/workspace/sql/statement_execution.rst index 5dabcc0d2..19bca1fdb 100644 --- a/docs/workspace/sql/statement_execution.rst +++ b/docs/workspace/sql/statement_execution.rst @@ -91,8 +91,6 @@ .. py:method:: cancel_execution(statement_id: str) - Cancel statement execution. - Requests that an executing statement be canceled. Callers must poll for status to see the terminal state. @@ -105,7 +103,7 @@ .. py:method:: execute_statement(statement: str, warehouse_id: str [, byte_limit: Optional[int], catalog: Optional[str], disposition: Optional[Disposition], format: Optional[Format], on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout], parameters: Optional[List[StatementParameterListItem]], row_limit: Optional[int], schema: Optional[str], wait_timeout: Optional[str]]) -> StatementResponse - Execute a SQL statement. + Execute a SQL statement :param statement: str The SQL statement to execute. The statement can optionally be parameterized, see `parameters`. @@ -216,8 +214,6 @@ .. py:method:: get_statement(statement_id: str) -> StatementResponse - Get status, manifest, and result first chunk. - This request can be used to poll for the statement's status. When the `status.state` field is `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the @@ -235,8 +231,6 @@ .. py:method:: get_statement_result_chunk_n(statement_id: str, chunk_index: int) -> ResultData - Get result chunk by index. - After the statement execution has `SUCCEEDED`, this request can be used to fetch any chunk by index. Whereas the first chunk with `chunk_index=0` is typically fetched with :method:statementexecution/executeStatement or :method:statementexecution/getStatement, this request diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst index 7695dbc8b..5a88cdbf3 100644 --- a/docs/workspace/sql/warehouses.rst +++ b/docs/workspace/sql/warehouses.rst @@ -34,8 +34,6 @@ # cleanup w.warehouses.delete(id=created.id) - Create a warehouse. - Creates a new SQL warehouse. :param auto_stop_mins: int (optional) @@ -104,8 +102,6 @@ .. py:method:: delete(id: str) - Delete a warehouse. - Deletes a SQL warehouse. :param id: str @@ -149,8 +145,6 @@ # cleanup w.warehouses.delete(id=created.id) - Update a warehouse. - Updates the configuration for a SQL warehouse. :param id: str @@ -247,8 +241,6 @@ # cleanup w.warehouses.delete(id=created.id) - Get warehouse info. - Gets the information for a single SQL warehouse. :param id: str @@ -259,8 +251,6 @@ .. py:method:: get_permission_levels(warehouse_id: str) -> GetWarehousePermissionLevelsResponse - Get SQL warehouse permission levels. - Gets the permission levels that a user can have on an object. :param warehouse_id: str @@ -271,8 +261,6 @@ .. py:method:: get_permissions(warehouse_id: str) -> WarehousePermissions - Get SQL warehouse permissions. - Gets the permissions of a SQL warehouse. SQL warehouses can inherit permissions from their root object. @@ -284,8 +272,6 @@ .. py:method:: get_workspace_warehouse_config() -> GetWorkspaceWarehouseConfigResponse - Get the workspace configuration. - Gets the workspace level configuration that is shared by all SQL warehouses in a workspace. :returns: :class:`GetWorkspaceWarehouseConfigResponse` @@ -305,8 +291,6 @@ all = w.warehouses.list(sql.ListWarehousesRequest()) - List warehouses. - Lists all SQL warehouses that a user has manager permissions on. :param run_as_user_id: int (optional) @@ -318,8 +302,6 @@ .. py:method:: set_permissions(warehouse_id: str [, access_control_list: Optional[List[WarehouseAccessControlRequest]]]) -> WarehousePermissions - Set SQL warehouse permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -332,8 +314,6 @@ .. py:method:: set_workspace_warehouse_config( [, channel: Optional[Channel], config_param: Optional[RepeatedEndpointConfPairs], data_access_config: Optional[List[EndpointConfPair]], enabled_warehouse_types: Optional[List[WarehouseTypePair]], global_param: Optional[RepeatedEndpointConfPairs], google_service_account: Optional[str], instance_profile_arn: Optional[str], security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy], sql_configuration_parameters: Optional[RepeatedEndpointConfPairs]]) - Set the workspace configuration. - Sets the workspace level configuration that is shared by all SQL warehouses in a workspace. :param channel: :class:`Channel` (optional) @@ -364,8 +344,6 @@ .. py:method:: start(id: str) -> Wait[GetWarehouseResponse] - Start a warehouse. - Starts a SQL warehouse. :param id: str @@ -381,8 +359,6 @@ .. py:method:: stop(id: str) -> Wait[GetWarehouseResponse] - Stop a warehouse. - Stops a SQL warehouse. :param id: str @@ -398,8 +374,6 @@ .. py:method:: update_permissions(warehouse_id: str [, access_control_list: Optional[List[WarehouseAccessControlRequest]]]) -> WarehousePermissions - Update SQL warehouse permissions. - Updates the permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root object. diff --git a/docs/workspace/vectorsearch/vector_search_endpoints.rst b/docs/workspace/vectorsearch/vector_search_endpoints.rst index 50c335064..510bc6868 100644 --- a/docs/workspace/vectorsearch/vector_search_endpoints.rst +++ b/docs/workspace/vectorsearch/vector_search_endpoints.rst @@ -8,8 +8,6 @@ .. py:method:: create_endpoint(name: str, endpoint_type: EndpointType [, budget_policy_id: Optional[str]]) -> Wait[EndpointInfo] - Create an endpoint. - Create a new endpoint. :param name: str @@ -29,8 +27,6 @@ .. py:method:: delete_endpoint(endpoint_name: str) - Delete an endpoint. - Delete a vector search endpoint. :param endpoint_name: str @@ -41,8 +37,6 @@ .. py:method:: get_endpoint(endpoint_name: str) -> EndpointInfo - Get an endpoint. - Get details for a single vector search endpoint. :param endpoint_name: str @@ -53,8 +47,6 @@ .. py:method:: list_endpoints( [, page_token: Optional[str]]) -> Iterator[EndpointInfo] - List all endpoints. - List all vector search endpoints in the workspace. :param page_token: str (optional) @@ -65,8 +57,6 @@ .. py:method:: update_endpoint_budget_policy(endpoint_name: str, budget_policy_id: str) -> PatchEndpointBudgetPolicyResponse - Update the budget policy of an endpoint. - Update the budget policy of an endpoint :param endpoint_name: str diff --git a/docs/workspace/vectorsearch/vector_search_indexes.rst b/docs/workspace/vectorsearch/vector_search_indexes.rst index 90762b275..398f86147 100644 --- a/docs/workspace/vectorsearch/vector_search_indexes.rst +++ b/docs/workspace/vectorsearch/vector_search_indexes.rst @@ -14,8 +14,6 @@ .. py:method:: create_index(name: str, endpoint_name: str, primary_key: str, index_type: VectorIndexType [, delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest], direct_access_index_spec: Optional[DirectAccessVectorIndexSpec]]) -> VectorIndex - Create an index. - Create a new index. :param name: str @@ -39,8 +37,6 @@ .. py:method:: delete_data_vector_index(index_name: str, primary_keys: List[str]) -> DeleteDataVectorIndexResponse - Delete data from index. - Handles the deletion of data from a specified vector index. :param index_name: str @@ -55,8 +51,6 @@ Delete an index. - Delete an index. - :param index_name: str Name of the index @@ -67,8 +61,6 @@ Get an index. - Get an index. - :param index_name: str Name of the index @@ -77,8 +69,6 @@ .. py:method:: list_indexes(endpoint_name: str [, page_token: Optional[str]]) -> Iterator[MiniVectorIndex] - List indexes. - List all indexes in the given endpoint. :param endpoint_name: str @@ -91,8 +81,6 @@ .. py:method:: query_index(index_name: str, columns: List[str] [, columns_to_rerank: Optional[List[str]], filters_json: Optional[str], num_results: Optional[int], query_text: Optional[str], query_type: Optional[str], query_vector: Optional[List[float]], score_threshold: Optional[float]]) -> QueryVectorIndexResponse - Query an index. - Query the specified vector index. :param index_name: str @@ -126,8 +114,6 @@ .. py:method:: query_next_page(index_name: str [, endpoint_name: Optional[str], page_token: Optional[str]]) -> QueryVectorIndexResponse - Query next page. - Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request to fetch next page of results. @@ -143,8 +129,6 @@ .. py:method:: scan_index(index_name: str [, last_primary_key: Optional[str], num_results: Optional[int]]) -> ScanVectorIndexResponse - Scan an index. - Scan the specified vector index and return the first `num_results` entries after the exclusive `primary_key`. @@ -160,8 +144,6 @@ .. py:method:: sync_index(index_name: str) - Synchronize an index. - Triggers a synchronization process for a specified vector index. :param index_name: str @@ -172,8 +154,6 @@ .. py:method:: upsert_data_vector_index(index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse - Upsert data into an index. - Handles the upserting of data into a specified vector index. :param index_name: str diff --git a/docs/workspace/workspace/git_credentials.rst b/docs/workspace/workspace/git_credentials.rst index d5efd62eb..eda0831be 100644 --- a/docs/workspace/workspace/git_credentials.rst +++ b/docs/workspace/workspace/git_credentials.rst @@ -26,8 +26,6 @@ # cleanup w.git_credentials.delete(credential_id=cr.credential_id) - Create a credential entry. - Creates a Git credential entry for the user. Only one Git credential per user is supported, so any attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update existing credentials, or the DELETE endpoint to delete existing credentials. @@ -53,8 +51,6 @@ .. py:method:: delete(credential_id: int) - Delete a credential. - Deletes the specified Git credential. :param credential_id: int @@ -81,8 +77,6 @@ # cleanup w.git_credentials.delete(credential_id=cr.credential_id) - Get a credential entry. - Gets the Git credential with the specified credential ID. :param credential_id: int @@ -104,8 +98,6 @@ list = w.git_credentials.list() - Get Git credentials. - Lists the calling user's Git credentials. One credential per user is supported. :returns: Iterator over :class:`CredentialInfo` @@ -136,8 +128,6 @@ # cleanup w.git_credentials.delete(credential_id=cr.credential_id) - Update a credential. - Updates the specified Git credential. :param credential_id: int diff --git a/docs/workspace/workspace/repos.rst b/docs/workspace/workspace/repos.rst index 7388ffe6b..ceb833465 100644 --- a/docs/workspace/workspace/repos.rst +++ b/docs/workspace/workspace/repos.rst @@ -38,8 +38,6 @@ # cleanup w.repos.delete(repo_id=ri.id) - Create a repo. - Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created programmatically must be linked to a remote Git repo, unlike repos created in the browser. @@ -61,8 +59,6 @@ .. py:method:: delete(repo_id: int) - Delete a repo. - Deletes the specified repo. :param repo_id: int @@ -97,8 +93,6 @@ # cleanup w.repos.delete(repo_id=ri.id) - Get a repo. - Returns the repo with the given repo ID. :param repo_id: int @@ -109,8 +103,6 @@ .. py:method:: get_permission_levels(repo_id: str) -> GetRepoPermissionLevelsResponse - Get repo permission levels. - Gets the permission levels that a user can have on an object. :param repo_id: str @@ -121,8 +113,6 @@ .. py:method:: get_permissions(repo_id: str) -> RepoPermissions - Get repo permissions. - Gets the permissions of a repo. Repos can inherit permissions from their root object. :param repo_id: str @@ -145,8 +135,6 @@ all = w.repos.list(workspace.ListReposRequest()) - Get repos. - Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate through additional pages. @@ -163,8 +151,6 @@ .. py:method:: set_permissions(repo_id: str [, access_control_list: Optional[List[RepoAccessControlRequest]]]) -> RepoPermissions - Set repo permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. @@ -201,8 +187,6 @@ # cleanup w.repos.delete(repo_id=ri.id) - Update a repo. - Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same branch. @@ -223,8 +207,6 @@ .. py:method:: update_permissions(repo_id: str [, access_control_list: Optional[List[RepoAccessControlRequest]]]) -> RepoPermissions - Update repo permissions. - Updates the permissions on a repo. Repos can inherit permissions from their root object. :param repo_id: str diff --git a/docs/workspace/workspace/secrets.rst b/docs/workspace/workspace/secrets.rst index 2dc261114..a92042354 100644 --- a/docs/workspace/workspace/secrets.rst +++ b/docs/workspace/workspace/secrets.rst @@ -37,8 +37,6 @@ w.secrets.delete_secret(scope=scope_name, key=key_name) w.secrets.delete_scope(scope=scope_name) - Create a new secret scope. - The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters. @@ -56,8 +54,6 @@ .. py:method:: delete_acl(scope: str, principal: str) - Delete an ACL. - Deletes the given ACL on the given scope. Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no @@ -74,8 +70,6 @@ .. py:method:: delete_scope(scope: str) - Delete a secret scope. - Deletes a secret scope. Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user @@ -89,8 +83,6 @@ .. py:method:: delete_secret(scope: str, key: str) - Delete a secret. - Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the secret scope. @@ -107,8 +99,6 @@ .. py:method:: get_acl(scope: str, principal: str) -> AclItem - Get secret ACL details. - Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE` permission to invoke this API. @@ -125,8 +115,6 @@ .. py:method:: get_secret(scope: str, key: str) -> GetSecretResponse - Get a secret. - Gets the bytes representation of a secret value for the specified scope and key. Users need the READ permission to make this call. @@ -170,8 +158,6 @@ w.secrets.delete_secret(scope=scope_name, key=key_name) w.secrets.delete_scope(scope=scope_name) - Lists ACLs. - List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the @@ -196,8 +182,6 @@ scopes = w.secrets.list_scopes() - List all scopes. - Lists all secret scopes available in the workspace. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. @@ -230,8 +214,6 @@ w.secrets.delete_secret(scope=scope_name, key=key_name) w.secrets.delete_scope(scope=scope_name) - List secret keys. - Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data cannot be retrieved using this API. Users need the READ permission to make this call. @@ -278,8 +260,6 @@ w.secrets.delete_secret(scope=scope_name, key=key_name) w.secrets.delete_scope(scope=scope_name) - Create/update an ACL. - Creates or overwrites the Access Control List (ACL) associated with the given principal (user or group) on the specified scope point. @@ -340,8 +320,6 @@ w.secrets.delete_secret(scope=scope_name, key=key_name) w.secrets.delete_scope(scope=scope_name) - Add a secret. - Inserts a secret under the provided scope with the given name. If a secret already exists with the same name, this command overwrites the existing secret's value. The server encrypts the secret using the secret scope's encryption settings before storing it. diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index abfc30860..2c369968e 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -11,8 +11,6 @@ .. py:method:: delete(path: str [, recursive: Optional[bool]]) - Delete a workspace object. - Deletes an object or a directory (and optionally recursively deletes all objects in the directory). * If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty directory and `recursive` is set to `false`, this call returns an error @@ -83,8 +81,6 @@ export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook) - Export a workspace object. - Exports an object or the contents of an entire directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. @@ -112,8 +108,6 @@ .. py:method:: get_permission_levels(workspace_object_type: str, workspace_object_id: str) -> GetWorkspaceObjectPermissionLevelsResponse - Get workspace object permission levels. - Gets the permission levels that a user can have on an object. :param workspace_object_type: str @@ -126,8 +120,6 @@ .. py:method:: get_permissions(workspace_object_type: str, workspace_object_id: str) -> WorkspaceObjectPermissions - Get workspace object permissions. - Gets the permissions of a workspace object. Workspace objects can inherit permissions from their parent objects or root object. @@ -156,8 +148,6 @@ obj = w.workspace.get_status(path=notebook_path) - Get status. - Gets the status of an object or a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. @@ -188,12 +178,10 @@ content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(), format=workspace.ImportFormat.SOURCE, language=workspace.Language.SQL, - overwrite=true_, + overwrite=True, path=notebook_path, ) - Import a workspace object. - Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. If `path` already exists and `overwrite` is set to `false`, this call returns an error `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE` @@ -235,14 +223,16 @@ .. code-block:: + import os + import time + from databricks.sdk import WorkspaceClient w = WorkspaceClient() - names = [] - for i in w.workspace.list(f"/Users/{w.current_user.me().user_name}", recursive=True): - names.append(i.path) - assert len(names) > 0 + notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" + + objects = w.workspace.list(path=os.path.dirname(notebook)) List workspace objects @@ -254,8 +244,6 @@ .. py:method:: mkdirs(path: str) - Create a directory. - Creates the specified directory (and necessary parent directories if they do not exist). If there is an object (not a directory) at any prefix of the input path, this call returns an error `RESOURCE_ALREADY_EXISTS`. @@ -272,8 +260,6 @@ .. py:method:: set_permissions(workspace_object_type: str, workspace_object_id: str [, access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]]]) -> WorkspaceObjectPermissions - Set workspace object permissions. - Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their parent objects or root object. @@ -289,8 +275,6 @@ .. py:method:: update_permissions(workspace_object_type: str, workspace_object_id: str [, access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]]]) -> WorkspaceObjectPermissions - Update workspace object permissions. - Updates the permissions on a workspace object. Workspace objects can inherit permissions from their parent objects or root object.