diff --git a/databricks/sdk/service/aibuilder.py b/databricks/sdk/service/aibuilder.py index 2d263362..899bf347 100755 --- a/databricks/sdk/service/aibuilder.py +++ b/databricks/sdk/service/aibuilder.py @@ -15,11 +15,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class CancelCustomLlmOptimizationRunRequest: - id: Optional[str] = None - - @dataclass class CancelOptimizeResponse: def as_dict(self) -> dict: @@ -38,68 +33,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CancelOptimizeResponse: return cls() -@dataclass -class CreateCustomLlmRequest: - name: str - """Name of the custom LLM. Only alphanumeric characters and dashes allowed.""" - - instructions: str - """Instructions for the custom LLM to follow""" - - agent_artifact_path: Optional[str] = None - """Optional: UC path for agent artifacts. If you are using a dataset that you only have read - permissions, please provide a destination path where you have write permissions. Please provide - this in catalog.schema format.""" - - datasets: Optional[List[Dataset]] = None - """Datasets used for training and evaluating the model, not for inference. Currently, only 1 - dataset is accepted.""" - - guidelines: Optional[List[str]] = None - """Guidelines for the custom LLM to adhere to""" - - def as_dict(self) -> dict: - """Serializes the CreateCustomLlmRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.agent_artifact_path is not None: - body["agent_artifact_path"] = self.agent_artifact_path - if self.datasets: - body["datasets"] = [v.as_dict() for v in self.datasets] - if self.guidelines: - body["guidelines"] = [v for v in self.guidelines] - if self.instructions is not None: - body["instructions"] = self.instructions - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateCustomLlmRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.agent_artifact_path is not None: - body["agent_artifact_path"] = self.agent_artifact_path - if self.datasets: - body["datasets"] = self.datasets - if self.guidelines: - body["guidelines"] = self.guidelines - if self.instructions is not None: - body["instructions"] = self.instructions - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCustomLlmRequest: - """Deserializes the CreateCustomLlmRequest from a dictionary.""" - return cls( - agent_artifact_path=d.get("agent_artifact_path", None), - datasets=_repeated_dict(d, "datasets", Dataset), - guidelines=d.get("guidelines", None), - instructions=d.get("instructions", None), - name=d.get("name", None), - ) - - @dataclass class CustomLlm: name: str @@ -239,12 +172,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteCustomLlmResponse: return cls() -@dataclass -class StartCustomLlmOptimizationRunRequest: - id: Optional[str] = None - """The Id of the tile.""" - - class State(Enum): """States of Custom LLM optimization lifecycle.""" @@ -299,60 +226,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Table: ) -@dataclass -class UpdateCustomLlmRequest: - custom_llm: CustomLlm - """The CustomLlm containing the fields which should be updated.""" - - update_mask: str - """The list of the CustomLlm fields to update. These should correspond to the values (or lack - thereof) present in `custom_llm`. - - The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - id: Optional[str] = None - """The id of the custom llm""" - - def as_dict(self) -> dict: - """Serializes the UpdateCustomLlmRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.custom_llm: - body["custom_llm"] = self.custom_llm.as_dict() - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCustomLlmRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.custom_llm: - body["custom_llm"] = self.custom_llm - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomLlmRequest: - """Deserializes the UpdateCustomLlmRequest from a dictionary.""" - return cls( - custom_llm=_from_dict(d, "custom_llm", CustomLlm), - id=d.get("id", None), - update_mask=d.get("update_mask", None), - ) - - class AiBuilderAPI: """The Custom LLMs service manages state and powers the UI for the Custom LLM product.""" diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index 9f9e38c4..330e0515 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -605,40 +605,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AppPermissionsDescription: ) -@dataclass -class AppPermissionsRequest: - access_control_list: Optional[List[AppAccessControlRequest]] = None - - app_name: Optional[str] = None - """The app for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the AppPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.app_name is not None: - body["app_name"] = self.app_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AppPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.app_name is not None: - body["app_name"] = self.app_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AppPermissionsRequest: - """Deserializes the AppPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AppAccessControlRequest), - app_name=d.get("app_name", None), - ) - - @dataclass class AppResource: name: str @@ -1112,18 +1078,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListAppsResponse: return cls(apps=_repeated_dict(d, "apps", App), next_page_token=d.get("next_page_token", None)) -@dataclass -class StartAppRequest: - name: Optional[str] = None - """The name of the app.""" - - -@dataclass -class StopAppRequest: - name: Optional[str] = None - """The name of the app.""" - - class AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index 763cc050..03d13532 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -416,41 +416,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BudgetPolicy: ) -@dataclass -class CreateBillingUsageDashboardRequest: - dashboard_type: Optional[UsageDashboardType] = None - """Workspace level usage dashboard shows usage data for the specified workspace ID. Global level - usage dashboard shows usage data for all workspaces in the account.""" - - workspace_id: Optional[int] = None - """The workspace ID of the workspace in which the usage dashboard is created.""" - - def as_dict(self) -> dict: - """Serializes the CreateBillingUsageDashboardRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_type is not None: - body["dashboard_type"] = self.dashboard_type.value - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateBillingUsageDashboardRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_type is not None: - body["dashboard_type"] = self.dashboard_type - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateBillingUsageDashboardRequest: - """Deserializes the CreateBillingUsageDashboardRequest from a dictionary.""" - return cls( - dashboard_type=_enum(d, "dashboard_type", UsageDashboardType), workspace_id=d.get("workspace_id", None) - ) - - @dataclass class CreateBillingUsageDashboardResponse: dashboard_id: Optional[str] = None @@ -628,31 +593,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationBudgetAlertCon ) -@dataclass -class CreateBudgetConfigurationRequest: - budget: CreateBudgetConfigurationBudget - """Properties of the new budget configuration.""" - - def as_dict(self) -> dict: - """Serializes the CreateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.budget: - body["budget"] = self.budget.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.budget: - body["budget"] = self.budget - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationRequest: - """Deserializes the CreateBudgetConfigurationRequest from a dictionary.""" - return cls(budget=_from_dict(d, "budget", CreateBudgetConfigurationBudget)) - - @dataclass class CreateBudgetConfigurationResponse: budget: Optional[BudgetConfiguration] = None @@ -678,43 +618,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationResponse: return cls(budget=_from_dict(d, "budget", BudgetConfiguration)) -@dataclass -class CreateBudgetPolicyRequest: - """A request to create a BudgetPolicy.""" - - policy: Optional[BudgetPolicy] = None - """The policy to create. `policy_id` needs to be empty as it will be generated `policy_name` must - be provided, custom_tags may need to be provided depending on the cloud provider. All other - fields are optional.""" - - request_id: Optional[str] = None - """A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is - recommended. This request is only idempotent if a `request_id` is provided.""" - - def as_dict(self) -> dict: - """Serializes the CreateBudgetPolicyRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.policy: - body["policy"] = self.policy.as_dict() - if self.request_id is not None: - body["request_id"] = self.request_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateBudgetPolicyRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.policy: - body["policy"] = self.policy - if self.request_id is not None: - body["request_id"] = self.request_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetPolicyRequest: - """Deserializes the CreateBudgetPolicyRequest from a dictionary.""" - return cls(policy=_from_dict(d, "policy", BudgetPolicy), request_id=d.get("request_id", None)) - - @dataclass class CreateLogDeliveryConfigurationParams: """* Log Delivery Configuration""" @@ -1518,38 +1421,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateBudgetConfigurationBudget: ) -@dataclass -class UpdateBudgetConfigurationRequest: - budget: UpdateBudgetConfigurationBudget - """The updated budget. This will overwrite the budget specified by the budget ID.""" - - budget_id: Optional[str] = None - """The Databricks budget configuration ID.""" - - def as_dict(self) -> dict: - """Serializes the UpdateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.budget: - body["budget"] = self.budget.as_dict() - if self.budget_id is not None: - body["budget_id"] = self.budget_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.budget: - body["budget"] = self.budget - if self.budget_id is not None: - body["budget_id"] = self.budget_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateBudgetConfigurationRequest: - """Deserializes the UpdateBudgetConfigurationRequest from a dictionary.""" - return cls(budget=_from_dict(d, "budget", UpdateBudgetConfigurationBudget), budget_id=d.get("budget_id", None)) - - @dataclass class UpdateBudgetConfigurationResponse: budget: Optional[BudgetConfiguration] = None @@ -1575,81 +1446,12 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateBudgetConfigurationResponse: return cls(budget=_from_dict(d, "budget", BudgetConfiguration)) -@dataclass -class UpdateLogDeliveryConfigurationStatusRequest: - """* Update Log Delivery Configuration""" - - status: LogDeliveryConfigStatus - """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). - Defaults to `ENABLED`. You can [enable or disable the - configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration - is not supported, so disable a log delivery configuration that is no longer needed.""" - - log_delivery_configuration_id: Optional[str] = None - """The log delivery configuration id of customer""" - - def as_dict(self) -> dict: - """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.log_delivery_configuration_id is not None: - body["log_delivery_configuration_id"] = self.log_delivery_configuration_id - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.log_delivery_configuration_id is not None: - body["log_delivery_configuration_id"] = self.log_delivery_configuration_id - if self.status is not None: - body["status"] = self.status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateLogDeliveryConfigurationStatusRequest: - """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary.""" - return cls( - log_delivery_configuration_id=d.get("log_delivery_configuration_id", None), - status=_enum(d, "status", LogDeliveryConfigStatus), - ) - - class UsageDashboardType(Enum): USAGE_DASHBOARD_TYPE_GLOBAL = "USAGE_DASHBOARD_TYPE_GLOBAL" USAGE_DASHBOARD_TYPE_WORKSPACE = "USAGE_DASHBOARD_TYPE_WORKSPACE" -@dataclass -class WrappedCreateLogDeliveryConfiguration: - """* Properties of the new log delivery configuration.""" - - log_delivery_configuration: CreateLogDeliveryConfigurationParams - """* Log Delivery Configuration""" - - def as_dict(self) -> dict: - """Serializes the WrappedCreateLogDeliveryConfiguration into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.log_delivery_configuration: - body["log_delivery_configuration"] = self.log_delivery_configuration.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the WrappedCreateLogDeliveryConfiguration into a shallow dictionary of its immediate attributes.""" - body = {} - if self.log_delivery_configuration: - body["log_delivery_configuration"] = self.log_delivery_configuration - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> WrappedCreateLogDeliveryConfiguration: - """Deserializes the WrappedCreateLogDeliveryConfiguration from a dictionary.""" - return cls( - log_delivery_configuration=_from_dict(d, "log_delivery_configuration", CreateLogDeliveryConfigurationParams) - ) - - @dataclass class WrappedLogDeliveryConfiguration: log_delivery_configuration: Optional[LogDeliveryConfiguration] = None diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 3cd01d05..f9d1221b 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -19,106 +19,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class AccountsCreateMetastore: - metastore_info: Optional[CreateMetastore] = None - - def as_dict(self) -> dict: - """Serializes the AccountsCreateMetastore into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.metastore_info: - body["metastore_info"] = self.metastore_info.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountsCreateMetastore into a shallow dictionary of its immediate attributes.""" - body = {} - if self.metastore_info: - body["metastore_info"] = self.metastore_info - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateMetastore: - """Deserializes the AccountsCreateMetastore from a dictionary.""" - return cls(metastore_info=_from_dict(d, "metastore_info", CreateMetastore)) - - -@dataclass -class AccountsCreateMetastoreAssignment: - metastore_assignment: Optional[CreateMetastoreAssignment] = None - - metastore_id: Optional[str] = None - """Unity Catalog metastore ID""" - - workspace_id: Optional[int] = None - """Workspace ID.""" - - def as_dict(self) -> dict: - """Serializes the AccountsCreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountsCreateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateMetastoreAssignment: - """Deserializes the AccountsCreateMetastoreAssignment from a dictionary.""" - return cls( - metastore_assignment=_from_dict(d, "metastore_assignment", CreateMetastoreAssignment), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) - - -@dataclass -class AccountsCreateStorageCredential: - credential_info: Optional[CreateStorageCredential] = None - - metastore_id: Optional[str] = None - """Unity Catalog metastore ID""" - - def as_dict(self) -> dict: - """Serializes the AccountsCreateStorageCredential into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.credential_info: - body["credential_info"] = self.credential_info.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountsCreateStorageCredential into a shallow dictionary of its immediate attributes.""" - body = {} - if self.credential_info: - body["credential_info"] = self.credential_info - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateStorageCredential: - """Deserializes the AccountsCreateStorageCredential from a dictionary.""" - return cls( - credential_info=_from_dict(d, "credential_info", CreateStorageCredential), - metastore_id=d.get("metastore_id", None), - ) - - @dataclass class AccountsMetastoreAssignment: metastore_assignment: Optional[MetastoreAssignment] = None @@ -191,123 +91,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AccountsStorageCredentialInfo: return cls(credential_info=_from_dict(d, "credential_info", StorageCredentialInfo)) -@dataclass -class AccountsUpdateMetastore: - metastore_id: Optional[str] = None - """Unity Catalog metastore ID""" - - metastore_info: Optional[UpdateMetastore] = None - - def as_dict(self) -> dict: - """Serializes the AccountsUpdateMetastore into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.metastore_info: - body["metastore_info"] = self.metastore_info.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountsUpdateMetastore into a shallow dictionary of its immediate attributes.""" - body = {} - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.metastore_info: - body["metastore_info"] = self.metastore_info - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateMetastore: - """Deserializes the AccountsUpdateMetastore from a dictionary.""" - return cls( - metastore_id=d.get("metastore_id", None), metastore_info=_from_dict(d, "metastore_info", UpdateMetastore) - ) - - -@dataclass -class AccountsUpdateMetastoreAssignment: - metastore_assignment: Optional[UpdateMetastoreAssignment] = None - - metastore_id: Optional[str] = None - """Unity Catalog metastore ID""" - - workspace_id: Optional[int] = None - """Workspace ID.""" - - def as_dict(self) -> dict: - """Serializes the AccountsUpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountsUpdateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateMetastoreAssignment: - """Deserializes the AccountsUpdateMetastoreAssignment from a dictionary.""" - return cls( - metastore_assignment=_from_dict(d, "metastore_assignment", UpdateMetastoreAssignment), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) - - -@dataclass -class AccountsUpdateStorageCredential: - credential_info: Optional[UpdateStorageCredential] = None - - metastore_id: Optional[str] = None - """Unity Catalog metastore ID""" - - storage_credential_name: Optional[str] = None - """Name of the storage credential.""" - - def as_dict(self) -> dict: - """Serializes the AccountsUpdateStorageCredential into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.credential_info: - body["credential_info"] = self.credential_info.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountsUpdateStorageCredential into a shallow dictionary of its immediate attributes.""" - body = {} - if self.credential_info: - body["credential_info"] = self.credential_info - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateStorageCredential: - """Deserializes the AccountsUpdateStorageCredential from a dictionary.""" - return cls( - credential_info=_from_dict(d, "credential_info", UpdateStorageCredential), - metastore_id=d.get("metastore_id", None), - storage_credential_name=d.get("storage_credential_name", None), - ) - - @dataclass class ArtifactAllowlistInfo: artifact_matchers: Optional[List[ArtifactMatcher]] = None @@ -1624,190 +1407,358 @@ def from_dict(cls, d: Dict[str, Any]) -> ContinuousUpdateStatus: @dataclass -class CreateCatalog: +class CreateFunction: name: str - """Name of catalog.""" + """Name of function, relative to parent schema.""" + + catalog_name: str + """Name of parent catalog.""" + + schema_name: str + """Name of parent schema relative to its parent catalog.""" + + input_params: FunctionParameterInfos + + data_type: ColumnTypeName + """Scalar function return data type.""" + + full_data_type: str + """Pretty printed function data type.""" + + routine_body: CreateFunctionRoutineBody + """Function language. When **EXTERNAL** is used, the language of the routine function should be + specified in the __external_language__ field, and the __return_params__ of the function cannot + be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + **NO_SQL**.""" + + routine_definition: str + """Function body.""" + + parameter_style: CreateFunctionParameterStyle + """Function parameter style. **S** is the value for SQL.""" + + is_deterministic: bool + """Whether the function is deterministic.""" + + sql_data_access: CreateFunctionSqlDataAccess + """Function SQL data access.""" + + is_null_call: bool + """Function null call.""" + + security_type: CreateFunctionSecurityType + """Function security type.""" + + specific_name: str + """Specific name of the function; Reserved for future use.""" comment: Optional[str] = None """User-provided free-form text description.""" - connection_name: Optional[str] = None - """The name of the connection to an external data source.""" + external_language: Optional[str] = None + """External function language.""" - options: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" + external_name: Optional[str] = None + """External function name.""" - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" + properties: Optional[str] = None + """JSON-serialized key-value pair map, encoded (escaped) as a string.""" - provider_name: Optional[str] = None - """The name of delta sharing provider. - - A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server.""" + return_params: Optional[FunctionParameterInfos] = None + """Table function return parameters.""" - share_name: Optional[str] = None - """The name of the share under the share provider.""" + routine_dependencies: Optional[DependencyList] = None + """Function dependencies.""" - storage_root: Optional[str] = None - """Storage root URL for managed tables within catalog.""" + sql_path: Optional[str] = None + """List of schemes whose objects can be referenced without qualification.""" def as_dict(self) -> dict: - """Serializes the CreateCatalog into a dictionary suitable for use as a JSON request body.""" + """Serializes the CreateFunction into a dictionary suitable for use as a JSON request body.""" body = {} + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name if self.comment is not None: body["comment"] = self.comment - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.properties: - body["properties"] = self.properties - if self.provider_name is not None: - body["provider_name"] = self.provider_name - if self.share_name is not None: - body["share_name"] = self.share_name - if self.storage_root is not None: - body["storage_root"] = self.storage_root - return body + if self.data_type is not None: + body["data_type"] = self.data_type.value + if self.external_language is not None: + body["external_language"] = self.external_language + if self.external_name is not None: + body["external_name"] = self.external_name + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.input_params: + body["input_params"] = self.input_params.as_dict() + if self.is_deterministic is not None: + body["is_deterministic"] = self.is_deterministic + if self.is_null_call is not None: + body["is_null_call"] = self.is_null_call + if self.name is not None: + body["name"] = self.name + if self.parameter_style is not None: + body["parameter_style"] = self.parameter_style.value + if self.properties is not None: + body["properties"] = self.properties + if self.return_params: + body["return_params"] = self.return_params.as_dict() + if self.routine_body is not None: + body["routine_body"] = self.routine_body.value + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.routine_dependencies: + body["routine_dependencies"] = self.routine_dependencies.as_dict() + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.security_type is not None: + body["security_type"] = self.security_type.value + if self.specific_name is not None: + body["specific_name"] = self.specific_name + if self.sql_data_access is not None: + body["sql_data_access"] = self.sql_data_access.value + if self.sql_path is not None: + body["sql_path"] = self.sql_path + return body def as_shallow_dict(self) -> dict: - """Serializes the CreateCatalog into a shallow dictionary of its immediate attributes.""" + """Serializes the CreateFunction into a shallow dictionary of its immediate attributes.""" body = {} + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name if self.comment is not None: body["comment"] = self.comment - if self.connection_name is not None: - body["connection_name"] = self.connection_name + if self.data_type is not None: + body["data_type"] = self.data_type + if self.external_language is not None: + body["external_language"] = self.external_language + if self.external_name is not None: + body["external_name"] = self.external_name + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.input_params: + body["input_params"] = self.input_params + if self.is_deterministic is not None: + body["is_deterministic"] = self.is_deterministic + if self.is_null_call is not None: + body["is_null_call"] = self.is_null_call if self.name is not None: body["name"] = self.name - if self.options: - body["options"] = self.options - if self.properties: + if self.parameter_style is not None: + body["parameter_style"] = self.parameter_style + if self.properties is not None: body["properties"] = self.properties - if self.provider_name is not None: - body["provider_name"] = self.provider_name - if self.share_name is not None: - body["share_name"] = self.share_name - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.return_params: + body["return_params"] = self.return_params + if self.routine_body is not None: + body["routine_body"] = self.routine_body + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.routine_dependencies: + body["routine_dependencies"] = self.routine_dependencies + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.security_type is not None: + body["security_type"] = self.security_type + if self.specific_name is not None: + body["specific_name"] = self.specific_name + if self.sql_data_access is not None: + body["sql_data_access"] = self.sql_data_access + if self.sql_path is not None: + body["sql_path"] = self.sql_path return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCatalog: - """Deserializes the CreateCatalog from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> CreateFunction: + """Deserializes the CreateFunction from a dictionary.""" return cls( + catalog_name=d.get("catalog_name", None), comment=d.get("comment", None), - connection_name=d.get("connection_name", None), + data_type=_enum(d, "data_type", ColumnTypeName), + external_language=d.get("external_language", None), + external_name=d.get("external_name", None), + full_data_type=d.get("full_data_type", None), + input_params=_from_dict(d, "input_params", FunctionParameterInfos), + is_deterministic=d.get("is_deterministic", None), + is_null_call=d.get("is_null_call", None), name=d.get("name", None), - options=d.get("options", None), + parameter_style=_enum(d, "parameter_style", CreateFunctionParameterStyle), properties=d.get("properties", None), - provider_name=d.get("provider_name", None), - share_name=d.get("share_name", None), - storage_root=d.get("storage_root", None), + return_params=_from_dict(d, "return_params", FunctionParameterInfos), + routine_body=_enum(d, "routine_body", CreateFunctionRoutineBody), + routine_definition=d.get("routine_definition", None), + routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList), + schema_name=d.get("schema_name", None), + security_type=_enum(d, "security_type", CreateFunctionSecurityType), + specific_name=d.get("specific_name", None), + sql_data_access=_enum(d, "sql_data_access", CreateFunctionSqlDataAccess), + sql_path=d.get("sql_path", None), ) -@dataclass -class CreateConnection: - name: str - """Name of the connection.""" +class CreateFunctionParameterStyle(Enum): + """Function parameter style. **S** is the value for SQL.""" - connection_type: ConnectionType - """The type of connection.""" + S = "S" - options: Dict[str, str] - """A map of key-value properties attached to the securable.""" - comment: Optional[str] = None - """User-provided free-form text description.""" +class CreateFunctionRoutineBody(Enum): + """Function language. When **EXTERNAL** is used, the language of the routine function should be + specified in the __external_language__ field, and the __return_params__ of the function cannot + be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + **NO_SQL**.""" - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" + EXTERNAL = "EXTERNAL" + SQL = "SQL" - read_only: Optional[bool] = None - """If the connection is read only.""" + +class CreateFunctionSecurityType(Enum): + """The security type of the function.""" + + DEFINER = "DEFINER" + + +class CreateFunctionSqlDataAccess(Enum): + """Function SQL data access.""" + + CONTAINS_SQL = "CONTAINS_SQL" + NO_SQL = "NO_SQL" + READS_SQL_DATA = "READS_SQL_DATA" + + +@dataclass +class CreateMetastore: + name: str + """The user-specified name of the metastore.""" + + region: Optional[str] = None + """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" + + storage_root: Optional[str] = None + """The storage root URL for metastore""" def as_dict(self) -> dict: - """Serializes the CreateConnection into a dictionary suitable for use as a JSON request body.""" + """Serializes the CreateMetastore into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.connection_type is not None: - body["connection_type"] = self.connection_type.value if self.name is not None: body["name"] = self.name - if self.options: - body["options"] = self.options - if self.properties: - body["properties"] = self.properties - if self.read_only is not None: - body["read_only"] = self.read_only + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root return body def as_shallow_dict(self) -> dict: - """Serializes the CreateConnection into a shallow dictionary of its immediate attributes.""" + """Serializes the CreateMetastore into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.connection_type is not None: - body["connection_type"] = self.connection_type if self.name is not None: body["name"] = self.name - if self.options: - body["options"] = self.options - if self.properties: - body["properties"] = self.properties - if self.read_only is not None: - body["read_only"] = self.read_only + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateConnection: - """Deserializes the CreateConnection from a dictionary.""" - return cls( - comment=d.get("comment", None), - connection_type=_enum(d, "connection_type", ConnectionType), - name=d.get("name", None), - options=d.get("options", None), - properties=d.get("properties", None), - read_only=d.get("read_only", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> CreateMetastore: + """Deserializes the CreateMetastore from a dictionary.""" + return cls(name=d.get("name", None), region=d.get("region", None), storage_root=d.get("storage_root", None)) @dataclass -class CreateCredentialRequest: - name: str - """The credential name. The name must be unique among storage and service credentials within the - metastore.""" - - aws_iam_role: Optional[AwsIamRole] = None - """The AWS IAM role configuration.""" +class CreateMetastoreAssignment: + metastore_id: str + """The unique ID of the metastore.""" - azure_managed_identity: Optional[AzureManagedIdentity] = None - """The Azure managed identity configuration.""" + default_catalog_name: str + """The name of the default catalog in the metastore. This field is deprecated. Please use "Default + Namespace API" to configure the default catalog for a Databricks workspace.""" - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" + workspace_id: Optional[int] = None + """A workspace ID.""" - comment: Optional[str] = None - """Comment associated with the credential.""" + def as_dict(self) -> dict: + """Serializes the CreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id + return body - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None - """The Databricks managed GCP service account configuration.""" + def as_shallow_dict(self) -> dict: + """Serializes the CreateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id + return body - purpose: Optional[CredentialPurpose] = None - """Indicates the purpose of the credential.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateMetastoreAssignment: + """Deserializes the CreateMetastoreAssignment from a dictionary.""" + return cls( + default_catalog_name=d.get("default_catalog_name", None), + metastore_id=d.get("metastore_id", None), + workspace_id=d.get("workspace_id", None), + ) - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" - skip_validation: Optional[bool] = None - """Optional. Supplying true to this argument skips validation of the created set of credentials.""" +@dataclass +class CreateResponse: + def as_dict(self) -> dict: + """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: + """Deserializes the CreateResponse from a dictionary.""" + return cls() + + +@dataclass +class CreateStorageCredential: + name: str + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" + + aws_iam_role: Optional[AwsIamRoleRequest] = None + """The AWS IAM role configuration.""" + + azure_managed_identity: Optional[AzureManagedIdentityRequest] = None + """The Azure managed identity configuration.""" + + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" + + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" + + comment: Optional[str] = None + """Comment associated with the credential.""" + + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None + """The Databricks managed GCP service account configuration.""" + + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + + skip_validation: Optional[bool] = None + """Supplying true to this argument skips validation of the created credential.""" def as_dict(self) -> dict: - """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the CreateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} if self.aws_iam_role: body["aws_iam_role"] = self.aws_iam_role.as_dict() @@ -1815,14 +1766,14 @@ def as_dict(self) -> dict: body["azure_managed_identity"] = self.azure_managed_identity.as_dict() if self.azure_service_principal: body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() if self.comment is not None: body["comment"] = self.comment if self.databricks_gcp_service_account: body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() if self.name is not None: body["name"] = self.name - if self.purpose is not None: - body["purpose"] = self.purpose.value if self.read_only is not None: body["read_only"] = self.read_only if self.skip_validation is not None: @@ -1830,7 +1781,7 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the CreateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} if self.aws_iam_role: body["aws_iam_role"] = self.aws_iam_role @@ -1838,14 +1789,14 @@ def as_shallow_dict(self) -> dict: body["azure_managed_identity"] = self.azure_managed_identity if self.azure_service_principal: body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token if self.comment is not None: body["comment"] = self.comment if self.databricks_gcp_service_account: body["databricks_gcp_service_account"] = self.databricks_gcp_service_account if self.name is not None: body["name"] = self.name - if self.purpose is not None: - body["purpose"] = self.purpose if self.read_only is not None: body["read_only"] = self.read_only if self.skip_validation is not None: @@ -1853,1001 +1804,840 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialRequest: - """Deserializes the CreateCredentialRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> CreateStorageCredential: + """Deserializes the CreateStorageCredential from a dictionary.""" return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest + ), name=d.get("name", None), - purpose=_enum(d, "purpose", CredentialPurpose), read_only=d.get("read_only", None), skip_validation=d.get("skip_validation", None), ) @dataclass -class CreateExternalLocation: - name: str - """Name of the external location.""" +class CredentialInfo: + aws_iam_role: Optional[AwsIamRole] = None + """The AWS IAM role configuration.""" - url: str - """Path URL of the external location.""" + azure_managed_identity: Optional[AzureManagedIdentity] = None + """The Azure managed identity configuration.""" - credential_name: str - """Name of the storage credential used with this location.""" + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" comment: Optional[str] = None - """User-provided free-form text description.""" + """Comment associated with the credential.""" - enable_file_events: Optional[bool] = None - """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" + created_at: Optional[int] = None + """Time at which this credential was created, in epoch milliseconds.""" - encryption_details: Optional[EncryptionDetails] = None - """Encryption options that apply to clients connecting to cloud storage.""" + created_by: Optional[str] = None + """Username of credential creator.""" - fallback: Optional[bool] = None - """Indicates whether fallback mode is enabled for this external location. When fallback mode is - enabled, the access to the location falls back to cluster credentials if UC credentials are not - sufficient.""" + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None + """The Databricks managed GCP service account configuration.""" - file_event_queue: Optional[FileEventQueue] = None - """[Create:OPT Update:OPT] File event queue settings.""" + full_name: Optional[str] = None + """The full name of the credential.""" + + id: Optional[str] = None + """The unique identifier of the credential.""" + + isolation_mode: Optional[IsolationMode] = None + """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" + + metastore_id: Optional[str] = None + """Unique identifier of the parent metastore.""" + + name: Optional[str] = None + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" + + owner: Optional[str] = None + """Username of current owner of credential.""" + + purpose: Optional[CredentialPurpose] = None + """Indicates the purpose of the credential.""" read_only: Optional[bool] = None - """Indicates whether the external location is read-only.""" + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" - skip_validation: Optional[bool] = None - """Skips validation of the storage credential associated with the external location.""" + updated_at: Optional[int] = None + """Time at which this credential was last modified, in epoch milliseconds.""" + + updated_by: Optional[str] = None + """Username of user who last modified the credential.""" + + used_for_managed_storage: Optional[bool] = None + """Whether this credential is the current metastore's root storage credential. Only applicable when + purpose is **STORAGE**.""" def as_dict(self) -> dict: - """Serializes the CreateExternalLocation into a dictionary suitable for use as a JSON request body.""" + """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() if self.comment is not None: body["comment"] = self.comment - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue.as_dict() + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.full_name is not None: + body["full_name"] = self.full_name + if self.id is not None: + body["id"] = self.id + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.purpose is not None: + body["purpose"] = self.purpose.value if self.read_only is not None: body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - if self.url is not None: - body["url"] = self.url + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.used_for_managed_storage is not None: + body["used_for_managed_storage"] = self.used_for_managed_storage return body def as_shallow_dict(self) -> dict: - """Serializes the CreateExternalLocation into a shallow dictionary of its immediate attributes.""" + """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal if self.comment is not None: body["comment"] = self.comment - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.full_name is not None: + body["full_name"] = self.full_name + if self.id is not None: + body["id"] = self.id + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.purpose is not None: + body["purpose"] = self.purpose if self.read_only is not None: body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - if self.url is not None: - body["url"] = self.url + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.used_for_managed_storage is not None: + body["used_for_managed_storage"] = self.used_for_managed_storage return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateExternalLocation: - """Deserializes the CreateExternalLocation from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: + """Deserializes the CredentialInfo from a dictionary.""" return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), comment=d.get("comment", None), - credential_name=d.get("credential_name", None), - enable_file_events=d.get("enable_file_events", None), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - fallback=d.get("fallback", None), - file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), + full_name=d.get("full_name", None), + id=d.get("id", None), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + metastore_id=d.get("metastore_id", None), name=d.get("name", None), + owner=d.get("owner", None), + purpose=_enum(d, "purpose", CredentialPurpose), read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - url=d.get("url", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + used_for_managed_storage=d.get("used_for_managed_storage", None), ) -@dataclass -class CreateFunction: - name: str - """Name of function, relative to parent schema.""" +class CredentialPurpose(Enum): - catalog_name: str - """Name of parent catalog.""" + SERVICE = "SERVICE" + STORAGE = "STORAGE" - schema_name: str - """Name of parent schema relative to its parent catalog.""" - - input_params: FunctionParameterInfos - data_type: ColumnTypeName - """Scalar function return data type.""" - - full_data_type: str - """Pretty printed function data type.""" +class CredentialType(Enum): + """Next Id: 12""" - routine_body: CreateFunctionRoutineBody - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" + BEARER_TOKEN = "BEARER_TOKEN" + OAUTH_ACCESS_TOKEN = "OAUTH_ACCESS_TOKEN" + OAUTH_M2M = "OAUTH_M2M" + OAUTH_REFRESH_TOKEN = "OAUTH_REFRESH_TOKEN" + OAUTH_RESOURCE_OWNER_PASSWORD = "OAUTH_RESOURCE_OWNER_PASSWORD" + OAUTH_U2M = "OAUTH_U2M" + OAUTH_U2M_MAPPING = "OAUTH_U2M_MAPPING" + OIDC_TOKEN = "OIDC_TOKEN" + PEM_PRIVATE_KEY = "PEM_PRIVATE_KEY" + SERVICE_CREDENTIAL = "SERVICE_CREDENTIAL" + UNKNOWN_CREDENTIAL_TYPE = "UNKNOWN_CREDENTIAL_TYPE" + USERNAME_PASSWORD = "USERNAME_PASSWORD" - routine_definition: str - """Function body.""" - parameter_style: CreateFunctionParameterStyle - """Function parameter style. **S** is the value for SQL.""" +@dataclass +class CredentialValidationResult: + message: Optional[str] = None + """Error message would exist when the result does not equal to **PASS**.""" - is_deterministic: bool - """Whether the function is deterministic.""" + result: Optional[ValidateCredentialResult] = None + """The results of the tested operation.""" - sql_data_access: CreateFunctionSqlDataAccess - """Function SQL data access.""" + def as_dict(self) -> dict: + """Serializes the CredentialValidationResult into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.message is not None: + body["message"] = self.message + if self.result is not None: + body["result"] = self.result.value + return body - is_null_call: bool - """Function null call.""" + def as_shallow_dict(self) -> dict: + """Serializes the CredentialValidationResult into a shallow dictionary of its immediate attributes.""" + body = {} + if self.message is not None: + body["message"] = self.message + if self.result is not None: + body["result"] = self.result + return body - security_type: CreateFunctionSecurityType - """Function security type.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CredentialValidationResult: + """Deserializes the CredentialValidationResult from a dictionary.""" + return cls(message=d.get("message", None), result=_enum(d, "result", ValidateCredentialResult)) - specific_name: str - """Specific name of the function; Reserved for future use.""" - comment: Optional[str] = None - """User-provided free-form text description.""" +class DataSourceFormat(Enum): + """Data source format""" - external_language: Optional[str] = None - """External function language.""" + AVRO = "AVRO" + BIGQUERY_FORMAT = "BIGQUERY_FORMAT" + CSV = "CSV" + DATABRICKS_FORMAT = "DATABRICKS_FORMAT" + DELTA = "DELTA" + DELTASHARING = "DELTASHARING" + HIVE_CUSTOM = "HIVE_CUSTOM" + HIVE_SERDE = "HIVE_SERDE" + JSON = "JSON" + MYSQL_FORMAT = "MYSQL_FORMAT" + NETSUITE_FORMAT = "NETSUITE_FORMAT" + ORC = "ORC" + PARQUET = "PARQUET" + POSTGRESQL_FORMAT = "POSTGRESQL_FORMAT" + REDSHIFT_FORMAT = "REDSHIFT_FORMAT" + SALESFORCE_FORMAT = "SALESFORCE_FORMAT" + SNOWFLAKE_FORMAT = "SNOWFLAKE_FORMAT" + SQLDW_FORMAT = "SQLDW_FORMAT" + SQLSERVER_FORMAT = "SQLSERVER_FORMAT" + TEXT = "TEXT" + UNITY_CATALOG = "UNITY_CATALOG" + VECTOR_INDEX_FORMAT = "VECTOR_INDEX_FORMAT" + WORKDAY_RAAS_FORMAT = "WORKDAY_RAAS_FORMAT" - external_name: Optional[str] = None - """External function name.""" - properties: Optional[str] = None - """JSON-serialized key-value pair map, encoded (escaped) as a string.""" +@dataclass +class DatabricksGcpServiceAccount: + """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - return_params: Optional[FunctionParameterInfos] = None - """Table function return parameters.""" + credential_id: Optional[str] = None + """The Databricks internal ID that represents this managed identity.""" - routine_dependencies: Optional[DependencyList] = None - """Function dependencies.""" + email: Optional[str] = None + """The email of the service account.""" - sql_path: Optional[str] = None - """List of schemes whose objects can be referenced without qualification.""" + private_key_id: Optional[str] = None + """The ID that represents the private key for this Service Account""" def as_dict(self) -> dict: - """Serializes the CreateFunction into a dictionary suitable for use as a JSON request body.""" + """Serializes the DatabricksGcpServiceAccount into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.data_type is not None: - body["data_type"] = self.data_type.value - if self.external_language is not None: - body["external_language"] = self.external_language - if self.external_name is not None: - body["external_name"] = self.external_name - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.input_params: - body["input_params"] = self.input_params.as_dict() - if self.is_deterministic is not None: - body["is_deterministic"] = self.is_deterministic - if self.is_null_call is not None: - body["is_null_call"] = self.is_null_call - if self.name is not None: - body["name"] = self.name - if self.parameter_style is not None: - body["parameter_style"] = self.parameter_style.value - if self.properties is not None: - body["properties"] = self.properties - if self.return_params: - body["return_params"] = self.return_params.as_dict() - if self.routine_body is not None: - body["routine_body"] = self.routine_body.value - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.routine_dependencies: - body["routine_dependencies"] = self.routine_dependencies.as_dict() - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.security_type is not None: - body["security_type"] = self.security_type.value - if self.specific_name is not None: - body["specific_name"] = self.specific_name - if self.sql_data_access is not None: - body["sql_data_access"] = self.sql_data_access.value - if self.sql_path is not None: - body["sql_path"] = self.sql_path + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.email is not None: + body["email"] = self.email + if self.private_key_id is not None: + body["private_key_id"] = self.private_key_id return body def as_shallow_dict(self) -> dict: - """Serializes the CreateFunction into a shallow dictionary of its immediate attributes.""" + """Serializes the DatabricksGcpServiceAccount into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.data_type is not None: - body["data_type"] = self.data_type - if self.external_language is not None: - body["external_language"] = self.external_language - if self.external_name is not None: - body["external_name"] = self.external_name - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.input_params: - body["input_params"] = self.input_params - if self.is_deterministic is not None: - body["is_deterministic"] = self.is_deterministic - if self.is_null_call is not None: - body["is_null_call"] = self.is_null_call - if self.name is not None: - body["name"] = self.name - if self.parameter_style is not None: - body["parameter_style"] = self.parameter_style - if self.properties is not None: - body["properties"] = self.properties - if self.return_params: - body["return_params"] = self.return_params - if self.routine_body is not None: - body["routine_body"] = self.routine_body - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.routine_dependencies: - body["routine_dependencies"] = self.routine_dependencies - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.security_type is not None: - body["security_type"] = self.security_type - if self.specific_name is not None: - body["specific_name"] = self.specific_name - if self.sql_data_access is not None: - body["sql_data_access"] = self.sql_data_access - if self.sql_path is not None: - body["sql_path"] = self.sql_path + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.email is not None: + body["email"] = self.email + if self.private_key_id is not None: + body["private_key_id"] = self.private_key_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateFunction: - """Deserializes the CreateFunction from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccount: + """Deserializes the DatabricksGcpServiceAccount from a dictionary.""" return cls( - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - data_type=_enum(d, "data_type", ColumnTypeName), - external_language=d.get("external_language", None), - external_name=d.get("external_name", None), - full_data_type=d.get("full_data_type", None), - input_params=_from_dict(d, "input_params", FunctionParameterInfos), - is_deterministic=d.get("is_deterministic", None), - is_null_call=d.get("is_null_call", None), - name=d.get("name", None), - parameter_style=_enum(d, "parameter_style", CreateFunctionParameterStyle), - properties=d.get("properties", None), - return_params=_from_dict(d, "return_params", FunctionParameterInfos), - routine_body=_enum(d, "routine_body", CreateFunctionRoutineBody), - routine_definition=d.get("routine_definition", None), - routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList), - schema_name=d.get("schema_name", None), - security_type=_enum(d, "security_type", CreateFunctionSecurityType), - specific_name=d.get("specific_name", None), - sql_data_access=_enum(d, "sql_data_access", CreateFunctionSqlDataAccess), - sql_path=d.get("sql_path", None), + credential_id=d.get("credential_id", None), + email=d.get("email", None), + private_key_id=d.get("private_key_id", None), ) -class CreateFunctionParameterStyle(Enum): - """Function parameter style. **S** is the value for SQL.""" - - S = "S" - - @dataclass -class CreateFunctionRequest: - function_info: CreateFunction - """Partial __FunctionInfo__ specifying the function to be created.""" +class DatabricksGcpServiceAccountRequest: + """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" def as_dict(self) -> dict: - """Serializes the CreateFunctionRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the DatabricksGcpServiceAccountRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_info: - body["function_info"] = self.function_info.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the CreateFunctionRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the DatabricksGcpServiceAccountRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_info: - body["function_info"] = self.function_info return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateFunctionRequest: - """Deserializes the CreateFunctionRequest from a dictionary.""" - return cls(function_info=_from_dict(d, "function_info", CreateFunction)) - - -class CreateFunctionRoutineBody(Enum): - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" + def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountRequest: + """Deserializes the DatabricksGcpServiceAccountRequest from a dictionary.""" + return cls() - EXTERNAL = "EXTERNAL" - SQL = "SQL" +@dataclass +class DatabricksGcpServiceAccountResponse: + """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" -class CreateFunctionSecurityType(Enum): - """The security type of the function.""" + credential_id: Optional[str] = None + """The Databricks internal ID that represents this managed identity.""" - DEFINER = "DEFINER" + email: Optional[str] = None + """The email of the service account.""" + def as_dict(self) -> dict: + """Serializes the DatabricksGcpServiceAccountResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.email is not None: + body["email"] = self.email + return body -class CreateFunctionSqlDataAccess(Enum): - """Function SQL data access.""" + def as_shallow_dict(self) -> dict: + """Serializes the DatabricksGcpServiceAccountResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.email is not None: + body["email"] = self.email + return body - CONTAINS_SQL = "CONTAINS_SQL" - NO_SQL = "NO_SQL" - READS_SQL_DATA = "READS_SQL_DATA" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountResponse: + """Deserializes the DatabricksGcpServiceAccountResponse from a dictionary.""" + return cls(credential_id=d.get("credential_id", None), email=d.get("email", None)) @dataclass -class CreateMetastore: - name: str - """The user-specified name of the metastore.""" - - region: Optional[str] = None - """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" - - storage_root: Optional[str] = None - """The storage root URL for metastore""" - +class DeleteAliasResponse: def as_dict(self) -> dict: - """Serializes the CreateMetastore into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteAliasResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root return body def as_shallow_dict(self) -> dict: - """Serializes the CreateMetastore into a shallow dictionary of its immediate attributes.""" + """Serializes the DeleteAliasResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateMetastore: - """Deserializes the CreateMetastore from a dictionary.""" - return cls(name=d.get("name", None), region=d.get("region", None), storage_root=d.get("storage_root", None)) + def from_dict(cls, d: Dict[str, Any]) -> DeleteAliasResponse: + """Deserializes the DeleteAliasResponse from a dictionary.""" + return cls() @dataclass -class CreateMetastoreAssignment: - metastore_id: str - """The unique ID of the metastore.""" +class DeleteCredentialResponse: + def as_dict(self) -> dict: + """Serializes the DeleteCredentialResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - default_catalog_name: str - """The name of the default catalog in the metastore. This field is deprecated. Please use "Default - Namespace API" to configure the default catalog for a Databricks workspace.""" + def as_shallow_dict(self) -> dict: + """Serializes the DeleteCredentialResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteCredentialResponse: + """Deserializes the DeleteCredentialResponse from a dictionary.""" + return cls() - workspace_id: Optional[int] = None - """A workspace ID.""" +@dataclass +class DeleteResponse: def as_dict(self) -> dict: - """Serializes the CreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: - """Serializes the CreateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateMetastoreAssignment: - """Deserializes the CreateMetastoreAssignment from a dictionary.""" - return cls( - default_catalog_name=d.get("default_catalog_name", None), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: + """Deserializes the DeleteResponse from a dictionary.""" + return cls() @dataclass -class CreateMonitor: - assets_dir: str - """The directory to store monitoring assets (e.g. dashboard, metric tables).""" - - output_schema_name: str - """Schema where output metric tables are created.""" - - baseline_table_name: Optional[str] = None - """Name of the baseline table from which drift metrics are computed from. Columns in the monitored - table should also be present in the baseline table.""" +class DeltaRuntimePropertiesKvPairs: + """Properties pertaining to the current state of the delta table as given by the commit server. + This does not contain **delta.*** (input) properties in __TableInfo.properties__.""" - custom_metrics: Optional[List[MonitorMetric]] = None - """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived - metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across - time windows).""" + delta_runtime_properties: Dict[str, str] + """A map of key-value properties attached to the securable.""" - data_classification_config: Optional[MonitorDataClassificationConfig] = None - """The data classification config for the monitor.""" + def as_dict(self) -> dict: + """Serializes the DeltaRuntimePropertiesKvPairs into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.delta_runtime_properties: + body["delta_runtime_properties"] = self.delta_runtime_properties + return body - inference_log: Optional[MonitorInferenceLog] = None - """Configuration for monitoring inference logs.""" + def as_shallow_dict(self) -> dict: + """Serializes the DeltaRuntimePropertiesKvPairs into a shallow dictionary of its immediate attributes.""" + body = {} + if self.delta_runtime_properties: + body["delta_runtime_properties"] = self.delta_runtime_properties + return body - notifications: Optional[MonitorNotifications] = None - """The notification settings for the monitor.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeltaRuntimePropertiesKvPairs: + """Deserializes the DeltaRuntimePropertiesKvPairs from a dictionary.""" + return cls(delta_runtime_properties=d.get("delta_runtime_properties", None)) - schedule: Optional[MonitorCronSchedule] = None - """The schedule for automatically updating and refreshing metric tables.""" - skip_builtin_dashboard: Optional[bool] = None - """Whether to skip creating a default dashboard summarizing data quality metrics.""" +class DeltaSharingScopeEnum(Enum): - slicing_exprs: Optional[List[str]] = None - """List of column expressions to slice data with for targeted analysis. The data is grouped by each - expression independently, resulting in a separate slice for each predicate and its complements. - For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" + INTERNAL = "INTERNAL" + INTERNAL_AND_EXTERNAL = "INTERNAL_AND_EXTERNAL" - snapshot: Optional[MonitorSnapshot] = None - """Configuration for monitoring snapshot tables.""" - table_name: Optional[str] = None - """Full name of the table.""" +@dataclass +class Dependency: + """A dependency of a SQL object. Either the __table__ field or the __function__ field must be + defined.""" - time_series: Optional[MonitorTimeSeries] = None - """Configuration for monitoring time series tables.""" + function: Optional[FunctionDependency] = None + """A function that is dependent on a SQL object.""" - warehouse_id: Optional[str] = None - """Optional argument to specify the warehouse for dashboard creation. If not specified, the first - running warehouse will be used.""" + table: Optional[TableDependency] = None + """A table that is dependent on a SQL object.""" def as_dict(self) -> dict: - """Serializes the CreateMonitor into a dictionary suitable for use as a JSON request body.""" + """Serializes the Dependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assets_dir is not None: - body["assets_dir"] = self.assets_dir - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config.as_dict() - if self.inference_log: - body["inference_log"] = self.inference_log.as_dict() - if self.notifications: - body["notifications"] = self.notifications.as_dict() - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.skip_builtin_dashboard is not None: - body["skip_builtin_dashboard"] = self.skip_builtin_dashboard - if self.slicing_exprs: - body["slicing_exprs"] = [v for v in self.slicing_exprs] - if self.snapshot: - body["snapshot"] = self.snapshot.as_dict() - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series.as_dict() - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.function: + body["function"] = self.function.as_dict() + if self.table: + body["table"] = self.table.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the CreateMonitor into a shallow dictionary of its immediate attributes.""" + """Serializes the Dependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.assets_dir is not None: - body["assets_dir"] = self.assets_dir - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = self.custom_metrics - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config - if self.inference_log: - body["inference_log"] = self.inference_log - if self.notifications: - body["notifications"] = self.notifications - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.schedule: - body["schedule"] = self.schedule - if self.skip_builtin_dashboard is not None: - body["skip_builtin_dashboard"] = self.skip_builtin_dashboard - if self.slicing_exprs: - body["slicing_exprs"] = self.slicing_exprs - if self.snapshot: - body["snapshot"] = self.snapshot - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.function: + body["function"] = self.function + if self.table: + body["table"] = self.table return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateMonitor: - """Deserializes the CreateMonitor from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> Dependency: + """Deserializes the Dependency from a dictionary.""" return cls( - assets_dir=d.get("assets_dir", None), - baseline_table_name=d.get("baseline_table_name", None), - custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), - data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), - inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), - notifications=_from_dict(d, "notifications", MonitorNotifications), - output_schema_name=d.get("output_schema_name", None), - schedule=_from_dict(d, "schedule", MonitorCronSchedule), - skip_builtin_dashboard=d.get("skip_builtin_dashboard", None), - slicing_exprs=d.get("slicing_exprs", None), - snapshot=_from_dict(d, "snapshot", MonitorSnapshot), - table_name=d.get("table_name", None), - time_series=_from_dict(d, "time_series", MonitorTimeSeries), - warehouse_id=d.get("warehouse_id", None), + function=_from_dict(d, "function", FunctionDependency), table=_from_dict(d, "table", TableDependency) ) @dataclass -class CreateRegisteredModelRequest: - catalog_name: str - """The name of the catalog where the schema and the registered model reside""" - - schema_name: str - """The name of the schema where the registered model resides""" - - name: str - """The name of the registered model""" - - comment: Optional[str] = None - """The comment attached to the registered model""" +class DependencyList: + """A list of dependencies.""" - storage_location: Optional[str] = None - """The storage location on the cloud under which model version data files are stored""" + dependencies: Optional[List[Dependency]] = None + """Array of dependencies.""" def as_dict(self) -> dict: - """Serializes the CreateRegisteredModelRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the DependencyList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location + if self.dependencies: + body["dependencies"] = [v.as_dict() for v in self.dependencies] return body def as_shallow_dict(self) -> dict: - """Serializes the CreateRegisteredModelRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the DependencyList into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location + if self.dependencies: + body["dependencies"] = self.dependencies return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateRegisteredModelRequest: - """Deserializes the CreateRegisteredModelRequest from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - name=d.get("name", None), - schema_name=d.get("schema_name", None), - storage_location=d.get("storage_location", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> DependencyList: + """Deserializes the DependencyList from a dictionary.""" + return cls(dependencies=_repeated_dict(d, "dependencies", Dependency)) @dataclass -class CreateResponse: +class DisableResponse: def as_dict(self) -> dict: - """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DisableResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the DisableResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: - """Deserializes the CreateResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> DisableResponse: + """Deserializes the DisableResponse from a dictionary.""" return cls() @dataclass -class CreateSchema: - name: str - """Name of schema, relative to parent catalog.""" - - catalog_name: str - """Name of parent catalog.""" - - comment: Optional[str] = None - """User-provided free-form text description.""" - - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" +class EffectivePermissionsList: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - storage_root: Optional[str] = None - """Storage root URL for managed tables within schema.""" + privilege_assignments: Optional[List[EffectivePrivilegeAssignment]] = None + """The privileges conveyed to each principal (either directly or via inheritance)""" def as_dict(self) -> dict: - """Serializes the CreateSchema into a dictionary suitable for use as a JSON request body.""" + """Serializes the EffectivePermissionsList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.properties: - body["properties"] = self.properties - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: - """Serializes the CreateSchema into a shallow dictionary of its immediate attributes.""" + """Serializes the EffectivePermissionsList into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.properties: - body["properties"] = self.properties - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateSchema: - """Deserializes the CreateSchema from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> EffectivePermissionsList: + """Deserializes the EffectivePermissionsList from a dictionary.""" return cls( - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - name=d.get("name", None), - properties=d.get("properties", None), - storage_root=d.get("storage_root", None), + next_page_token=d.get("next_page_token", None), + privilege_assignments=_repeated_dict(d, "privilege_assignments", EffectivePrivilegeAssignment), ) @dataclass -class CreateStorageCredential: - name: str - """The credential name. The name must be unique among storage and service credentials within the - metastore.""" - - aws_iam_role: Optional[AwsIamRoleRequest] = None - """The AWS IAM role configuration.""" - - azure_managed_identity: Optional[AzureManagedIdentityRequest] = None - """The Azure managed identity configuration.""" - - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" - - cloudflare_api_token: Optional[CloudflareApiToken] = None - """The Cloudflare API token configuration.""" - - comment: Optional[str] = None - """Comment associated with the credential.""" - - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None - """The Databricks managed GCP service account configuration.""" +class EffectivePredictiveOptimizationFlag: + value: EnablePredictiveOptimization + """Whether predictive optimization should be enabled for this object and objects under it.""" - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" + inherited_from_name: Optional[str] = None + """The name of the object from which the flag was inherited. If there was no inheritance, this + field is left blank.""" - skip_validation: Optional[bool] = None - """Supplying true to this argument skips validation of the created credential.""" + inherited_from_type: Optional[EffectivePredictiveOptimizationFlagInheritedFromType] = None + """The type of the object from which the flag was inherited. If there was no inheritance, this + field is left blank.""" def as_dict(self) -> dict: - """Serializes the CreateStorageCredential into a dictionary suitable for use as a JSON request body.""" + """Serializes the EffectivePredictiveOptimizationFlag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.inherited_from_name is not None: + body["inherited_from_name"] = self.inherited_from_name + if self.inherited_from_type is not None: + body["inherited_from_type"] = self.inherited_from_type.value + if self.value is not None: + body["value"] = self.value.value return body def as_shallow_dict(self) -> dict: - """Serializes the CreateStorageCredential into a shallow dictionary of its immediate attributes.""" + """Serializes the EffectivePredictiveOptimizationFlag into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.inherited_from_name is not None: + body["inherited_from_name"] = self.inherited_from_name + if self.inherited_from_type is not None: + body["inherited_from_type"] = self.inherited_from_type + if self.value is not None: + body["value"] = self.value return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateStorageCredential: - """Deserializes the CreateStorageCredential from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> EffectivePredictiveOptimizationFlag: + """Deserializes the EffectivePredictiveOptimizationFlag from a dictionary.""" return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - name=d.get("name", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), + inherited_from_name=d.get("inherited_from_name", None), + inherited_from_type=_enum(d, "inherited_from_type", EffectivePredictiveOptimizationFlagInheritedFromType), + value=_enum(d, "value", EnablePredictiveOptimization), ) -@dataclass -class CreateTableConstraint: - full_name_arg: str - """The full name of the table referenced by the constraint.""" +class EffectivePredictiveOptimizationFlagInheritedFromType(Enum): + """The type of the object from which the flag was inherited. If there was no inheritance, this + field is left blank.""" - constraint: TableConstraint - """A table constraint, as defined by *one* of the following fields being set: - __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.""" + CATALOG = "CATALOG" + SCHEMA = "SCHEMA" - def as_dict(self) -> dict: - """Serializes the CreateTableConstraint into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.constraint: - body["constraint"] = self.constraint.as_dict() - if self.full_name_arg is not None: - body["full_name_arg"] = self.full_name_arg - return body - def as_shallow_dict(self) -> dict: - """Serializes the CreateTableConstraint into a shallow dictionary of its immediate attributes.""" - body = {} - if self.constraint: - body["constraint"] = self.constraint - if self.full_name_arg is not None: - body["full_name_arg"] = self.full_name_arg - return body +@dataclass +class EffectivePrivilege: + inherited_from_name: Optional[str] = None + """The full name of the object that conveys this privilege via inheritance. This field is omitted + when privilege is not inherited (it's assigned to the securable itself).""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateTableConstraint: - """Deserializes the CreateTableConstraint from a dictionary.""" - return cls(constraint=_from_dict(d, "constraint", TableConstraint), full_name_arg=d.get("full_name_arg", None)) + inherited_from_type: Optional[SecurableType] = None + """The type of the object that conveys this privilege via inheritance. This field is omitted when + privilege is not inherited (it's assigned to the securable itself).""" + + privilege: Optional[Privilege] = None + """The privilege assigned to the principal.""" + + def as_dict(self) -> dict: + """Serializes the EffectivePrivilege into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.inherited_from_name is not None: + body["inherited_from_name"] = self.inherited_from_name + if self.inherited_from_type is not None: + body["inherited_from_type"] = self.inherited_from_type.value + if self.privilege is not None: + body["privilege"] = self.privilege.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EffectivePrivilege into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited_from_name is not None: + body["inherited_from_name"] = self.inherited_from_name + if self.inherited_from_type is not None: + body["inherited_from_type"] = self.inherited_from_type + if self.privilege is not None: + body["privilege"] = self.privilege + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> EffectivePrivilege: + """Deserializes the EffectivePrivilege from a dictionary.""" + return cls( + inherited_from_name=d.get("inherited_from_name", None), + inherited_from_type=_enum(d, "inherited_from_type", SecurableType), + privilege=_enum(d, "privilege", Privilege), + ) @dataclass -class CreateVolumeRequestContent: - catalog_name: str - """The name of the catalog where the schema and the volume are""" +class EffectivePrivilegeAssignment: + principal: Optional[str] = None + """The principal (user email address or group name).""" - schema_name: str - """The name of the schema where the volume is""" + privileges: Optional[List[EffectivePrivilege]] = None + """The privileges conveyed to the principal (either directly or via inheritance).""" - name: str - """The name of the volume""" + def as_dict(self) -> dict: + """Serializes the EffectivePrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = [v.as_dict() for v in self.privileges] + return body - volume_type: VolumeType - """The type of the volume. An external volume is located in the specified external location. A - managed volume is located in the default location which is specified by the parent schema, or - the parent catalog, or the Metastore. [Learn more] - - [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" + def as_shallow_dict(self) -> dict: + """Serializes the EffectivePrivilegeAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = self.privileges + return body - comment: Optional[str] = None - """The comment attached to the volume""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> EffectivePrivilegeAssignment: + """Deserializes the EffectivePrivilegeAssignment from a dictionary.""" + return cls(principal=d.get("principal", None), privileges=_repeated_dict(d, "privileges", EffectivePrivilege)) - storage_location: Optional[str] = None - """The storage location on the cloud""" +class EnablePredictiveOptimization(Enum): + + DISABLE = "DISABLE" + ENABLE = "ENABLE" + INHERIT = "INHERIT" + + +@dataclass +class EnableResponse: def as_dict(self) -> dict: - """Serializes the CreateVolumeRequestContent into a dictionary suitable for use as a JSON request body.""" + """Serializes the EnableResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.volume_type is not None: - body["volume_type"] = self.volume_type.value return body def as_shallow_dict(self) -> dict: - """Serializes the CreateVolumeRequestContent into a shallow dictionary of its immediate attributes.""" + """Serializes the EnableResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.volume_type is not None: - body["volume_type"] = self.volume_type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateVolumeRequestContent: - """Deserializes the CreateVolumeRequestContent from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - name=d.get("name", None), - schema_name=d.get("schema_name", None), - storage_location=d.get("storage_location", None), - volume_type=_enum(d, "volume_type", VolumeType), - ) + def from_dict(cls, d: Dict[str, Any]) -> EnableResponse: + """Deserializes the EnableResponse from a dictionary.""" + return cls() @dataclass -class CredentialInfo: - aws_iam_role: Optional[AwsIamRole] = None - """The AWS IAM role configuration.""" +class EncryptionDetails: + """Encryption options that apply to clients connecting to cloud storage.""" - azure_managed_identity: Optional[AzureManagedIdentity] = None - """The Azure managed identity configuration.""" + sse_encryption_details: Optional[SseEncryptionDetails] = None + """Server-Side Encryption properties for clients communicating with AWS s3.""" - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" + def as_dict(self) -> dict: + """Serializes the EncryptionDetails into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.sse_encryption_details: + body["sse_encryption_details"] = self.sse_encryption_details.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the EncryptionDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.sse_encryption_details: + body["sse_encryption_details"] = self.sse_encryption_details + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> EncryptionDetails: + """Deserializes the EncryptionDetails from a dictionary.""" + return cls(sse_encryption_details=_from_dict(d, "sse_encryption_details", SseEncryptionDetails)) + + +@dataclass +class ExternalLocationInfo: + browse_only: Optional[bool] = None + """Indicates whether the principal is limited to retrieving metadata for the associated object + through the BROWSE privilege when include_browse is enabled in the request.""" comment: Optional[str] = None - """Comment associated with the credential.""" + """User-provided free-form text description.""" created_at: Optional[int] = None - """Time at which this credential was created, in epoch milliseconds.""" + """Time at which this external location was created, in epoch milliseconds.""" created_by: Optional[str] = None - """Username of credential creator.""" + """Username of external location creator.""" - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None - """The Databricks managed GCP service account configuration.""" + credential_id: Optional[str] = None + """Unique ID of the location's storage credential.""" - full_name: Optional[str] = None - """The full name of the credential.""" + credential_name: Optional[str] = None + """Name of the storage credential used with this location.""" - id: Optional[str] = None - """The unique identifier of the credential.""" + enable_file_events: Optional[bool] = None + """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" + + encryption_details: Optional[EncryptionDetails] = None + """Encryption options that apply to clients connecting to cloud storage.""" + + fallback: Optional[bool] = None + """Indicates whether fallback mode is enabled for this external location. When fallback mode is + enabled, the access to the location falls back to cluster credentials if UC credentials are not + sufficient.""" + + file_event_queue: Optional[FileEventQueue] = None + """[Create:OPT Update:OPT] File event queue settings.""" isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" metastore_id: Optional[str] = None - """Unique identifier of the parent metastore.""" + """Unique identifier of metastore hosting the external location.""" name: Optional[str] = None - """The credential name. The name must be unique among storage and service credentials within the - metastore.""" + """Name of the external location.""" owner: Optional[str] = None - """Username of current owner of credential.""" - - purpose: Optional[CredentialPurpose] = None - """Indicates the purpose of the credential.""" + """The owner of the external location.""" read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" + """Indicates whether the external location is read-only.""" updated_at: Optional[int] = None - """Time at which this credential was last modified, in epoch milliseconds.""" + """Time at which external location this was last modified, in epoch milliseconds.""" updated_by: Optional[str] = None - """Username of user who last modified the credential.""" + """Username of user who last modified the external location.""" - used_for_managed_storage: Optional[bool] = None - """Whether this credential is the current metastore's root storage credential. Only applicable when - purpose is **STORAGE**.""" + url: Optional[str] = None + """Path URL of the external location.""" def as_dict(self) -> dict: - """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLocationInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.browse_only is not None: + body["browse_only"] = self.browse_only if self.comment is not None: body["comment"] = self.comment if self.created_at is not None: body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.full_name is not None: - body["full_name"] = self.full_name - if self.id is not None: - body["id"] = self.id + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.enable_file_events is not None: + body["enable_file_events"] = self.enable_file_events + if self.encryption_details: + body["encryption_details"] = self.encryption_details.as_dict() + if self.fallback is not None: + body["fallback"] = self.fallback + if self.file_event_queue: + body["file_event_queue"] = self.file_event_queue.as_dict() if self.isolation_mode is not None: body["isolation_mode"] = self.isolation_mode.value if self.metastore_id is not None: @@ -2856,39 +2646,39 @@ def as_dict(self) -> dict: body["name"] = self.name if self.owner is not None: body["owner"] = self.owner - if self.purpose is not None: - body["purpose"] = self.purpose.value if self.read_only is not None: body["read_only"] = self.read_only if self.updated_at is not None: body["updated_at"] = self.updated_at if self.updated_by is not None: body["updated_by"] = self.updated_by - if self.used_for_managed_storage is not None: - body["used_for_managed_storage"] = self.used_for_managed_storage + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: - """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLocationInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal + if self.browse_only is not None: + body["browse_only"] = self.browse_only if self.comment is not None: body["comment"] = self.comment if self.created_at is not None: body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.full_name is not None: - body["full_name"] = self.full_name - if self.id is not None: - body["id"] = self.id + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.enable_file_events is not None: + body["enable_file_events"] = self.enable_file_events + if self.encryption_details: + body["encryption_details"] = self.encryption_details + if self.fallback is not None: + body["fallback"] = self.fallback + if self.file_event_queue: + body["file_event_queue"] = self.file_event_queue if self.isolation_mode is not None: body["isolation_mode"] = self.isolation_mode if self.metastore_id is not None: @@ -2897,1752 +2687,1788 @@ def as_shallow_dict(self) -> dict: body["name"] = self.name if self.owner is not None: body["owner"] = self.owner - if self.purpose is not None: - body["purpose"] = self.purpose if self.read_only is not None: body["read_only"] = self.read_only if self.updated_at is not None: body["updated_at"] = self.updated_at if self.updated_by is not None: body["updated_by"] = self.updated_by - if self.used_for_managed_storage is not None: - body["used_for_managed_storage"] = self.used_for_managed_storage + if self.url is not None: + body["url"] = self.url return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: - """Deserializes the CredentialInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ExternalLocationInfo: + """Deserializes the ExternalLocationInfo from a dictionary.""" return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + browse_only=d.get("browse_only", None), comment=d.get("comment", None), created_at=d.get("created_at", None), created_by=d.get("created_by", None), - databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), - full_name=d.get("full_name", None), - id=d.get("id", None), + credential_id=d.get("credential_id", None), + credential_name=d.get("credential_name", None), + enable_file_events=d.get("enable_file_events", None), + encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), + fallback=d.get("fallback", None), + file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), isolation_mode=_enum(d, "isolation_mode", IsolationMode), metastore_id=d.get("metastore_id", None), name=d.get("name", None), owner=d.get("owner", None), - purpose=_enum(d, "purpose", CredentialPurpose), read_only=d.get("read_only", None), updated_at=d.get("updated_at", None), updated_by=d.get("updated_by", None), - used_for_managed_storage=d.get("used_for_managed_storage", None), + url=d.get("url", None), ) -class CredentialPurpose(Enum): - - SERVICE = "SERVICE" - STORAGE = "STORAGE" - - -class CredentialType(Enum): - """Next Id: 12""" - - BEARER_TOKEN = "BEARER_TOKEN" - OAUTH_ACCESS_TOKEN = "OAUTH_ACCESS_TOKEN" - OAUTH_M2M = "OAUTH_M2M" - OAUTH_REFRESH_TOKEN = "OAUTH_REFRESH_TOKEN" - OAUTH_RESOURCE_OWNER_PASSWORD = "OAUTH_RESOURCE_OWNER_PASSWORD" - OAUTH_U2M = "OAUTH_U2M" - OAUTH_U2M_MAPPING = "OAUTH_U2M_MAPPING" - OIDC_TOKEN = "OIDC_TOKEN" - PEM_PRIVATE_KEY = "PEM_PRIVATE_KEY" - SERVICE_CREDENTIAL = "SERVICE_CREDENTIAL" - UNKNOWN_CREDENTIAL_TYPE = "UNKNOWN_CREDENTIAL_TYPE" - USERNAME_PASSWORD = "USERNAME_PASSWORD" - - @dataclass -class CredentialValidationResult: - message: Optional[str] = None - """Error message would exist when the result does not equal to **PASS**.""" +class FailedStatus: + """Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the + ONLINE_PIPELINE_FAILED state.""" - result: Optional[ValidateCredentialResult] = None - """The results of the tested operation.""" + last_processed_commit_version: Optional[int] = None + """The last source table Delta version that was synced to the online table. Note that this Delta + version may only be partially synced to the online table. Only populated if the table is still + online and available for serving.""" + + timestamp: Optional[str] = None + """The timestamp of the last time any data was synchronized from the source table to the online + table. Only populated if the table is still online and available for serving.""" def as_dict(self) -> dict: - """Serializes the CredentialValidationResult into a dictionary suitable for use as a JSON request body.""" + """Serializes the FailedStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.result is not None: - body["result"] = self.result.value + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body def as_shallow_dict(self) -> dict: - """Serializes the CredentialValidationResult into a shallow dictionary of its immediate attributes.""" + """Serializes the FailedStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.result is not None: - body["result"] = self.result + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CredentialValidationResult: - """Deserializes the CredentialValidationResult from a dictionary.""" - return cls(message=d.get("message", None), result=_enum(d, "result", ValidateCredentialResult)) - + def from_dict(cls, d: Dict[str, Any]) -> FailedStatus: + """Deserializes the FailedStatus from a dictionary.""" + return cls( + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=d.get("timestamp", None), + ) -class DataSourceFormat(Enum): - """Data source format""" - AVRO = "AVRO" - BIGQUERY_FORMAT = "BIGQUERY_FORMAT" - CSV = "CSV" - DATABRICKS_FORMAT = "DATABRICKS_FORMAT" - DELTA = "DELTA" - DELTASHARING = "DELTASHARING" - HIVE_CUSTOM = "HIVE_CUSTOM" - HIVE_SERDE = "HIVE_SERDE" - JSON = "JSON" - MYSQL_FORMAT = "MYSQL_FORMAT" - NETSUITE_FORMAT = "NETSUITE_FORMAT" - ORC = "ORC" - PARQUET = "PARQUET" - POSTGRESQL_FORMAT = "POSTGRESQL_FORMAT" - REDSHIFT_FORMAT = "REDSHIFT_FORMAT" - SALESFORCE_FORMAT = "SALESFORCE_FORMAT" - SNOWFLAKE_FORMAT = "SNOWFLAKE_FORMAT" - SQLDW_FORMAT = "SQLDW_FORMAT" - SQLSERVER_FORMAT = "SQLSERVER_FORMAT" - TEXT = "TEXT" - UNITY_CATALOG = "UNITY_CATALOG" - VECTOR_INDEX_FORMAT = "VECTOR_INDEX_FORMAT" - WORKDAY_RAAS_FORMAT = "WORKDAY_RAAS_FORMAT" +@dataclass +class FileEventQueue: + managed_aqs: Optional[AzureQueueStorage] = None + managed_pubsub: Optional[GcpPubsub] = None -@dataclass -class DatabricksGcpServiceAccount: - """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" + managed_sqs: Optional[AwsSqsQueue] = None - credential_id: Optional[str] = None - """The Databricks internal ID that represents this managed identity.""" + provided_aqs: Optional[AzureQueueStorage] = None - email: Optional[str] = None - """The email of the service account.""" + provided_pubsub: Optional[GcpPubsub] = None - private_key_id: Optional[str] = None - """The ID that represents the private key for this Service Account""" + provided_sqs: Optional[AwsSqsQueue] = None def as_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccount into a dictionary suitable for use as a JSON request body.""" + """Serializes the FileEventQueue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.email is not None: - body["email"] = self.email - if self.private_key_id is not None: - body["private_key_id"] = self.private_key_id + if self.managed_aqs: + body["managed_aqs"] = self.managed_aqs.as_dict() + if self.managed_pubsub: + body["managed_pubsub"] = self.managed_pubsub.as_dict() + if self.managed_sqs: + body["managed_sqs"] = self.managed_sqs.as_dict() + if self.provided_aqs: + body["provided_aqs"] = self.provided_aqs.as_dict() + if self.provided_pubsub: + body["provided_pubsub"] = self.provided_pubsub.as_dict() + if self.provided_sqs: + body["provided_sqs"] = self.provided_sqs.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccount into a shallow dictionary of its immediate attributes.""" + """Serializes the FileEventQueue into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.email is not None: - body["email"] = self.email - if self.private_key_id is not None: - body["private_key_id"] = self.private_key_id + if self.managed_aqs: + body["managed_aqs"] = self.managed_aqs + if self.managed_pubsub: + body["managed_pubsub"] = self.managed_pubsub + if self.managed_sqs: + body["managed_sqs"] = self.managed_sqs + if self.provided_aqs: + body["provided_aqs"] = self.provided_aqs + if self.provided_pubsub: + body["provided_pubsub"] = self.provided_pubsub + if self.provided_sqs: + body["provided_sqs"] = self.provided_sqs return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccount: - """Deserializes the DatabricksGcpServiceAccount from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> FileEventQueue: + """Deserializes the FileEventQueue from a dictionary.""" return cls( - credential_id=d.get("credential_id", None), - email=d.get("email", None), - private_key_id=d.get("private_key_id", None), + managed_aqs=_from_dict(d, "managed_aqs", AzureQueueStorage), + managed_pubsub=_from_dict(d, "managed_pubsub", GcpPubsub), + managed_sqs=_from_dict(d, "managed_sqs", AwsSqsQueue), + provided_aqs=_from_dict(d, "provided_aqs", AzureQueueStorage), + provided_pubsub=_from_dict(d, "provided_pubsub", GcpPubsub), + provided_sqs=_from_dict(d, "provided_sqs", AwsSqsQueue), ) @dataclass -class DatabricksGcpServiceAccountRequest: - """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" +class ForeignKeyConstraint: + name: str + """The name of the constraint.""" + + child_columns: List[str] + """Column names for this constraint.""" + + parent_table: str + """The full name of the parent constraint.""" + + parent_columns: List[str] + """Column names for this constraint.""" def as_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccountRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the ForeignKeyConstraint into a dictionary suitable for use as a JSON request body.""" body = {} + if self.child_columns: + body["child_columns"] = [v for v in self.child_columns] + if self.name is not None: + body["name"] = self.name + if self.parent_columns: + body["parent_columns"] = [v for v in self.parent_columns] + if self.parent_table is not None: + body["parent_table"] = self.parent_table return body def as_shallow_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccountRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the ForeignKeyConstraint into a shallow dictionary of its immediate attributes.""" body = {} + if self.child_columns: + body["child_columns"] = self.child_columns + if self.name is not None: + body["name"] = self.name + if self.parent_columns: + body["parent_columns"] = self.parent_columns + if self.parent_table is not None: + body["parent_table"] = self.parent_table return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountRequest: - """Deserializes the DatabricksGcpServiceAccountRequest from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> ForeignKeyConstraint: + """Deserializes the ForeignKeyConstraint from a dictionary.""" + return cls( + child_columns=d.get("child_columns", None), + name=d.get("name", None), + parent_columns=d.get("parent_columns", None), + parent_table=d.get("parent_table", None), + ) @dataclass -class DatabricksGcpServiceAccountResponse: - """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" +class FunctionDependency: + """A function that is dependent on a SQL object.""" - credential_id: Optional[str] = None - """The Databricks internal ID that represents this managed identity.""" - - email: Optional[str] = None - """The email of the service account.""" + function_full_name: str + """Full name of the dependent function, in the form of + __catalog_name__.__schema_name__.__function_name__.""" def as_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccountResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the FunctionDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.email is not None: - body["email"] = self.email + if self.function_full_name is not None: + body["function_full_name"] = self.function_full_name return body def as_shallow_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccountResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the FunctionDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.email is not None: - body["email"] = self.email + if self.function_full_name is not None: + body["function_full_name"] = self.function_full_name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountResponse: - """Deserializes the DatabricksGcpServiceAccountResponse from a dictionary.""" - return cls(credential_id=d.get("credential_id", None), email=d.get("email", None)) + def from_dict(cls, d: Dict[str, Any]) -> FunctionDependency: + """Deserializes the FunctionDependency from a dictionary.""" + return cls(function_full_name=d.get("function_full_name", None)) @dataclass -class DeleteAliasResponse: - def as_dict(self) -> dict: - """Serializes the DeleteAliasResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body +class FunctionInfo: + browse_only: Optional[bool] = None + """Indicates whether the principal is limited to retrieving metadata for the associated object + through the BROWSE privilege when include_browse is enabled in the request.""" - def as_shallow_dict(self) -> dict: - """Serializes the DeleteAliasResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body + catalog_name: Optional[str] = None + """Name of parent catalog.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteAliasResponse: - """Deserializes the DeleteAliasResponse from a dictionary.""" - return cls() + comment: Optional[str] = None + """User-provided free-form text description.""" + created_at: Optional[int] = None + """Time at which this function was created, in epoch milliseconds.""" -@dataclass -class DeleteCredentialResponse: - def as_dict(self) -> dict: - """Serializes the DeleteCredentialResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body + created_by: Optional[str] = None + """Username of function creator.""" - def as_shallow_dict(self) -> dict: - """Serializes the DeleteCredentialResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body + data_type: Optional[ColumnTypeName] = None + """Scalar function return data type.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteCredentialResponse: - """Deserializes the DeleteCredentialResponse from a dictionary.""" - return cls() + external_language: Optional[str] = None + """External function language.""" + external_name: Optional[str] = None + """External function name.""" -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body + full_data_type: Optional[str] = None + """Pretty printed function data type.""" - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body + full_name: Optional[str] = None + """Full name of function, in form of __catalog_name__.__schema_name__.__function__name__""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() + function_id: Optional[str] = None + """Id of Function, relative to parent schema.""" + input_params: Optional[FunctionParameterInfos] = None -@dataclass -class DeltaRuntimePropertiesKvPairs: - """Properties pertaining to the current state of the delta table as given by the commit server. - This does not contain **delta.*** (input) properties in __TableInfo.properties__.""" + is_deterministic: Optional[bool] = None + """Whether the function is deterministic.""" - delta_runtime_properties: Dict[str, str] - """A map of key-value properties attached to the securable.""" + is_null_call: Optional[bool] = None + """Function null call.""" - def as_dict(self) -> dict: - """Serializes the DeltaRuntimePropertiesKvPairs into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.delta_runtime_properties: - body["delta_runtime_properties"] = self.delta_runtime_properties - return body + metastore_id: Optional[str] = None + """Unique identifier of parent metastore.""" - def as_shallow_dict(self) -> dict: - """Serializes the DeltaRuntimePropertiesKvPairs into a shallow dictionary of its immediate attributes.""" - body = {} - if self.delta_runtime_properties: - body["delta_runtime_properties"] = self.delta_runtime_properties - return body + name: Optional[str] = None + """Name of function, relative to parent schema.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeltaRuntimePropertiesKvPairs: - """Deserializes the DeltaRuntimePropertiesKvPairs from a dictionary.""" - return cls(delta_runtime_properties=d.get("delta_runtime_properties", None)) + owner: Optional[str] = None + """Username of current owner of function.""" + parameter_style: Optional[FunctionInfoParameterStyle] = None + """Function parameter style. **S** is the value for SQL.""" -class DeltaSharingScopeEnum(Enum): + properties: Optional[str] = None + """JSON-serialized key-value pair map, encoded (escaped) as a string.""" - INTERNAL = "INTERNAL" - INTERNAL_AND_EXTERNAL = "INTERNAL_AND_EXTERNAL" + return_params: Optional[FunctionParameterInfos] = None + """Table function return parameters.""" + routine_body: Optional[FunctionInfoRoutineBody] = None + """Function language. When **EXTERNAL** is used, the language of the routine function should be + specified in the __external_language__ field, and the __return_params__ of the function cannot + be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + **NO_SQL**.""" -@dataclass -class Dependency: - """A dependency of a SQL object. Either the __table__ field or the __function__ field must be - defined.""" + routine_definition: Optional[str] = None + """Function body.""" - function: Optional[FunctionDependency] = None - """A function that is dependent on a SQL object.""" + routine_dependencies: Optional[DependencyList] = None + """Function dependencies.""" - table: Optional[TableDependency] = None - """A table that is dependent on a SQL object.""" + schema_name: Optional[str] = None + """Name of parent schema relative to its parent catalog.""" - def as_dict(self) -> dict: - """Serializes the Dependency into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.function: - body["function"] = self.function.as_dict() - if self.table: - body["table"] = self.table.as_dict() - return body + security_type: Optional[FunctionInfoSecurityType] = None + """Function security type.""" - def as_shallow_dict(self) -> dict: - """Serializes the Dependency into a shallow dictionary of its immediate attributes.""" - body = {} - if self.function: - body["function"] = self.function - if self.table: - body["table"] = self.table - return body + specific_name: Optional[str] = None + """Specific name of the function; Reserved for future use.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Dependency: - """Deserializes the Dependency from a dictionary.""" - return cls( - function=_from_dict(d, "function", FunctionDependency), table=_from_dict(d, "table", TableDependency) - ) + sql_data_access: Optional[FunctionInfoSqlDataAccess] = None + """Function SQL data access.""" + sql_path: Optional[str] = None + """List of schemes whose objects can be referenced without qualification.""" -@dataclass -class DependencyList: - """A list of dependencies.""" + updated_at: Optional[int] = None + """Time at which this function was created, in epoch milliseconds.""" - dependencies: Optional[List[Dependency]] = None - """Array of dependencies.""" + updated_by: Optional[str] = None + """Username of user who last modified function.""" def as_dict(self) -> dict: - """Serializes the DependencyList into a dictionary suitable for use as a JSON request body.""" + """Serializes the FunctionInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dependencies: - body["dependencies"] = [v.as_dict() for v in self.dependencies] + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_type is not None: + body["data_type"] = self.data_type.value + if self.external_language is not None: + body["external_language"] = self.external_language + if self.external_name is not None: + body["external_name"] = self.external_name + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.full_name is not None: + body["full_name"] = self.full_name + if self.function_id is not None: + body["function_id"] = self.function_id + if self.input_params: + body["input_params"] = self.input_params.as_dict() + if self.is_deterministic is not None: + body["is_deterministic"] = self.is_deterministic + if self.is_null_call is not None: + body["is_null_call"] = self.is_null_call + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.parameter_style is not None: + body["parameter_style"] = self.parameter_style.value + if self.properties is not None: + body["properties"] = self.properties + if self.return_params: + body["return_params"] = self.return_params.as_dict() + if self.routine_body is not None: + body["routine_body"] = self.routine_body.value + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.routine_dependencies: + body["routine_dependencies"] = self.routine_dependencies.as_dict() + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.security_type is not None: + body["security_type"] = self.security_type.value + if self.specific_name is not None: + body["specific_name"] = self.specific_name + if self.sql_data_access is not None: + body["sql_data_access"] = self.sql_data_access.value + if self.sql_path is not None: + body["sql_path"] = self.sql_path + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: - """Serializes the DependencyList into a shallow dictionary of its immediate attributes.""" + """Serializes the FunctionInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.dependencies: - body["dependencies"] = self.dependencies + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_type is not None: + body["data_type"] = self.data_type + if self.external_language is not None: + body["external_language"] = self.external_language + if self.external_name is not None: + body["external_name"] = self.external_name + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.full_name is not None: + body["full_name"] = self.full_name + if self.function_id is not None: + body["function_id"] = self.function_id + if self.input_params: + body["input_params"] = self.input_params + if self.is_deterministic is not None: + body["is_deterministic"] = self.is_deterministic + if self.is_null_call is not None: + body["is_null_call"] = self.is_null_call + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.parameter_style is not None: + body["parameter_style"] = self.parameter_style + if self.properties is not None: + body["properties"] = self.properties + if self.return_params: + body["return_params"] = self.return_params + if self.routine_body is not None: + body["routine_body"] = self.routine_body + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.routine_dependencies: + body["routine_dependencies"] = self.routine_dependencies + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.security_type is not None: + body["security_type"] = self.security_type + if self.specific_name is not None: + body["specific_name"] = self.specific_name + if self.sql_data_access is not None: + body["sql_data_access"] = self.sql_data_access + if self.sql_path is not None: + body["sql_path"] = self.sql_path + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DependencyList: - """Deserializes the DependencyList from a dictionary.""" - return cls(dependencies=_repeated_dict(d, "dependencies", Dependency)) + def from_dict(cls, d: Dict[str, Any]) -> FunctionInfo: + """Deserializes the FunctionInfo from a dictionary.""" + return cls( + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + data_type=_enum(d, "data_type", ColumnTypeName), + external_language=d.get("external_language", None), + external_name=d.get("external_name", None), + full_data_type=d.get("full_data_type", None), + full_name=d.get("full_name", None), + function_id=d.get("function_id", None), + input_params=_from_dict(d, "input_params", FunctionParameterInfos), + is_deterministic=d.get("is_deterministic", None), + is_null_call=d.get("is_null_call", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + parameter_style=_enum(d, "parameter_style", FunctionInfoParameterStyle), + properties=d.get("properties", None), + return_params=_from_dict(d, "return_params", FunctionParameterInfos), + routine_body=_enum(d, "routine_body", FunctionInfoRoutineBody), + routine_definition=d.get("routine_definition", None), + routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList), + schema_name=d.get("schema_name", None), + security_type=_enum(d, "security_type", FunctionInfoSecurityType), + specific_name=d.get("specific_name", None), + sql_data_access=_enum(d, "sql_data_access", FunctionInfoSqlDataAccess), + sql_path=d.get("sql_path", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) -@dataclass -class DisableResponse: - def as_dict(self) -> dict: - """Serializes the DisableResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body +class FunctionInfoParameterStyle(Enum): + """Function parameter style. **S** is the value for SQL.""" - def as_shallow_dict(self) -> dict: - """Serializes the DisableResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body + S = "S" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DisableResponse: - """Deserializes the DisableResponse from a dictionary.""" - return cls() +class FunctionInfoRoutineBody(Enum): + """Function language. When **EXTERNAL** is used, the language of the routine function should be + specified in the __external_language__ field, and the __return_params__ of the function cannot + be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + **NO_SQL**.""" -@dataclass -class EffectivePermissionsList: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + EXTERNAL = "EXTERNAL" + SQL = "SQL" - privilege_assignments: Optional[List[EffectivePrivilegeAssignment]] = None - """The privileges conveyed to each principal (either directly or via inheritance)""" - def as_dict(self) -> dict: - """Serializes the EffectivePermissionsList into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.privilege_assignments: - body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] - return body +class FunctionInfoSecurityType(Enum): + """The security type of the function.""" - def as_shallow_dict(self) -> dict: - """Serializes the EffectivePermissionsList into a shallow dictionary of its immediate attributes.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.privilege_assignments: - body["privilege_assignments"] = self.privilege_assignments - return body + DEFINER = "DEFINER" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EffectivePermissionsList: - """Deserializes the EffectivePermissionsList from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - privilege_assignments=_repeated_dict(d, "privilege_assignments", EffectivePrivilegeAssignment), - ) +class FunctionInfoSqlDataAccess(Enum): + """Function SQL data access.""" -@dataclass -class EffectivePredictiveOptimizationFlag: - value: EnablePredictiveOptimization - """Whether predictive optimization should be enabled for this object and objects under it.""" + CONTAINS_SQL = "CONTAINS_SQL" + NO_SQL = "NO_SQL" + READS_SQL_DATA = "READS_SQL_DATA" - inherited_from_name: Optional[str] = None - """The name of the object from which the flag was inherited. If there was no inheritance, this - field is left blank.""" - inherited_from_type: Optional[EffectivePredictiveOptimizationFlagInheritedFromType] = None - """The type of the object from which the flag was inherited. If there was no inheritance, this - field is left blank.""" +@dataclass +class FunctionParameterInfo: + name: str + """Name of parameter.""" - def as_dict(self) -> dict: - """Serializes the EffectivePredictiveOptimizationFlag into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.inherited_from_name is not None: - body["inherited_from_name"] = self.inherited_from_name - if self.inherited_from_type is not None: - body["inherited_from_type"] = self.inherited_from_type.value - if self.value is not None: - body["value"] = self.value.value - return body + type_text: str + """Full data type spec, SQL/catalogString text.""" - def as_shallow_dict(self) -> dict: - """Serializes the EffectivePredictiveOptimizationFlag into a shallow dictionary of its immediate attributes.""" - body = {} - if self.inherited_from_name is not None: - body["inherited_from_name"] = self.inherited_from_name - if self.inherited_from_type is not None: - body["inherited_from_type"] = self.inherited_from_type - if self.value is not None: - body["value"] = self.value - return body + type_name: ColumnTypeName - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EffectivePredictiveOptimizationFlag: - """Deserializes the EffectivePredictiveOptimizationFlag from a dictionary.""" - return cls( - inherited_from_name=d.get("inherited_from_name", None), - inherited_from_type=_enum(d, "inherited_from_type", EffectivePredictiveOptimizationFlagInheritedFromType), - value=_enum(d, "value", EnablePredictiveOptimization), - ) + position: int + """Ordinal position of column (starting at position 0).""" + comment: Optional[str] = None + """User-provided free-form text description.""" -class EffectivePredictiveOptimizationFlagInheritedFromType(Enum): - """The type of the object from which the flag was inherited. If there was no inheritance, this - field is left blank.""" + parameter_default: Optional[str] = None + """Default value of the parameter.""" - CATALOG = "CATALOG" - SCHEMA = "SCHEMA" + parameter_mode: Optional[FunctionParameterMode] = None + """The mode of the function parameter.""" + parameter_type: Optional[FunctionParameterType] = None + """The type of function parameter.""" -@dataclass -class EffectivePrivilege: - inherited_from_name: Optional[str] = None - """The full name of the object that conveys this privilege via inheritance. This field is omitted - when privilege is not inherited (it's assigned to the securable itself).""" + type_interval_type: Optional[str] = None + """Format of IntervalType.""" - inherited_from_type: Optional[SecurableType] = None - """The type of the object that conveys this privilege via inheritance. This field is omitted when - privilege is not inherited (it's assigned to the securable itself).""" + type_json: Optional[str] = None + """Full data type spec, JSON-serialized.""" - privilege: Optional[Privilege] = None - """The privilege assigned to the principal.""" + type_precision: Optional[int] = None + """Digits of precision; required on Create for DecimalTypes.""" - def as_dict(self) -> dict: - """Serializes the EffectivePrivilege into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.inherited_from_name is not None: - body["inherited_from_name"] = self.inherited_from_name - if self.inherited_from_type is not None: - body["inherited_from_type"] = self.inherited_from_type.value - if self.privilege is not None: - body["privilege"] = self.privilege.value - return body + type_scale: Optional[int] = None + """Digits to right of decimal; Required on Create for DecimalTypes.""" - def as_shallow_dict(self) -> dict: - """Serializes the EffectivePrivilege into a shallow dictionary of its immediate attributes.""" + def as_dict(self) -> dict: + """Serializes the FunctionParameterInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited_from_name is not None: - body["inherited_from_name"] = self.inherited_from_name - if self.inherited_from_type is not None: - body["inherited_from_type"] = self.inherited_from_type - if self.privilege is not None: - body["privilege"] = self.privilege + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.parameter_default is not None: + body["parameter_default"] = self.parameter_default + if self.parameter_mode is not None: + body["parameter_mode"] = self.parameter_mode.value + if self.parameter_type is not None: + body["parameter_type"] = self.parameter_type.value + if self.position is not None: + body["position"] = self.position + if self.type_interval_type is not None: + body["type_interval_type"] = self.type_interval_type + if self.type_json is not None: + body["type_json"] = self.type_json + if self.type_name is not None: + body["type_name"] = self.type_name.value + if self.type_precision is not None: + body["type_precision"] = self.type_precision + if self.type_scale is not None: + body["type_scale"] = self.type_scale + if self.type_text is not None: + body["type_text"] = self.type_text + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FunctionParameterInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.parameter_default is not None: + body["parameter_default"] = self.parameter_default + if self.parameter_mode is not None: + body["parameter_mode"] = self.parameter_mode + if self.parameter_type is not None: + body["parameter_type"] = self.parameter_type + if self.position is not None: + body["position"] = self.position + if self.type_interval_type is not None: + body["type_interval_type"] = self.type_interval_type + if self.type_json is not None: + body["type_json"] = self.type_json + if self.type_name is not None: + body["type_name"] = self.type_name + if self.type_precision is not None: + body["type_precision"] = self.type_precision + if self.type_scale is not None: + body["type_scale"] = self.type_scale + if self.type_text is not None: + body["type_text"] = self.type_text return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EffectivePrivilege: - """Deserializes the EffectivePrivilege from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfo: + """Deserializes the FunctionParameterInfo from a dictionary.""" return cls( - inherited_from_name=d.get("inherited_from_name", None), - inherited_from_type=_enum(d, "inherited_from_type", SecurableType), - privilege=_enum(d, "privilege", Privilege), + comment=d.get("comment", None), + name=d.get("name", None), + parameter_default=d.get("parameter_default", None), + parameter_mode=_enum(d, "parameter_mode", FunctionParameterMode), + parameter_type=_enum(d, "parameter_type", FunctionParameterType), + position=d.get("position", None), + type_interval_type=d.get("type_interval_type", None), + type_json=d.get("type_json", None), + type_name=_enum(d, "type_name", ColumnTypeName), + type_precision=d.get("type_precision", None), + type_scale=d.get("type_scale", None), + type_text=d.get("type_text", None), ) @dataclass -class EffectivePrivilegeAssignment: - principal: Optional[str] = None - """The principal (user email address or group name).""" - - privileges: Optional[List[EffectivePrivilege]] = None - """The privileges conveyed to the principal (either directly or via inheritance).""" +class FunctionParameterInfos: + parameters: Optional[List[FunctionParameterInfo]] = None + """The array of __FunctionParameterInfo__ definitions of the function's parameters.""" def as_dict(self) -> dict: - """Serializes the EffectivePrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" + """Serializes the FunctionParameterInfos into a dictionary suitable for use as a JSON request body.""" body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = [v.as_dict() for v in self.privileges] + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] return body def as_shallow_dict(self) -> dict: - """Serializes the EffectivePrivilegeAssignment into a shallow dictionary of its immediate attributes.""" + """Serializes the FunctionParameterInfos into a shallow dictionary of its immediate attributes.""" body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = self.privileges + if self.parameters: + body["parameters"] = self.parameters return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EffectivePrivilegeAssignment: - """Deserializes the EffectivePrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_dict(d, "privileges", EffectivePrivilege)) + def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfos: + """Deserializes the FunctionParameterInfos from a dictionary.""" + return cls(parameters=_repeated_dict(d, "parameters", FunctionParameterInfo)) -class EnablePredictiveOptimization(Enum): +class FunctionParameterMode(Enum): + """The mode of the function parameter.""" - DISABLE = "DISABLE" - ENABLE = "ENABLE" - INHERIT = "INHERIT" + IN = "IN" -@dataclass -class EnableRequest: - catalog_name: Optional[str] = None - """the catalog for which the system schema is to enabled in""" +class FunctionParameterType(Enum): + """The type of function parameter.""" - metastore_id: Optional[str] = None - """The metastore ID under which the system schema lives.""" + COLUMN = "COLUMN" + PARAM = "PARAM" - schema_name: Optional[str] = None - """Full name of the system schema.""" + +@dataclass +class GcpOauthToken: + """GCP temporary credentials for API authentication. Read more at + https://developers.google.com/identity/protocols/oauth2/service-account""" + + oauth_token: Optional[str] = None def as_dict(self) -> dict: - """Serializes the EnableRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the GcpOauthToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.schema_name is not None: - body["schema_name"] = self.schema_name + if self.oauth_token is not None: + body["oauth_token"] = self.oauth_token return body def as_shallow_dict(self) -> dict: - """Serializes the EnableRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the GcpOauthToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.schema_name is not None: - body["schema_name"] = self.schema_name + if self.oauth_token is not None: + body["oauth_token"] = self.oauth_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EnableRequest: - """Deserializes the EnableRequest from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - metastore_id=d.get("metastore_id", None), - schema_name=d.get("schema_name", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> GcpOauthToken: + """Deserializes the GcpOauthToken from a dictionary.""" + return cls(oauth_token=d.get("oauth_token", None)) @dataclass -class EnableResponse: +class GcpPubsub: + managed_resource_id: Optional[str] = None + """Unique identifier included in the name of file events managed cloud resources.""" + + subscription_name: Optional[str] = None + """The Pub/Sub subscription name in the format projects/{project}/subscriptions/{subscription name} + REQUIRED for provided_pubsub.""" + def as_dict(self) -> dict: - """Serializes the EnableResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the GcpPubsub into a dictionary suitable for use as a JSON request body.""" body = {} + if self.managed_resource_id is not None: + body["managed_resource_id"] = self.managed_resource_id + if self.subscription_name is not None: + body["subscription_name"] = self.subscription_name return body def as_shallow_dict(self) -> dict: - """Serializes the EnableResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the GcpPubsub into a shallow dictionary of its immediate attributes.""" body = {} + if self.managed_resource_id is not None: + body["managed_resource_id"] = self.managed_resource_id + if self.subscription_name is not None: + body["subscription_name"] = self.subscription_name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EnableResponse: - """Deserializes the EnableResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> GcpPubsub: + """Deserializes the GcpPubsub from a dictionary.""" + return cls( + managed_resource_id=d.get("managed_resource_id", None), subscription_name=d.get("subscription_name", None) + ) @dataclass -class EncryptionDetails: - """Encryption options that apply to clients connecting to cloud storage.""" +class GenerateTemporaryServiceCredentialAzureOptions: + """The Azure cloud options to customize the requested temporary credential""" - sse_encryption_details: Optional[SseEncryptionDetails] = None - """Server-Side Encryption properties for clients communicating with AWS s3.""" + resources: Optional[List[str]] = None + """The resources to which the temporary Azure credential should apply. These resources are the + scopes that are passed to the token provider (see + https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python)""" def as_dict(self) -> dict: - """Serializes the EncryptionDetails into a dictionary suitable for use as a JSON request body.""" + """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.sse_encryption_details: - body["sse_encryption_details"] = self.sse_encryption_details.as_dict() + if self.resources: + body["resources"] = [v for v in self.resources] return body def as_shallow_dict(self) -> dict: - """Serializes the EncryptionDetails into a shallow dictionary of its immediate attributes.""" + """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.sse_encryption_details: - body["sse_encryption_details"] = self.sse_encryption_details + if self.resources: + body["resources"] = self.resources return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EncryptionDetails: - """Deserializes the EncryptionDetails from a dictionary.""" - return cls(sse_encryption_details=_from_dict(d, "sse_encryption_details", SseEncryptionDetails)) + def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialAzureOptions: + """Deserializes the GenerateTemporaryServiceCredentialAzureOptions from a dictionary.""" + return cls(resources=d.get("resources", None)) @dataclass -class ExternalLocationInfo: - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" +class GenerateTemporaryServiceCredentialGcpOptions: + """The GCP cloud options to customize the requested temporary credential""" - comment: Optional[str] = None - """User-provided free-form text description.""" + scopes: Optional[List[str]] = None + """The scopes to which the temporary GCP credential should apply. These resources are the scopes + that are passed to the token provider (see + https://google-auth.readthedocs.io/en/latest/reference/google.auth.html#google.auth.credentials.Credentials)""" - created_at: Optional[int] = None - """Time at which this external location was created, in epoch milliseconds.""" + def as_dict(self) -> dict: + """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.scopes: + body["scopes"] = [v for v in self.scopes] + return body - created_by: Optional[str] = None - """Username of external location creator.""" + def as_shallow_dict(self) -> dict: + """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.scopes: + body["scopes"] = self.scopes + return body - credential_id: Optional[str] = None - """Unique ID of the location's storage credential.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialGcpOptions: + """Deserializes the GenerateTemporaryServiceCredentialGcpOptions from a dictionary.""" + return cls(scopes=d.get("scopes", None)) - credential_name: Optional[str] = None - """Name of the storage credential used with this location.""" - enable_file_events: Optional[bool] = None - """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" +@dataclass +class GenerateTemporaryTableCredentialResponse: + aws_temp_credentials: Optional[AwsCredentials] = None + """AWS temporary credentials for API authentication. Read more at + https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" - encryption_details: Optional[EncryptionDetails] = None - """Encryption options that apply to clients connecting to cloud storage.""" - - fallback: Optional[bool] = None - """Indicates whether fallback mode is enabled for this external location. When fallback mode is - enabled, the access to the location falls back to cluster credentials if UC credentials are not - sufficient.""" - - file_event_queue: Optional[FileEventQueue] = None - """[Create:OPT Update:OPT] File event queue settings.""" - - isolation_mode: Optional[IsolationMode] = None - - metastore_id: Optional[str] = None - """Unique identifier of metastore hosting the external location.""" - - name: Optional[str] = None - """Name of the external location.""" + azure_aad: Optional[AzureActiveDirectoryToken] = None + """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed + Identity. Read more at + https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" - owner: Optional[str] = None - """The owner of the external location.""" + azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None + """Azure temporary credentials for API authentication. Read more at + https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas""" - read_only: Optional[bool] = None - """Indicates whether the external location is read-only.""" + expiration_time: Optional[int] = None + """Server time when the credential will expire, in epoch milliseconds. The API client is advised to + cache the credential given this expiration time.""" - updated_at: Optional[int] = None - """Time at which external location this was last modified, in epoch milliseconds.""" + gcp_oauth_token: Optional[GcpOauthToken] = None + """GCP temporary credentials for API authentication. Read more at + https://developers.google.com/identity/protocols/oauth2/service-account""" - updated_by: Optional[str] = None - """Username of user who last modified the external location.""" + r2_temp_credentials: Optional[R2Credentials] = None + """R2 temporary credentials for API authentication. Read more at + https://developers.cloudflare.com/r2/api/s3/tokens/.""" url: Optional[str] = None - """Path URL of the external location.""" + """The URL of the storage path accessible by the temporary credential.""" def as_dict(self) -> dict: - """Serializes the ExternalLocationInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue.as_dict() - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() + if self.azure_aad: + body["azure_aad"] = self.azure_aad.as_dict() + if self.azure_user_delegation_sas: + body["azure_user_delegation_sas"] = self.azure_user_delegation_sas.as_dict() + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() + if self.r2_temp_credentials: + body["r2_temp_credentials"] = self.r2_temp_credentials.as_dict() if self.url is not None: body["url"] = self.url return body def as_shallow_dict(self) -> dict: - """Serializes the ExternalLocationInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the GenerateTemporaryTableCredentialResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials + if self.azure_aad: + body["azure_aad"] = self.azure_aad + if self.azure_user_delegation_sas: + body["azure_user_delegation_sas"] = self.azure_user_delegation_sas + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token + if self.r2_temp_credentials: + body["r2_temp_credentials"] = self.r2_temp_credentials if self.url is not None: body["url"] = self.url return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLocationInfo: - """Deserializes the ExternalLocationInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryTableCredentialResponse: + """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary.""" return cls( - browse_only=d.get("browse_only", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - credential_id=d.get("credential_id", None), - credential_name=d.get("credential_name", None), - enable_file_events=d.get("enable_file_events", None), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - fallback=d.get("fallback", None), - file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), + aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), + azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), + azure_user_delegation_sas=_from_dict(d, "azure_user_delegation_sas", AzureUserDelegationSas), + expiration_time=d.get("expiration_time", None), + gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), + r2_temp_credentials=_from_dict(d, "r2_temp_credentials", R2Credentials), url=d.get("url", None), ) @dataclass -class FailedStatus: - """Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the - ONLINE_PIPELINE_FAILED state.""" - - last_processed_commit_version: Optional[int] = None - """The last source table Delta version that was synced to the online table. Note that this Delta - version may only be partially synced to the online table. Only populated if the table is still - online and available for serving.""" - - timestamp: Optional[str] = None - """The timestamp of the last time any data was synchronized from the source table to the online - table. Only populated if the table is still online and available for serving.""" +class GetCatalogWorkspaceBindingsResponse: + workspaces: Optional[List[int]] = None + """A list of workspace IDs""" def as_dict(self) -> dict: - """Serializes the FailedStatus into a dictionary suitable for use as a JSON request body.""" + """Serializes the GetCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp + if self.workspaces: + body["workspaces"] = [v for v in self.workspaces] return body def as_shallow_dict(self) -> dict: - """Serializes the FailedStatus into a shallow dictionary of its immediate attributes.""" + """Serializes the GetCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp + if self.workspaces: + body["workspaces"] = self.workspaces return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FailedStatus: - """Deserializes the FailedStatus from a dictionary.""" - return cls( - last_processed_commit_version=d.get("last_processed_commit_version", None), - timestamp=d.get("timestamp", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> GetCatalogWorkspaceBindingsResponse: + """Deserializes the GetCatalogWorkspaceBindingsResponse from a dictionary.""" + return cls(workspaces=d.get("workspaces", None)) @dataclass -class FileEventQueue: - managed_aqs: Optional[AzureQueueStorage] = None +class GetMetastoreSummaryResponse: + cloud: Optional[str] = None + """Cloud vendor of the metastore home shard (e.g., `aws`, `azure`, `gcp`).""" - managed_pubsub: Optional[GcpPubsub] = None + created_at: Optional[int] = None + """Time at which this metastore was created, in epoch milliseconds.""" - managed_sqs: Optional[AwsSqsQueue] = None + created_by: Optional[str] = None + """Username of metastore creator.""" - provided_aqs: Optional[AzureQueueStorage] = None + default_data_access_config_id: Optional[str] = None + """Unique identifier of the metastore's (Default) Data Access Configuration.""" - provided_pubsub: Optional[GcpPubsub] = None + delta_sharing_organization_name: Optional[str] = None + """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta + Sharing as the official name.""" - provided_sqs: Optional[AwsSqsQueue] = None + delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None + """The lifetime of delta sharing recipient token in seconds.""" - def as_dict(self) -> dict: - """Serializes the FileEventQueue into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.managed_aqs: - body["managed_aqs"] = self.managed_aqs.as_dict() - if self.managed_pubsub: - body["managed_pubsub"] = self.managed_pubsub.as_dict() - if self.managed_sqs: - body["managed_sqs"] = self.managed_sqs.as_dict() - if self.provided_aqs: - body["provided_aqs"] = self.provided_aqs.as_dict() - if self.provided_pubsub: - body["provided_pubsub"] = self.provided_pubsub.as_dict() - if self.provided_sqs: - body["provided_sqs"] = self.provided_sqs.as_dict() - return body + delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None + """The scope of Delta Sharing enabled for the metastore.""" - def as_shallow_dict(self) -> dict: - """Serializes the FileEventQueue into a shallow dictionary of its immediate attributes.""" - body = {} - if self.managed_aqs: - body["managed_aqs"] = self.managed_aqs - if self.managed_pubsub: - body["managed_pubsub"] = self.managed_pubsub - if self.managed_sqs: - body["managed_sqs"] = self.managed_sqs - if self.provided_aqs: - body["provided_aqs"] = self.provided_aqs - if self.provided_pubsub: - body["provided_pubsub"] = self.provided_pubsub - if self.provided_sqs: - body["provided_sqs"] = self.provided_sqs - return body + external_access_enabled: Optional[bool] = None + """Whether to allow non-DBR clients to directly access entities under the metastore.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FileEventQueue: - """Deserializes the FileEventQueue from a dictionary.""" - return cls( - managed_aqs=_from_dict(d, "managed_aqs", AzureQueueStorage), - managed_pubsub=_from_dict(d, "managed_pubsub", GcpPubsub), - managed_sqs=_from_dict(d, "managed_sqs", AwsSqsQueue), - provided_aqs=_from_dict(d, "provided_aqs", AzureQueueStorage), - provided_pubsub=_from_dict(d, "provided_pubsub", GcpPubsub), - provided_sqs=_from_dict(d, "provided_sqs", AwsSqsQueue), - ) + global_metastore_id: Optional[str] = None + """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`.""" + metastore_id: Optional[str] = None + """Unique identifier of metastore.""" -@dataclass -class ForeignKeyConstraint: - name: str - """The name of the constraint.""" + name: Optional[str] = None + """The user-specified name of the metastore.""" - child_columns: List[str] - """Column names for this constraint.""" + owner: Optional[str] = None + """The owner of the metastore.""" - parent_table: str - """The full name of the parent constraint.""" + privilege_model_version: Optional[str] = None + """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" - parent_columns: List[str] - """Column names for this constraint.""" + region: Optional[str] = None + """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" + + storage_root: Optional[str] = None + """The storage root URL for metastore""" + + storage_root_credential_id: Optional[str] = None + """UUID of storage credential to access the metastore storage_root.""" + + storage_root_credential_name: Optional[str] = None + """Name of the storage credential to access the metastore storage_root.""" + + updated_at: Optional[int] = None + """Time at which the metastore was last modified, in epoch milliseconds.""" + + updated_by: Optional[str] = None + """Username of user who last modified the metastore.""" def as_dict(self) -> dict: - """Serializes the ForeignKeyConstraint into a dictionary suitable for use as a JSON request body.""" + """Serializes the GetMetastoreSummaryResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.child_columns: - body["child_columns"] = [v for v in self.child_columns] + if self.cloud is not None: + body["cloud"] = self.cloud + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.default_data_access_config_id is not None: + body["default_data_access_config_id"] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope.value + if self.external_access_enabled is not None: + body["external_access_enabled"] = self.external_access_enabled + if self.global_metastore_id is not None: + body["global_metastore_id"] = self.global_metastore_id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name - if self.parent_columns: - body["parent_columns"] = [v for v in self.parent_columns] - if self.parent_table is not None: - body["parent_table"] = self.parent_table + if self.owner is not None: + body["owner"] = self.owner + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: + body["storage_root_credential_name"] = self.storage_root_credential_name + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: - """Serializes the ForeignKeyConstraint into a shallow dictionary of its immediate attributes.""" + """Serializes the GetMetastoreSummaryResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.child_columns: - body["child_columns"] = self.child_columns + if self.cloud is not None: + body["cloud"] = self.cloud + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.default_data_access_config_id is not None: + body["default_data_access_config_id"] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope + if self.external_access_enabled is not None: + body["external_access_enabled"] = self.external_access_enabled + if self.global_metastore_id is not None: + body["global_metastore_id"] = self.global_metastore_id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name - if self.parent_columns: - body["parent_columns"] = self.parent_columns - if self.parent_table is not None: - body["parent_table"] = self.parent_table + if self.owner is not None: + body["owner"] = self.owner + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: + body["storage_root_credential_name"] = self.storage_root_credential_name + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ForeignKeyConstraint: - """Deserializes the ForeignKeyConstraint from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> GetMetastoreSummaryResponse: + """Deserializes the GetMetastoreSummaryResponse from a dictionary.""" return cls( - child_columns=d.get("child_columns", None), + cloud=d.get("cloud", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + default_data_access_config_id=d.get("default_data_access_config_id", None), + delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), + delta_sharing_recipient_token_lifetime_in_seconds=d.get( + "delta_sharing_recipient_token_lifetime_in_seconds", None + ), + delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), + external_access_enabled=d.get("external_access_enabled", None), + global_metastore_id=d.get("global_metastore_id", None), + metastore_id=d.get("metastore_id", None), name=d.get("name", None), - parent_columns=d.get("parent_columns", None), - parent_table=d.get("parent_table", None), + owner=d.get("owner", None), + privilege_model_version=d.get("privilege_model_version", None), + region=d.get("region", None), + storage_root=d.get("storage_root", None), + storage_root_credential_id=d.get("storage_root_credential_id", None), + storage_root_credential_name=d.get("storage_root_credential_name", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), ) @dataclass -class FunctionDependency: - """A function that is dependent on a SQL object.""" +class GetPermissionsResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - function_full_name: str - """Full name of the dependent function, in the form of - __catalog_name__.__schema_name__.__function_name__.""" + privilege_assignments: Optional[List[PrivilegeAssignment]] = None + """The privileges assigned to each principal""" def as_dict(self) -> dict: - """Serializes the FunctionDependency into a dictionary suitable for use as a JSON request body.""" + """Serializes the GetPermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_full_name is not None: - body["function_full_name"] = self.function_full_name + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: - """Serializes the FunctionDependency into a shallow dictionary of its immediate attributes.""" + """Serializes the GetPermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_full_name is not None: - body["function_full_name"] = self.function_full_name + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FunctionDependency: - """Deserializes the FunctionDependency from a dictionary.""" - return cls(function_full_name=d.get("function_full_name", None)) + def from_dict(cls, d: Dict[str, Any]) -> GetPermissionsResponse: + """Deserializes the GetPermissionsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment), + ) @dataclass -class FunctionInfo: - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" - - catalog_name: Optional[str] = None - """Name of parent catalog.""" +class GetQuotaResponse: + quota_info: Optional[QuotaInfo] = None + """The returned QuotaInfo.""" - comment: Optional[str] = None - """User-provided free-form text description.""" + def as_dict(self) -> dict: + """Serializes the GetQuotaResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.quota_info: + body["quota_info"] = self.quota_info.as_dict() + return body - created_at: Optional[int] = None - """Time at which this function was created, in epoch milliseconds.""" + def as_shallow_dict(self) -> dict: + """Serializes the GetQuotaResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.quota_info: + body["quota_info"] = self.quota_info + return body - created_by: Optional[str] = None - """Username of function creator.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetQuotaResponse: + """Deserializes the GetQuotaResponse from a dictionary.""" + return cls(quota_info=_from_dict(d, "quota_info", QuotaInfo)) - data_type: Optional[ColumnTypeName] = None - """Scalar function return data type.""" - external_language: Optional[str] = None - """External function language.""" +@dataclass +class GetWorkspaceBindingsResponse: + bindings: Optional[List[WorkspaceBinding]] = None + """List of workspace bindings""" - external_name: Optional[str] = None - """External function name.""" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - full_data_type: Optional[str] = None - """Pretty printed function data type.""" - - full_name: Optional[str] = None - """Full name of function, in form of __catalog_name__.__schema_name__.__function__name__""" - - function_id: Optional[str] = None - """Id of Function, relative to parent schema.""" + def as_dict(self) -> dict: + """Serializes the GetWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.bindings: + body["bindings"] = [v.as_dict() for v in self.bindings] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - input_params: Optional[FunctionParameterInfos] = None + def as_shallow_dict(self) -> dict: + """Serializes the GetWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bindings: + body["bindings"] = self.bindings + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - is_deterministic: Optional[bool] = None - """Whether the function is deterministic.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceBindingsResponse: + """Deserializes the GetWorkspaceBindingsResponse from a dictionary.""" + return cls( + bindings=_repeated_dict(d, "bindings", WorkspaceBinding), next_page_token=d.get("next_page_token", None) + ) - is_null_call: Optional[bool] = None - """Function null call.""" - metastore_id: Optional[str] = None - """Unique identifier of parent metastore.""" +class IsolationMode(Enum): - name: Optional[str] = None - """Name of function, relative to parent schema.""" + ISOLATION_MODE_ISOLATED = "ISOLATION_MODE_ISOLATED" + ISOLATION_MODE_OPEN = "ISOLATION_MODE_OPEN" - owner: Optional[str] = None - """Username of current owner of function.""" - parameter_style: Optional[FunctionInfoParameterStyle] = None - """Function parameter style. **S** is the value for SQL.""" +@dataclass +class ListAccountMetastoreAssignmentsResponse: + """The list of workspaces to which the given metastore is assigned.""" - properties: Optional[str] = None - """JSON-serialized key-value pair map, encoded (escaped) as a string.""" + workspace_ids: Optional[List[int]] = None - return_params: Optional[FunctionParameterInfos] = None - """Table function return parameters.""" + def as_dict(self) -> dict: + """Serializes the ListAccountMetastoreAssignmentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.workspace_ids: + body["workspace_ids"] = [v for v in self.workspace_ids] + return body - routine_body: Optional[FunctionInfoRoutineBody] = None - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" + def as_shallow_dict(self) -> dict: + """Serializes the ListAccountMetastoreAssignmentsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.workspace_ids: + body["workspace_ids"] = self.workspace_ids + return body - routine_definition: Optional[str] = None - """Function body.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListAccountMetastoreAssignmentsResponse: + """Deserializes the ListAccountMetastoreAssignmentsResponse from a dictionary.""" + return cls(workspace_ids=d.get("workspace_ids", None)) - routine_dependencies: Optional[DependencyList] = None - """Function dependencies.""" - schema_name: Optional[str] = None - """Name of parent schema relative to its parent catalog.""" +@dataclass +class ListAccountStorageCredentialsResponse: + storage_credentials: Optional[List[StorageCredentialInfo]] = None + """An array of metastore storage credentials.""" - security_type: Optional[FunctionInfoSecurityType] = None - """Function security type.""" + def as_dict(self) -> dict: + """Serializes the ListAccountStorageCredentialsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.storage_credentials: + body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials] + return body - specific_name: Optional[str] = None - """Specific name of the function; Reserved for future use.""" + def as_shallow_dict(self) -> dict: + """Serializes the ListAccountStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.storage_credentials: + body["storage_credentials"] = self.storage_credentials + return body - sql_data_access: Optional[FunctionInfoSqlDataAccess] = None - """Function SQL data access.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListAccountStorageCredentialsResponse: + """Deserializes the ListAccountStorageCredentialsResponse from a dictionary.""" + return cls(storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo)) - sql_path: Optional[str] = None - """List of schemes whose objects can be referenced without qualification.""" - updated_at: Optional[int] = None - """Time at which this function was created, in epoch milliseconds.""" +@dataclass +class ListCatalogsResponse: + catalogs: Optional[List[CatalogInfo]] = None + """An array of catalog information objects.""" - updated_by: Optional[str] = None - """Username of user who last modified function.""" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" def as_dict(self) -> dict: - """Serializes the FunctionInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListCatalogsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_type is not None: - body["data_type"] = self.data_type.value - if self.external_language is not None: - body["external_language"] = self.external_language - if self.external_name is not None: - body["external_name"] = self.external_name - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.full_name is not None: - body["full_name"] = self.full_name - if self.function_id is not None: - body["function_id"] = self.function_id - if self.input_params: - body["input_params"] = self.input_params.as_dict() - if self.is_deterministic is not None: - body["is_deterministic"] = self.is_deterministic - if self.is_null_call is not None: - body["is_null_call"] = self.is_null_call - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.parameter_style is not None: - body["parameter_style"] = self.parameter_style.value - if self.properties is not None: - body["properties"] = self.properties - if self.return_params: - body["return_params"] = self.return_params.as_dict() - if self.routine_body is not None: - body["routine_body"] = self.routine_body.value - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.routine_dependencies: - body["routine_dependencies"] = self.routine_dependencies.as_dict() - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.security_type is not None: - body["security_type"] = self.security_type.value - if self.specific_name is not None: - body["specific_name"] = self.specific_name - if self.sql_data_access is not None: - body["sql_data_access"] = self.sql_data_access.value - if self.sql_path is not None: - body["sql_path"] = self.sql_path - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.catalogs: + body["catalogs"] = [v.as_dict() for v in self.catalogs] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: - """Serializes the FunctionInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the ListCatalogsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_type is not None: - body["data_type"] = self.data_type - if self.external_language is not None: - body["external_language"] = self.external_language - if self.external_name is not None: - body["external_name"] = self.external_name - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.full_name is not None: - body["full_name"] = self.full_name - if self.function_id is not None: - body["function_id"] = self.function_id - if self.input_params: - body["input_params"] = self.input_params - if self.is_deterministic is not None: - body["is_deterministic"] = self.is_deterministic - if self.is_null_call is not None: - body["is_null_call"] = self.is_null_call - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.parameter_style is not None: - body["parameter_style"] = self.parameter_style - if self.properties is not None: - body["properties"] = self.properties - if self.return_params: - body["return_params"] = self.return_params - if self.routine_body is not None: - body["routine_body"] = self.routine_body - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.routine_dependencies: - body["routine_dependencies"] = self.routine_dependencies - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.security_type is not None: - body["security_type"] = self.security_type - if self.specific_name is not None: - body["specific_name"] = self.specific_name - if self.sql_data_access is not None: - body["sql_data_access"] = self.sql_data_access - if self.sql_path is not None: - body["sql_path"] = self.sql_path - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.catalogs: + body["catalogs"] = self.catalogs + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FunctionInfo: - """Deserializes the FunctionInfo from a dictionary.""" - return cls( - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - data_type=_enum(d, "data_type", ColumnTypeName), - external_language=d.get("external_language", None), - external_name=d.get("external_name", None), - full_data_type=d.get("full_data_type", None), - full_name=d.get("full_name", None), - function_id=d.get("function_id", None), - input_params=_from_dict(d, "input_params", FunctionParameterInfos), - is_deterministic=d.get("is_deterministic", None), - is_null_call=d.get("is_null_call", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - parameter_style=_enum(d, "parameter_style", FunctionInfoParameterStyle), - properties=d.get("properties", None), - return_params=_from_dict(d, "return_params", FunctionParameterInfos), - routine_body=_enum(d, "routine_body", FunctionInfoRoutineBody), - routine_definition=d.get("routine_definition", None), - routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList), - schema_name=d.get("schema_name", None), - security_type=_enum(d, "security_type", FunctionInfoSecurityType), - specific_name=d.get("specific_name", None), - sql_data_access=_enum(d, "sql_data_access", FunctionInfoSqlDataAccess), - sql_path=d.get("sql_path", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) - - -class FunctionInfoParameterStyle(Enum): - """Function parameter style. **S** is the value for SQL.""" - - S = "S" - - -class FunctionInfoRoutineBody(Enum): - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" - - EXTERNAL = "EXTERNAL" - SQL = "SQL" - - -class FunctionInfoSecurityType(Enum): - """The security type of the function.""" - - DEFINER = "DEFINER" - - -class FunctionInfoSqlDataAccess(Enum): - """Function SQL data access.""" - - CONTAINS_SQL = "CONTAINS_SQL" - NO_SQL = "NO_SQL" - READS_SQL_DATA = "READS_SQL_DATA" + def from_dict(cls, d: Dict[str, Any]) -> ListCatalogsResponse: + """Deserializes the ListCatalogsResponse from a dictionary.""" + return cls(catalogs=_repeated_dict(d, "catalogs", CatalogInfo), next_page_token=d.get("next_page_token", None)) @dataclass -class FunctionParameterInfo: - name: str - """Name of parameter.""" - - type_text: str - """Full data type spec, SQL/catalogString text.""" - - type_name: ColumnTypeName - - position: int - """Ordinal position of column (starting at position 0).""" - - comment: Optional[str] = None - """User-provided free-form text description.""" - - parameter_default: Optional[str] = None - """Default value of the parameter.""" - - parameter_mode: Optional[FunctionParameterMode] = None - """The mode of the function parameter.""" - - parameter_type: Optional[FunctionParameterType] = None - """The type of function parameter.""" - - type_interval_type: Optional[str] = None - """Format of IntervalType.""" - - type_json: Optional[str] = None - """Full data type spec, JSON-serialized.""" - - type_precision: Optional[int] = None - """Digits of precision; required on Create for DecimalTypes.""" +class ListConnectionsResponse: + connections: Optional[List[ConnectionInfo]] = None + """An array of connection information objects.""" - type_scale: Optional[int] = None - """Digits to right of decimal; Required on Create for DecimalTypes.""" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" def as_dict(self) -> dict: - """Serializes the FunctionParameterInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListConnectionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.parameter_default is not None: - body["parameter_default"] = self.parameter_default - if self.parameter_mode is not None: - body["parameter_mode"] = self.parameter_mode.value - if self.parameter_type is not None: - body["parameter_type"] = self.parameter_type.value - if self.position is not None: - body["position"] = self.position - if self.type_interval_type is not None: - body["type_interval_type"] = self.type_interval_type - if self.type_json is not None: - body["type_json"] = self.type_json - if self.type_name is not None: - body["type_name"] = self.type_name.value - if self.type_precision is not None: - body["type_precision"] = self.type_precision - if self.type_scale is not None: - body["type_scale"] = self.type_scale - if self.type_text is not None: - body["type_text"] = self.type_text + if self.connections: + body["connections"] = [v.as_dict() for v in self.connections] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: - """Serializes the FunctionParameterInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the ListConnectionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.parameter_default is not None: - body["parameter_default"] = self.parameter_default - if self.parameter_mode is not None: - body["parameter_mode"] = self.parameter_mode - if self.parameter_type is not None: - body["parameter_type"] = self.parameter_type - if self.position is not None: - body["position"] = self.position - if self.type_interval_type is not None: - body["type_interval_type"] = self.type_interval_type - if self.type_json is not None: - body["type_json"] = self.type_json - if self.type_name is not None: - body["type_name"] = self.type_name - if self.type_precision is not None: - body["type_precision"] = self.type_precision - if self.type_scale is not None: - body["type_scale"] = self.type_scale - if self.type_text is not None: - body["type_text"] = self.type_text + if self.connections: + body["connections"] = self.connections + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfo: - """Deserializes the FunctionParameterInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListConnectionsResponse: + """Deserializes the ListConnectionsResponse from a dictionary.""" return cls( - comment=d.get("comment", None), - name=d.get("name", None), - parameter_default=d.get("parameter_default", None), - parameter_mode=_enum(d, "parameter_mode", FunctionParameterMode), - parameter_type=_enum(d, "parameter_type", FunctionParameterType), - position=d.get("position", None), - type_interval_type=d.get("type_interval_type", None), - type_json=d.get("type_json", None), - type_name=_enum(d, "type_name", ColumnTypeName), - type_precision=d.get("type_precision", None), - type_scale=d.get("type_scale", None), - type_text=d.get("type_text", None), + connections=_repeated_dict(d, "connections", ConnectionInfo), next_page_token=d.get("next_page_token", None) ) @dataclass -class FunctionParameterInfos: - parameters: Optional[List[FunctionParameterInfo]] = None - """The array of __FunctionParameterInfo__ definitions of the function's parameters.""" +class ListCredentialsResponse: + credentials: Optional[List[CredentialInfo]] = None + + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" def as_dict(self) -> dict: - """Serializes the FunctionParameterInfos into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] + if self.credentials: + body["credentials"] = [v.as_dict() for v in self.credentials] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: - """Serializes the FunctionParameterInfos into a shallow dictionary of its immediate attributes.""" + """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: - body["parameters"] = self.parameters + if self.credentials: + body["credentials"] = self.credentials + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfos: - """Deserializes the FunctionParameterInfos from a dictionary.""" - return cls(parameters=_repeated_dict(d, "parameters", FunctionParameterInfo)) - - -class FunctionParameterMode(Enum): - """The mode of the function parameter.""" - - IN = "IN" - - -class FunctionParameterType(Enum): - """The type of function parameter.""" - - COLUMN = "COLUMN" - PARAM = "PARAM" + def from_dict(cls, d: Dict[str, Any]) -> ListCredentialsResponse: + """Deserializes the ListCredentialsResponse from a dictionary.""" + return cls( + credentials=_repeated_dict(d, "credentials", CredentialInfo), next_page_token=d.get("next_page_token", None) + ) @dataclass -class GcpOauthToken: - """GCP temporary credentials for API authentication. Read more at - https://developers.google.com/identity/protocols/oauth2/service-account""" +class ListExternalLocationsResponse: + external_locations: Optional[List[ExternalLocationInfo]] = None + """An array of external locations.""" - oauth_token: Optional[str] = None + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" def as_dict(self) -> dict: - """Serializes the GcpOauthToken into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListExternalLocationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.oauth_token is not None: - body["oauth_token"] = self.oauth_token + if self.external_locations: + body["external_locations"] = [v.as_dict() for v in self.external_locations] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: - """Serializes the GcpOauthToken into a shallow dictionary of its immediate attributes.""" + """Serializes the ListExternalLocationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.oauth_token is not None: - body["oauth_token"] = self.oauth_token + if self.external_locations: + body["external_locations"] = self.external_locations + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GcpOauthToken: - """Deserializes the GcpOauthToken from a dictionary.""" - return cls(oauth_token=d.get("oauth_token", None)) - - -@dataclass -class GcpPubsub: - managed_resource_id: Optional[str] = None - """Unique identifier included in the name of file events managed cloud resources.""" + def from_dict(cls, d: Dict[str, Any]) -> ListExternalLocationsResponse: + """Deserializes the ListExternalLocationsResponse from a dictionary.""" + return cls( + external_locations=_repeated_dict(d, "external_locations", ExternalLocationInfo), + next_page_token=d.get("next_page_token", None), + ) - subscription_name: Optional[str] = None - """The Pub/Sub subscription name in the format projects/{project}/subscriptions/{subscription name} - REQUIRED for provided_pubsub.""" + +@dataclass +class ListFunctionsResponse: + functions: Optional[List[FunctionInfo]] = None + """An array of function information objects.""" + + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" def as_dict(self) -> dict: - """Serializes the GcpPubsub into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListFunctionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.managed_resource_id is not None: - body["managed_resource_id"] = self.managed_resource_id - if self.subscription_name is not None: - body["subscription_name"] = self.subscription_name + if self.functions: + body["functions"] = [v.as_dict() for v in self.functions] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: - """Serializes the GcpPubsub into a shallow dictionary of its immediate attributes.""" + """Serializes the ListFunctionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.managed_resource_id is not None: - body["managed_resource_id"] = self.managed_resource_id - if self.subscription_name is not None: - body["subscription_name"] = self.subscription_name + if self.functions: + body["functions"] = self.functions + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GcpPubsub: - """Deserializes the GcpPubsub from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListFunctionsResponse: + """Deserializes the ListFunctionsResponse from a dictionary.""" return cls( - managed_resource_id=d.get("managed_resource_id", None), subscription_name=d.get("subscription_name", None) + functions=_repeated_dict(d, "functions", FunctionInfo), next_page_token=d.get("next_page_token", None) ) @dataclass -class GenerateTemporaryServiceCredentialAzureOptions: - """The Azure cloud options to customize the requested temporary credential""" +class ListMetastoresResponse: + metastores: Optional[List[MetastoreInfo]] = None + """An array of metastore information objects.""" - resources: Optional[List[str]] = None - """The resources to which the temporary Azure credential should apply. These resources are the - scopes that are passed to the token provider (see - https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python)""" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" def as_dict(self) -> dict: - """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListMetastoresResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.resources: - body["resources"] = [v for v in self.resources] + if self.metastores: + body["metastores"] = [v.as_dict() for v in self.metastores] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: - """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a shallow dictionary of its immediate attributes.""" + """Serializes the ListMetastoresResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.resources: - body["resources"] = self.resources + if self.metastores: + body["metastores"] = self.metastores + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialAzureOptions: - """Deserializes the GenerateTemporaryServiceCredentialAzureOptions from a dictionary.""" - return cls(resources=d.get("resources", None)) + def from_dict(cls, d: Dict[str, Any]) -> ListMetastoresResponse: + """Deserializes the ListMetastoresResponse from a dictionary.""" + return cls( + metastores=_repeated_dict(d, "metastores", MetastoreInfo), next_page_token=d.get("next_page_token", None) + ) @dataclass -class GenerateTemporaryServiceCredentialGcpOptions: - """The GCP cloud options to customize the requested temporary credential""" +class ListModelVersionsResponse: + model_versions: Optional[List[ModelVersionInfo]] = None - scopes: Optional[List[str]] = None - """The scopes to which the temporary GCP credential should apply. These resources are the scopes - that are passed to the token provider (see - https://google-auth.readthedocs.io/en/latest/reference/google.auth.html#google.auth.credentials.Credentials)""" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" def as_dict(self) -> dict: - """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListModelVersionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.scopes: - body["scopes"] = [v for v in self.scopes] + if self.model_versions: + body["model_versions"] = [v.as_dict() for v in self.model_versions] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: - """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a shallow dictionary of its immediate attributes.""" + """Serializes the ListModelVersionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.scopes: - body["scopes"] = self.scopes + if self.model_versions: + body["model_versions"] = self.model_versions + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialGcpOptions: - """Deserializes the GenerateTemporaryServiceCredentialGcpOptions from a dictionary.""" - return cls(scopes=d.get("scopes", None)) + def from_dict(cls, d: Dict[str, Any]) -> ListModelVersionsResponse: + """Deserializes the ListModelVersionsResponse from a dictionary.""" + return cls( + model_versions=_repeated_dict(d, "model_versions", ModelVersionInfo), + next_page_token=d.get("next_page_token", None), + ) @dataclass -class GenerateTemporaryServiceCredentialRequest: - credential_name: str - """The name of the service credential used to generate a temporary credential""" - - azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None - """The Azure cloud options to customize the requested temporary credential""" +class ListQuotasResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request.""" - gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None - """The GCP cloud options to customize the requested temporary credential""" + quotas: Optional[List[QuotaInfo]] = None + """An array of returned QuotaInfos.""" def as_dict(self) -> dict: - """Serializes the GenerateTemporaryServiceCredentialRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListQuotasResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.azure_options: - body["azure_options"] = self.azure_options.as_dict() - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.gcp_options: - body["gcp_options"] = self.gcp_options.as_dict() + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.quotas: + body["quotas"] = [v.as_dict() for v in self.quotas] return body def as_shallow_dict(self) -> dict: - """Serializes the GenerateTemporaryServiceCredentialRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the ListQuotasResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.azure_options: - body["azure_options"] = self.azure_options - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.gcp_options: - body["gcp_options"] = self.gcp_options + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.quotas: + body["quotas"] = self.quotas return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialRequest: - """Deserializes the GenerateTemporaryServiceCredentialRequest from a dictionary.""" - return cls( - azure_options=_from_dict(d, "azure_options", GenerateTemporaryServiceCredentialAzureOptions), - credential_name=d.get("credential_name", None), - gcp_options=_from_dict(d, "gcp_options", GenerateTemporaryServiceCredentialGcpOptions), - ) + def from_dict(cls, d: Dict[str, Any]) -> ListQuotasResponse: + """Deserializes the ListQuotasResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), quotas=_repeated_dict(d, "quotas", QuotaInfo)) @dataclass -class GenerateTemporaryTableCredentialRequest: - operation: Optional[TableOperation] = None - """The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is - specified, the credentials returned will have write permissions, otherwise, it will be read - only.""" +class ListRegisteredModelsResponse: + next_page_token: Optional[str] = None + """Opaque token for pagination. Omitted if there are no more results. page_token should be set to + this value for fetching the next page.""" - table_id: Optional[str] = None - """UUID of the table to read or write.""" + registered_models: Optional[List[RegisteredModelInfo]] = None def as_dict(self) -> dict: - """Serializes the GenerateTemporaryTableCredentialRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListRegisteredModelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.operation is not None: - body["operation"] = self.operation.value - if self.table_id is not None: - body["table_id"] = self.table_id + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.registered_models: + body["registered_models"] = [v.as_dict() for v in self.registered_models] return body def as_shallow_dict(self) -> dict: - """Serializes the GenerateTemporaryTableCredentialRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the ListRegisteredModelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.operation is not None: - body["operation"] = self.operation - if self.table_id is not None: - body["table_id"] = self.table_id + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.registered_models: + body["registered_models"] = self.registered_models return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryTableCredentialRequest: - """Deserializes the GenerateTemporaryTableCredentialRequest from a dictionary.""" - return cls(operation=_enum(d, "operation", TableOperation), table_id=d.get("table_id", None)) + def from_dict(cls, d: Dict[str, Any]) -> ListRegisteredModelsResponse: + """Deserializes the ListRegisteredModelsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + registered_models=_repeated_dict(d, "registered_models", RegisteredModelInfo), + ) @dataclass -class GenerateTemporaryTableCredentialResponse: - aws_temp_credentials: Optional[AwsCredentials] = None - """AWS temporary credentials for API authentication. Read more at - https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" +class ListSchemasResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - azure_aad: Optional[AzureActiveDirectoryToken] = None - """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed - Identity. Read more at - https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" + schemas: Optional[List[SchemaInfo]] = None + """An array of schema information objects.""" - azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None - """Azure temporary credentials for API authentication. Read more at - https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas""" + def as_dict(self) -> dict: + """Serializes the ListSchemasResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = [v.as_dict() for v in self.schemas] + return body - expiration_time: Optional[int] = None - """Server time when the credential will expire, in epoch milliseconds. The API client is advised to - cache the credential given this expiration time.""" + def as_shallow_dict(self) -> dict: + """Serializes the ListSchemasResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = self.schemas + return body - gcp_oauth_token: Optional[GcpOauthToken] = None - """GCP temporary credentials for API authentication. Read more at - https://developers.google.com/identity/protocols/oauth2/service-account""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListSchemasResponse: + """Deserializes the ListSchemasResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), schemas=_repeated_dict(d, "schemas", SchemaInfo)) - r2_temp_credentials: Optional[R2Credentials] = None - """R2 temporary credentials for API authentication. Read more at - https://developers.cloudflare.com/r2/api/s3/tokens/.""" - url: Optional[str] = None - """The URL of the storage path accessible by the temporary credential.""" +@dataclass +class ListStorageCredentialsResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + + storage_credentials: Optional[List[StorageCredentialInfo]] = None def as_dict(self) -> dict: - """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListStorageCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() - if self.azure_aad: - body["azure_aad"] = self.azure_aad.as_dict() - if self.azure_user_delegation_sas: - body["azure_user_delegation_sas"] = self.azure_user_delegation_sas.as_dict() - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() - if self.r2_temp_credentials: - body["r2_temp_credentials"] = self.r2_temp_credentials.as_dict() - if self.url is not None: - body["url"] = self.url + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.storage_credentials: + body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials] return body def as_shallow_dict(self) -> dict: - """Serializes the GenerateTemporaryTableCredentialResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ListStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials - if self.azure_aad: - body["azure_aad"] = self.azure_aad - if self.azure_user_delegation_sas: - body["azure_user_delegation_sas"] = self.azure_user_delegation_sas - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token - if self.r2_temp_credentials: - body["r2_temp_credentials"] = self.r2_temp_credentials - if self.url is not None: - body["url"] = self.url + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.storage_credentials: + body["storage_credentials"] = self.storage_credentials return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryTableCredentialResponse: - """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListStorageCredentialsResponse: + """Deserializes the ListStorageCredentialsResponse from a dictionary.""" return cls( - aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), - azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), - azure_user_delegation_sas=_from_dict(d, "azure_user_delegation_sas", AzureUserDelegationSas), - expiration_time=d.get("expiration_time", None), - gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), - r2_temp_credentials=_from_dict(d, "r2_temp_credentials", R2Credentials), - url=d.get("url", None), + next_page_token=d.get("next_page_token", None), + storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo), ) @dataclass -class GetCatalogWorkspaceBindingsResponse: - workspaces: Optional[List[int]] = None - """A list of workspace IDs""" +class ListSystemSchemasResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + + schemas: Optional[List[SystemSchemaInfo]] = None + """An array of system schema information objects.""" def as_dict(self) -> dict: - """Serializes the GetCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListSystemSchemasResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.workspaces: - body["workspaces"] = [v for v in self.workspaces] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = [v.as_dict() for v in self.schemas] return body def as_shallow_dict(self) -> dict: - """Serializes the GetCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ListSystemSchemasResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.workspaces: - body["workspaces"] = self.workspaces + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = self.schemas return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetCatalogWorkspaceBindingsResponse: - """Deserializes the GetCatalogWorkspaceBindingsResponse from a dictionary.""" - return cls(workspaces=d.get("workspaces", None)) + def from_dict(cls, d: Dict[str, Any]) -> ListSystemSchemasResponse: + """Deserializes the ListSystemSchemasResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), schemas=_repeated_dict(d, "schemas", SystemSchemaInfo) + ) @dataclass -class GetMetastoreSummaryResponse: +class ListTableSummariesResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + + tables: Optional[List[TableSummary]] = None + """List of table summaries.""" + + def as_dict(self) -> dict: + """Serializes the ListTableSummariesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tables: + body["tables"] = [v.as_dict() for v in self.tables] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListTableSummariesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tables: + body["tables"] = self.tables + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListTableSummariesResponse: + """Deserializes the ListTableSummariesResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), tables=_repeated_dict(d, "tables", TableSummary)) + + +@dataclass +class ListTablesResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + + tables: Optional[List[TableInfo]] = None + """An array of table information objects.""" + + def as_dict(self) -> dict: + """Serializes the ListTablesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tables: + body["tables"] = [v.as_dict() for v in self.tables] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListTablesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tables: + body["tables"] = self.tables + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListTablesResponse: + """Deserializes the ListTablesResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), tables=_repeated_dict(d, "tables", TableInfo)) + + +@dataclass +class ListVolumesResponseContent: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request to retrieve the next page of + results.""" + + volumes: Optional[List[VolumeInfo]] = None + + def as_dict(self) -> dict: + """Serializes the ListVolumesResponseContent into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.volumes: + body["volumes"] = [v.as_dict() for v in self.volumes] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListVolumesResponseContent into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.volumes: + body["volumes"] = self.volumes + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListVolumesResponseContent: + """Deserializes the ListVolumesResponseContent from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), volumes=_repeated_dict(d, "volumes", VolumeInfo)) + + +class MatchType(Enum): + """The artifact pattern matching type""" + + PREFIX_MATCH = "PREFIX_MATCH" + + +@dataclass +class MetastoreAssignment: + workspace_id: int + """The unique ID of the Databricks workspace.""" + + metastore_id: str + """The unique ID of the metastore.""" + + default_catalog_name: Optional[str] = None + """The name of the default catalog in the metastore.""" + + def as_dict(self) -> dict: + """Serializes the MetastoreAssignment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the MetastoreAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> MetastoreAssignment: + """Deserializes the MetastoreAssignment from a dictionary.""" + return cls( + default_catalog_name=d.get("default_catalog_name", None), + metastore_id=d.get("metastore_id", None), + workspace_id=d.get("workspace_id", None), + ) + + +@dataclass +class MetastoreInfo: cloud: Optional[str] = None """Cloud vendor of the metastore home shard (e.g., `aws`, `azure`, `gcp`).""" @@ -4702,7 +4528,7 @@ class GetMetastoreSummaryResponse: """Username of user who last modified the metastore.""" def as_dict(self) -> dict: - """Serializes the GetMetastoreSummaryResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MetastoreInfo into a dictionary suitable for use as a JSON request body.""" body = {} if self.cloud is not None: body["cloud"] = self.cloud @@ -4747,7 +4573,7 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the GetMetastoreSummaryResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MetastoreInfo into a shallow dictionary of its immediate attributes.""" body = {} if self.cloud is not None: body["cloud"] = self.cloud @@ -4792,8 +4618,8 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetMetastoreSummaryResponse: - """Deserializes the GetMetastoreSummaryResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> MetastoreInfo: + """Deserializes the MetastoreInfo from a dictionary.""" return cls( cloud=d.get("cloud", None), created_at=d.get("created_at", None), @@ -4820,2609 +4646,2488 @@ def from_dict(cls, d: Dict[str, Any]) -> GetMetastoreSummaryResponse: @dataclass -class GetPermissionsResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" +class ModelVersionInfo: + aliases: Optional[List[RegisteredModelAlias]] = None + """List of aliases associated with the model version""" - privilege_assignments: Optional[List[PrivilegeAssignment]] = None - """The privileges assigned to each principal""" + browse_only: Optional[bool] = None + """Indicates whether the principal is limited to retrieving metadata for the associated object + through the BROWSE privilege when include_browse is enabled in the request.""" - def as_dict(self) -> dict: - """Serializes the GetPermissionsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.privilege_assignments: - body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] - return body + catalog_name: Optional[str] = None + """The name of the catalog containing the model version""" - def as_shallow_dict(self) -> dict: - """Serializes the GetPermissionsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.privilege_assignments: - body["privilege_assignments"] = self.privilege_assignments - return body + comment: Optional[str] = None + """The comment attached to the model version""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetPermissionsResponse: - """Deserializes the GetPermissionsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment), - ) + created_at: Optional[int] = None + created_by: Optional[str] = None + """The identifier of the user who created the model version""" -@dataclass -class GetQuotaResponse: - quota_info: Optional[QuotaInfo] = None - """The returned QuotaInfo.""" + id: Optional[str] = None + """The unique identifier of the model version""" - def as_dict(self) -> dict: - """Serializes the GetQuotaResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.quota_info: - body["quota_info"] = self.quota_info.as_dict() - return body + metastore_id: Optional[str] = None + """The unique identifier of the metastore containing the model version""" - def as_shallow_dict(self) -> dict: - """Serializes the GetQuotaResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.quota_info: - body["quota_info"] = self.quota_info - return body + model_name: Optional[str] = None + """The name of the parent registered model of the model version, relative to parent schema""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetQuotaResponse: - """Deserializes the GetQuotaResponse from a dictionary.""" - return cls(quota_info=_from_dict(d, "quota_info", QuotaInfo)) + model_version_dependencies: Optional[DependencyList] = None + """Model version dependencies, for feature-store packaged models""" + run_id: Optional[str] = None + """MLflow run ID used when creating the model version, if ``source`` was generated by an experiment + run stored in an MLflow tracking server""" -@dataclass -class GetWorkspaceBindingsResponse: - bindings: Optional[List[WorkspaceBinding]] = None - """List of workspace bindings""" + run_workspace_id: Optional[int] = None + """ID of the Databricks workspace containing the MLflow run that generated this model version, if + applicable""" - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + schema_name: Optional[str] = None + """The name of the schema containing the model version, relative to parent catalog""" + + source: Optional[str] = None + """URI indicating the location of the source artifacts (files) for the model version""" + + status: Optional[ModelVersionInfoStatus] = None + """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION + status, then move to READY status once the model version files are uploaded and the model + version is finalized. Only model versions in READY status can be loaded for inference or served.""" + + storage_location: Optional[str] = None + """The storage location on the cloud under which model version data files are stored""" + + updated_at: Optional[int] = None + + updated_by: Optional[str] = None + """The identifier of the user who updated the model version last time""" + + version: Optional[int] = None + """Integer model version number, used to reference the model version in API requests.""" def as_dict(self) -> dict: - """Serializes the GetWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ModelVersionInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bindings: - body["bindings"] = [v.as_dict() for v in self.bindings] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.aliases: + body["aliases"] = [v.as_dict() for v in self.aliases] + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.id is not None: + body["id"] = self.id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.model_name is not None: + body["model_name"] = self.model_name + if self.model_version_dependencies: + body["model_version_dependencies"] = self.model_version_dependencies.as_dict() + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_workspace_id is not None: + body["run_workspace_id"] = self.run_workspace_id + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.source is not None: + body["source"] = self.source + if self.status is not None: + body["status"] = self.status.value + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: - """Serializes the GetWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ModelVersionInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.bindings: - body["bindings"] = self.bindings - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.aliases: + body["aliases"] = self.aliases + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.id is not None: + body["id"] = self.id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.model_name is not None: + body["model_name"] = self.model_name + if self.model_version_dependencies: + body["model_version_dependencies"] = self.model_version_dependencies + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_workspace_id is not None: + body["run_workspace_id"] = self.run_workspace_id + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.source is not None: + body["source"] = self.source + if self.status is not None: + body["status"] = self.status + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.version is not None: + body["version"] = self.version return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceBindingsResponse: - """Deserializes the GetWorkspaceBindingsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ModelVersionInfo: + """Deserializes the ModelVersionInfo from a dictionary.""" return cls( - bindings=_repeated_dict(d, "bindings", WorkspaceBinding), next_page_token=d.get("next_page_token", None) + aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + id=d.get("id", None), + metastore_id=d.get("metastore_id", None), + model_name=d.get("model_name", None), + model_version_dependencies=_from_dict(d, "model_version_dependencies", DependencyList), + run_id=d.get("run_id", None), + run_workspace_id=d.get("run_workspace_id", None), + schema_name=d.get("schema_name", None), + source=d.get("source", None), + status=_enum(d, "status", ModelVersionInfoStatus), + storage_location=d.get("storage_location", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + version=d.get("version", None), ) -class IsolationMode(Enum): +class ModelVersionInfoStatus(Enum): + """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION + status, then move to READY status once the model version files are uploaded and the model + version is finalized. Only model versions in READY status can be loaded for inference or served.""" - ISOLATION_MODE_ISOLATED = "ISOLATION_MODE_ISOLATED" - ISOLATION_MODE_OPEN = "ISOLATION_MODE_OPEN" + FAILED_REGISTRATION = "FAILED_REGISTRATION" + PENDING_REGISTRATION = "PENDING_REGISTRATION" + READY = "READY" @dataclass -class ListAccountMetastoreAssignmentsResponse: - """The list of workspaces to which the given metastore is assigned.""" +class MonitorCronSchedule: + quartz_cron_expression: str + """The expression that determines when to run the monitor. See [examples]. + + [examples]: https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html""" - workspace_ids: Optional[List[int]] = None + timezone_id: str + """The timezone id (e.g., ``"PST"``) in which to evaluate the quartz expression.""" + + pause_status: Optional[MonitorCronSchedulePauseStatus] = None + """Read only field that indicates whether a schedule is paused or not.""" def as_dict(self) -> dict: - """Serializes the ListAccountMetastoreAssignmentsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorCronSchedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.workspace_ids: - body["workspace_ids"] = [v for v in self.workspace_ids] + if self.pause_status is not None: + body["pause_status"] = self.pause_status.value + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body def as_shallow_dict(self) -> dict: - """Serializes the ListAccountMetastoreAssignmentsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorCronSchedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.workspace_ids: - body["workspace_ids"] = self.workspace_ids + if self.pause_status is not None: + body["pause_status"] = self.pause_status + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListAccountMetastoreAssignmentsResponse: - """Deserializes the ListAccountMetastoreAssignmentsResponse from a dictionary.""" - return cls(workspace_ids=d.get("workspace_ids", None)) + def from_dict(cls, d: Dict[str, Any]) -> MonitorCronSchedule: + """Deserializes the MonitorCronSchedule from a dictionary.""" + return cls( + pause_status=_enum(d, "pause_status", MonitorCronSchedulePauseStatus), + quartz_cron_expression=d.get("quartz_cron_expression", None), + timezone_id=d.get("timezone_id", None), + ) + + +class MonitorCronSchedulePauseStatus(Enum): + """Read only field that indicates whether a schedule is paused or not.""" + + PAUSED = "PAUSED" + UNPAUSED = "UNPAUSED" @dataclass -class ListAccountStorageCredentialsResponse: - storage_credentials: Optional[List[StorageCredentialInfo]] = None - """An array of metastore storage credentials.""" +class MonitorDataClassificationConfig: + enabled: Optional[bool] = None + """Whether data classification is enabled.""" def as_dict(self) -> dict: - """Serializes the ListAccountStorageCredentialsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorDataClassificationConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.storage_credentials: - body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials] + if self.enabled is not None: + body["enabled"] = self.enabled return body def as_shallow_dict(self) -> dict: - """Serializes the ListAccountStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorDataClassificationConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.storage_credentials: - body["storage_credentials"] = self.storage_credentials + if self.enabled is not None: + body["enabled"] = self.enabled return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListAccountStorageCredentialsResponse: - """Deserializes the ListAccountStorageCredentialsResponse from a dictionary.""" - return cls(storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo)) + def from_dict(cls, d: Dict[str, Any]) -> MonitorDataClassificationConfig: + """Deserializes the MonitorDataClassificationConfig from a dictionary.""" + return cls(enabled=d.get("enabled", None)) @dataclass -class ListCatalogsResponse: - catalogs: Optional[List[CatalogInfo]] = None - """An array of catalog information objects.""" - - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" +class MonitorDestination: + email_addresses: Optional[List[str]] = None + """The list of email addresses to send the notification to. A maximum of 5 email addresses is + supported.""" def as_dict(self) -> dict: - """Serializes the ListCatalogsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalogs: - body["catalogs"] = [v.as_dict() for v in self.catalogs] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.email_addresses: + body["email_addresses"] = [v for v in self.email_addresses] return body def as_shallow_dict(self) -> dict: - """Serializes the ListCatalogsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalogs: - body["catalogs"] = self.catalogs - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.email_addresses: + body["email_addresses"] = self.email_addresses return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListCatalogsResponse: - """Deserializes the ListCatalogsResponse from a dictionary.""" - return cls(catalogs=_repeated_dict(d, "catalogs", CatalogInfo), next_page_token=d.get("next_page_token", None)) + def from_dict(cls, d: Dict[str, Any]) -> MonitorDestination: + """Deserializes the MonitorDestination from a dictionary.""" + return cls(email_addresses=d.get("email_addresses", None)) @dataclass -class ListConnectionsResponse: - connections: Optional[List[ConnectionInfo]] = None - """An array of connection information objects.""" - - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" +class MonitorInferenceLog: + timestamp_col: str + """Column that contains the timestamps of requests. The column must be one of the following: - A + ``TimestampType`` column - A column whose values can be converted to timestamps through the + pyspark ``to_timestamp`` [function]. + + [function]: https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html""" - def as_dict(self) -> dict: - """Serializes the ListConnectionsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.connections: - body["connections"] = [v.as_dict() for v in self.connections] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body + granularities: List[str] + """Granularities for aggregating data into time windows based on their timestamp. Currently the + following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, + ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.""" - def as_shallow_dict(self) -> dict: - """Serializes the ListConnectionsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.connections: - body["connections"] = self.connections - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body + model_id_col: str + """Column that contains the id of the model generating the predictions. Metrics will be computed + per model id by default, and also across all model ids.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListConnectionsResponse: - """Deserializes the ListConnectionsResponse from a dictionary.""" - return cls( - connections=_repeated_dict(d, "connections", ConnectionInfo), next_page_token=d.get("next_page_token", None) - ) + problem_type: MonitorInferenceLogProblemType + """Problem type the model aims to solve. Determines the type of model-quality metrics that will be + computed.""" + prediction_col: str + """Column that contains the output/prediction from the model.""" -@dataclass -class ListCredentialsResponse: - credentials: Optional[List[CredentialInfo]] = None + label_col: Optional[str] = None + """Optional column that contains the ground truth for the prediction.""" - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + prediction_proba_col: Optional[str] = None + """Optional column that contains the prediction probabilities for each class in a classification + problem type. The values in this column should be a map, mapping each class label to the + prediction probability for a given sample. The map should be of PySpark MapType().""" def as_dict(self) -> dict: - """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorInferenceLog into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credentials: - body["credentials"] = [v.as_dict() for v in self.credentials] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.granularities: + body["granularities"] = [v for v in self.granularities] + if self.label_col is not None: + body["label_col"] = self.label_col + if self.model_id_col is not None: + body["model_id_col"] = self.model_id_col + if self.prediction_col is not None: + body["prediction_col"] = self.prediction_col + if self.prediction_proba_col is not None: + body["prediction_proba_col"] = self.prediction_proba_col + if self.problem_type is not None: + body["problem_type"] = self.problem_type.value + if self.timestamp_col is not None: + body["timestamp_col"] = self.timestamp_col return body def as_shallow_dict(self) -> dict: - """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorInferenceLog into a shallow dictionary of its immediate attributes.""" body = {} - if self.credentials: - body["credentials"] = self.credentials - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.granularities: + body["granularities"] = self.granularities + if self.label_col is not None: + body["label_col"] = self.label_col + if self.model_id_col is not None: + body["model_id_col"] = self.model_id_col + if self.prediction_col is not None: + body["prediction_col"] = self.prediction_col + if self.prediction_proba_col is not None: + body["prediction_proba_col"] = self.prediction_proba_col + if self.problem_type is not None: + body["problem_type"] = self.problem_type + if self.timestamp_col is not None: + body["timestamp_col"] = self.timestamp_col return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListCredentialsResponse: - """Deserializes the ListCredentialsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> MonitorInferenceLog: + """Deserializes the MonitorInferenceLog from a dictionary.""" return cls( - credentials=_repeated_dict(d, "credentials", CredentialInfo), next_page_token=d.get("next_page_token", None) + granularities=d.get("granularities", None), + label_col=d.get("label_col", None), + model_id_col=d.get("model_id_col", None), + prediction_col=d.get("prediction_col", None), + prediction_proba_col=d.get("prediction_proba_col", None), + problem_type=_enum(d, "problem_type", MonitorInferenceLogProblemType), + timestamp_col=d.get("timestamp_col", None), ) -@dataclass -class ListExternalLocationsResponse: - external_locations: Optional[List[ExternalLocationInfo]] = None - """An array of external locations.""" +class MonitorInferenceLogProblemType(Enum): + """Problem type the model aims to solve. Determines the type of model-quality metrics that will be + computed.""" - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + PROBLEM_TYPE_CLASSIFICATION = "PROBLEM_TYPE_CLASSIFICATION" + PROBLEM_TYPE_REGRESSION = "PROBLEM_TYPE_REGRESSION" - def as_dict(self) -> dict: - """Serializes the ListExternalLocationsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.external_locations: - body["external_locations"] = [v.as_dict() for v in self.external_locations] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body - def as_shallow_dict(self) -> dict: - """Serializes the ListExternalLocationsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.external_locations: - body["external_locations"] = self.external_locations - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body +@dataclass +class MonitorInfo: + table_name: str + """The full name of the table to monitor. Format: __catalog_name__.__schema_name__.__table_name__.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListExternalLocationsResponse: - """Deserializes the ListExternalLocationsResponse from a dictionary.""" - return cls( - external_locations=_repeated_dict(d, "external_locations", ExternalLocationInfo), - next_page_token=d.get("next_page_token", None), - ) + status: MonitorInfoStatus + """The status of the monitor.""" + monitor_version: str + """The version of the monitor config (e.g. 1,2,3). If negative, the monitor may be corrupted.""" -@dataclass -class ListFunctionsResponse: - functions: Optional[List[FunctionInfo]] = None - """An array of function information objects.""" + profile_metrics_table_name: str + """The full name of the profile metrics table. Format: + __catalog_name__.__schema_name__.__table_name__.""" - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + drift_metrics_table_name: str + """The full name of the drift metrics table. Format: + __catalog_name__.__schema_name__.__table_name__.""" - def as_dict(self) -> dict: - """Serializes the ListFunctionsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.functions: - body["functions"] = [v.as_dict() for v in self.functions] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body + assets_dir: Optional[str] = None + """The directory to store monitoring assets (e.g. dashboard, metric tables).""" - def as_shallow_dict(self) -> dict: - """Serializes the ListFunctionsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.functions: - body["functions"] = self.functions - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body + baseline_table_name: Optional[str] = None + """Name of the baseline table from which drift metrics are computed from. Columns in the monitored + table should also be present in the baseline table.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListFunctionsResponse: - """Deserializes the ListFunctionsResponse from a dictionary.""" - return cls( - functions=_repeated_dict(d, "functions", FunctionInfo), next_page_token=d.get("next_page_token", None) - ) + custom_metrics: Optional[List[MonitorMetric]] = None + """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived + metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across + time windows).""" + dashboard_id: Optional[str] = None + """Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in + PENDING state.""" -@dataclass -class ListMetastoresResponse: - metastores: Optional[List[MetastoreInfo]] = None - """An array of metastore information objects.""" + data_classification_config: Optional[MonitorDataClassificationConfig] = None + """The data classification config for the monitor.""" - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + inference_log: Optional[MonitorInferenceLog] = None + """Configuration for monitoring inference logs.""" + + latest_monitor_failure_msg: Optional[str] = None + """The latest failure message of the monitor (if any).""" + + notifications: Optional[MonitorNotifications] = None + """The notification settings for the monitor.""" + + output_schema_name: Optional[str] = None + """Schema where output metric tables are created.""" + + schedule: Optional[MonitorCronSchedule] = None + """The schedule for automatically updating and refreshing metric tables.""" + + slicing_exprs: Optional[List[str]] = None + """List of column expressions to slice data with for targeted analysis. The data is grouped by each + expression independently, resulting in a separate slice for each predicate and its complements. + For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" + + snapshot: Optional[MonitorSnapshot] = None + """Configuration for monitoring snapshot tables.""" + + time_series: Optional[MonitorTimeSeries] = None + """Configuration for monitoring time series tables.""" def as_dict(self) -> dict: - """Serializes the ListMetastoresResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastores: - body["metastores"] = [v.as_dict() for v in self.metastores] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.assets_dir is not None: + body["assets_dir"] = self.assets_dir + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.data_classification_config: + body["data_classification_config"] = self.data_classification_config.as_dict() + if self.drift_metrics_table_name is not None: + body["drift_metrics_table_name"] = self.drift_metrics_table_name + if self.inference_log: + body["inference_log"] = self.inference_log.as_dict() + if self.latest_monitor_failure_msg is not None: + body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg + if self.monitor_version is not None: + body["monitor_version"] = self.monitor_version + if self.notifications: + body["notifications"] = self.notifications.as_dict() + if self.output_schema_name is not None: + body["output_schema_name"] = self.output_schema_name + if self.profile_metrics_table_name is not None: + body["profile_metrics_table_name"] = self.profile_metrics_table_name + if self.schedule: + body["schedule"] = self.schedule.as_dict() + if self.slicing_exprs: + body["slicing_exprs"] = [v for v in self.slicing_exprs] + if self.snapshot: + body["snapshot"] = self.snapshot.as_dict() + if self.status is not None: + body["status"] = self.status.value + if self.table_name is not None: + body["table_name"] = self.table_name + if self.time_series: + body["time_series"] = self.time_series.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the ListMetastoresResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastores: - body["metastores"] = self.metastores - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.assets_dir is not None: + body["assets_dir"] = self.assets_dir + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = self.custom_metrics + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.data_classification_config: + body["data_classification_config"] = self.data_classification_config + if self.drift_metrics_table_name is not None: + body["drift_metrics_table_name"] = self.drift_metrics_table_name + if self.inference_log: + body["inference_log"] = self.inference_log + if self.latest_monitor_failure_msg is not None: + body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg + if self.monitor_version is not None: + body["monitor_version"] = self.monitor_version + if self.notifications: + body["notifications"] = self.notifications + if self.output_schema_name is not None: + body["output_schema_name"] = self.output_schema_name + if self.profile_metrics_table_name is not None: + body["profile_metrics_table_name"] = self.profile_metrics_table_name + if self.schedule: + body["schedule"] = self.schedule + if self.slicing_exprs: + body["slicing_exprs"] = self.slicing_exprs + if self.snapshot: + body["snapshot"] = self.snapshot + if self.status is not None: + body["status"] = self.status + if self.table_name is not None: + body["table_name"] = self.table_name + if self.time_series: + body["time_series"] = self.time_series return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListMetastoresResponse: - """Deserializes the ListMetastoresResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> MonitorInfo: + """Deserializes the MonitorInfo from a dictionary.""" return cls( - metastores=_repeated_dict(d, "metastores", MetastoreInfo), next_page_token=d.get("next_page_token", None) + assets_dir=d.get("assets_dir", None), + baseline_table_name=d.get("baseline_table_name", None), + custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), + dashboard_id=d.get("dashboard_id", None), + data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), + drift_metrics_table_name=d.get("drift_metrics_table_name", None), + inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), + latest_monitor_failure_msg=d.get("latest_monitor_failure_msg", None), + monitor_version=d.get("monitor_version", None), + notifications=_from_dict(d, "notifications", MonitorNotifications), + output_schema_name=d.get("output_schema_name", None), + profile_metrics_table_name=d.get("profile_metrics_table_name", None), + schedule=_from_dict(d, "schedule", MonitorCronSchedule), + slicing_exprs=d.get("slicing_exprs", None), + snapshot=_from_dict(d, "snapshot", MonitorSnapshot), + status=_enum(d, "status", MonitorInfoStatus), + table_name=d.get("table_name", None), + time_series=_from_dict(d, "time_series", MonitorTimeSeries), ) +class MonitorInfoStatus(Enum): + """The status of the monitor.""" + + MONITOR_STATUS_ACTIVE = "MONITOR_STATUS_ACTIVE" + MONITOR_STATUS_DELETE_PENDING = "MONITOR_STATUS_DELETE_PENDING" + MONITOR_STATUS_ERROR = "MONITOR_STATUS_ERROR" + MONITOR_STATUS_FAILED = "MONITOR_STATUS_FAILED" + MONITOR_STATUS_PENDING = "MONITOR_STATUS_PENDING" + + @dataclass -class ListModelVersionsResponse: - model_versions: Optional[List[ModelVersionInfo]] = None +class MonitorMetric: + name: str + """Name of the metric in the output tables.""" - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + definition: str + """Jinja template for a SQL expression that specifies how to compute the metric. See [create metric + definition]. + + [create metric definition]: https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition""" + + input_columns: List[str] + """A list of column names in the input table the metric should be computed for. Can use + ``":table"`` to indicate that the metric needs information from multiple columns.""" + + output_data_type: str + """The output type of the custom metric.""" + + type: MonitorMetricType + """Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or + ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and + ``"CUSTOM_METRIC_TYPE_DERIVED"`` metrics are computed on a single table, whereas the + ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the + two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing + columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate + metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics""" def as_dict(self) -> dict: - """Serializes the ListModelVersionsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorMetric into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_versions: - body["model_versions"] = [v.as_dict() for v in self.model_versions] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.definition is not None: + body["definition"] = self.definition + if self.input_columns: + body["input_columns"] = [v for v in self.input_columns] + if self.name is not None: + body["name"] = self.name + if self.output_data_type is not None: + body["output_data_type"] = self.output_data_type + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: - """Serializes the ListModelVersionsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorMetric into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_versions: - body["model_versions"] = self.model_versions - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.definition is not None: + body["definition"] = self.definition + if self.input_columns: + body["input_columns"] = self.input_columns + if self.name is not None: + body["name"] = self.name + if self.output_data_type is not None: + body["output_data_type"] = self.output_data_type + if self.type is not None: + body["type"] = self.type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListModelVersionsResponse: - """Deserializes the ListModelVersionsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> MonitorMetric: + """Deserializes the MonitorMetric from a dictionary.""" return cls( - model_versions=_repeated_dict(d, "model_versions", ModelVersionInfo), - next_page_token=d.get("next_page_token", None), + definition=d.get("definition", None), + input_columns=d.get("input_columns", None), + name=d.get("name", None), + output_data_type=d.get("output_data_type", None), + type=_enum(d, "type", MonitorMetricType), ) +class MonitorMetricType(Enum): + """Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or + ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and + ``"CUSTOM_METRIC_TYPE_DERIVED"`` metrics are computed on a single table, whereas the + ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the + two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing + columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate + metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics""" + + CUSTOM_METRIC_TYPE_AGGREGATE = "CUSTOM_METRIC_TYPE_AGGREGATE" + CUSTOM_METRIC_TYPE_DERIVED = "CUSTOM_METRIC_TYPE_DERIVED" + CUSTOM_METRIC_TYPE_DRIFT = "CUSTOM_METRIC_TYPE_DRIFT" + + @dataclass -class ListQuotasResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request.""" +class MonitorNotifications: + on_failure: Optional[MonitorDestination] = None + """Who to send notifications to on monitor failure.""" - quotas: Optional[List[QuotaInfo]] = None - """An array of returned QuotaInfos.""" + on_new_classification_tag_detected: Optional[MonitorDestination] = None + """Who to send notifications to when new data classification tags are detected.""" def as_dict(self) -> dict: - """Serializes the ListQuotasResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorNotifications into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.quotas: - body["quotas"] = [v.as_dict() for v in self.quotas] + if self.on_failure: + body["on_failure"] = self.on_failure.as_dict() + if self.on_new_classification_tag_detected: + body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the ListQuotasResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorNotifications into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.quotas: - body["quotas"] = self.quotas - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListQuotasResponse: - """Deserializes the ListQuotasResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), quotas=_repeated_dict(d, "quotas", QuotaInfo)) + if self.on_failure: + body["on_failure"] = self.on_failure + if self.on_new_classification_tag_detected: + body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> MonitorNotifications: + """Deserializes the MonitorNotifications from a dictionary.""" + return cls( + on_failure=_from_dict(d, "on_failure", MonitorDestination), + on_new_classification_tag_detected=_from_dict(d, "on_new_classification_tag_detected", MonitorDestination), + ) @dataclass -class ListRegisteredModelsResponse: - next_page_token: Optional[str] = None - """Opaque token for pagination. Omitted if there are no more results. page_token should be set to - this value for fetching the next page.""" +class MonitorRefreshInfo: + refresh_id: int + """Unique id of the refresh operation.""" - registered_models: Optional[List[RegisteredModelInfo]] = None + state: MonitorRefreshInfoState + """The current state of the refresh.""" + + start_time_ms: int + """Time at which refresh operation was initiated (milliseconds since 1/1/1970 UTC).""" + + end_time_ms: Optional[int] = None + """Time at which refresh operation completed (milliseconds since 1/1/1970 UTC).""" + + message: Optional[str] = None + """An optional message to give insight into the current state of the job (e.g. FAILURE messages).""" + + trigger: Optional[MonitorRefreshInfoTrigger] = None + """The method by which the refresh was triggered.""" def as_dict(self) -> dict: - """Serializes the ListRegisteredModelsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorRefreshInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.registered_models: - body["registered_models"] = [v.as_dict() for v in self.registered_models] + if self.end_time_ms is not None: + body["end_time_ms"] = self.end_time_ms + if self.message is not None: + body["message"] = self.message + if self.refresh_id is not None: + body["refresh_id"] = self.refresh_id + if self.start_time_ms is not None: + body["start_time_ms"] = self.start_time_ms + if self.state is not None: + body["state"] = self.state.value + if self.trigger is not None: + body["trigger"] = self.trigger.value return body def as_shallow_dict(self) -> dict: - """Serializes the ListRegisteredModelsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorRefreshInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.registered_models: - body["registered_models"] = self.registered_models + if self.end_time_ms is not None: + body["end_time_ms"] = self.end_time_ms + if self.message is not None: + body["message"] = self.message + if self.refresh_id is not None: + body["refresh_id"] = self.refresh_id + if self.start_time_ms is not None: + body["start_time_ms"] = self.start_time_ms + if self.state is not None: + body["state"] = self.state + if self.trigger is not None: + body["trigger"] = self.trigger return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListRegisteredModelsResponse: - """Deserializes the ListRegisteredModelsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> MonitorRefreshInfo: + """Deserializes the MonitorRefreshInfo from a dictionary.""" return cls( - next_page_token=d.get("next_page_token", None), - registered_models=_repeated_dict(d, "registered_models", RegisteredModelInfo), + end_time_ms=d.get("end_time_ms", None), + message=d.get("message", None), + refresh_id=d.get("refresh_id", None), + start_time_ms=d.get("start_time_ms", None), + state=_enum(d, "state", MonitorRefreshInfoState), + trigger=_enum(d, "trigger", MonitorRefreshInfoTrigger), ) -@dataclass -class ListSchemasResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" +class MonitorRefreshInfoState(Enum): + """The current state of the refresh.""" - schemas: Optional[List[SchemaInfo]] = None - """An array of schema information objects.""" + CANCELED = "CANCELED" + FAILED = "FAILED" + PENDING = "PENDING" + RUNNING = "RUNNING" + SUCCESS = "SUCCESS" - def as_dict(self) -> dict: - """Serializes the ListSchemasResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schemas: - body["schemas"] = [v.as_dict() for v in self.schemas] - return body - def as_shallow_dict(self) -> dict: - """Serializes the ListSchemasResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schemas: - body["schemas"] = self.schemas - return body +class MonitorRefreshInfoTrigger(Enum): + """The method by which the refresh was triggered.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListSchemasResponse: - """Deserializes the ListSchemasResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), schemas=_repeated_dict(d, "schemas", SchemaInfo)) + MANUAL = "MANUAL" + SCHEDULE = "SCHEDULE" @dataclass -class ListStorageCredentialsResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" - - storage_credentials: Optional[List[StorageCredentialInfo]] = None +class MonitorRefreshListResponse: + refreshes: Optional[List[MonitorRefreshInfo]] = None + """List of refreshes.""" def as_dict(self) -> dict: - """Serializes the ListStorageCredentialsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorRefreshListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.storage_credentials: - body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials] + if self.refreshes: + body["refreshes"] = [v.as_dict() for v in self.refreshes] return body def as_shallow_dict(self) -> dict: - """Serializes the ListStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorRefreshListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.storage_credentials: - body["storage_credentials"] = self.storage_credentials + if self.refreshes: + body["refreshes"] = self.refreshes return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListStorageCredentialsResponse: - """Deserializes the ListStorageCredentialsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo), - ) + def from_dict(cls, d: Dict[str, Any]) -> MonitorRefreshListResponse: + """Deserializes the MonitorRefreshListResponse from a dictionary.""" + return cls(refreshes=_repeated_dict(d, "refreshes", MonitorRefreshInfo)) @dataclass -class ListSystemSchemasResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" - - schemas: Optional[List[SystemSchemaInfo]] = None - """An array of system schema information objects.""" - +class MonitorSnapshot: def as_dict(self) -> dict: - """Serializes the ListSystemSchemasResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorSnapshot into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schemas: - body["schemas"] = [v.as_dict() for v in self.schemas] return body def as_shallow_dict(self) -> dict: - """Serializes the ListSystemSchemasResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorSnapshot into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schemas: - body["schemas"] = self.schemas return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListSystemSchemasResponse: - """Deserializes the ListSystemSchemasResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), schemas=_repeated_dict(d, "schemas", SystemSchemaInfo) - ) + def from_dict(cls, d: Dict[str, Any]) -> MonitorSnapshot: + """Deserializes the MonitorSnapshot from a dictionary.""" + return cls() @dataclass -class ListTableSummariesResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" +class MonitorTimeSeries: + timestamp_col: str + """Column that contains the timestamps of requests. The column must be one of the following: - A + ``TimestampType`` column - A column whose values can be converted to timestamps through the + pyspark ``to_timestamp`` [function]. + + [function]: https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html""" - tables: Optional[List[TableSummary]] = None - """List of table summaries.""" + granularities: List[str] + """Granularities for aggregating data into time windows based on their timestamp. Currently the + following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, + ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.""" def as_dict(self) -> dict: - """Serializes the ListTableSummariesResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorTimeSeries into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.tables: - body["tables"] = [v.as_dict() for v in self.tables] + if self.granularities: + body["granularities"] = [v for v in self.granularities] + if self.timestamp_col is not None: + body["timestamp_col"] = self.timestamp_col return body def as_shallow_dict(self) -> dict: - """Serializes the ListTableSummariesResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorTimeSeries into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.tables: - body["tables"] = self.tables + if self.granularities: + body["granularities"] = self.granularities + if self.timestamp_col is not None: + body["timestamp_col"] = self.timestamp_col return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListTableSummariesResponse: - """Deserializes the ListTableSummariesResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), tables=_repeated_dict(d, "tables", TableSummary)) + def from_dict(cls, d: Dict[str, Any]) -> MonitorTimeSeries: + """Deserializes the MonitorTimeSeries from a dictionary.""" + return cls(granularities=d.get("granularities", None), timestamp_col=d.get("timestamp_col", None)) @dataclass -class ListTablesResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" - - tables: Optional[List[TableInfo]] = None - """An array of table information objects.""" +class NamedTableConstraint: + name: str + """The name of the constraint.""" def as_dict(self) -> dict: - """Serializes the ListTablesResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the NamedTableConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.tables: - body["tables"] = [v.as_dict() for v in self.tables] + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: - """Serializes the ListTablesResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the NamedTableConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.tables: - body["tables"] = self.tables + if self.name is not None: + body["name"] = self.name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListTablesResponse: - """Deserializes the ListTablesResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), tables=_repeated_dict(d, "tables", TableInfo)) + def from_dict(cls, d: Dict[str, Any]) -> NamedTableConstraint: + """Deserializes the NamedTableConstraint from a dictionary.""" + return cls(name=d.get("name", None)) @dataclass -class ListVolumesResponseContent: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request to retrieve the next page of - results.""" +class OnlineTable: + """Online Table information.""" - volumes: Optional[List[VolumeInfo]] = None + name: Optional[str] = None + """Full three-part (catalog, schema, table) name of the table.""" + + spec: Optional[OnlineTableSpec] = None + """Specification of the online table.""" + + status: Optional[OnlineTableStatus] = None + """Online Table data synchronization status""" + + table_serving_url: Optional[str] = None + """Data serving REST API URL for this table""" + + unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None + """The provisioning state of the online table entity in Unity Catalog. This is distinct from the + state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline + may be in "PROVISIONING" as it runs asynchronously).""" def as_dict(self) -> dict: - """Serializes the ListVolumesResponseContent into a dictionary suitable for use as a JSON request body.""" + """Serializes the OnlineTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.volumes: - body["volumes"] = [v.as_dict() for v in self.volumes] + if self.name is not None: + body["name"] = self.name + if self.spec: + body["spec"] = self.spec.as_dict() + if self.status: + body["status"] = self.status.as_dict() + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: + body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value return body def as_shallow_dict(self) -> dict: - """Serializes the ListVolumesResponseContent into a shallow dictionary of its immediate attributes.""" + """Serializes the OnlineTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.volumes: - body["volumes"] = self.volumes + if self.name is not None: + body["name"] = self.name + if self.spec: + body["spec"] = self.spec + if self.status: + body["status"] = self.status + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: + body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListVolumesResponseContent: - """Deserializes the ListVolumesResponseContent from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), volumes=_repeated_dict(d, "volumes", VolumeInfo)) + def from_dict(cls, d: Dict[str, Any]) -> OnlineTable: + """Deserializes the OnlineTable from a dictionary.""" + return cls( + name=d.get("name", None), + spec=_from_dict(d, "spec", OnlineTableSpec), + status=_from_dict(d, "status", OnlineTableStatus), + table_serving_url=d.get("table_serving_url", None), + unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), + ) -class MatchType(Enum): - """The artifact pattern matching type""" +@dataclass +class OnlineTableSpec: + """Specification of an online table.""" - PREFIX_MATCH = "PREFIX_MATCH" + perform_full_copy: Optional[bool] = None + """Whether to create a full-copy pipeline -- a pipeline that stops after creates a full copy of the + source table upon initialization and does not process any change data feeds (CDFs) afterwards. + The pipeline can still be manually triggered afterwards, but it always perform a full copy of + the source table and there are no incremental updates. This mode is useful for syncing views or + tables without CDFs to online tables. Note that the full-copy pipeline only supports "triggered" + scheduling policy.""" + pipeline_id: Optional[str] = None + """ID of the associated pipeline. Generated by the server - cannot be set by the caller.""" -@dataclass -class MetastoreAssignment: - workspace_id: int - """The unique ID of the Databricks workspace.""" + primary_key_columns: Optional[List[str]] = None + """Primary Key columns to be used for data insert/update in the destination.""" - metastore_id: str - """The unique ID of the metastore.""" + run_continuously: Optional[OnlineTableSpecContinuousSchedulingPolicy] = None + """Pipeline runs continuously after generating the initial data.""" - default_catalog_name: Optional[str] = None - """The name of the default catalog in the metastore.""" + run_triggered: Optional[OnlineTableSpecTriggeredSchedulingPolicy] = None + """Pipeline stops after generating the initial data and can be triggered later (manually, through a + cron job or through data triggers)""" + + source_table_full_name: Optional[str] = None + """Three-part (catalog, schema, table) name of the source Delta table.""" + + timeseries_key: Optional[str] = None + """Time series key to deduplicate (tie-break) rows with the same primary key.""" def as_dict(self) -> dict: - """Serializes the MetastoreAssignment into a dictionary suitable for use as a JSON request body.""" + """Serializes the OnlineTableSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.perform_full_copy is not None: + body["perform_full_copy"] = self.perform_full_copy + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.primary_key_columns: + body["primary_key_columns"] = [v for v in self.primary_key_columns] + if self.run_continuously: + body["run_continuously"] = self.run_continuously.as_dict() + if self.run_triggered: + body["run_triggered"] = self.run_triggered.as_dict() + if self.source_table_full_name is not None: + body["source_table_full_name"] = self.source_table_full_name + if self.timeseries_key is not None: + body["timeseries_key"] = self.timeseries_key return body def as_shallow_dict(self) -> dict: - """Serializes the MetastoreAssignment into a shallow dictionary of its immediate attributes.""" + """Serializes the OnlineTableSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.perform_full_copy is not None: + body["perform_full_copy"] = self.perform_full_copy + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.primary_key_columns: + body["primary_key_columns"] = self.primary_key_columns + if self.run_continuously: + body["run_continuously"] = self.run_continuously + if self.run_triggered: + body["run_triggered"] = self.run_triggered + if self.source_table_full_name is not None: + body["source_table_full_name"] = self.source_table_full_name + if self.timeseries_key is not None: + body["timeseries_key"] = self.timeseries_key return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MetastoreAssignment: - """Deserializes the MetastoreAssignment from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpec: + """Deserializes the OnlineTableSpec from a dictionary.""" return cls( - default_catalog_name=d.get("default_catalog_name", None), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), + perform_full_copy=d.get("perform_full_copy", None), + pipeline_id=d.get("pipeline_id", None), + primary_key_columns=d.get("primary_key_columns", None), + run_continuously=_from_dict(d, "run_continuously", OnlineTableSpecContinuousSchedulingPolicy), + run_triggered=_from_dict(d, "run_triggered", OnlineTableSpecTriggeredSchedulingPolicy), + source_table_full_name=d.get("source_table_full_name", None), + timeseries_key=d.get("timeseries_key", None), ) @dataclass -class MetastoreInfo: - cloud: Optional[str] = None - """Cloud vendor of the metastore home shard (e.g., `aws`, `azure`, `gcp`).""" +class OnlineTableSpecContinuousSchedulingPolicy: + def as_dict(self) -> dict: + """Serializes the OnlineTableSpecContinuousSchedulingPolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - created_at: Optional[int] = None - """Time at which this metastore was created, in epoch milliseconds.""" + def as_shallow_dict(self) -> dict: + """Serializes the OnlineTableSpecContinuousSchedulingPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + return body - created_by: Optional[str] = None - """Username of metastore creator.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpecContinuousSchedulingPolicy: + """Deserializes the OnlineTableSpecContinuousSchedulingPolicy from a dictionary.""" + return cls() - default_data_access_config_id: Optional[str] = None - """Unique identifier of the metastore's (Default) Data Access Configuration.""" - delta_sharing_organization_name: Optional[str] = None - """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta - Sharing as the official name.""" +@dataclass +class OnlineTableSpecTriggeredSchedulingPolicy: + def as_dict(self) -> dict: + """Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None - """The lifetime of delta sharing recipient token in seconds.""" + def as_shallow_dict(self) -> dict: + """Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + return body - delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None - """The scope of Delta Sharing enabled for the metastore.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpecTriggeredSchedulingPolicy: + """Deserializes the OnlineTableSpecTriggeredSchedulingPolicy from a dictionary.""" + return cls() - external_access_enabled: Optional[bool] = None - """Whether to allow non-DBR clients to directly access entities under the metastore.""" - global_metastore_id: Optional[str] = None - """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`.""" +class OnlineTableState(Enum): + """The state of an online table.""" - metastore_id: Optional[str] = None - """Unique identifier of metastore.""" + OFFLINE = "OFFLINE" + OFFLINE_FAILED = "OFFLINE_FAILED" + ONLINE = "ONLINE" + ONLINE_CONTINUOUS_UPDATE = "ONLINE_CONTINUOUS_UPDATE" + ONLINE_NO_PENDING_UPDATE = "ONLINE_NO_PENDING_UPDATE" + ONLINE_PIPELINE_FAILED = "ONLINE_PIPELINE_FAILED" + ONLINE_TRIGGERED_UPDATE = "ONLINE_TRIGGERED_UPDATE" + ONLINE_UPDATING_PIPELINE_RESOURCES = "ONLINE_UPDATING_PIPELINE_RESOURCES" + PROVISIONING = "PROVISIONING" + PROVISIONING_INITIAL_SNAPSHOT = "PROVISIONING_INITIAL_SNAPSHOT" + PROVISIONING_PIPELINE_RESOURCES = "PROVISIONING_PIPELINE_RESOURCES" - name: Optional[str] = None - """The user-specified name of the metastore.""" - owner: Optional[str] = None - """The owner of the metastore.""" +@dataclass +class OnlineTableStatus: + """Status of an online table.""" - privilege_model_version: Optional[str] = None - """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" + continuous_update_status: Optional[ContinuousUpdateStatus] = None + """Detailed status of an online table. Shown if the online table is in the ONLINE_CONTINUOUS_UPDATE + or the ONLINE_UPDATING_PIPELINE_RESOURCES state.""" - region: Optional[str] = None - """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" + detailed_state: Optional[OnlineTableState] = None + """The state of the online table.""" - storage_root: Optional[str] = None - """The storage root URL for metastore""" + failed_status: Optional[FailedStatus] = None + """Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the + ONLINE_PIPELINE_FAILED state.""" - storage_root_credential_id: Optional[str] = None - """UUID of storage credential to access the metastore storage_root.""" + message: Optional[str] = None + """A text description of the current state of the online table.""" - storage_root_credential_name: Optional[str] = None - """Name of the storage credential to access the metastore storage_root.""" - - updated_at: Optional[int] = None - """Time at which the metastore was last modified, in epoch milliseconds.""" + provisioning_status: Optional[ProvisioningStatus] = None + """Detailed status of an online table. Shown if the online table is in the + PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" - updated_by: Optional[str] = None - """Username of user who last modified the metastore.""" + triggered_update_status: Optional[TriggeredUpdateStatus] = None + """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE + or the ONLINE_NO_PENDING_UPDATE state.""" def as_dict(self) -> dict: - """Serializes the MetastoreInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the OnlineTableStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cloud is not None: - body["cloud"] = self.cloud - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.default_data_access_config_id is not None: - body["default_data_access_config_id"] = self.default_data_access_config_id - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope.value - if self.external_access_enabled is not None: - body["external_access_enabled"] = self.external_access_enabled - if self.global_metastore_id is not None: - body["global_metastore_id"] = self.global_metastore_id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id - if self.storage_root_credential_name is not None: - body["storage_root_credential_name"] = self.storage_root_credential_name - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.continuous_update_status: + body["continuous_update_status"] = self.continuous_update_status.as_dict() + if self.detailed_state is not None: + body["detailed_state"] = self.detailed_state.value + if self.failed_status: + body["failed_status"] = self.failed_status.as_dict() + if self.message is not None: + body["message"] = self.message + if self.provisioning_status: + body["provisioning_status"] = self.provisioning_status.as_dict() + if self.triggered_update_status: + body["triggered_update_status"] = self.triggered_update_status.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the MetastoreInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the OnlineTableStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.cloud is not None: - body["cloud"] = self.cloud - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.default_data_access_config_id is not None: - body["default_data_access_config_id"] = self.default_data_access_config_id - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope - if self.external_access_enabled is not None: - body["external_access_enabled"] = self.external_access_enabled - if self.global_metastore_id is not None: - body["global_metastore_id"] = self.global_metastore_id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id - if self.storage_root_credential_name is not None: - body["storage_root_credential_name"] = self.storage_root_credential_name - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.continuous_update_status: + body["continuous_update_status"] = self.continuous_update_status + if self.detailed_state is not None: + body["detailed_state"] = self.detailed_state + if self.failed_status: + body["failed_status"] = self.failed_status + if self.message is not None: + body["message"] = self.message + if self.provisioning_status: + body["provisioning_status"] = self.provisioning_status + if self.triggered_update_status: + body["triggered_update_status"] = self.triggered_update_status return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MetastoreInfo: - """Deserializes the MetastoreInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> OnlineTableStatus: + """Deserializes the OnlineTableStatus from a dictionary.""" return cls( - cloud=d.get("cloud", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - default_data_access_config_id=d.get("default_data_access_config_id", None), - delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), - delta_sharing_recipient_token_lifetime_in_seconds=d.get( - "delta_sharing_recipient_token_lifetime_in_seconds", None - ), - delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), - external_access_enabled=d.get("external_access_enabled", None), - global_metastore_id=d.get("global_metastore_id", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - privilege_model_version=d.get("privilege_model_version", None), - region=d.get("region", None), - storage_root=d.get("storage_root", None), - storage_root_credential_id=d.get("storage_root_credential_id", None), - storage_root_credential_name=d.get("storage_root_credential_name", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), + continuous_update_status=_from_dict(d, "continuous_update_status", ContinuousUpdateStatus), + detailed_state=_enum(d, "detailed_state", OnlineTableState), + failed_status=_from_dict(d, "failed_status", FailedStatus), + message=d.get("message", None), + provisioning_status=_from_dict(d, "provisioning_status", ProvisioningStatus), + triggered_update_status=_from_dict(d, "triggered_update_status", TriggeredUpdateStatus), ) @dataclass -class ModelVersionInfo: - aliases: Optional[List[RegisteredModelAlias]] = None - """List of aliases associated with the model version""" - - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" - - catalog_name: Optional[str] = None - """The name of the catalog containing the model version""" - - comment: Optional[str] = None - """The comment attached to the model version""" - - created_at: Optional[int] = None - - created_by: Optional[str] = None - """The identifier of the user who created the model version""" - - id: Optional[str] = None - """The unique identifier of the model version""" +class PermissionsChange: + add: Optional[List[Privilege]] = None + """The set of privileges to add.""" - metastore_id: Optional[str] = None - """The unique identifier of the metastore containing the model version""" + principal: Optional[str] = None + """The principal whose privileges we are changing.""" - model_name: Optional[str] = None - """The name of the parent registered model of the model version, relative to parent schema""" + remove: Optional[List[Privilege]] = None + """The set of privileges to remove.""" - model_version_dependencies: Optional[DependencyList] = None - """Model version dependencies, for feature-store packaged models""" + def as_dict(self) -> dict: + """Serializes the PermissionsChange into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.add: + body["add"] = [v.value for v in self.add] + if self.principal is not None: + body["principal"] = self.principal + if self.remove: + body["remove"] = [v.value for v in self.remove] + return body - run_id: Optional[str] = None - """MLflow run ID used when creating the model version, if ``source`` was generated by an experiment - run stored in an MLflow tracking server""" + def as_shallow_dict(self) -> dict: + """Serializes the PermissionsChange into a shallow dictionary of its immediate attributes.""" + body = {} + if self.add: + body["add"] = self.add + if self.principal is not None: + body["principal"] = self.principal + if self.remove: + body["remove"] = self.remove + return body - run_workspace_id: Optional[int] = None - """ID of the Databricks workspace containing the MLflow run that generated this model version, if - applicable""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: + """Deserializes the PermissionsChange from a dictionary.""" + return cls( + add=_repeated_enum(d, "add", Privilege), + principal=d.get("principal", None), + remove=_repeated_enum(d, "remove", Privilege), + ) - schema_name: Optional[str] = None - """The name of the schema containing the model version, relative to parent catalog""" - source: Optional[str] = None - """URI indicating the location of the source artifacts (files) for the model version""" +@dataclass +class PipelineProgress: + """Progress information of the Online Table data synchronization pipeline.""" - status: Optional[ModelVersionInfoStatus] = None - """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION - status, then move to READY status once the model version files are uploaded and the model - version is finalized. Only model versions in READY status can be loaded for inference or served.""" + estimated_completion_time_seconds: Optional[float] = None + """The estimated time remaining to complete this update in seconds.""" - storage_location: Optional[str] = None - """The storage location on the cloud under which model version data files are stored""" + latest_version_currently_processing: Optional[int] = None + """The source table Delta version that was last processed by the pipeline. The pipeline may not + have completely processed this version yet.""" - updated_at: Optional[int] = None + sync_progress_completion: Optional[float] = None + """The completion ratio of this update. This is a number between 0 and 1.""" - updated_by: Optional[str] = None - """The identifier of the user who updated the model version last time""" + synced_row_count: Optional[int] = None + """The number of rows that have been synced in this update.""" - version: Optional[int] = None - """Integer model version number, used to reference the model version in API requests.""" + total_row_count: Optional[int] = None + """The total number of rows that need to be synced in this update. This number may be an estimate.""" def as_dict(self) -> dict: - """Serializes the ModelVersionInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the PipelineProgress into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aliases: - body["aliases"] = [v.as_dict() for v in self.aliases] - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.id is not None: - body["id"] = self.id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.model_name is not None: - body["model_name"] = self.model_name - if self.model_version_dependencies: - body["model_version_dependencies"] = self.model_version_dependencies.as_dict() - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_workspace_id is not None: - body["run_workspace_id"] = self.run_workspace_id - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.source is not None: - body["source"] = self.source - if self.status is not None: - body["status"] = self.status.value - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.version is not None: - body["version"] = self.version + if self.estimated_completion_time_seconds is not None: + body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: + body["latest_version_currently_processing"] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: + body["sync_progress_completion"] = self.sync_progress_completion + if self.synced_row_count is not None: + body["synced_row_count"] = self.synced_row_count + if self.total_row_count is not None: + body["total_row_count"] = self.total_row_count return body def as_shallow_dict(self) -> dict: - """Serializes the ModelVersionInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the PipelineProgress into a shallow dictionary of its immediate attributes.""" body = {} - if self.aliases: - body["aliases"] = self.aliases - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.id is not None: - body["id"] = self.id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.model_name is not None: - body["model_name"] = self.model_name - if self.model_version_dependencies: - body["model_version_dependencies"] = self.model_version_dependencies - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_workspace_id is not None: - body["run_workspace_id"] = self.run_workspace_id - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.source is not None: - body["source"] = self.source - if self.status is not None: - body["status"] = self.status - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.version is not None: - body["version"] = self.version + if self.estimated_completion_time_seconds is not None: + body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: + body["latest_version_currently_processing"] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: + body["sync_progress_completion"] = self.sync_progress_completion + if self.synced_row_count is not None: + body["synced_row_count"] = self.synced_row_count + if self.total_row_count is not None: + body["total_row_count"] = self.total_row_count return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ModelVersionInfo: - """Deserializes the ModelVersionInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> PipelineProgress: + """Deserializes the PipelineProgress from a dictionary.""" return cls( - aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - id=d.get("id", None), - metastore_id=d.get("metastore_id", None), - model_name=d.get("model_name", None), - model_version_dependencies=_from_dict(d, "model_version_dependencies", DependencyList), - run_id=d.get("run_id", None), - run_workspace_id=d.get("run_workspace_id", None), - schema_name=d.get("schema_name", None), - source=d.get("source", None), - status=_enum(d, "status", ModelVersionInfoStatus), - storage_location=d.get("storage_location", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - version=d.get("version", None), + estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None), + latest_version_currently_processing=d.get("latest_version_currently_processing", None), + sync_progress_completion=d.get("sync_progress_completion", None), + synced_row_count=d.get("synced_row_count", None), + total_row_count=d.get("total_row_count", None), ) -class ModelVersionInfoStatus(Enum): - """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION - status, then move to READY status once the model version files are uploaded and the model - version is finalized. Only model versions in READY status can be loaded for inference or served.""" - - FAILED_REGISTRATION = "FAILED_REGISTRATION" - PENDING_REGISTRATION = "PENDING_REGISTRATION" - READY = "READY" - - @dataclass -class MonitorCronSchedule: - quartz_cron_expression: str - """The expression that determines when to run the monitor. See [examples]. - - [examples]: https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html""" +class PrimaryKeyConstraint: + name: str + """The name of the constraint.""" - timezone_id: str - """The timezone id (e.g., ``"PST"``) in which to evaluate the quartz expression.""" + child_columns: List[str] + """Column names for this constraint.""" - pause_status: Optional[MonitorCronSchedulePauseStatus] = None - """Read only field that indicates whether a schedule is paused or not.""" + timeseries_columns: Optional[List[str]] = None + """Column names that represent a timeseries.""" def as_dict(self) -> dict: - """Serializes the MonitorCronSchedule into a dictionary suitable for use as a JSON request body.""" + """Serializes the PrimaryKeyConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.pause_status is not None: - body["pause_status"] = self.pause_status.value - if self.quartz_cron_expression is not None: - body["quartz_cron_expression"] = self.quartz_cron_expression - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.child_columns: + body["child_columns"] = [v for v in self.child_columns] + if self.name is not None: + body["name"] = self.name + if self.timeseries_columns: + body["timeseries_columns"] = [v for v in self.timeseries_columns] return body def as_shallow_dict(self) -> dict: - """Serializes the MonitorCronSchedule into a shallow dictionary of its immediate attributes.""" + """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.pause_status is not None: - body["pause_status"] = self.pause_status - if self.quartz_cron_expression is not None: - body["quartz_cron_expression"] = self.quartz_cron_expression - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.child_columns: + body["child_columns"] = self.child_columns + if self.name is not None: + body["name"] = self.name + if self.timeseries_columns: + body["timeseries_columns"] = self.timeseries_columns return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorCronSchedule: - """Deserializes the MonitorCronSchedule from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint: + """Deserializes the PrimaryKeyConstraint from a dictionary.""" return cls( - pause_status=_enum(d, "pause_status", MonitorCronSchedulePauseStatus), - quartz_cron_expression=d.get("quartz_cron_expression", None), - timezone_id=d.get("timezone_id", None), + child_columns=d.get("child_columns", None), + name=d.get("name", None), + timeseries_columns=d.get("timeseries_columns", None), ) -class MonitorCronSchedulePauseStatus(Enum): - """Read only field that indicates whether a schedule is paused or not.""" +class Privilege(Enum): - PAUSED = "PAUSED" - UNPAUSED = "UNPAUSED" + ACCESS = "ACCESS" + ALL_PRIVILEGES = "ALL_PRIVILEGES" + APPLY_TAG = "APPLY_TAG" + BROWSE = "BROWSE" + CREATE = "CREATE" + CREATE_CATALOG = "CREATE_CATALOG" + CREATE_CLEAN_ROOM = "CREATE_CLEAN_ROOM" + CREATE_CONNECTION = "CREATE_CONNECTION" + CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION" + CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE" + CREATE_EXTERNAL_VOLUME = "CREATE_EXTERNAL_VOLUME" + CREATE_FOREIGN_CATALOG = "CREATE_FOREIGN_CATALOG" + CREATE_FOREIGN_SECURABLE = "CREATE_FOREIGN_SECURABLE" + CREATE_FUNCTION = "CREATE_FUNCTION" + CREATE_MANAGED_STORAGE = "CREATE_MANAGED_STORAGE" + CREATE_MATERIALIZED_VIEW = "CREATE_MATERIALIZED_VIEW" + CREATE_MODEL = "CREATE_MODEL" + CREATE_PROVIDER = "CREATE_PROVIDER" + CREATE_RECIPIENT = "CREATE_RECIPIENT" + CREATE_SCHEMA = "CREATE_SCHEMA" + CREATE_SERVICE_CREDENTIAL = "CREATE_SERVICE_CREDENTIAL" + CREATE_SHARE = "CREATE_SHARE" + CREATE_STORAGE_CREDENTIAL = "CREATE_STORAGE_CREDENTIAL" + CREATE_TABLE = "CREATE_TABLE" + CREATE_VIEW = "CREATE_VIEW" + CREATE_VOLUME = "CREATE_VOLUME" + EXECUTE = "EXECUTE" + EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK" + MANAGE = "MANAGE" + MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST" + MODIFY = "MODIFY" + MODIFY_CLEAN_ROOM = "MODIFY_CLEAN_ROOM" + READ_FILES = "READ_FILES" + READ_PRIVATE_FILES = "READ_PRIVATE_FILES" + READ_VOLUME = "READ_VOLUME" + REFRESH = "REFRESH" + SELECT = "SELECT" + SET_SHARE_PERMISSION = "SET_SHARE_PERMISSION" + USAGE = "USAGE" + USE_CATALOG = "USE_CATALOG" + USE_CONNECTION = "USE_CONNECTION" + USE_MARKETPLACE_ASSETS = "USE_MARKETPLACE_ASSETS" + USE_PROVIDER = "USE_PROVIDER" + USE_RECIPIENT = "USE_RECIPIENT" + USE_SCHEMA = "USE_SCHEMA" + USE_SHARE = "USE_SHARE" + WRITE_FILES = "WRITE_FILES" + WRITE_PRIVATE_FILES = "WRITE_PRIVATE_FILES" + WRITE_VOLUME = "WRITE_VOLUME" @dataclass -class MonitorDataClassificationConfig: - enabled: Optional[bool] = None - """Whether data classification is enabled.""" +class PrivilegeAssignment: + principal: Optional[str] = None + """The principal (user email address or group name).""" + + privileges: Optional[List[Privilege]] = None + """The privileges assigned to the principal.""" def as_dict(self) -> dict: - """Serializes the MonitorDataClassificationConfig into a dictionary suitable for use as a JSON request body.""" + """Serializes the PrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = [v.value for v in self.privileges] return body def as_shallow_dict(self) -> dict: - """Serializes the MonitorDataClassificationConfig into a shallow dictionary of its immediate attributes.""" + """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = self.privileges return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorDataClassificationConfig: - """Deserializes the MonitorDataClassificationConfig from a dictionary.""" - return cls(enabled=d.get("enabled", None)) + def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: + """Deserializes the PrivilegeAssignment from a dictionary.""" + return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) @dataclass -class MonitorDestination: - email_addresses: Optional[List[str]] = None - """The list of email addresses to send the notification to. A maximum of 5 email addresses is - supported.""" +class ProvisioningInfo: + """Status of an asynchronously provisioned resource.""" + + state: Optional[ProvisioningInfoState] = None + """The provisioning state of the resource.""" def as_dict(self) -> dict: - """Serializes the MonitorDestination into a dictionary suitable for use as a JSON request body.""" + """Serializes the ProvisioningInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.email_addresses: - body["email_addresses"] = [v for v in self.email_addresses] + if self.state is not None: + body["state"] = self.state.value return body def as_shallow_dict(self) -> dict: - """Serializes the MonitorDestination into a shallow dictionary of its immediate attributes.""" + """Serializes the ProvisioningInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.email_addresses: - body["email_addresses"] = self.email_addresses + if self.state is not None: + body["state"] = self.state return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorDestination: - """Deserializes the MonitorDestination from a dictionary.""" - return cls(email_addresses=d.get("email_addresses", None)) - - -@dataclass -class MonitorInferenceLog: - timestamp_col: str - """Column that contains the timestamps of requests. The column must be one of the following: - A - ``TimestampType`` column - A column whose values can be converted to timestamps through the - pyspark ``to_timestamp`` [function]. - - [function]: https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html""" + def from_dict(cls, d: Dict[str, Any]) -> ProvisioningInfo: + """Deserializes the ProvisioningInfo from a dictionary.""" + return cls(state=_enum(d, "state", ProvisioningInfoState)) - granularities: List[str] - """Granularities for aggregating data into time windows based on their timestamp. Currently the - following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, - ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.""" - model_id_col: str - """Column that contains the id of the model generating the predictions. Metrics will be computed - per model id by default, and also across all model ids.""" +class ProvisioningInfoState(Enum): - problem_type: MonitorInferenceLogProblemType - """Problem type the model aims to solve. Determines the type of model-quality metrics that will be - computed.""" + ACTIVE = "ACTIVE" + DEGRADED = "DEGRADED" + DELETING = "DELETING" + FAILED = "FAILED" + PROVISIONING = "PROVISIONING" + UPDATING = "UPDATING" - prediction_col: str - """Column that contains the output/prediction from the model.""" - label_col: Optional[str] = None - """Optional column that contains the ground truth for the prediction.""" +@dataclass +class ProvisioningStatus: + """Detailed status of an online table. Shown if the online table is in the + PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" - prediction_proba_col: Optional[str] = None - """Optional column that contains the prediction probabilities for each class in a classification - problem type. The values in this column should be a map, mapping each class label to the - prediction probability for a given sample. The map should be of PySpark MapType().""" + initial_pipeline_sync_progress: Optional[PipelineProgress] = None + """Details about initial data synchronization. Only populated when in the + PROVISIONING_INITIAL_SNAPSHOT state.""" def as_dict(self) -> dict: - """Serializes the MonitorInferenceLog into a dictionary suitable for use as a JSON request body.""" + """Serializes the ProvisioningStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.granularities: - body["granularities"] = [v for v in self.granularities] - if self.label_col is not None: - body["label_col"] = self.label_col - if self.model_id_col is not None: - body["model_id_col"] = self.model_id_col - if self.prediction_col is not None: - body["prediction_col"] = self.prediction_col - if self.prediction_proba_col is not None: - body["prediction_proba_col"] = self.prediction_proba_col - if self.problem_type is not None: - body["problem_type"] = self.problem_type.value - if self.timestamp_col is not None: - body["timestamp_col"] = self.timestamp_col + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the MonitorInferenceLog into a shallow dictionary of its immediate attributes.""" + """Serializes the ProvisioningStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.granularities: - body["granularities"] = self.granularities - if self.label_col is not None: - body["label_col"] = self.label_col - if self.model_id_col is not None: - body["model_id_col"] = self.model_id_col - if self.prediction_col is not None: - body["prediction_col"] = self.prediction_col - if self.prediction_proba_col is not None: - body["prediction_proba_col"] = self.prediction_proba_col - if self.problem_type is not None: - body["problem_type"] = self.problem_type - if self.timestamp_col is not None: - body["timestamp_col"] = self.timestamp_col + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorInferenceLog: - """Deserializes the MonitorInferenceLog from a dictionary.""" - return cls( - granularities=d.get("granularities", None), - label_col=d.get("label_col", None), - model_id_col=d.get("model_id_col", None), - prediction_col=d.get("prediction_col", None), - prediction_proba_col=d.get("prediction_proba_col", None), - problem_type=_enum(d, "problem_type", MonitorInferenceLogProblemType), - timestamp_col=d.get("timestamp_col", None), - ) - - -class MonitorInferenceLogProblemType(Enum): - """Problem type the model aims to solve. Determines the type of model-quality metrics that will be - computed.""" - - PROBLEM_TYPE_CLASSIFICATION = "PROBLEM_TYPE_CLASSIFICATION" - PROBLEM_TYPE_REGRESSION = "PROBLEM_TYPE_REGRESSION" + def from_dict(cls, d: Dict[str, Any]) -> ProvisioningStatus: + """Deserializes the ProvisioningStatus from a dictionary.""" + return cls(initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", PipelineProgress)) @dataclass -class MonitorInfo: - table_name: str - """The full name of the table to monitor. Format: __catalog_name__.__schema_name__.__table_name__.""" - - status: MonitorInfoStatus - """The status of the monitor.""" - - monitor_version: str - """The version of the monitor config (e.g. 1,2,3). If negative, the monitor may be corrupted.""" - - profile_metrics_table_name: str - """The full name of the profile metrics table. Format: - __catalog_name__.__schema_name__.__table_name__.""" - - drift_metrics_table_name: str - """The full name of the drift metrics table. Format: - __catalog_name__.__schema_name__.__table_name__.""" +class QuotaInfo: + last_refreshed_at: Optional[int] = None + """The timestamp that indicates when the quota count was last updated.""" - assets_dir: Optional[str] = None - """The directory to store monitoring assets (e.g. dashboard, metric tables).""" + parent_full_name: Optional[str] = None + """Name of the parent resource. Returns metastore ID if the parent is a metastore.""" - baseline_table_name: Optional[str] = None - """Name of the baseline table from which drift metrics are computed from. Columns in the monitored - table should also be present in the baseline table.""" + parent_securable_type: Optional[SecurableType] = None + """The quota parent securable type.""" - custom_metrics: Optional[List[MonitorMetric]] = None - """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived - metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across - time windows).""" + quota_count: Optional[int] = None + """The current usage of the resource quota.""" - dashboard_id: Optional[str] = None - """Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in - PENDING state.""" + quota_limit: Optional[int] = None + """The current limit of the resource quota.""" - data_classification_config: Optional[MonitorDataClassificationConfig] = None - """The data classification config for the monitor.""" + quota_name: Optional[str] = None + """The name of the quota.""" - inference_log: Optional[MonitorInferenceLog] = None - """Configuration for monitoring inference logs.""" + def as_dict(self) -> dict: + """Serializes the QuotaInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_refreshed_at is not None: + body["last_refreshed_at"] = self.last_refreshed_at + if self.parent_full_name is not None: + body["parent_full_name"] = self.parent_full_name + if self.parent_securable_type is not None: + body["parent_securable_type"] = self.parent_securable_type.value + if self.quota_count is not None: + body["quota_count"] = self.quota_count + if self.quota_limit is not None: + body["quota_limit"] = self.quota_limit + if self.quota_name is not None: + body["quota_name"] = self.quota_name + return body - latest_monitor_failure_msg: Optional[str] = None - """The latest failure message of the monitor (if any).""" + def as_shallow_dict(self) -> dict: + """Serializes the QuotaInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_refreshed_at is not None: + body["last_refreshed_at"] = self.last_refreshed_at + if self.parent_full_name is not None: + body["parent_full_name"] = self.parent_full_name + if self.parent_securable_type is not None: + body["parent_securable_type"] = self.parent_securable_type + if self.quota_count is not None: + body["quota_count"] = self.quota_count + if self.quota_limit is not None: + body["quota_limit"] = self.quota_limit + if self.quota_name is not None: + body["quota_name"] = self.quota_name + return body - notifications: Optional[MonitorNotifications] = None - """The notification settings for the monitor.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> QuotaInfo: + """Deserializes the QuotaInfo from a dictionary.""" + return cls( + last_refreshed_at=d.get("last_refreshed_at", None), + parent_full_name=d.get("parent_full_name", None), + parent_securable_type=_enum(d, "parent_securable_type", SecurableType), + quota_count=d.get("quota_count", None), + quota_limit=d.get("quota_limit", None), + quota_name=d.get("quota_name", None), + ) - output_schema_name: Optional[str] = None - """Schema where output metric tables are created.""" - schedule: Optional[MonitorCronSchedule] = None - """The schedule for automatically updating and refreshing metric tables.""" +@dataclass +class R2Credentials: + """R2 temporary credentials for API authentication. Read more at + https://developers.cloudflare.com/r2/api/s3/tokens/.""" - slicing_exprs: Optional[List[str]] = None - """List of column expressions to slice data with for targeted analysis. The data is grouped by each - expression independently, resulting in a separate slice for each predicate and its complements. - For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" + access_key_id: Optional[str] = None + """The access key ID that identifies the temporary credentials.""" - snapshot: Optional[MonitorSnapshot] = None - """Configuration for monitoring snapshot tables.""" + secret_access_key: Optional[str] = None + """The secret access key associated with the access key.""" - time_series: Optional[MonitorTimeSeries] = None - """Configuration for monitoring time series tables.""" + session_token: Optional[str] = None + """The generated JWT that users must pass to use the temporary credentials.""" def as_dict(self) -> dict: - """Serializes the MonitorInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the R2Credentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assets_dir is not None: - body["assets_dir"] = self.assets_dir - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config.as_dict() - if self.drift_metrics_table_name is not None: - body["drift_metrics_table_name"] = self.drift_metrics_table_name - if self.inference_log: - body["inference_log"] = self.inference_log.as_dict() - if self.latest_monitor_failure_msg is not None: - body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg - if self.monitor_version is not None: - body["monitor_version"] = self.monitor_version - if self.notifications: - body["notifications"] = self.notifications.as_dict() - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.profile_metrics_table_name is not None: - body["profile_metrics_table_name"] = self.profile_metrics_table_name - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.slicing_exprs: - body["slicing_exprs"] = [v for v in self.slicing_exprs] - if self.snapshot: - body["snapshot"] = self.snapshot.as_dict() - if self.status is not None: - body["status"] = self.status.value - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series.as_dict() + if self.access_key_id is not None: + body["access_key_id"] = self.access_key_id + if self.secret_access_key is not None: + body["secret_access_key"] = self.secret_access_key + if self.session_token is not None: + body["session_token"] = self.session_token return body def as_shallow_dict(self) -> dict: - """Serializes the MonitorInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the R2Credentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.assets_dir is not None: - body["assets_dir"] = self.assets_dir - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = self.custom_metrics - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config - if self.drift_metrics_table_name is not None: - body["drift_metrics_table_name"] = self.drift_metrics_table_name - if self.inference_log: - body["inference_log"] = self.inference_log - if self.latest_monitor_failure_msg is not None: - body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg - if self.monitor_version is not None: - body["monitor_version"] = self.monitor_version - if self.notifications: - body["notifications"] = self.notifications - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.profile_metrics_table_name is not None: - body["profile_metrics_table_name"] = self.profile_metrics_table_name - if self.schedule: - body["schedule"] = self.schedule - if self.slicing_exprs: - body["slicing_exprs"] = self.slicing_exprs - if self.snapshot: - body["snapshot"] = self.snapshot - if self.status is not None: - body["status"] = self.status - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series + if self.access_key_id is not None: + body["access_key_id"] = self.access_key_id + if self.secret_access_key is not None: + body["secret_access_key"] = self.secret_access_key + if self.session_token is not None: + body["session_token"] = self.session_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorInfo: - """Deserializes the MonitorInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> R2Credentials: + """Deserializes the R2Credentials from a dictionary.""" return cls( - assets_dir=d.get("assets_dir", None), - baseline_table_name=d.get("baseline_table_name", None), - custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), - dashboard_id=d.get("dashboard_id", None), - data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), - drift_metrics_table_name=d.get("drift_metrics_table_name", None), - inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), - latest_monitor_failure_msg=d.get("latest_monitor_failure_msg", None), - monitor_version=d.get("monitor_version", None), - notifications=_from_dict(d, "notifications", MonitorNotifications), - output_schema_name=d.get("output_schema_name", None), - profile_metrics_table_name=d.get("profile_metrics_table_name", None), - schedule=_from_dict(d, "schedule", MonitorCronSchedule), - slicing_exprs=d.get("slicing_exprs", None), - snapshot=_from_dict(d, "snapshot", MonitorSnapshot), - status=_enum(d, "status", MonitorInfoStatus), - table_name=d.get("table_name", None), - time_series=_from_dict(d, "time_series", MonitorTimeSeries), + access_key_id=d.get("access_key_id", None), + secret_access_key=d.get("secret_access_key", None), + session_token=d.get("session_token", None), ) -class MonitorInfoStatus(Enum): - """The status of the monitor.""" - - MONITOR_STATUS_ACTIVE = "MONITOR_STATUS_ACTIVE" - MONITOR_STATUS_DELETE_PENDING = "MONITOR_STATUS_DELETE_PENDING" - MONITOR_STATUS_ERROR = "MONITOR_STATUS_ERROR" - MONITOR_STATUS_FAILED = "MONITOR_STATUS_FAILED" - MONITOR_STATUS_PENDING = "MONITOR_STATUS_PENDING" - - @dataclass -class MonitorMetric: - name: str - """Name of the metric in the output tables.""" - - definition: str - """Jinja template for a SQL expression that specifies how to compute the metric. See [create metric - definition]. - - [create metric definition]: https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition""" - - input_columns: List[str] - """A list of column names in the input table the metric should be computed for. Can use - ``":table"`` to indicate that the metric needs information from multiple columns.""" - - output_data_type: str - """The output type of the custom metric.""" +class RegenerateDashboardResponse: + dashboard_id: Optional[str] = None + """Id of the regenerated monitoring dashboard.""" - type: MonitorMetricType - """Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or - ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and - ``"CUSTOM_METRIC_TYPE_DERIVED"`` metrics are computed on a single table, whereas the - ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the - two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing - columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate - metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics""" + parent_folder: Optional[str] = None + """The directory where the regenerated dashboard is stored.""" def as_dict(self) -> dict: - """Serializes the MonitorMetric into a dictionary suitable for use as a JSON request body.""" + """Serializes the RegenerateDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.input_columns: - body["input_columns"] = [v for v in self.input_columns] - if self.name is not None: - body["name"] = self.name - if self.output_data_type is not None: - body["output_data_type"] = self.output_data_type - if self.type is not None: - body["type"] = self.type.value + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.parent_folder is not None: + body["parent_folder"] = self.parent_folder return body def as_shallow_dict(self) -> dict: - """Serializes the MonitorMetric into a shallow dictionary of its immediate attributes.""" + """Serializes the RegenerateDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.input_columns: - body["input_columns"] = self.input_columns - if self.name is not None: - body["name"] = self.name - if self.output_data_type is not None: - body["output_data_type"] = self.output_data_type - if self.type is not None: - body["type"] = self.type + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.parent_folder is not None: + body["parent_folder"] = self.parent_folder return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorMetric: - """Deserializes the MonitorMetric from a dictionary.""" - return cls( - definition=d.get("definition", None), - input_columns=d.get("input_columns", None), - name=d.get("name", None), - output_data_type=d.get("output_data_type", None), - type=_enum(d, "type", MonitorMetricType), - ) - - -class MonitorMetricType(Enum): - """Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or - ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and - ``"CUSTOM_METRIC_TYPE_DERIVED"`` metrics are computed on a single table, whereas the - ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the - two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing - columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate - metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics""" - - CUSTOM_METRIC_TYPE_AGGREGATE = "CUSTOM_METRIC_TYPE_AGGREGATE" - CUSTOM_METRIC_TYPE_DERIVED = "CUSTOM_METRIC_TYPE_DERIVED" - CUSTOM_METRIC_TYPE_DRIFT = "CUSTOM_METRIC_TYPE_DRIFT" + def from_dict(cls, d: Dict[str, Any]) -> RegenerateDashboardResponse: + """Deserializes the RegenerateDashboardResponse from a dictionary.""" + return cls(dashboard_id=d.get("dashboard_id", None), parent_folder=d.get("parent_folder", None)) @dataclass -class MonitorNotifications: - on_failure: Optional[MonitorDestination] = None - """Who to send notifications to on monitor failure.""" +class RegisteredModelAlias: + """Registered model alias.""" - on_new_classification_tag_detected: Optional[MonitorDestination] = None - """Who to send notifications to when new data classification tags are detected.""" + alias_name: Optional[str] = None + """Name of the alias, e.g. 'champion' or 'latest_stable'""" + + version_num: Optional[int] = None + """Integer version number of the model version to which this alias points.""" def as_dict(self) -> dict: - """Serializes the MonitorNotifications into a dictionary suitable for use as a JSON request body.""" + """Serializes the RegisteredModelAlias into a dictionary suitable for use as a JSON request body.""" body = {} - if self.on_failure: - body["on_failure"] = self.on_failure.as_dict() - if self.on_new_classification_tag_detected: - body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected.as_dict() + if self.alias_name is not None: + body["alias_name"] = self.alias_name + if self.version_num is not None: + body["version_num"] = self.version_num return body def as_shallow_dict(self) -> dict: - """Serializes the MonitorNotifications into a shallow dictionary of its immediate attributes.""" + """Serializes the RegisteredModelAlias into a shallow dictionary of its immediate attributes.""" body = {} - if self.on_failure: - body["on_failure"] = self.on_failure - if self.on_new_classification_tag_detected: - body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected + if self.alias_name is not None: + body["alias_name"] = self.alias_name + if self.version_num is not None: + body["version_num"] = self.version_num return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorNotifications: - """Deserializes the MonitorNotifications from a dictionary.""" - return cls( - on_failure=_from_dict(d, "on_failure", MonitorDestination), - on_new_classification_tag_detected=_from_dict(d, "on_new_classification_tag_detected", MonitorDestination), - ) + def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAlias: + """Deserializes the RegisteredModelAlias from a dictionary.""" + return cls(alias_name=d.get("alias_name", None), version_num=d.get("version_num", None)) @dataclass -class MonitorRefreshInfo: - refresh_id: int - """Unique id of the refresh operation.""" +class RegisteredModelInfo: + aliases: Optional[List[RegisteredModelAlias]] = None + """List of aliases associated with the registered model""" - state: MonitorRefreshInfoState - """The current state of the refresh.""" + browse_only: Optional[bool] = None + """Indicates whether the principal is limited to retrieving metadata for the associated object + through the BROWSE privilege when include_browse is enabled in the request.""" - start_time_ms: int - """Time at which refresh operation was initiated (milliseconds since 1/1/1970 UTC).""" + catalog_name: Optional[str] = None + """The name of the catalog where the schema and the registered model reside""" - end_time_ms: Optional[int] = None - """Time at which refresh operation completed (milliseconds since 1/1/1970 UTC).""" + comment: Optional[str] = None + """The comment attached to the registered model""" - message: Optional[str] = None - """An optional message to give insight into the current state of the job (e.g. FAILURE messages).""" + created_at: Optional[int] = None + """Creation timestamp of the registered model in milliseconds since the Unix epoch""" - trigger: Optional[MonitorRefreshInfoTrigger] = None - """The method by which the refresh was triggered.""" + created_by: Optional[str] = None + """The identifier of the user who created the registered model""" + + full_name: Optional[str] = None + """The three-level (fully qualified) name of the registered model""" + + metastore_id: Optional[str] = None + """The unique identifier of the metastore""" + + name: Optional[str] = None + """The name of the registered model""" + + owner: Optional[str] = None + """The identifier of the user who owns the registered model""" + + schema_name: Optional[str] = None + """The name of the schema where the registered model resides""" + + storage_location: Optional[str] = None + """The storage location on the cloud under which model version data files are stored""" + + updated_at: Optional[int] = None + """Last-update timestamp of the registered model in milliseconds since the Unix epoch""" + + updated_by: Optional[str] = None + """The identifier of the user who updated the registered model last time""" def as_dict(self) -> dict: - """Serializes the MonitorRefreshInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the RegisteredModelInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.end_time_ms is not None: - body["end_time_ms"] = self.end_time_ms - if self.message is not None: - body["message"] = self.message - if self.refresh_id is not None: - body["refresh_id"] = self.refresh_id - if self.start_time_ms is not None: - body["start_time_ms"] = self.start_time_ms - if self.state is not None: - body["state"] = self.state.value - if self.trigger is not None: - body["trigger"] = self.trigger.value + if self.aliases: + body["aliases"] = [v.as_dict() for v in self.aliases] + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: - """Serializes the MonitorRefreshInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the RegisteredModelInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.end_time_ms is not None: - body["end_time_ms"] = self.end_time_ms - if self.message is not None: - body["message"] = self.message - if self.refresh_id is not None: - body["refresh_id"] = self.refresh_id - if self.start_time_ms is not None: - body["start_time_ms"] = self.start_time_ms - if self.state is not None: - body["state"] = self.state - if self.trigger is not None: - body["trigger"] = self.trigger + if self.aliases: + body["aliases"] = self.aliases + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorRefreshInfo: - """Deserializes the MonitorRefreshInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelInfo: + """Deserializes the RegisteredModelInfo from a dictionary.""" return cls( - end_time_ms=d.get("end_time_ms", None), - message=d.get("message", None), - refresh_id=d.get("refresh_id", None), - start_time_ms=d.get("start_time_ms", None), - state=_enum(d, "state", MonitorRefreshInfoState), - trigger=_enum(d, "trigger", MonitorRefreshInfoTrigger), + aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + full_name=d.get("full_name", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + schema_name=d.get("schema_name", None), + storage_location=d.get("storage_location", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), ) -class MonitorRefreshInfoState(Enum): - """The current state of the refresh.""" +@dataclass +class SchemaInfo: + """Next ID: 40""" - CANCELED = "CANCELED" - FAILED = "FAILED" - PENDING = "PENDING" - RUNNING = "RUNNING" - SUCCESS = "SUCCESS" + browse_only: Optional[bool] = None + """Indicates whether the principal is limited to retrieving metadata for the associated object + through the BROWSE privilege when include_browse is enabled in the request.""" + catalog_name: Optional[str] = None + """Name of parent catalog.""" -class MonitorRefreshInfoTrigger(Enum): - """The method by which the refresh was triggered.""" + catalog_type: Optional[CatalogType] = None + """The type of the parent catalog.""" - MANUAL = "MANUAL" - SCHEDULE = "SCHEDULE" + comment: Optional[str] = None + """User-provided free-form text description.""" + created_at: Optional[int] = None + """Time at which this schema was created, in epoch milliseconds.""" -@dataclass -class MonitorRefreshListResponse: - refreshes: Optional[List[MonitorRefreshInfo]] = None - """List of refreshes.""" + created_by: Optional[str] = None + """Username of schema creator.""" - def as_dict(self) -> dict: - """Serializes the MonitorRefreshListResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.refreshes: - body["refreshes"] = [v.as_dict() for v in self.refreshes] - return body + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - def as_shallow_dict(self) -> dict: - """Serializes the MonitorRefreshListResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.refreshes: - body["refreshes"] = self.refreshes - return body + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None + """Whether predictive optimization should be enabled for this object and objects under it.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorRefreshListResponse: - """Deserializes the MonitorRefreshListResponse from a dictionary.""" - return cls(refreshes=_repeated_dict(d, "refreshes", MonitorRefreshInfo)) + full_name: Optional[str] = None + """Full name of schema, in form of __catalog_name__.__schema_name__.""" + metastore_id: Optional[str] = None + """Unique identifier of parent metastore.""" -@dataclass -class MonitorSnapshot: - def as_dict(self) -> dict: - """Serializes the MonitorSnapshot into a dictionary suitable for use as a JSON request body.""" - body = {} - return body + name: Optional[str] = None + """Name of schema, relative to parent catalog.""" - def as_shallow_dict(self) -> dict: - """Serializes the MonitorSnapshot into a shallow dictionary of its immediate attributes.""" - body = {} - return body + owner: Optional[str] = None + """Username of current owner of schema.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorSnapshot: - """Deserializes the MonitorSnapshot from a dictionary.""" - return cls() + properties: Optional[Dict[str, str]] = None + """A map of key-value properties attached to the securable.""" + schema_id: Optional[str] = None + """The unique identifier of the schema.""" -@dataclass -class MonitorTimeSeries: - timestamp_col: str - """Column that contains the timestamps of requests. The column must be one of the following: - A - ``TimestampType`` column - A column whose values can be converted to timestamps through the - pyspark ``to_timestamp`` [function]. - - [function]: https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html""" + storage_location: Optional[str] = None + """Storage location for managed tables within schema.""" - granularities: List[str] - """Granularities for aggregating data into time windows based on their timestamp. Currently the - following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, - ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.""" + storage_root: Optional[str] = None + """Storage root URL for managed tables within schema.""" + + updated_at: Optional[int] = None + """Time at which this schema was created, in epoch milliseconds.""" + + updated_by: Optional[str] = None + """Username of user who last modified schema.""" def as_dict(self) -> dict: - """Serializes the MonitorTimeSeries into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.granularities: - body["granularities"] = [v for v in self.granularities] - if self.timestamp_col is not None: - body["timestamp_col"] = self.timestamp_col - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MonitorTimeSeries into a shallow dictionary of its immediate attributes.""" - body = {} - if self.granularities: - body["granularities"] = self.granularities - if self.timestamp_col is not None: - body["timestamp_col"] = self.timestamp_col - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorTimeSeries: - """Deserializes the MonitorTimeSeries from a dictionary.""" - return cls(granularities=d.get("granularities", None), timestamp_col=d.get("timestamp_col", None)) - - -@dataclass -class NamedTableConstraint: - name: str - """The name of the constraint.""" - - def as_dict(self) -> dict: - """Serializes the NamedTableConstraint into a dictionary suitable for use as a JSON request body.""" + """Serializes the SchemaInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.catalog_type is not None: + body["catalog_type"] = self.catalog_type.value + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization.value + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties + if self.schema_id is not None: + body["schema_id"] = self.schema_id + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: - """Serializes the NamedTableConstraint into a shallow dictionary of its immediate attributes.""" + """Serializes the SchemaInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.catalog_type is not None: + body["catalog_type"] = self.catalog_type + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties + if self.schema_id is not None: + body["schema_id"] = self.schema_id + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> NamedTableConstraint: - """Deserializes the NamedTableConstraint from a dictionary.""" - return cls(name=d.get("name", None)) + def from_dict(cls, d: Dict[str, Any]) -> SchemaInfo: + """Deserializes the SchemaInfo from a dictionary.""" + return cls( + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + catalog_type=_enum(d, "catalog_type", CatalogType), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + effective_predictive_optimization_flag=_from_dict( + d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag + ), + enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), + full_name=d.get("full_name", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + properties=d.get("properties", None), + schema_id=d.get("schema_id", None), + storage_location=d.get("storage_location", None), + storage_root=d.get("storage_root", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) -@dataclass -class OnlineTable: - """Online Table information.""" +class SecurableType(Enum): + """The type of Unity Catalog securable.""" - name: Optional[str] = None - """Full three-part (catalog, schema, table) name of the table.""" + CATALOG = "CATALOG" + CLEAN_ROOM = "CLEAN_ROOM" + CONNECTION = "CONNECTION" + CREDENTIAL = "CREDENTIAL" + EXTERNAL_LOCATION = "EXTERNAL_LOCATION" + EXTERNAL_METADATA = "EXTERNAL_METADATA" + FUNCTION = "FUNCTION" + METASTORE = "METASTORE" + PIPELINE = "PIPELINE" + PROVIDER = "PROVIDER" + RECIPIENT = "RECIPIENT" + SCHEMA = "SCHEMA" + SHARE = "SHARE" + STAGING_TABLE = "STAGING_TABLE" + STORAGE_CREDENTIAL = "STORAGE_CREDENTIAL" + TABLE = "TABLE" + UNKNOWN_SECURABLE_TYPE = "UNKNOWN_SECURABLE_TYPE" + VOLUME = "VOLUME" - spec: Optional[OnlineTableSpec] = None - """Specification of the online table.""" - status: Optional[OnlineTableStatus] = None - """Online Table data synchronization status""" +@dataclass +class SseEncryptionDetails: + """Server-Side Encryption properties for clients communicating with AWS s3.""" - table_serving_url: Optional[str] = None - """Data serving REST API URL for this table""" + algorithm: Optional[SseEncryptionDetailsAlgorithm] = None + """Sets the value of the 'x-amz-server-side-encryption' header in S3 request.""" - unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None - """The provisioning state of the online table entity in Unity Catalog. This is distinct from the - state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline - may be in "PROVISIONING" as it runs asynchronously).""" + aws_kms_key_arn: Optional[str] = None + """Optional. The ARN of the SSE-KMS key used with the S3 location, when algorithm = "SSE-KMS". Sets + the value of the 'x-amz-server-side-encryption-aws-kms-key-id' header.""" def as_dict(self) -> dict: - """Serializes the OnlineTable into a dictionary suitable for use as a JSON request body.""" + """Serializes the SseEncryptionDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.spec: - body["spec"] = self.spec.as_dict() - if self.status: - body["status"] = self.status.as_dict() - if self.table_serving_url is not None: - body["table_serving_url"] = self.table_serving_url - if self.unity_catalog_provisioning_state is not None: - body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value + if self.algorithm is not None: + body["algorithm"] = self.algorithm.value + if self.aws_kms_key_arn is not None: + body["aws_kms_key_arn"] = self.aws_kms_key_arn return body def as_shallow_dict(self) -> dict: - """Serializes the OnlineTable into a shallow dictionary of its immediate attributes.""" + """Serializes the SseEncryptionDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.spec: - body["spec"] = self.spec - if self.status: - body["status"] = self.status - if self.table_serving_url is not None: - body["table_serving_url"] = self.table_serving_url - if self.unity_catalog_provisioning_state is not None: - body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state + if self.algorithm is not None: + body["algorithm"] = self.algorithm + if self.aws_kms_key_arn is not None: + body["aws_kms_key_arn"] = self.aws_kms_key_arn return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> OnlineTable: - """Deserializes the OnlineTable from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> SseEncryptionDetails: + """Deserializes the SseEncryptionDetails from a dictionary.""" return cls( - name=d.get("name", None), - spec=_from_dict(d, "spec", OnlineTableSpec), - status=_from_dict(d, "status", OnlineTableStatus), - table_serving_url=d.get("table_serving_url", None), - unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), + algorithm=_enum(d, "algorithm", SseEncryptionDetailsAlgorithm), + aws_kms_key_arn=d.get("aws_kms_key_arn", None), ) +class SseEncryptionDetailsAlgorithm(Enum): + + AWS_SSE_KMS = "AWS_SSE_KMS" + AWS_SSE_S3 = "AWS_SSE_S3" + + @dataclass -class OnlineTableSpec: - """Specification of an online table.""" +class StorageCredentialInfo: + aws_iam_role: Optional[AwsIamRoleResponse] = None + """The AWS IAM role configuration.""" - perform_full_copy: Optional[bool] = None - """Whether to create a full-copy pipeline -- a pipeline that stops after creates a full copy of the - source table upon initialization and does not process any change data feeds (CDFs) afterwards. - The pipeline can still be manually triggered afterwards, but it always perform a full copy of - the source table and there are no incremental updates. This mode is useful for syncing views or - tables without CDFs to online tables. Note that the full-copy pipeline only supports "triggered" - scheduling policy.""" + azure_managed_identity: Optional[AzureManagedIdentityResponse] = None + """The Azure managed identity configuration.""" - pipeline_id: Optional[str] = None - """ID of the associated pipeline. Generated by the server - cannot be set by the caller.""" + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" - primary_key_columns: Optional[List[str]] = None - """Primary Key columns to be used for data insert/update in the destination.""" + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" - run_continuously: Optional[OnlineTableSpecContinuousSchedulingPolicy] = None - """Pipeline runs continuously after generating the initial data.""" + comment: Optional[str] = None + """Comment associated with the credential.""" - run_triggered: Optional[OnlineTableSpecTriggeredSchedulingPolicy] = None - """Pipeline stops after generating the initial data and can be triggered later (manually, through a - cron job or through data triggers)""" + created_at: Optional[int] = None + """Time at which this credential was created, in epoch milliseconds.""" - source_table_full_name: Optional[str] = None - """Three-part (catalog, schema, table) name of the source Delta table.""" + created_by: Optional[str] = None + """Username of credential creator.""" - timeseries_key: Optional[str] = None - """Time series key to deduplicate (tie-break) rows with the same primary key.""" + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountResponse] = None + """The Databricks managed GCP service account configuration.""" - def as_dict(self) -> dict: - """Serializes the OnlineTableSpec into a dictionary suitable for use as a JSON request body.""" + full_name: Optional[str] = None + """The full name of the credential.""" + + id: Optional[str] = None + """The unique identifier of the credential.""" + + isolation_mode: Optional[IsolationMode] = None + """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" + + metastore_id: Optional[str] = None + """Unique identifier of the parent metastore.""" + + name: Optional[str] = None + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" + + owner: Optional[str] = None + """Username of current owner of credential.""" + + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + + updated_at: Optional[int] = None + """Time at which this credential was last modified, in epoch milliseconds.""" + + updated_by: Optional[str] = None + """Username of user who last modified the credential.""" + + used_for_managed_storage: Optional[bool] = None + """Whether this credential is the current metastore's root storage credential. Only applicable when + purpose is **STORAGE**.""" + + def as_dict(self) -> dict: + """Serializes the StorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.perform_full_copy is not None: - body["perform_full_copy"] = self.perform_full_copy - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.primary_key_columns: - body["primary_key_columns"] = [v for v in self.primary_key_columns] - if self.run_continuously: - body["run_continuously"] = self.run_continuously.as_dict() - if self.run_triggered: - body["run_triggered"] = self.run_triggered.as_dict() - if self.source_table_full_name is not None: - body["source_table_full_name"] = self.source_table_full_name - if self.timeseries_key is not None: - body["timeseries_key"] = self.timeseries_key + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.full_name is not None: + body["full_name"] = self.full_name + if self.id is not None: + body["id"] = self.id + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.used_for_managed_storage is not None: + body["used_for_managed_storage"] = self.used_for_managed_storage return body def as_shallow_dict(self) -> dict: - """Serializes the OnlineTableSpec into a shallow dictionary of its immediate attributes.""" + """Serializes the StorageCredentialInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.perform_full_copy is not None: - body["perform_full_copy"] = self.perform_full_copy - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.primary_key_columns: - body["primary_key_columns"] = self.primary_key_columns - if self.run_continuously: - body["run_continuously"] = self.run_continuously - if self.run_triggered: - body["run_triggered"] = self.run_triggered - if self.source_table_full_name is not None: - body["source_table_full_name"] = self.source_table_full_name - if self.timeseries_key is not None: - body["timeseries_key"] = self.timeseries_key + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.full_name is not None: + body["full_name"] = self.full_name + if self.id is not None: + body["id"] = self.id + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.used_for_managed_storage is not None: + body["used_for_managed_storage"] = self.used_for_managed_storage return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpec: - """Deserializes the OnlineTableSpec from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> StorageCredentialInfo: + """Deserializes the StorageCredentialInfo from a dictionary.""" return cls( - perform_full_copy=d.get("perform_full_copy", None), - pipeline_id=d.get("pipeline_id", None), - primary_key_columns=d.get("primary_key_columns", None), - run_continuously=_from_dict(d, "run_continuously", OnlineTableSpecContinuousSchedulingPolicy), - run_triggered=_from_dict(d, "run_triggered", OnlineTableSpecTriggeredSchedulingPolicy), - source_table_full_name=d.get("source_table_full_name", None), - timeseries_key=d.get("timeseries_key", None), + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleResponse), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountResponse + ), + full_name=d.get("full_name", None), + id=d.get("id", None), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + read_only=d.get("read_only", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + used_for_managed_storage=d.get("used_for_managed_storage", None), ) @dataclass -class OnlineTableSpecContinuousSchedulingPolicy: +class SystemSchemaInfo: + schema: str + """Name of the system schema.""" + + state: str + """The current state of enablement for the system schema. An empty string means the system schema + is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED | + ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE""" + def as_dict(self) -> dict: - """Serializes the OnlineTableSpecContinuousSchedulingPolicy into a dictionary suitable for use as a JSON request body.""" + """Serializes the SystemSchemaInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.schema is not None: + body["schema"] = self.schema + if self.state is not None: + body["state"] = self.state return body def as_shallow_dict(self) -> dict: - """Serializes the OnlineTableSpecContinuousSchedulingPolicy into a shallow dictionary of its immediate attributes.""" + """Serializes the SystemSchemaInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.schema is not None: + body["schema"] = self.schema + if self.state is not None: + body["state"] = self.state return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpecContinuousSchedulingPolicy: - """Deserializes the OnlineTableSpecContinuousSchedulingPolicy from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> SystemSchemaInfo: + """Deserializes the SystemSchemaInfo from a dictionary.""" + return cls(schema=d.get("schema", None), state=d.get("state", None)) @dataclass -class OnlineTableSpecTriggeredSchedulingPolicy: +class TableConstraint: + """A table constraint, as defined by *one* of the following fields being set: + __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.""" + + foreign_key_constraint: Optional[ForeignKeyConstraint] = None + + named_table_constraint: Optional[NamedTableConstraint] = None + + primary_key_constraint: Optional[PrimaryKeyConstraint] = None + def as_dict(self) -> dict: - """Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a dictionary suitable for use as a JSON request body.""" + """Serializes the TableConstraint into a dictionary suitable for use as a JSON request body.""" body = {} + if self.foreign_key_constraint: + body["foreign_key_constraint"] = self.foreign_key_constraint.as_dict() + if self.named_table_constraint: + body["named_table_constraint"] = self.named_table_constraint.as_dict() + if self.primary_key_constraint: + body["primary_key_constraint"] = self.primary_key_constraint.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a shallow dictionary of its immediate attributes.""" + """Serializes the TableConstraint into a shallow dictionary of its immediate attributes.""" body = {} + if self.foreign_key_constraint: + body["foreign_key_constraint"] = self.foreign_key_constraint + if self.named_table_constraint: + body["named_table_constraint"] = self.named_table_constraint + if self.primary_key_constraint: + body["primary_key_constraint"] = self.primary_key_constraint return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpecTriggeredSchedulingPolicy: - """Deserializes the OnlineTableSpecTriggeredSchedulingPolicy from a dictionary.""" - return cls() - - -class OnlineTableState(Enum): - """The state of an online table.""" - - OFFLINE = "OFFLINE" - OFFLINE_FAILED = "OFFLINE_FAILED" - ONLINE = "ONLINE" - ONLINE_CONTINUOUS_UPDATE = "ONLINE_CONTINUOUS_UPDATE" - ONLINE_NO_PENDING_UPDATE = "ONLINE_NO_PENDING_UPDATE" - ONLINE_PIPELINE_FAILED = "ONLINE_PIPELINE_FAILED" - ONLINE_TRIGGERED_UPDATE = "ONLINE_TRIGGERED_UPDATE" - ONLINE_UPDATING_PIPELINE_RESOURCES = "ONLINE_UPDATING_PIPELINE_RESOURCES" - PROVISIONING = "PROVISIONING" - PROVISIONING_INITIAL_SNAPSHOT = "PROVISIONING_INITIAL_SNAPSHOT" - PROVISIONING_PIPELINE_RESOURCES = "PROVISIONING_PIPELINE_RESOURCES" + def from_dict(cls, d: Dict[str, Any]) -> TableConstraint: + """Deserializes the TableConstraint from a dictionary.""" + return cls( + foreign_key_constraint=_from_dict(d, "foreign_key_constraint", ForeignKeyConstraint), + named_table_constraint=_from_dict(d, "named_table_constraint", NamedTableConstraint), + primary_key_constraint=_from_dict(d, "primary_key_constraint", PrimaryKeyConstraint), + ) @dataclass -class OnlineTableStatus: - """Status of an online table.""" +class TableDependency: + """A table that is dependent on a SQL object.""" - continuous_update_status: Optional[ContinuousUpdateStatus] = None - """Detailed status of an online table. Shown if the online table is in the ONLINE_CONTINUOUS_UPDATE - or the ONLINE_UPDATING_PIPELINE_RESOURCES state.""" - - detailed_state: Optional[OnlineTableState] = None - """The state of the online table.""" - - failed_status: Optional[FailedStatus] = None - """Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the - ONLINE_PIPELINE_FAILED state.""" - - message: Optional[str] = None - """A text description of the current state of the online table.""" - - provisioning_status: Optional[ProvisioningStatus] = None - """Detailed status of an online table. Shown if the online table is in the - PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" - - triggered_update_status: Optional[TriggeredUpdateStatus] = None - """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE - or the ONLINE_NO_PENDING_UPDATE state.""" + table_full_name: str + """Full name of the dependent table, in the form of + __catalog_name__.__schema_name__.__table_name__.""" def as_dict(self) -> dict: - """Serializes the OnlineTableStatus into a dictionary suitable for use as a JSON request body.""" + """Serializes the TableDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.continuous_update_status: - body["continuous_update_status"] = self.continuous_update_status.as_dict() - if self.detailed_state is not None: - body["detailed_state"] = self.detailed_state.value - if self.failed_status: - body["failed_status"] = self.failed_status.as_dict() - if self.message is not None: - body["message"] = self.message - if self.provisioning_status: - body["provisioning_status"] = self.provisioning_status.as_dict() - if self.triggered_update_status: - body["triggered_update_status"] = self.triggered_update_status.as_dict() + if self.table_full_name is not None: + body["table_full_name"] = self.table_full_name return body def as_shallow_dict(self) -> dict: - """Serializes the OnlineTableStatus into a shallow dictionary of its immediate attributes.""" + """Serializes the TableDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.continuous_update_status: - body["continuous_update_status"] = self.continuous_update_status - if self.detailed_state is not None: - body["detailed_state"] = self.detailed_state - if self.failed_status: - body["failed_status"] = self.failed_status - if self.message is not None: - body["message"] = self.message - if self.provisioning_status: - body["provisioning_status"] = self.provisioning_status - if self.triggered_update_status: - body["triggered_update_status"] = self.triggered_update_status + if self.table_full_name is not None: + body["table_full_name"] = self.table_full_name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> OnlineTableStatus: - """Deserializes the OnlineTableStatus from a dictionary.""" - return cls( - continuous_update_status=_from_dict(d, "continuous_update_status", ContinuousUpdateStatus), - detailed_state=_enum(d, "detailed_state", OnlineTableState), - failed_status=_from_dict(d, "failed_status", FailedStatus), - message=d.get("message", None), - provisioning_status=_from_dict(d, "provisioning_status", ProvisioningStatus), - triggered_update_status=_from_dict(d, "triggered_update_status", TriggeredUpdateStatus), - ) + def from_dict(cls, d: Dict[str, Any]) -> TableDependency: + """Deserializes the TableDependency from a dictionary.""" + return cls(table_full_name=d.get("table_full_name", None)) @dataclass -class PermissionsChange: - add: Optional[List[Privilege]] = None - """The set of privileges to add.""" - - principal: Optional[str] = None - """The principal whose privileges we are changing.""" - - remove: Optional[List[Privilege]] = None - """The set of privileges to remove.""" +class TableExistsResponse: + table_exists: Optional[bool] = None + """Whether the table exists or not.""" def as_dict(self) -> dict: - """Serializes the PermissionsChange into a dictionary suitable for use as a JSON request body.""" + """Serializes the TableExistsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.add: - body["add"] = [v.value for v in self.add] - if self.principal is not None: - body["principal"] = self.principal - if self.remove: - body["remove"] = [v.value for v in self.remove] + if self.table_exists is not None: + body["table_exists"] = self.table_exists return body def as_shallow_dict(self) -> dict: - """Serializes the PermissionsChange into a shallow dictionary of its immediate attributes.""" + """Serializes the TableExistsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.add: - body["add"] = self.add - if self.principal is not None: - body["principal"] = self.principal - if self.remove: - body["remove"] = self.remove + if self.table_exists is not None: + body["table_exists"] = self.table_exists return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: - """Deserializes the PermissionsChange from a dictionary.""" - return cls( - add=_repeated_enum(d, "add", Privilege), - principal=d.get("principal", None), - remove=_repeated_enum(d, "remove", Privilege), - ) + def from_dict(cls, d: Dict[str, Any]) -> TableExistsResponse: + """Deserializes the TableExistsResponse from a dictionary.""" + return cls(table_exists=d.get("table_exists", None)) @dataclass -class PipelineProgress: - """Progress information of the Online Table data synchronization pipeline.""" +class TableInfo: + access_point: Optional[str] = None + """The AWS access point to use when accesing s3 for this external location.""" - estimated_completion_time_seconds: Optional[float] = None - """The estimated time remaining to complete this update in seconds.""" + browse_only: Optional[bool] = None + """Indicates whether the principal is limited to retrieving metadata for the associated object + through the BROWSE privilege when include_browse is enabled in the request.""" - latest_version_currently_processing: Optional[int] = None - """The source table Delta version that was last processed by the pipeline. The pipeline may not - have completely processed this version yet.""" + catalog_name: Optional[str] = None + """Name of parent catalog.""" - sync_progress_completion: Optional[float] = None - """The completion ratio of this update. This is a number between 0 and 1.""" + columns: Optional[List[ColumnInfo]] = None + """The array of __ColumnInfo__ definitions of the table's columns.""" - synced_row_count: Optional[int] = None - """The number of rows that have been synced in this update.""" + comment: Optional[str] = None + """User-provided free-form text description.""" - total_row_count: Optional[int] = None - """The total number of rows that need to be synced in this update. This number may be an estimate.""" + created_at: Optional[int] = None + """Time at which this table was created, in epoch milliseconds.""" - def as_dict(self) -> dict: - """Serializes the PipelineProgress into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.estimated_completion_time_seconds is not None: - body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds - if self.latest_version_currently_processing is not None: - body["latest_version_currently_processing"] = self.latest_version_currently_processing - if self.sync_progress_completion is not None: - body["sync_progress_completion"] = self.sync_progress_completion - if self.synced_row_count is not None: - body["synced_row_count"] = self.synced_row_count - if self.total_row_count is not None: - body["total_row_count"] = self.total_row_count - return body + created_by: Optional[str] = None + """Username of table creator.""" - def as_shallow_dict(self) -> dict: - """Serializes the PipelineProgress into a shallow dictionary of its immediate attributes.""" - body = {} - if self.estimated_completion_time_seconds is not None: - body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds - if self.latest_version_currently_processing is not None: - body["latest_version_currently_processing"] = self.latest_version_currently_processing - if self.sync_progress_completion is not None: - body["sync_progress_completion"] = self.sync_progress_completion - if self.synced_row_count is not None: - body["synced_row_count"] = self.synced_row_count - if self.total_row_count is not None: - body["total_row_count"] = self.total_row_count - return body + data_access_configuration_id: Optional[str] = None + """Unique ID of the Data Access Configuration to use with the table data.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PipelineProgress: - """Deserializes the PipelineProgress from a dictionary.""" - return cls( - estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None), - latest_version_currently_processing=d.get("latest_version_currently_processing", None), - sync_progress_completion=d.get("sync_progress_completion", None), - synced_row_count=d.get("synced_row_count", None), - total_row_count=d.get("total_row_count", None), - ) + data_source_format: Optional[DataSourceFormat] = None + """Data source format""" + deleted_at: Optional[int] = None + """Time at which this table was deleted, in epoch milliseconds. Field is omitted if table is not + deleted.""" -@dataclass -class PrimaryKeyConstraint: - name: str - """The name of the constraint.""" + delta_runtime_properties_kvpairs: Optional[DeltaRuntimePropertiesKvPairs] = None + """Information pertaining to current state of the delta table.""" - child_columns: List[str] - """Column names for this constraint.""" + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - timeseries_columns: Optional[List[str]] = None - """Column names that represent a timeseries.""" + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - def as_dict(self) -> dict: - """Serializes the PrimaryKeyConstraint into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.child_columns: - body["child_columns"] = [v for v in self.child_columns] - if self.name is not None: - body["name"] = self.name - if self.timeseries_columns: - body["timeseries_columns"] = [v for v in self.timeseries_columns] - return body + encryption_details: Optional[EncryptionDetails] = None + """Encryption options that apply to clients connecting to cloud storage.""" - def as_shallow_dict(self) -> dict: - """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes.""" - body = {} - if self.child_columns: - body["child_columns"] = self.child_columns - if self.name is not None: - body["name"] = self.name - if self.timeseries_columns: - body["timeseries_columns"] = self.timeseries_columns - return body + full_name: Optional[str] = None + """Full name of table, in form of __catalog_name__.__schema_name__.__table_name__""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint: - """Deserializes the PrimaryKeyConstraint from a dictionary.""" - return cls( - child_columns=d.get("child_columns", None), - name=d.get("name", None), - timeseries_columns=d.get("timeseries_columns", None), - ) + metastore_id: Optional[str] = None + """Unique identifier of parent metastore.""" + name: Optional[str] = None + """Name of table, relative to parent schema.""" -class Privilege(Enum): + owner: Optional[str] = None + """Username of current owner of table.""" - ACCESS = "ACCESS" - ALL_PRIVILEGES = "ALL_PRIVILEGES" - APPLY_TAG = "APPLY_TAG" - BROWSE = "BROWSE" - CREATE = "CREATE" - CREATE_CATALOG = "CREATE_CATALOG" - CREATE_CLEAN_ROOM = "CREATE_CLEAN_ROOM" - CREATE_CONNECTION = "CREATE_CONNECTION" - CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION" - CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE" - CREATE_EXTERNAL_VOLUME = "CREATE_EXTERNAL_VOLUME" - CREATE_FOREIGN_CATALOG = "CREATE_FOREIGN_CATALOG" - CREATE_FOREIGN_SECURABLE = "CREATE_FOREIGN_SECURABLE" - CREATE_FUNCTION = "CREATE_FUNCTION" - CREATE_MANAGED_STORAGE = "CREATE_MANAGED_STORAGE" - CREATE_MATERIALIZED_VIEW = "CREATE_MATERIALIZED_VIEW" - CREATE_MODEL = "CREATE_MODEL" - CREATE_PROVIDER = "CREATE_PROVIDER" - CREATE_RECIPIENT = "CREATE_RECIPIENT" - CREATE_SCHEMA = "CREATE_SCHEMA" - CREATE_SERVICE_CREDENTIAL = "CREATE_SERVICE_CREDENTIAL" - CREATE_SHARE = "CREATE_SHARE" - CREATE_STORAGE_CREDENTIAL = "CREATE_STORAGE_CREDENTIAL" - CREATE_TABLE = "CREATE_TABLE" - CREATE_VIEW = "CREATE_VIEW" - CREATE_VOLUME = "CREATE_VOLUME" - EXECUTE = "EXECUTE" - EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK" - MANAGE = "MANAGE" - MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST" - MODIFY = "MODIFY" - MODIFY_CLEAN_ROOM = "MODIFY_CLEAN_ROOM" - READ_FILES = "READ_FILES" - READ_PRIVATE_FILES = "READ_PRIVATE_FILES" - READ_VOLUME = "READ_VOLUME" - REFRESH = "REFRESH" - SELECT = "SELECT" - SET_SHARE_PERMISSION = "SET_SHARE_PERMISSION" - USAGE = "USAGE" - USE_CATALOG = "USE_CATALOG" - USE_CONNECTION = "USE_CONNECTION" - USE_MARKETPLACE_ASSETS = "USE_MARKETPLACE_ASSETS" - USE_PROVIDER = "USE_PROVIDER" - USE_RECIPIENT = "USE_RECIPIENT" - USE_SCHEMA = "USE_SCHEMA" - USE_SHARE = "USE_SHARE" - WRITE_FILES = "WRITE_FILES" - WRITE_PRIVATE_FILES = "WRITE_PRIVATE_FILES" - WRITE_VOLUME = "WRITE_VOLUME" - - -@dataclass -class PrivilegeAssignment: - principal: Optional[str] = None - """The principal (user email address or group name).""" - - privileges: Optional[List[Privilege]] = None - """The privileges assigned to the principal.""" - - def as_dict(self) -> dict: - """Serializes the PrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = [v.value for v in self.privileges] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = self.privileges - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: - """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) - - -@dataclass -class ProvisioningInfo: - """Status of an asynchronously provisioned resource.""" - - state: Optional[ProvisioningInfoState] = None - """The provisioning state of the resource.""" - - def as_dict(self) -> dict: - """Serializes the ProvisioningInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.state is not None: - body["state"] = self.state.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ProvisioningInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.state is not None: - body["state"] = self.state - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ProvisioningInfo: - """Deserializes the ProvisioningInfo from a dictionary.""" - return cls(state=_enum(d, "state", ProvisioningInfoState)) - - -class ProvisioningInfoState(Enum): - - ACTIVE = "ACTIVE" - DEGRADED = "DEGRADED" - DELETING = "DELETING" - FAILED = "FAILED" - PROVISIONING = "PROVISIONING" - UPDATING = "UPDATING" + pipeline_id: Optional[str] = None + """The pipeline ID of the table. Applicable for tables created by pipelines (Materialized View, + Streaming Table, etc.).""" + properties: Optional[Dict[str, str]] = None + """A map of key-value properties attached to the securable.""" -@dataclass -class ProvisioningStatus: - """Detailed status of an online table. Shown if the online table is in the - PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" + row_filter: Optional[TableRowFilter] = None - initial_pipeline_sync_progress: Optional[PipelineProgress] = None - """Details about initial data synchronization. Only populated when in the - PROVISIONING_INITIAL_SNAPSHOT state.""" + schema_name: Optional[str] = None + """Name of parent schema relative to its parent catalog.""" - def as_dict(self) -> dict: - """Serializes the ProvisioningStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.initial_pipeline_sync_progress: - body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict() - return body + sql_path: Optional[str] = None + """List of schemes whose objects can be referenced without qualification.""" - def as_shallow_dict(self) -> dict: - """Serializes the ProvisioningStatus into a shallow dictionary of its immediate attributes.""" - body = {} - if self.initial_pipeline_sync_progress: - body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress - return body + storage_credential_name: Optional[str] = None + """Name of the storage credential, when a storage credential is configured for use with this table.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ProvisioningStatus: - """Deserializes the ProvisioningStatus from a dictionary.""" - return cls(initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", PipelineProgress)) + storage_location: Optional[str] = None + """Storage root URL for table (for **MANAGED**, **EXTERNAL** tables)""" + table_constraints: Optional[List[TableConstraint]] = None + """List of table constraints. Note: this field is not set in the output of the __listTables__ API.""" -@dataclass -class QuotaInfo: - last_refreshed_at: Optional[int] = None - """The timestamp that indicates when the quota count was last updated.""" + table_id: Optional[str] = None + """The unique identifier of the table.""" - parent_full_name: Optional[str] = None - """Name of the parent resource. Returns metastore ID if the parent is a metastore.""" + table_type: Optional[TableType] = None - parent_securable_type: Optional[SecurableType] = None - """The quota parent securable type.""" + updated_at: Optional[int] = None + """Time at which this table was last modified, in epoch milliseconds.""" - quota_count: Optional[int] = None - """The current usage of the resource quota.""" + updated_by: Optional[str] = None + """Username of user who last modified the table.""" - quota_limit: Optional[int] = None - """The current limit of the resource quota.""" + view_definition: Optional[str] = None + """View definition SQL (when __table_type__ is **VIEW**, **MATERIALIZED_VIEW**, or + **STREAMING_TABLE**)""" - quota_name: Optional[str] = None - """The name of the quota.""" + view_dependencies: Optional[DependencyList] = None + """View dependencies (when table_type == **VIEW** or **MATERIALIZED_VIEW**, **STREAMING_TABLE**) - + when DependencyList is None, the dependency is not provided; - when DependencyList is an empty + list, the dependency is provided but is empty; - when DependencyList is not an empty list, + dependencies are provided and recorded.""" def as_dict(self) -> dict: - """Serializes the QuotaInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the TableInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_refreshed_at is not None: - body["last_refreshed_at"] = self.last_refreshed_at - if self.parent_full_name is not None: - body["parent_full_name"] = self.parent_full_name - if self.parent_securable_type is not None: - body["parent_securable_type"] = self.parent_securable_type.value - if self.quota_count is not None: - body["quota_count"] = self.quota_count - if self.quota_limit is not None: - body["quota_limit"] = self.quota_limit - if self.quota_name is not None: - body["quota_name"] = self.quota_name + if self.access_point is not None: + body["access_point"] = self.access_point + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_access_configuration_id is not None: + body["data_access_configuration_id"] = self.data_access_configuration_id + if self.data_source_format is not None: + body["data_source_format"] = self.data_source_format.value + if self.deleted_at is not None: + body["deleted_at"] = self.deleted_at + if self.delta_runtime_properties_kvpairs: + body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs.as_dict() + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization.value + if self.encryption_details: + body["encryption_details"] = self.encryption_details.as_dict() + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.properties: + body["properties"] = self.properties + if self.row_filter: + body["row_filter"] = self.row_filter.as_dict() + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.sql_path is not None: + body["sql_path"] = self.sql_path + if self.storage_credential_name is not None: + body["storage_credential_name"] = self.storage_credential_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.table_constraints: + body["table_constraints"] = [v.as_dict() for v in self.table_constraints] + if self.table_id is not None: + body["table_id"] = self.table_id + if self.table_type is not None: + body["table_type"] = self.table_type.value + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.view_definition is not None: + body["view_definition"] = self.view_definition + if self.view_dependencies: + body["view_dependencies"] = self.view_dependencies.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the QuotaInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the TableInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_refreshed_at is not None: - body["last_refreshed_at"] = self.last_refreshed_at - if self.parent_full_name is not None: - body["parent_full_name"] = self.parent_full_name - if self.parent_securable_type is not None: - body["parent_securable_type"] = self.parent_securable_type - if self.quota_count is not None: - body["quota_count"] = self.quota_count - if self.quota_limit is not None: - body["quota_limit"] = self.quota_limit - if self.quota_name is not None: - body["quota_name"] = self.quota_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QuotaInfo: - """Deserializes the QuotaInfo from a dictionary.""" - return cls( - last_refreshed_at=d.get("last_refreshed_at", None), - parent_full_name=d.get("parent_full_name", None), - parent_securable_type=_enum(d, "parent_securable_type", SecurableType), - quota_count=d.get("quota_count", None), - quota_limit=d.get("quota_limit", None), - quota_name=d.get("quota_name", None), - ) - - -@dataclass -class R2Credentials: - """R2 temporary credentials for API authentication. Read more at - https://developers.cloudflare.com/r2/api/s3/tokens/.""" - - access_key_id: Optional[str] = None - """The access key ID that identifies the temporary credentials.""" - - secret_access_key: Optional[str] = None - """The secret access key associated with the access key.""" - - session_token: Optional[str] = None - """The generated JWT that users must pass to use the temporary credentials.""" - - def as_dict(self) -> dict: - """Serializes the R2Credentials into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_key_id is not None: - body["access_key_id"] = self.access_key_id - if self.secret_access_key is not None: - body["secret_access_key"] = self.secret_access_key - if self.session_token is not None: - body["session_token"] = self.session_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the R2Credentials into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_key_id is not None: - body["access_key_id"] = self.access_key_id - if self.secret_access_key is not None: - body["secret_access_key"] = self.secret_access_key - if self.session_token is not None: - body["session_token"] = self.session_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> R2Credentials: - """Deserializes the R2Credentials from a dictionary.""" - return cls( - access_key_id=d.get("access_key_id", None), - secret_access_key=d.get("secret_access_key", None), - session_token=d.get("session_token", None), - ) - - -@dataclass -class RegenerateDashboardRequest: - table_name: Optional[str] = None - """Full name of the table.""" - - warehouse_id: Optional[str] = None - """Optional argument to specify the warehouse for dashboard regeneration. If not specified, the - first running warehouse will be used.""" - - def as_dict(self) -> dict: - """Serializes the RegenerateDashboardRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.table_name is not None: - body["table_name"] = self.table_name - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RegenerateDashboardRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.table_name is not None: - body["table_name"] = self.table_name - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RegenerateDashboardRequest: - """Deserializes the RegenerateDashboardRequest from a dictionary.""" - return cls(table_name=d.get("table_name", None), warehouse_id=d.get("warehouse_id", None)) - - -@dataclass -class RegenerateDashboardResponse: - dashboard_id: Optional[str] = None - """Id of the regenerated monitoring dashboard.""" - - parent_folder: Optional[str] = None - """The directory where the regenerated dashboard is stored.""" - - def as_dict(self) -> dict: - """Serializes the RegenerateDashboardResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.parent_folder is not None: - body["parent_folder"] = self.parent_folder - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RegenerateDashboardResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.parent_folder is not None: - body["parent_folder"] = self.parent_folder - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RegenerateDashboardResponse: - """Deserializes the RegenerateDashboardResponse from a dictionary.""" - return cls(dashboard_id=d.get("dashboard_id", None), parent_folder=d.get("parent_folder", None)) - - -@dataclass -class RegisteredModelAlias: - """Registered model alias.""" - - alias_name: Optional[str] = None - """Name of the alias, e.g. 'champion' or 'latest_stable'""" - - version_num: Optional[int] = None - """Integer version number of the model version to which this alias points.""" - - def as_dict(self) -> dict: - """Serializes the RegisteredModelAlias into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.alias_name is not None: - body["alias_name"] = self.alias_name - if self.version_num is not None: - body["version_num"] = self.version_num - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RegisteredModelAlias into a shallow dictionary of its immediate attributes.""" - body = {} - if self.alias_name is not None: - body["alias_name"] = self.alias_name - if self.version_num is not None: - body["version_num"] = self.version_num - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAlias: - """Deserializes the RegisteredModelAlias from a dictionary.""" - return cls(alias_name=d.get("alias_name", None), version_num=d.get("version_num", None)) - - -@dataclass -class RegisteredModelInfo: - aliases: Optional[List[RegisteredModelAlias]] = None - """List of aliases associated with the registered model""" - - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" - - catalog_name: Optional[str] = None - """The name of the catalog where the schema and the registered model reside""" - - comment: Optional[str] = None - """The comment attached to the registered model""" - - created_at: Optional[int] = None - """Creation timestamp of the registered model in milliseconds since the Unix epoch""" - - created_by: Optional[str] = None - """The identifier of the user who created the registered model""" - - full_name: Optional[str] = None - """The three-level (fully qualified) name of the registered model""" - - metastore_id: Optional[str] = None - """The unique identifier of the metastore""" - - name: Optional[str] = None - """The name of the registered model""" - - owner: Optional[str] = None - """The identifier of the user who owns the registered model""" - - schema_name: Optional[str] = None - """The name of the schema where the registered model resides""" - - storage_location: Optional[str] = None - """The storage location on the cloud under which model version data files are stored""" - - updated_at: Optional[int] = None - """Last-update timestamp of the registered model in milliseconds since the Unix epoch""" - - updated_by: Optional[str] = None - """The identifier of the user who updated the registered model last time""" - - def as_dict(self) -> dict: - """Serializes the RegisteredModelInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aliases: - body["aliases"] = [v.as_dict() for v in self.aliases] + if self.access_point is not None: + body["access_point"] = self.access_point if self.browse_only is not None: body["browse_only"] = self.browse_only if self.catalog_name is not None: body["catalog_name"] = self.catalog_name + if self.columns: + body["columns"] = self.columns if self.comment is not None: body["comment"] = self.comment if self.created_at is not None: body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by + if self.data_access_configuration_id is not None: + body["data_access_configuration_id"] = self.data_access_configuration_id + if self.data_source_format is not None: + body["data_source_format"] = self.data_source_format + if self.deleted_at is not None: + body["deleted_at"] = self.deleted_at + if self.delta_runtime_properties_kvpairs: + body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization + if self.encryption_details: + body["encryption_details"] = self.encryption_details if self.full_name is not None: body["full_name"] = self.full_name if self.metastore_id is not None: @@ -7431,2596 +7136,688 @@ def as_dict(self) -> dict: body["name"] = self.name if self.owner is not None: body["owner"] = self.owner + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.properties: + body["properties"] = self.properties + if self.row_filter: + body["row_filter"] = self.row_filter if self.schema_name is not None: body["schema_name"] = self.schema_name + if self.sql_path is not None: + body["sql_path"] = self.sql_path + if self.storage_credential_name is not None: + body["storage_credential_name"] = self.storage_credential_name if self.storage_location is not None: body["storage_location"] = self.storage_location + if self.table_constraints: + body["table_constraints"] = self.table_constraints + if self.table_id is not None: + body["table_id"] = self.table_id + if self.table_type is not None: + body["table_type"] = self.table_type if self.updated_at is not None: body["updated_at"] = self.updated_at if self.updated_by is not None: body["updated_by"] = self.updated_by - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RegisteredModelInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aliases: - body["aliases"] = self.aliases - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelInfo: - """Deserializes the RegisteredModelInfo from a dictionary.""" - return cls( - aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - full_name=d.get("full_name", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - schema_name=d.get("schema_name", None), - storage_location=d.get("storage_location", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) - - -@dataclass -class SchemaInfo: - """Next ID: 40""" - - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" - - catalog_name: Optional[str] = None - """Name of parent catalog.""" - - catalog_type: Optional[CatalogType] = None - """The type of the parent catalog.""" - - comment: Optional[str] = None - """User-provided free-form text description.""" - - created_at: Optional[int] = None - """Time at which this schema was created, in epoch milliseconds.""" - - created_by: Optional[str] = None - """Username of schema creator.""" - - effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - - enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - """Whether predictive optimization should be enabled for this object and objects under it.""" - - full_name: Optional[str] = None - """Full name of schema, in form of __catalog_name__.__schema_name__.""" - - metastore_id: Optional[str] = None - """Unique identifier of parent metastore.""" - - name: Optional[str] = None - """Name of schema, relative to parent catalog.""" - - owner: Optional[str] = None - """Username of current owner of schema.""" - - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" - - schema_id: Optional[str] = None - """The unique identifier of the schema.""" - - storage_location: Optional[str] = None - """Storage location for managed tables within schema.""" - - storage_root: Optional[str] = None - """Storage root URL for managed tables within schema.""" - - updated_at: Optional[int] = None - """Time at which this schema was created, in epoch milliseconds.""" - - updated_by: Optional[str] = None - """Username of user who last modified schema.""" - - def as_dict(self) -> dict: - """Serializes the SchemaInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.catalog_type is not None: - body["catalog_type"] = self.catalog_type.value - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - if self.schema_id is not None: - body["schema_id"] = self.schema_id - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SchemaInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.catalog_type is not None: - body["catalog_type"] = self.catalog_type - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - if self.schema_id is not None: - body["schema_id"] = self.schema_id - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.view_definition is not None: + body["view_definition"] = self.view_definition + if self.view_dependencies: + body["view_dependencies"] = self.view_dependencies return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SchemaInfo: - """Deserializes the SchemaInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> TableInfo: + """Deserializes the TableInfo from a dictionary.""" return cls( + access_point=d.get("access_point", None), browse_only=d.get("browse_only", None), catalog_name=d.get("catalog_name", None), - catalog_type=_enum(d, "catalog_type", CatalogType), + columns=_repeated_dict(d, "columns", ColumnInfo), comment=d.get("comment", None), created_at=d.get("created_at", None), created_by=d.get("created_by", None), + data_access_configuration_id=d.get("data_access_configuration_id", None), + data_source_format=_enum(d, "data_source_format", DataSourceFormat), + deleted_at=d.get("deleted_at", None), + delta_runtime_properties_kvpairs=_from_dict( + d, "delta_runtime_properties_kvpairs", DeltaRuntimePropertiesKvPairs + ), effective_predictive_optimization_flag=_from_dict( d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag ), enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), + encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), full_name=d.get("full_name", None), metastore_id=d.get("metastore_id", None), name=d.get("name", None), owner=d.get("owner", None), + pipeline_id=d.get("pipeline_id", None), properties=d.get("properties", None), - schema_id=d.get("schema_id", None), + row_filter=_from_dict(d, "row_filter", TableRowFilter), + schema_name=d.get("schema_name", None), + sql_path=d.get("sql_path", None), + storage_credential_name=d.get("storage_credential_name", None), storage_location=d.get("storage_location", None), - storage_root=d.get("storage_root", None), + table_constraints=_repeated_dict(d, "table_constraints", TableConstraint), + table_id=d.get("table_id", None), + table_type=_enum(d, "table_type", TableType), updated_at=d.get("updated_at", None), updated_by=d.get("updated_by", None), - ) - - -class SecurableType(Enum): - """The type of Unity Catalog securable.""" - - CATALOG = "CATALOG" - CLEAN_ROOM = "CLEAN_ROOM" - CONNECTION = "CONNECTION" - CREDENTIAL = "CREDENTIAL" - EXTERNAL_LOCATION = "EXTERNAL_LOCATION" - EXTERNAL_METADATA = "EXTERNAL_METADATA" - FUNCTION = "FUNCTION" - METASTORE = "METASTORE" - PIPELINE = "PIPELINE" - PROVIDER = "PROVIDER" - RECIPIENT = "RECIPIENT" - SCHEMA = "SCHEMA" - SHARE = "SHARE" - STAGING_TABLE = "STAGING_TABLE" - STORAGE_CREDENTIAL = "STORAGE_CREDENTIAL" - TABLE = "TABLE" - UNKNOWN_SECURABLE_TYPE = "UNKNOWN_SECURABLE_TYPE" - VOLUME = "VOLUME" - - -@dataclass -class SetArtifactAllowlist: - artifact_matchers: List[ArtifactMatcher] - """A list of allowed artifact match patterns.""" - - artifact_type: Optional[ArtifactType] = None - """The artifact type of the allowlist.""" - - created_at: Optional[int] = None - """Time at which this artifact allowlist was set, in epoch milliseconds.""" - - created_by: Optional[str] = None - """Username of the user who set the artifact allowlist.""" - - metastore_id: Optional[str] = None - """Unique identifier of parent metastore.""" - - def as_dict(self) -> dict: - """Serializes the SetArtifactAllowlist into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.artifact_matchers: - body["artifact_matchers"] = [v.as_dict() for v in self.artifact_matchers] - if self.artifact_type is not None: - body["artifact_type"] = self.artifact_type.value - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetArtifactAllowlist into a shallow dictionary of its immediate attributes.""" - body = {} - if self.artifact_matchers: - body["artifact_matchers"] = self.artifact_matchers - if self.artifact_type is not None: - body["artifact_type"] = self.artifact_type - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetArtifactAllowlist: - """Deserializes the SetArtifactAllowlist from a dictionary.""" - return cls( - artifact_matchers=_repeated_dict(d, "artifact_matchers", ArtifactMatcher), - artifact_type=_enum(d, "artifact_type", ArtifactType), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - metastore_id=d.get("metastore_id", None), - ) - - -@dataclass -class SetRegisteredModelAliasRequest: - full_name: str - """Full name of the registered model""" - - alias: str - """The name of the alias""" - - version_num: int - """The version number of the model version to which the alias points""" - - def as_dict(self) -> dict: - """Serializes the SetRegisteredModelAliasRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.alias is not None: - body["alias"] = self.alias - if self.full_name is not None: - body["full_name"] = self.full_name - if self.version_num is not None: - body["version_num"] = self.version_num - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetRegisteredModelAliasRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.alias is not None: - body["alias"] = self.alias - if self.full_name is not None: - body["full_name"] = self.full_name - if self.version_num is not None: - body["version_num"] = self.version_num - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetRegisteredModelAliasRequest: - """Deserializes the SetRegisteredModelAliasRequest from a dictionary.""" - return cls( - alias=d.get("alias", None), full_name=d.get("full_name", None), version_num=d.get("version_num", None) - ) - - -@dataclass -class SseEncryptionDetails: - """Server-Side Encryption properties for clients communicating with AWS s3.""" - - algorithm: Optional[SseEncryptionDetailsAlgorithm] = None - """Sets the value of the 'x-amz-server-side-encryption' header in S3 request.""" - - aws_kms_key_arn: Optional[str] = None - """Optional. The ARN of the SSE-KMS key used with the S3 location, when algorithm = "SSE-KMS". Sets - the value of the 'x-amz-server-side-encryption-aws-kms-key-id' header.""" - - def as_dict(self) -> dict: - """Serializes the SseEncryptionDetails into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.algorithm is not None: - body["algorithm"] = self.algorithm.value - if self.aws_kms_key_arn is not None: - body["aws_kms_key_arn"] = self.aws_kms_key_arn - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SseEncryptionDetails into a shallow dictionary of its immediate attributes.""" - body = {} - if self.algorithm is not None: - body["algorithm"] = self.algorithm - if self.aws_kms_key_arn is not None: - body["aws_kms_key_arn"] = self.aws_kms_key_arn - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SseEncryptionDetails: - """Deserializes the SseEncryptionDetails from a dictionary.""" - return cls( - algorithm=_enum(d, "algorithm", SseEncryptionDetailsAlgorithm), - aws_kms_key_arn=d.get("aws_kms_key_arn", None), - ) - - -class SseEncryptionDetailsAlgorithm(Enum): - - AWS_SSE_KMS = "AWS_SSE_KMS" - AWS_SSE_S3 = "AWS_SSE_S3" - - -@dataclass -class StorageCredentialInfo: - aws_iam_role: Optional[AwsIamRoleResponse] = None - """The AWS IAM role configuration.""" - - azure_managed_identity: Optional[AzureManagedIdentityResponse] = None - """The Azure managed identity configuration.""" - - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" - - cloudflare_api_token: Optional[CloudflareApiToken] = None - """The Cloudflare API token configuration.""" - - comment: Optional[str] = None - """Comment associated with the credential.""" - - created_at: Optional[int] = None - """Time at which this credential was created, in epoch milliseconds.""" - - created_by: Optional[str] = None - """Username of credential creator.""" - - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountResponse] = None - """The Databricks managed GCP service account configuration.""" - - full_name: Optional[str] = None - """The full name of the credential.""" - - id: Optional[str] = None - """The unique identifier of the credential.""" - - isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - - metastore_id: Optional[str] = None - """Unique identifier of the parent metastore.""" - - name: Optional[str] = None - """The credential name. The name must be unique among storage and service credentials within the - metastore.""" - - owner: Optional[str] = None - """Username of current owner of credential.""" - - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" - - updated_at: Optional[int] = None - """Time at which this credential was last modified, in epoch milliseconds.""" - - updated_by: Optional[str] = None - """Username of user who last modified the credential.""" - - used_for_managed_storage: Optional[bool] = None - """Whether this credential is the current metastore's root storage credential. Only applicable when - purpose is **STORAGE**.""" - - def as_dict(self) -> dict: - """Serializes the StorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.full_name is not None: - body["full_name"] = self.full_name - if self.id is not None: - body["id"] = self.id - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.used_for_managed_storage is not None: - body["used_for_managed_storage"] = self.used_for_managed_storage - return body - - def as_shallow_dict(self) -> dict: - """Serializes the StorageCredentialInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.full_name is not None: - body["full_name"] = self.full_name - if self.id is not None: - body["id"] = self.id - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.used_for_managed_storage is not None: - body["used_for_managed_storage"] = self.used_for_managed_storage - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> StorageCredentialInfo: - """Deserializes the StorageCredentialInfo from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleResponse), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountResponse - ), - full_name=d.get("full_name", None), - id=d.get("id", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - used_for_managed_storage=d.get("used_for_managed_storage", None), - ) - - -@dataclass -class SystemSchemaInfo: - schema: str - """Name of the system schema.""" - - state: str - """The current state of enablement for the system schema. An empty string means the system schema - is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED | - ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE""" - - def as_dict(self) -> dict: - """Serializes the SystemSchemaInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.schema is not None: - body["schema"] = self.schema - if self.state is not None: - body["state"] = self.state - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SystemSchemaInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.schema is not None: - body["schema"] = self.schema - if self.state is not None: - body["state"] = self.state - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SystemSchemaInfo: - """Deserializes the SystemSchemaInfo from a dictionary.""" - return cls(schema=d.get("schema", None), state=d.get("state", None)) - - -@dataclass -class TableConstraint: - """A table constraint, as defined by *one* of the following fields being set: - __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.""" - - foreign_key_constraint: Optional[ForeignKeyConstraint] = None - - named_table_constraint: Optional[NamedTableConstraint] = None - - primary_key_constraint: Optional[PrimaryKeyConstraint] = None - - def as_dict(self) -> dict: - """Serializes the TableConstraint into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.foreign_key_constraint: - body["foreign_key_constraint"] = self.foreign_key_constraint.as_dict() - if self.named_table_constraint: - body["named_table_constraint"] = self.named_table_constraint.as_dict() - if self.primary_key_constraint: - body["primary_key_constraint"] = self.primary_key_constraint.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TableConstraint into a shallow dictionary of its immediate attributes.""" - body = {} - if self.foreign_key_constraint: - body["foreign_key_constraint"] = self.foreign_key_constraint - if self.named_table_constraint: - body["named_table_constraint"] = self.named_table_constraint - if self.primary_key_constraint: - body["primary_key_constraint"] = self.primary_key_constraint - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TableConstraint: - """Deserializes the TableConstraint from a dictionary.""" - return cls( - foreign_key_constraint=_from_dict(d, "foreign_key_constraint", ForeignKeyConstraint), - named_table_constraint=_from_dict(d, "named_table_constraint", NamedTableConstraint), - primary_key_constraint=_from_dict(d, "primary_key_constraint", PrimaryKeyConstraint), - ) - - -@dataclass -class TableDependency: - """A table that is dependent on a SQL object.""" - - table_full_name: str - """Full name of the dependent table, in the form of - __catalog_name__.__schema_name__.__table_name__.""" - - def as_dict(self) -> dict: - """Serializes the TableDependency into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.table_full_name is not None: - body["table_full_name"] = self.table_full_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TableDependency into a shallow dictionary of its immediate attributes.""" - body = {} - if self.table_full_name is not None: - body["table_full_name"] = self.table_full_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TableDependency: - """Deserializes the TableDependency from a dictionary.""" - return cls(table_full_name=d.get("table_full_name", None)) - - -@dataclass -class TableExistsResponse: - table_exists: Optional[bool] = None - """Whether the table exists or not.""" - - def as_dict(self) -> dict: - """Serializes the TableExistsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.table_exists is not None: - body["table_exists"] = self.table_exists - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TableExistsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.table_exists is not None: - body["table_exists"] = self.table_exists - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TableExistsResponse: - """Deserializes the TableExistsResponse from a dictionary.""" - return cls(table_exists=d.get("table_exists", None)) - - -@dataclass -class TableInfo: - access_point: Optional[str] = None - """The AWS access point to use when accesing s3 for this external location.""" - - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" - - catalog_name: Optional[str] = None - """Name of parent catalog.""" - - columns: Optional[List[ColumnInfo]] = None - """The array of __ColumnInfo__ definitions of the table's columns.""" - - comment: Optional[str] = None - """User-provided free-form text description.""" - - created_at: Optional[int] = None - """Time at which this table was created, in epoch milliseconds.""" - - created_by: Optional[str] = None - """Username of table creator.""" - - data_access_configuration_id: Optional[str] = None - """Unique ID of the Data Access Configuration to use with the table data.""" - - data_source_format: Optional[DataSourceFormat] = None - """Data source format""" - - deleted_at: Optional[int] = None - """Time at which this table was deleted, in epoch milliseconds. Field is omitted if table is not - deleted.""" - - delta_runtime_properties_kvpairs: Optional[DeltaRuntimePropertiesKvPairs] = None - """Information pertaining to current state of the delta table.""" - - effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - - enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - - encryption_details: Optional[EncryptionDetails] = None - """Encryption options that apply to clients connecting to cloud storage.""" - - full_name: Optional[str] = None - """Full name of table, in form of __catalog_name__.__schema_name__.__table_name__""" - - metastore_id: Optional[str] = None - """Unique identifier of parent metastore.""" - - name: Optional[str] = None - """Name of table, relative to parent schema.""" - - owner: Optional[str] = None - """Username of current owner of table.""" - - pipeline_id: Optional[str] = None - """The pipeline ID of the table. Applicable for tables created by pipelines (Materialized View, - Streaming Table, etc.).""" - - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" - - row_filter: Optional[TableRowFilter] = None - - schema_name: Optional[str] = None - """Name of parent schema relative to its parent catalog.""" - - sql_path: Optional[str] = None - """List of schemes whose objects can be referenced without qualification.""" - - storage_credential_name: Optional[str] = None - """Name of the storage credential, when a storage credential is configured for use with this table.""" - - storage_location: Optional[str] = None - """Storage root URL for table (for **MANAGED**, **EXTERNAL** tables)""" - - table_constraints: Optional[List[TableConstraint]] = None - """List of table constraints. Note: this field is not set in the output of the __listTables__ API.""" - - table_id: Optional[str] = None - """The unique identifier of the table.""" - - table_type: Optional[TableType] = None - - updated_at: Optional[int] = None - """Time at which this table was last modified, in epoch milliseconds.""" - - updated_by: Optional[str] = None - """Username of user who last modified the table.""" - - view_definition: Optional[str] = None - """View definition SQL (when __table_type__ is **VIEW**, **MATERIALIZED_VIEW**, or - **STREAMING_TABLE**)""" - - view_dependencies: Optional[DependencyList] = None - """View dependencies (when table_type == **VIEW** or **MATERIALIZED_VIEW**, **STREAMING_TABLE**) - - when DependencyList is None, the dependency is not provided; - when DependencyList is an empty - list, the dependency is provided but is empty; - when DependencyList is not an empty list, - dependencies are provided and recorded.""" - - def as_dict(self) -> dict: - """Serializes the TableInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_point is not None: - body["access_point"] = self.access_point - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.columns: - body["columns"] = [v.as_dict() for v in self.columns] - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_access_configuration_id is not None: - body["data_access_configuration_id"] = self.data_access_configuration_id - if self.data_source_format is not None: - body["data_source_format"] = self.data_source_format.value - if self.deleted_at is not None: - body["deleted_at"] = self.deleted_at - if self.delta_runtime_properties_kvpairs: - body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs.as_dict() - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.properties: - body["properties"] = self.properties - if self.row_filter: - body["row_filter"] = self.row_filter.as_dict() - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.sql_path is not None: - body["sql_path"] = self.sql_path - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.table_constraints: - body["table_constraints"] = [v.as_dict() for v in self.table_constraints] - if self.table_id is not None: - body["table_id"] = self.table_id - if self.table_type is not None: - body["table_type"] = self.table_type.value - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.view_definition is not None: - body["view_definition"] = self.view_definition - if self.view_dependencies: - body["view_dependencies"] = self.view_dependencies.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TableInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_point is not None: - body["access_point"] = self.access_point - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.columns: - body["columns"] = self.columns - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_access_configuration_id is not None: - body["data_access_configuration_id"] = self.data_access_configuration_id - if self.data_source_format is not None: - body["data_source_format"] = self.data_source_format - if self.deleted_at is not None: - body["deleted_at"] = self.deleted_at - if self.delta_runtime_properties_kvpairs: - body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.properties: - body["properties"] = self.properties - if self.row_filter: - body["row_filter"] = self.row_filter - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.sql_path is not None: - body["sql_path"] = self.sql_path - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.table_constraints: - body["table_constraints"] = self.table_constraints - if self.table_id is not None: - body["table_id"] = self.table_id - if self.table_type is not None: - body["table_type"] = self.table_type - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.view_definition is not None: - body["view_definition"] = self.view_definition - if self.view_dependencies: - body["view_dependencies"] = self.view_dependencies - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TableInfo: - """Deserializes the TableInfo from a dictionary.""" - return cls( - access_point=d.get("access_point", None), - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - columns=_repeated_dict(d, "columns", ColumnInfo), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - data_access_configuration_id=d.get("data_access_configuration_id", None), - data_source_format=_enum(d, "data_source_format", DataSourceFormat), - deleted_at=d.get("deleted_at", None), - delta_runtime_properties_kvpairs=_from_dict( - d, "delta_runtime_properties_kvpairs", DeltaRuntimePropertiesKvPairs - ), - effective_predictive_optimization_flag=_from_dict( - d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag - ), - enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - full_name=d.get("full_name", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - pipeline_id=d.get("pipeline_id", None), - properties=d.get("properties", None), - row_filter=_from_dict(d, "row_filter", TableRowFilter), - schema_name=d.get("schema_name", None), - sql_path=d.get("sql_path", None), - storage_credential_name=d.get("storage_credential_name", None), - storage_location=d.get("storage_location", None), - table_constraints=_repeated_dict(d, "table_constraints", TableConstraint), - table_id=d.get("table_id", None), - table_type=_enum(d, "table_type", TableType), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - view_definition=d.get("view_definition", None), - view_dependencies=_from_dict(d, "view_dependencies", DependencyList), - ) - - -class TableOperation(Enum): - - READ = "READ" - READ_WRITE = "READ_WRITE" - - -@dataclass -class TableRowFilter: - function_name: str - """The full name of the row filter SQL UDF.""" - - input_column_names: List[str] - """The list of table columns to be passed as input to the row filter function. The column types - should match the types of the filter function arguments.""" - - def as_dict(self) -> dict: - """Serializes the TableRowFilter into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.input_column_names: - body["input_column_names"] = [v for v in self.input_column_names] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TableRowFilter into a shallow dictionary of its immediate attributes.""" - body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.input_column_names: - body["input_column_names"] = self.input_column_names - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TableRowFilter: - """Deserializes the TableRowFilter from a dictionary.""" - return cls(function_name=d.get("function_name", None), input_column_names=d.get("input_column_names", None)) - - -@dataclass -class TableSummary: - full_name: Optional[str] = None - """The full name of the table.""" - - table_type: Optional[TableType] = None - - def as_dict(self) -> dict: - """Serializes the TableSummary into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.full_name is not None: - body["full_name"] = self.full_name - if self.table_type is not None: - body["table_type"] = self.table_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TableSummary into a shallow dictionary of its immediate attributes.""" - body = {} - if self.full_name is not None: - body["full_name"] = self.full_name - if self.table_type is not None: - body["table_type"] = self.table_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TableSummary: - """Deserializes the TableSummary from a dictionary.""" - return cls(full_name=d.get("full_name", None), table_type=_enum(d, "table_type", TableType)) - - -class TableType(Enum): - - EXTERNAL = "EXTERNAL" - EXTERNAL_SHALLOW_CLONE = "EXTERNAL_SHALLOW_CLONE" - FOREIGN = "FOREIGN" - MANAGED = "MANAGED" - MANAGED_SHALLOW_CLONE = "MANAGED_SHALLOW_CLONE" - MATERIALIZED_VIEW = "MATERIALIZED_VIEW" - STREAMING_TABLE = "STREAMING_TABLE" - VIEW = "VIEW" - - -@dataclass -class TagKeyValue: - key: Optional[str] = None - """name of the tag""" - - value: Optional[str] = None - """value of the tag associated with the key, could be optional""" - - def as_dict(self) -> dict: - """Serializes the TagKeyValue into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TagKeyValue into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TagKeyValue: - """Deserializes the TagKeyValue from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) - - -@dataclass -class TemporaryCredentials: - aws_temp_credentials: Optional[AwsCredentials] = None - """AWS temporary credentials for API authentication. Read more at - https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" - - azure_aad: Optional[AzureActiveDirectoryToken] = None - """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed - Identity. Read more at - https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" - - expiration_time: Optional[int] = None - """Server time when the credential will expire, in epoch milliseconds. The API client is advised to - cache the credential given this expiration time.""" - - gcp_oauth_token: Optional[GcpOauthToken] = None - """GCP temporary credentials for API authentication. Read more at - https://developers.google.com/identity/protocols/oauth2/service-account""" - - def as_dict(self) -> dict: - """Serializes the TemporaryCredentials into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() - if self.azure_aad: - body["azure_aad"] = self.azure_aad.as_dict() - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TemporaryCredentials into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials - if self.azure_aad: - body["azure_aad"] = self.azure_aad - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TemporaryCredentials: - """Deserializes the TemporaryCredentials from a dictionary.""" - return cls( - aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), - azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), - expiration_time=d.get("expiration_time", None), - gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), - ) - - -@dataclass -class TriggeredUpdateStatus: - """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE - or the ONLINE_NO_PENDING_UPDATE state.""" - - last_processed_commit_version: Optional[int] = None - """The last source table Delta version that was synced to the online table. Note that this Delta - version may not be completely synced to the online table yet.""" - - timestamp: Optional[str] = None - """The timestamp of the last time any data was synchronized from the source table to the online - table.""" - - triggered_update_progress: Optional[PipelineProgress] = None - """Progress of the active data synchronization pipeline.""" - - def as_dict(self) -> dict: - """Serializes the TriggeredUpdateStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.triggered_update_progress: - body["triggered_update_progress"] = self.triggered_update_progress.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TriggeredUpdateStatus into a shallow dictionary of its immediate attributes.""" - body = {} - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.triggered_update_progress: - body["triggered_update_progress"] = self.triggered_update_progress - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TriggeredUpdateStatus: - """Deserializes the TriggeredUpdateStatus from a dictionary.""" - return cls( - last_processed_commit_version=d.get("last_processed_commit_version", None), - timestamp=d.get("timestamp", None), - triggered_update_progress=_from_dict(d, "triggered_update_progress", PipelineProgress), - ) - - -@dataclass -class UnassignResponse: - def as_dict(self) -> dict: - """Serializes the UnassignResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UnassignResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UnassignResponse: - """Deserializes the UnassignResponse from a dictionary.""" - return cls() - - -@dataclass -class UpdateAssignmentResponse: - def as_dict(self) -> dict: - """Serializes the UpdateAssignmentResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAssignmentResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAssignmentResponse: - """Deserializes the UpdateAssignmentResponse from a dictionary.""" - return cls() - - -@dataclass -class UpdateCatalog: - comment: Optional[str] = None - """User-provided free-form text description.""" - - enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - """Whether predictive optimization should be enabled for this object and objects under it.""" - - isolation_mode: Optional[CatalogIsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - - name: Optional[str] = None - """The name of the catalog.""" - - new_name: Optional[str] = None - """New name for the catalog.""" - - options: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" - - owner: Optional[str] = None - """Username of current owner of catalog.""" - - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" - - def as_dict(self) -> dict: - """Serializes the UpdateCatalog into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCatalog into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalog: - """Deserializes the UpdateCatalog from a dictionary.""" - return cls( - comment=d.get("comment", None), - enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), - isolation_mode=_enum(d, "isolation_mode", CatalogIsolationMode), - name=d.get("name", None), - new_name=d.get("new_name", None), - options=d.get("options", None), - owner=d.get("owner", None), - properties=d.get("properties", None), - ) - - -@dataclass -class UpdateCatalogWorkspaceBindingsResponse: - workspaces: Optional[List[int]] = None - """A list of workspace IDs""" - - def as_dict(self) -> dict: - """Serializes the UpdateCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.workspaces: - body["workspaces"] = [v for v in self.workspaces] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.workspaces: - body["workspaces"] = self.workspaces - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse: - """Deserializes the UpdateCatalogWorkspaceBindingsResponse from a dictionary.""" - return cls(workspaces=d.get("workspaces", None)) - - -@dataclass -class UpdateConnection: - options: Dict[str, str] - """A map of key-value properties attached to the securable.""" - - name: Optional[str] = None - """Name of the connection.""" - - new_name: Optional[str] = None - """New name for the connection.""" - - owner: Optional[str] = None - """Username of current owner of the connection.""" - - def as_dict(self) -> dict: - """Serializes the UpdateConnection into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateConnection into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateConnection: - """Deserializes the UpdateConnection from a dictionary.""" - return cls( - name=d.get("name", None), - new_name=d.get("new_name", None), - options=d.get("options", None), - owner=d.get("owner", None), - ) - - -@dataclass -class UpdateCredentialRequest: - aws_iam_role: Optional[AwsIamRole] = None - """The AWS IAM role configuration.""" - - azure_managed_identity: Optional[AzureManagedIdentity] = None - """The Azure managed identity configuration.""" - - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" - - comment: Optional[str] = None - """Comment associated with the credential.""" - - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None - """The Databricks managed GCP service account configuration.""" - - force: Optional[bool] = None - """Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent - external locations and external tables (when purpose is **STORAGE**).""" - - isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - - name_arg: Optional[str] = None - """Name of the credential.""" - - new_name: Optional[str] = None - """New name of credential.""" - - owner: Optional[str] = None - """Username of current owner of credential.""" - - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" - - skip_validation: Optional[bool] = None - """Supply true to this argument to skip validation of the updated credential.""" - - def as_dict(self) -> dict: - """Serializes the UpdateCredentialRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.name_arg is not None: - body["name_arg"] = self.name_arg - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCredentialRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.name_arg is not None: - body["name_arg"] = self.name_arg - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCredentialRequest: - """Deserializes the UpdateCredentialRequest from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), - force=d.get("force", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - name_arg=d.get("name_arg", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - ) - - -@dataclass -class UpdateExternalLocation: - comment: Optional[str] = None - """User-provided free-form text description.""" - - credential_name: Optional[str] = None - """Name of the storage credential used with this location.""" - - enable_file_events: Optional[bool] = None - """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" - - encryption_details: Optional[EncryptionDetails] = None - """Encryption options that apply to clients connecting to cloud storage.""" - - fallback: Optional[bool] = None - """Indicates whether fallback mode is enabled for this external location. When fallback mode is - enabled, the access to the location falls back to cluster credentials if UC credentials are not - sufficient.""" - - file_event_queue: Optional[FileEventQueue] = None - """[Create:OPT Update:OPT] File event queue settings.""" - - force: Optional[bool] = None - """Force update even if changing url invalidates dependent external tables or mounts.""" - - isolation_mode: Optional[IsolationMode] = None - - name: Optional[str] = None - """Name of the external location.""" - - new_name: Optional[str] = None - """New name for the external location.""" - - owner: Optional[str] = None - """The owner of the external location.""" - - read_only: Optional[bool] = None - """Indicates whether the external location is read-only.""" - - skip_validation: Optional[bool] = None - """Skips validation of the storage credential associated with the external location.""" - - url: Optional[str] = None - """Path URL of the external location.""" - - def as_dict(self) -> dict: - """Serializes the UpdateExternalLocation into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue.as_dict() - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - if self.url is not None: - body["url"] = self.url - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateExternalLocation into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - if self.url is not None: - body["url"] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateExternalLocation: - """Deserializes the UpdateExternalLocation from a dictionary.""" - return cls( - comment=d.get("comment", None), - credential_name=d.get("credential_name", None), - enable_file_events=d.get("enable_file_events", None), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - fallback=d.get("fallback", None), - file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), - force=d.get("force", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - url=d.get("url", None), - ) - - -@dataclass -class UpdateFunction: - name: Optional[str] = None - """The fully-qualified name of the function (of the form - __catalog_name__.__schema_name__.__function__name__).""" - - owner: Optional[str] = None - """Username of current owner of function.""" - - def as_dict(self) -> dict: - """Serializes the UpdateFunction into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateFunction into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateFunction: - """Deserializes the UpdateFunction from a dictionary.""" - return cls(name=d.get("name", None), owner=d.get("owner", None)) - - -@dataclass -class UpdateMetastore: - delta_sharing_organization_name: Optional[str] = None - """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta - Sharing as the official name.""" - - delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None - """The lifetime of delta sharing recipient token in seconds.""" - - delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None - """The scope of Delta Sharing enabled for the metastore.""" - - id: Optional[str] = None - """Unique ID of the metastore.""" - - new_name: Optional[str] = None - """New name for the metastore.""" - - owner: Optional[str] = None - """The owner of the metastore.""" - - privilege_model_version: Optional[str] = None - """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" - - storage_root_credential_id: Optional[str] = None - """UUID of storage credential to access the metastore storage_root.""" - - def as_dict(self) -> dict: - """Serializes the UpdateMetastore into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope.value - if self.id is not None: - body["id"] = self.id - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateMetastore into a shallow dictionary of its immediate attributes.""" - body = {} - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope - if self.id is not None: - body["id"] = self.id - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastore: - """Deserializes the UpdateMetastore from a dictionary.""" - return cls( - delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), - delta_sharing_recipient_token_lifetime_in_seconds=d.get( - "delta_sharing_recipient_token_lifetime_in_seconds", None - ), - delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), - id=d.get("id", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - privilege_model_version=d.get("privilege_model_version", None), - storage_root_credential_id=d.get("storage_root_credential_id", None), - ) - - -@dataclass -class UpdateMetastoreAssignment: - default_catalog_name: Optional[str] = None - """The name of the default catalog in the metastore. This field is deprecated. Please use "Default - Namespace API" to configure the default catalog for a Databricks workspace.""" - - metastore_id: Optional[str] = None - """The unique ID of the metastore.""" - - workspace_id: Optional[int] = None - """A workspace ID.""" - - def as_dict(self) -> dict: - """Serializes the UpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastoreAssignment: - """Deserializes the UpdateMetastoreAssignment from a dictionary.""" - return cls( - default_catalog_name=d.get("default_catalog_name", None), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) - - -@dataclass -class UpdateModelVersionRequest: - comment: Optional[str] = None - """The comment attached to the model version""" - - full_name: Optional[str] = None - """The three-level (fully qualified) name of the model version""" - - version: Optional[int] = None - """The integer version number of the model version""" - - def as_dict(self) -> dict: - """Serializes the UpdateModelVersionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.full_name is not None: - body["full_name"] = self.full_name - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.full_name is not None: - body["full_name"] = self.full_name - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionRequest: - """Deserializes the UpdateModelVersionRequest from a dictionary.""" - return cls(comment=d.get("comment", None), full_name=d.get("full_name", None), version=d.get("version", None)) - - -@dataclass -class UpdateMonitor: - output_schema_name: str - """Schema where output metric tables are created.""" - - baseline_table_name: Optional[str] = None - """Name of the baseline table from which drift metrics are computed from. Columns in the monitored - table should also be present in the baseline table.""" - - custom_metrics: Optional[List[MonitorMetric]] = None - """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived - metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across - time windows).""" - - dashboard_id: Optional[str] = None - """Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in - PENDING state.""" - - data_classification_config: Optional[MonitorDataClassificationConfig] = None - """The data classification config for the monitor.""" - - inference_log: Optional[MonitorInferenceLog] = None - """Configuration for monitoring inference logs.""" - - notifications: Optional[MonitorNotifications] = None - """The notification settings for the monitor.""" - - schedule: Optional[MonitorCronSchedule] = None - """The schedule for automatically updating and refreshing metric tables.""" - - slicing_exprs: Optional[List[str]] = None - """List of column expressions to slice data with for targeted analysis. The data is grouped by each - expression independently, resulting in a separate slice for each predicate and its complements. - For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" - - snapshot: Optional[MonitorSnapshot] = None - """Configuration for monitoring snapshot tables.""" - - table_name: Optional[str] = None - """Full name of the table.""" - - time_series: Optional[MonitorTimeSeries] = None - """Configuration for monitoring time series tables.""" - - def as_dict(self) -> dict: - """Serializes the UpdateMonitor into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config.as_dict() - if self.inference_log: - body["inference_log"] = self.inference_log.as_dict() - if self.notifications: - body["notifications"] = self.notifications.as_dict() - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.slicing_exprs: - body["slicing_exprs"] = [v for v in self.slicing_exprs] - if self.snapshot: - body["snapshot"] = self.snapshot.as_dict() - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateMonitor into a shallow dictionary of its immediate attributes.""" - body = {} - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = self.custom_metrics - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config - if self.inference_log: - body["inference_log"] = self.inference_log - if self.notifications: - body["notifications"] = self.notifications - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.schedule: - body["schedule"] = self.schedule - if self.slicing_exprs: - body["slicing_exprs"] = self.slicing_exprs - if self.snapshot: - body["snapshot"] = self.snapshot - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateMonitor: - """Deserializes the UpdateMonitor from a dictionary.""" - return cls( - baseline_table_name=d.get("baseline_table_name", None), - custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), - dashboard_id=d.get("dashboard_id", None), - data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), - inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), - notifications=_from_dict(d, "notifications", MonitorNotifications), - output_schema_name=d.get("output_schema_name", None), - schedule=_from_dict(d, "schedule", MonitorCronSchedule), - slicing_exprs=d.get("slicing_exprs", None), - snapshot=_from_dict(d, "snapshot", MonitorSnapshot), - table_name=d.get("table_name", None), - time_series=_from_dict(d, "time_series", MonitorTimeSeries), - ) - - -@dataclass -class UpdatePermissions: - changes: Optional[List[PermissionsChange]] = None - """Array of permissions change objects.""" - - full_name: Optional[str] = None - """Full name of securable.""" - - securable_type: Optional[str] = None - """Type of securable.""" - - def as_dict(self) -> dict: - """Serializes the UpdatePermissions into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.changes: - body["changes"] = [v.as_dict() for v in self.changes] - if self.full_name is not None: - body["full_name"] = self.full_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type - return body + view_definition=d.get("view_definition", None), + view_dependencies=_from_dict(d, "view_dependencies", DependencyList), + ) - def as_shallow_dict(self) -> dict: - """Serializes the UpdatePermissions into a shallow dictionary of its immediate attributes.""" - body = {} - if self.changes: - body["changes"] = self.changes - if self.full_name is not None: - body["full_name"] = self.full_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdatePermissions: - """Deserializes the UpdatePermissions from a dictionary.""" - return cls( - changes=_repeated_dict(d, "changes", PermissionsChange), - full_name=d.get("full_name", None), - securable_type=d.get("securable_type", None), - ) +class TableOperation(Enum): + + READ = "READ" + READ_WRITE = "READ_WRITE" @dataclass -class UpdatePermissionsResponse: - privilege_assignments: Optional[List[PrivilegeAssignment]] = None - """The privileges assigned to each principal""" +class TableRowFilter: + function_name: str + """The full name of the row filter SQL UDF.""" + + input_column_names: List[str] + """The list of table columns to be passed as input to the row filter function. The column types + should match the types of the filter function arguments.""" def as_dict(self) -> dict: - """Serializes the UpdatePermissionsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the TableRowFilter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.privilege_assignments: - body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] + if self.function_name is not None: + body["function_name"] = self.function_name + if self.input_column_names: + body["input_column_names"] = [v for v in self.input_column_names] return body def as_shallow_dict(self) -> dict: - """Serializes the UpdatePermissionsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the TableRowFilter into a shallow dictionary of its immediate attributes.""" body = {} - if self.privilege_assignments: - body["privilege_assignments"] = self.privilege_assignments + if self.function_name is not None: + body["function_name"] = self.function_name + if self.input_column_names: + body["input_column_names"] = self.input_column_names return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdatePermissionsResponse: - """Deserializes the UpdatePermissionsResponse from a dictionary.""" - return cls(privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment)) + def from_dict(cls, d: Dict[str, Any]) -> TableRowFilter: + """Deserializes the TableRowFilter from a dictionary.""" + return cls(function_name=d.get("function_name", None), input_column_names=d.get("input_column_names", None)) @dataclass -class UpdateRegisteredModelRequest: - comment: Optional[str] = None - """The comment attached to the registered model""" - +class TableSummary: full_name: Optional[str] = None - """The three-level (fully qualified) name of the registered model""" - - new_name: Optional[str] = None - """New name for the registered model.""" + """The full name of the table.""" - owner: Optional[str] = None - """The identifier of the user who owns the registered model""" + table_type: Optional[TableType] = None def as_dict(self) -> dict: - """Serializes the UpdateRegisteredModelRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the TableSummary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment if self.full_name is not None: body["full_name"] = self.full_name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner + if self.table_type is not None: + body["table_type"] = self.table_type.value return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateRegisteredModelRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the TableSummary into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment if self.full_name is not None: body["full_name"] = self.full_name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner + if self.table_type is not None: + body["table_type"] = self.table_type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRegisteredModelRequest: - """Deserializes the UpdateRegisteredModelRequest from a dictionary.""" - return cls( - comment=d.get("comment", None), - full_name=d.get("full_name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> TableSummary: + """Deserializes the TableSummary from a dictionary.""" + return cls(full_name=d.get("full_name", None), table_type=_enum(d, "table_type", TableType)) + + +class TableType(Enum): + + EXTERNAL = "EXTERNAL" + EXTERNAL_SHALLOW_CLONE = "EXTERNAL_SHALLOW_CLONE" + FOREIGN = "FOREIGN" + MANAGED = "MANAGED" + MANAGED_SHALLOW_CLONE = "MANAGED_SHALLOW_CLONE" + MATERIALIZED_VIEW = "MATERIALIZED_VIEW" + STREAMING_TABLE = "STREAMING_TABLE" + VIEW = "VIEW" @dataclass -class UpdateResponse: +class TagKeyValue: + key: Optional[str] = None + """name of the tag""" + + value: Optional[str] = None + """value of the tag associated with the key, could be optional""" + def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the TagKeyValue into a dictionary suitable for use as a JSON request body.""" body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the TagKeyValue into a shallow dictionary of its immediate attributes.""" body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> TagKeyValue: + """Deserializes the TagKeyValue from a dictionary.""" + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass -class UpdateSchema: - comment: Optional[str] = None - """User-provided free-form text description.""" - - enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - """Whether predictive optimization should be enabled for this object and objects under it.""" - - full_name: Optional[str] = None - """Full name of the schema.""" +class TemporaryCredentials: + aws_temp_credentials: Optional[AwsCredentials] = None + """AWS temporary credentials for API authentication. Read more at + https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" - new_name: Optional[str] = None - """New name for the schema.""" + azure_aad: Optional[AzureActiveDirectoryToken] = None + """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed + Identity. Read more at + https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" - owner: Optional[str] = None - """Username of current owner of schema.""" + expiration_time: Optional[int] = None + """Server time when the credential will expire, in epoch milliseconds. The API client is advised to + cache the credential given this expiration time.""" - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" + gcp_oauth_token: Optional[GcpOauthToken] = None + """GCP temporary credentials for API authentication. Read more at + https://developers.google.com/identity/protocols/oauth2/service-account""" def as_dict(self) -> dict: - """Serializes the UpdateSchema into a dictionary suitable for use as a JSON request body.""" + """Serializes the TemporaryCredentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.full_name is not None: - body["full_name"] = self.full_name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() + if self.azure_aad: + body["azure_aad"] = self.azure_aad.as_dict() + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateSchema into a shallow dictionary of its immediate attributes.""" + """Serializes the TemporaryCredentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.full_name is not None: - body["full_name"] = self.full_name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials + if self.azure_aad: + body["azure_aad"] = self.azure_aad + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateSchema: - """Deserializes the UpdateSchema from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> TemporaryCredentials: + """Deserializes the TemporaryCredentials from a dictionary.""" return cls( - comment=d.get("comment", None), - enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), - full_name=d.get("full_name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - properties=d.get("properties", None), + aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), + azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), + expiration_time=d.get("expiration_time", None), + gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), ) @dataclass -class UpdateStorageCredential: - aws_iam_role: Optional[AwsIamRoleRequest] = None - """The AWS IAM role configuration.""" - - azure_managed_identity: Optional[AzureManagedIdentityResponse] = None - """The Azure managed identity configuration.""" +class TriggeredUpdateStatus: + """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE + or the ONLINE_NO_PENDING_UPDATE state.""" - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" + last_processed_commit_version: Optional[int] = None + """The last source table Delta version that was synced to the online table. Note that this Delta + version may not be completely synced to the online table yet.""" - cloudflare_api_token: Optional[CloudflareApiToken] = None - """The Cloudflare API token configuration.""" + timestamp: Optional[str] = None + """The timestamp of the last time any data was synchronized from the source table to the online + table.""" - comment: Optional[str] = None - """Comment associated with the credential.""" + triggered_update_progress: Optional[PipelineProgress] = None + """Progress of the active data synchronization pipeline.""" - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None - """The Databricks managed GCP service account configuration.""" + def as_dict(self) -> dict: + """Serializes the TriggeredUpdateStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.triggered_update_progress: + body["triggered_update_progress"] = self.triggered_update_progress.as_dict() + return body - force: Optional[bool] = None - """Force update even if there are dependent external locations or external tables.""" + def as_shallow_dict(self) -> dict: + """Serializes the TriggeredUpdateStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.triggered_update_progress: + body["triggered_update_progress"] = self.triggered_update_progress + return body - isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TriggeredUpdateStatus: + """Deserializes the TriggeredUpdateStatus from a dictionary.""" + return cls( + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=d.get("timestamp", None), + triggered_update_progress=_from_dict(d, "triggered_update_progress", PipelineProgress), + ) - name: Optional[str] = None - """Name of the storage credential.""" - new_name: Optional[str] = None - """New name for the storage credential.""" +@dataclass +class UnassignResponse: + def as_dict(self) -> dict: + """Serializes the UnassignResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - owner: Optional[str] = None - """Username of current owner of credential.""" + def as_shallow_dict(self) -> dict: + """Serializes the UnassignResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UnassignResponse: + """Deserializes the UnassignResponse from a dictionary.""" + return cls() - skip_validation: Optional[bool] = None - """Supplying true to this argument skips validation of the updated credential.""" +@dataclass +class UpdateAssignmentResponse: def as_dict(self) -> dict: - """Serializes the UpdateStorageCredential into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateAssignmentResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateStorageCredential into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateAssignmentResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateStorageCredential: - """Deserializes the UpdateStorageCredential from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - force=d.get("force", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> UpdateAssignmentResponse: + """Deserializes the UpdateAssignmentResponse from a dictionary.""" + return cls() @dataclass -class UpdateTableRequest: - """Update a table owner.""" - - full_name: Optional[str] = None - """Full name of the table.""" - - owner: Optional[str] = None +class UpdateCatalogWorkspaceBindingsResponse: + workspaces: Optional[List[int]] = None + """A list of workspace IDs""" def as_dict(self) -> dict: - """Serializes the UpdateTableRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.full_name is not None: - body["full_name"] = self.full_name - if self.owner is not None: - body["owner"] = self.owner + if self.workspaces: + body["workspaces"] = [v for v in self.workspaces] return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateTableRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.full_name is not None: - body["full_name"] = self.full_name - if self.owner is not None: - body["owner"] = self.owner + if self.workspaces: + body["workspaces"] = self.workspaces return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateTableRequest: - """Deserializes the UpdateTableRequest from a dictionary.""" - return cls(full_name=d.get("full_name", None), owner=d.get("owner", None)) + def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse: + """Deserializes the UpdateCatalogWorkspaceBindingsResponse from a dictionary.""" + return cls(workspaces=d.get("workspaces", None)) @dataclass -class UpdateVolumeRequestContent: - comment: Optional[str] = None - """The comment attached to the volume""" +class UpdateMetastore: + delta_sharing_organization_name: Optional[str] = None + """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta + Sharing as the official name.""" - name: Optional[str] = None - """The three-level (fully qualified) name of the volume""" + delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None + """The lifetime of delta sharing recipient token in seconds.""" + + delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None + """The scope of Delta Sharing enabled for the metastore.""" + + id: Optional[str] = None + """Unique ID of the metastore.""" new_name: Optional[str] = None - """New name for the volume.""" + """New name for the metastore.""" owner: Optional[str] = None - """The identifier of the user who owns the volume""" + """The owner of the metastore.""" + + privilege_model_version: Optional[str] = None + """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" + + storage_root_credential_id: Optional[str] = None + """UUID of storage credential to access the metastore storage_root.""" def as_dict(self) -> dict: - """Serializes the UpdateVolumeRequestContent into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateMetastore into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope.value + if self.id is not None: + body["id"] = self.id if self.new_name is not None: body["new_name"] = self.new_name if self.owner is not None: body["owner"] = self.owner + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateVolumeRequestContent into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateMetastore into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope + if self.id is not None: + body["id"] = self.id if self.new_name is not None: body["new_name"] = self.new_name if self.owner is not None: body["owner"] = self.owner + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateVolumeRequestContent: - """Deserializes the UpdateVolumeRequestContent from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastore: + """Deserializes the UpdateMetastore from a dictionary.""" return cls( - comment=d.get("comment", None), - name=d.get("name", None), + delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), + delta_sharing_recipient_token_lifetime_in_seconds=d.get( + "delta_sharing_recipient_token_lifetime_in_seconds", None + ), + delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), + id=d.get("id", None), new_name=d.get("new_name", None), owner=d.get("owner", None), + privilege_model_version=d.get("privilege_model_version", None), + storage_root_credential_id=d.get("storage_root_credential_id", None), ) @dataclass -class UpdateWorkspaceBindings: - assign_workspaces: Optional[List[int]] = None - """A list of workspace IDs.""" +class UpdateMetastoreAssignment: + default_catalog_name: Optional[str] = None + """The name of the default catalog in the metastore. This field is deprecated. Please use "Default + Namespace API" to configure the default catalog for a Databricks workspace.""" - name: Optional[str] = None - """The name of the catalog.""" + metastore_id: Optional[str] = None + """The unique ID of the metastore.""" - unassign_workspaces: Optional[List[int]] = None - """A list of workspace IDs.""" + workspace_id: Optional[int] = None + """A workspace ID.""" def as_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindings into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assign_workspaces: - body["assign_workspaces"] = [v for v in self.assign_workspaces] - if self.name is not None: - body["name"] = self.name - if self.unassign_workspaces: - body["unassign_workspaces"] = [v for v in self.unassign_workspaces] + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindings into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.assign_workspaces: - body["assign_workspaces"] = self.assign_workspaces - if self.name is not None: - body["name"] = self.name - if self.unassign_workspaces: - body["unassign_workspaces"] = self.unassign_workspaces + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindings: - """Deserializes the UpdateWorkspaceBindings from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastoreAssignment: + """Deserializes the UpdateMetastoreAssignment from a dictionary.""" return cls( - assign_workspaces=d.get("assign_workspaces", None), - name=d.get("name", None), - unassign_workspaces=d.get("unassign_workspaces", None), + default_catalog_name=d.get("default_catalog_name", None), + metastore_id=d.get("metastore_id", None), + workspace_id=d.get("workspace_id", None), ) @dataclass -class UpdateWorkspaceBindingsParameters: - add: Optional[List[WorkspaceBinding]] = None - """List of workspace bindings.""" - - remove: Optional[List[WorkspaceBinding]] = None - """List of workspace bindings.""" - - securable_name: Optional[str] = None - """The name of the securable.""" - - securable_type: Optional[str] = None - """The type of the securable to bind to a workspace (catalog, storage_credential, credential, or - external_location).""" +class UpdatePermissionsResponse: + privilege_assignments: Optional[List[PrivilegeAssignment]] = None + """The privileges assigned to each principal""" - def as_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.add: - body["add"] = [v.as_dict() for v in self.add] - if self.remove: - body["remove"] = [v.as_dict() for v in self.remove] - if self.securable_name is not None: - body["securable_name"] = self.securable_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type + def as_dict(self) -> dict: + """Serializes the UpdatePermissionsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindingsParameters into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdatePermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.add: - body["add"] = self.add - if self.remove: - body["remove"] = self.remove - if self.securable_name is not None: - body["securable_name"] = self.securable_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindingsParameters: - """Deserializes the UpdateWorkspaceBindingsParameters from a dictionary.""" - return cls( - add=_repeated_dict(d, "add", WorkspaceBinding), - remove=_repeated_dict(d, "remove", WorkspaceBinding), - securable_name=d.get("securable_name", None), - securable_type=d.get("securable_type", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> UpdatePermissionsResponse: + """Deserializes the UpdatePermissionsResponse from a dictionary.""" + return cls(privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment)) @dataclass -class UpdateWorkspaceBindingsResponse: - """A list of workspace IDs that are bound to the securable""" - - bindings: Optional[List[WorkspaceBinding]] = None - """List of workspace bindings.""" - +class UpdateResponse: def as_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bindings: - body["bindings"] = [v.as_dict() for v in self.bindings] return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.bindings: - body["bindings"] = self.bindings return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindingsResponse: - """Deserializes the UpdateWorkspaceBindingsResponse from a dictionary.""" - return cls(bindings=_repeated_dict(d, "bindings", WorkspaceBinding)) + def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: + """Deserializes the UpdateResponse from a dictionary.""" + return cls() @dataclass -class ValidateCredentialRequest: - """Next ID: 17""" - - aws_iam_role: Optional[AwsIamRole] = None - """The AWS IAM role configuration""" +class UpdateStorageCredential: + aws_iam_role: Optional[AwsIamRoleRequest] = None + """The AWS IAM role configuration.""" - azure_managed_identity: Optional[AzureManagedIdentity] = None + azure_managed_identity: Optional[AzureManagedIdentityResponse] = None """The Azure managed identity configuration.""" - credential_name: Optional[str] = None - """Required. The name of an existing credential or long-lived cloud credential to validate.""" + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None - """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" + + comment: Optional[str] = None + """Comment associated with the credential.""" + + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None + """The Databricks managed GCP service account configuration.""" - external_location_name: Optional[str] = None - """The name of an existing external location to validate. Only applicable for storage credentials - (purpose is **STORAGE**.)""" + force: Optional[bool] = None + """Force update even if there are dependent external locations or external tables.""" - purpose: Optional[CredentialPurpose] = None - """The purpose of the credential. This should only be used when the credential is specified.""" + isolation_mode: Optional[IsolationMode] = None + """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" + + name: Optional[str] = None + """Name of the storage credential.""" + + new_name: Optional[str] = None + """New name for the storage credential.""" + + owner: Optional[str] = None + """Username of current owner of credential.""" read_only: Optional[bool] = None - """Whether the credential is only usable for read operations. Only applicable for storage - credentials (purpose is **STORAGE**.)""" + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" - url: Optional[str] = None - """The external location url to validate. Only applicable when purpose is **STORAGE**.""" + skip_validation: Optional[bool] = None + """Supplying true to this argument skips validation of the updated credential.""" def as_dict(self) -> dict: - """Serializes the ValidateCredentialRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} if self.aws_iam_role: body["aws_iam_role"] = self.aws_iam_role.as_dict() if self.azure_managed_identity: body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.credential_name is not None: - body["credential_name"] = self.credential_name + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() + if self.comment is not None: + body["comment"] = self.comment if self.databricks_gcp_service_account: body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.external_location_name is not None: - body["external_location_name"] = self.external_location_name - if self.purpose is not None: - body["purpose"] = self.purpose.value + if self.force is not None: + body["force"] = self.force + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner if self.read_only is not None: body["read_only"] = self.read_only - if self.url is not None: - body["url"] = self.url + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body def as_shallow_dict(self) -> dict: - """Serializes the ValidateCredentialRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} if self.aws_iam_role: body["aws_iam_role"] = self.aws_iam_role if self.azure_managed_identity: body["azure_managed_identity"] = self.azure_managed_identity - if self.credential_name is not None: - body["credential_name"] = self.credential_name + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token + if self.comment is not None: + body["comment"] = self.comment if self.databricks_gcp_service_account: body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.external_location_name is not None: - body["external_location_name"] = self.external_location_name - if self.purpose is not None: - body["purpose"] = self.purpose + if self.force is not None: + body["force"] = self.force + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner if self.read_only is not None: body["read_only"] = self.read_only - if self.url is not None: - body["url"] = self.url + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ValidateCredentialRequest: - """Deserializes the ValidateCredentialRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> UpdateStorageCredential: + """Deserializes the UpdateStorageCredential from a dictionary.""" return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), - credential_name=d.get("credential_name", None), - databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), - external_location_name=d.get("external_location_name", None), - purpose=_enum(d, "purpose", CredentialPurpose), + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), + comment=d.get("comment", None), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest + ), + force=d.get("force", None), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + name=d.get("name", None), + new_name=d.get("new_name", None), + owner=d.get("owner", None), read_only=d.get("read_only", None), - url=d.get("url", None), + skip_validation=d.get("skip_validation", None), ) +@dataclass +class UpdateWorkspaceBindingsResponse: + """A list of workspace IDs that are bound to the securable""" + + bindings: Optional[List[WorkspaceBinding]] = None + """List of workspace bindings.""" + + def as_dict(self) -> dict: + """Serializes the UpdateWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.bindings: + body["bindings"] = [v.as_dict() for v in self.bindings] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bindings: + body["bindings"] = self.bindings + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindingsResponse: + """Deserializes the UpdateWorkspaceBindingsResponse from a dictionary.""" + return cls(bindings=_repeated_dict(d, "bindings", WorkspaceBinding)) + + @dataclass class ValidateCredentialResponse: is_dir: Optional[bool] = None @@ -10062,99 +7859,6 @@ class ValidateCredentialResult(Enum): SKIP = "SKIP" -@dataclass -class ValidateStorageCredential: - aws_iam_role: Optional[AwsIamRoleRequest] = None - """The AWS IAM role configuration.""" - - azure_managed_identity: Optional[AzureManagedIdentityRequest] = None - """The Azure managed identity configuration.""" - - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" - - cloudflare_api_token: Optional[CloudflareApiToken] = None - """The Cloudflare API token configuration.""" - - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None - """The Databricks created GCP service account configuration.""" - - external_location_name: Optional[str] = None - """The name of an existing external location to validate.""" - - read_only: Optional[bool] = None - """Whether the storage credential is only usable for read operations.""" - - storage_credential_name: Optional[str] = None - """Required. The name of an existing credential or long-lived cloud credential to validate.""" - - url: Optional[str] = None - """The external location url to validate.""" - - def as_dict(self) -> dict: - """Serializes the ValidateStorageCredential into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.external_location_name is not None: - body["external_location_name"] = self.external_location_name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - if self.url is not None: - body["url"] = self.url - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ValidateStorageCredential into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.external_location_name is not None: - body["external_location_name"] = self.external_location_name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - if self.url is not None: - body["url"] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ValidateStorageCredential: - """Deserializes the ValidateStorageCredential from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - external_location_name=d.get("external_location_name", None), - read_only=d.get("read_only", None), - storage_credential_name=d.get("storage_credential_name", None), - url=d.get("url", None), - ) - - @dataclass class ValidateStorageCredentialResponse: is_dir: Optional[bool] = None diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index 8dd2c135..0ae28a8c 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -1183,37 +1183,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListCleanRoomsResponse: ) -@dataclass -class UpdateCleanRoomRequest: - clean_room: Optional[CleanRoom] = None - - name: Optional[str] = None - """Name of the clean room.""" - - def as_dict(self) -> dict: - """Serializes the UpdateCleanRoomRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.clean_room: - body["clean_room"] = self.clean_room.as_dict() - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCleanRoomRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.clean_room: - body["clean_room"] = self.clean_room - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCleanRoomRequest: - """Deserializes the UpdateCleanRoomRequest from a dictionary.""" - return cls(clean_room=_from_dict(d, "clean_room", CleanRoom), name=d.get("name", None)) - - class CleanRoomAssetsAPI: """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room.""" diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index e04a09fc..89f87eb0 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -19,70 +19,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class AddInstanceProfile: - instance_profile_arn: str - """The AWS ARN of the instance profile to register with Databricks. This field is required.""" - - iam_role_arn: Optional[str] = None - """The AWS IAM role ARN of the role associated with the instance profile. This field is required if - your role name and instance profile name do not match and you want to use the instance profile - with [Databricks SQL Serverless]. - - Otherwise, this field is optional. - - [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html""" - - is_meta_instance_profile: Optional[bool] = None - """Boolean flag indicating whether the instance profile should only be used in credential - passthrough scenarios. If true, it means the instance profile contains an meta IAM role which - could assume a wide range of roles. Therefore it should always be used with authorization. This - field is optional, the default value is `false`.""" - - skip_validation: Optional[bool] = None - """By default, Databricks validates that it has sufficient permissions to launch instances with the - instance profile. This validation uses AWS dry-run mode for the RunInstances API. If validation - fails with an error message that does not indicate an IAM related permission issue, (e.g. - “Your requested instance type is not supported in your requested availability zone”), you - can pass this flag to skip the validation and forcibly add the instance profile.""" - - def as_dict(self) -> dict: - """Serializes the AddInstanceProfile into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.iam_role_arn is not None: - body["iam_role_arn"] = self.iam_role_arn - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: - body["is_meta_instance_profile"] = self.is_meta_instance_profile - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AddInstanceProfile into a shallow dictionary of its immediate attributes.""" - body = {} - if self.iam_role_arn is not None: - body["iam_role_arn"] = self.iam_role_arn - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: - body["is_meta_instance_profile"] = self.is_meta_instance_profile - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AddInstanceProfile: - """Deserializes the AddInstanceProfile from a dictionary.""" - return cls( - iam_role_arn=d.get("iam_role_arn", None), - instance_profile_arn=d.get("instance_profile_arn", None), - is_meta_instance_profile=d.get("is_meta_instance_profile", None), - skip_validation=d.get("skip_validation", None), - ) - - @dataclass class AddResponse: def as_dict(self) -> dict: @@ -392,46 +328,6 @@ class AzureAvailability(Enum): SPOT_WITH_FALLBACK_AZURE = "SPOT_WITH_FALLBACK_AZURE" -@dataclass -class CancelCommand: - cluster_id: Optional[str] = None - - command_id: Optional[str] = None - - context_id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the CancelCommand into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.command_id is not None: - body["commandId"] = self.command_id - if self.context_id is not None: - body["contextId"] = self.context_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelCommand into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.command_id is not None: - body["commandId"] = self.command_id - if self.context_id is not None: - body["contextId"] = self.context_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelCommand: - """Deserializes the CancelCommand from a dictionary.""" - return cls( - cluster_id=d.get("clusterId", None), - command_id=d.get("commandId", None), - context_id=d.get("contextId", None), - ) - - @dataclass class CancelResponse: def as_dict(self) -> dict: @@ -450,37 +346,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CancelResponse: return cls() -@dataclass -class ChangeClusterOwner: - cluster_id: str - - owner_username: str - """New owner of the cluster_id after this RPC.""" - - def as_dict(self) -> dict: - """Serializes the ChangeClusterOwner into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.owner_username is not None: - body["owner_username"] = self.owner_username - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ChangeClusterOwner into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.owner_username is not None: - body["owner_username"] = self.owner_username - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ChangeClusterOwner: - """Deserializes the ChangeClusterOwner from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), owner_username=d.get("owner_username", None)) - - @dataclass class ChangeClusterOwnerResponse: def as_dict(self) -> dict: @@ -1866,40 +1731,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ClusterPermissionsDescription: ) -@dataclass -class ClusterPermissionsRequest: - access_control_list: Optional[List[ClusterAccessControlRequest]] = None - - cluster_id: Optional[str] = None - """The cluster for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the ClusterPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ClusterPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ClusterPermissionsRequest: - """Deserializes the ClusterPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ClusterAccessControlRequest), - cluster_id=d.get("cluster_id", None), - ) - - @dataclass class ClusterPolicyAccessControlRequest: group_name: Optional[str] = None @@ -2131,40 +1962,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyPermissionsDescription: ) -@dataclass -class ClusterPolicyPermissionsRequest: - access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None - - cluster_policy_id: Optional[str] = None - """The cluster policy for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the ClusterPolicyPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.cluster_policy_id is not None: - body["cluster_policy_id"] = self.cluster_policy_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ClusterPolicyPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.cluster_policy_id is not None: - body["cluster_policy_id"] = self.cluster_policy_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyPermissionsRequest: - """Deserializes the ClusterPolicyPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ClusterPolicyAccessControlRequest), - cluster_policy_id=d.get("cluster_policy_id", None), - ) - - @dataclass class ClusterSettingsChange: """Represents a change to the cluster settings required for the cluster to become compliant with @@ -2630,56 +2427,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ClusterSpec: ) -@dataclass -class Command: - cluster_id: Optional[str] = None - """Running cluster id""" - - command: Optional[str] = None - """Executable code""" - - context_id: Optional[str] = None - """Running context id""" - - language: Optional[Language] = None - - def as_dict(self) -> dict: - """Serializes the Command into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.command is not None: - body["command"] = self.command - if self.context_id is not None: - body["contextId"] = self.context_id - if self.language is not None: - body["language"] = self.language.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Command into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.command is not None: - body["command"] = self.command - if self.context_id is not None: - body["contextId"] = self.context_id - if self.language is not None: - body["language"] = self.language - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Command: - """Deserializes the Command from a dictionary.""" - return cls( - cluster_id=d.get("clusterId", None), - command=d.get("command", None), - context_id=d.get("contextId", None), - language=_enum(d, "language", Language), - ) - - class CommandStatus(Enum): CANCELLED = "Cancelled" @@ -2766,1904 +2513,609 @@ def from_dict(cls, d: Dict[str, Any]) -> ContextStatusResponse: @dataclass -class CreateCluster: - spark_version: str - """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can - be retrieved by using the :method:clusters/sparkVersions API call.""" +class CreateClusterResponse: + cluster_id: Optional[str] = None - apply_policy_default_values: Optional[bool] = None - """When set to true, fixed and default values from the policy will be used for fields that are - omitted. When set to false, only fixed values from the policy will be applied.""" + def as_dict(self) -> dict: + """Serializes the CreateClusterResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + return body - autoscale: Optional[AutoScale] = None - """Parameters needed in order to automatically scale clusters up and down based on load. Note: - autoscaling works best with DB runtime versions 3.0 or later.""" + def as_shallow_dict(self) -> dict: + """Serializes the CreateClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + return body - autotermination_minutes: Optional[int] = None - """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, - this cluster will not be automatically terminated. If specified, the threshold must be between - 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic - termination.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateClusterResponse: + """Deserializes the CreateClusterResponse from a dictionary.""" + return cls(cluster_id=d.get("cluster_id", None)) - aws_attributes: Optional[AwsAttributes] = None - """Attributes related to clusters running on Amazon Web Services. If not specified at cluster - creation, a set of default values will be used.""" - azure_attributes: Optional[AzureAttributes] = None - """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, - a set of default values will be used.""" +@dataclass +class CreateInstancePoolResponse: + instance_pool_id: Optional[str] = None + """The ID of the created instance pool.""" - clone_from: Optional[CloneCluster] = None - """When specified, this clones libraries from a source cluster during the creation of a new - cluster.""" + def as_dict(self) -> dict: + """Serializes the CreateInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + return body - cluster_log_conf: Optional[ClusterLogConf] = None - """The configuration for delivering spark logs to a long-term storage destination. Three kinds of - destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be - specified for one cluster. If the conf is given, the logs will be delivered to the destination - every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the - destination of executor logs is `$destination/$clusterId/executor`.""" + def as_shallow_dict(self) -> dict: + """Serializes the CreateInstancePoolResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + return body - cluster_name: Optional[str] = None - """Cluster name requested by the user. This doesn't have to be unique. If not specified at - creation, the cluster name will be an empty string. For job clusters, the cluster name is - automatically set based on the job and job run IDs.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateInstancePoolResponse: + """Deserializes the CreateInstancePoolResponse from a dictionary.""" + return cls(instance_pool_id=d.get("instance_pool_id", None)) - custom_tags: Optional[Dict[str, str]] = None - """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS - instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - - - Currently, Databricks allows at most 45 custom tags - - - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster - tags""" - data_security_mode: Optional[DataSecurityMode] = None - """Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - Alias for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - multiple users sharing the cluster. Data governance features are not available in this mode. * - `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - `single_user_name`. Most programming languages, cluster features and data governance features - are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - users. Cluster users are fully isolated so that they cannot see each other's data and - credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that - doesn’t have UC nor passthrough enabled.""" - - docker_image: Optional[DockerImage] = None - """Custom docker image BYOC""" - - driver_instance_pool_id: Optional[str] = None - """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster - uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - - driver_node_type_id: Optional[str] = None - """The node type of the Spark driver. Note that this field is optional; if unset, the driver node - type will be set as the same value as `node_type_id` defined above. - - This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both - driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id - and node_type_id take precedence.""" - - enable_elastic_disk: Optional[bool] = None - """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk - space when its Spark workers are running low on disk space. This feature requires specific AWS - permissions to function correctly - refer to the User Guide for more details.""" +@dataclass +class CreatePolicyResponse: + policy_id: Optional[str] = None + """Canonical unique identifier for the cluster policy.""" - enable_local_disk_encryption: Optional[bool] = None - """Whether to enable LUKS on cluster VMs' local disks""" + def as_dict(self) -> dict: + """Serializes the CreatePolicyResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.policy_id is not None: + body["policy_id"] = self.policy_id + return body - gcp_attributes: Optional[GcpAttributes] = None - """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster - creation, a set of default values will be used.""" + def as_shallow_dict(self) -> dict: + """Serializes the CreatePolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.policy_id is not None: + body["policy_id"] = self.policy_id + return body - init_scripts: Optional[List[InitScriptInfo]] = None - """The configuration for storing init scripts. Any number of destinations can be specified. The - scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, - init script logs are sent to `//init_scripts`.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreatePolicyResponse: + """Deserializes the CreatePolicyResponse from a dictionary.""" + return cls(policy_id=d.get("policy_id", None)) - instance_pool_id: Optional[str] = None - """The optional ID of the instance pool to which the cluster belongs.""" - is_single_node: Optional[bool] = None - """This field can only be used when `kind = CLASSIC_PREVIEW`. - - When set to true, Databricks will automatically set single node related `custom_tags`, - `spark_conf`, and `num_workers`""" +@dataclass +class CreateResponse: + script_id: Optional[str] = None + """The global init script ID.""" - kind: Optional[Kind] = None - """The kind of compute described by this compute specification. - - Depending on `kind`, different validations and default values will be applied. - - Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no - specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - - By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - - [simple form]: https://docs.databricks.com/compute/simple-form.html""" + def as_dict(self) -> dict: + """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.script_id is not None: + body["script_id"] = self.script_id + return body - node_type_id: Optional[str] = None - """This field encodes, through a single value, the resources available to each of the Spark nodes - in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or - compute intensive workloads. A list of available node types can be retrieved by using the - :method:clusters/listNodeTypes API call.""" + def as_shallow_dict(self) -> dict: + """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.script_id is not None: + body["script_id"] = self.script_id + return body - num_workers: Optional[int] = None - """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and - `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - - Note: When reading the properties of a cluster, this field reflects the desired number of - workers rather than the actual current number of workers. For instance, if a cluster is resized - from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 - workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the - new nodes are provisioned.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: + """Deserializes the CreateResponse from a dictionary.""" + return cls(script_id=d.get("script_id", None)) - policy_id: Optional[str] = None - """The ID of the cluster policy used to create the cluster if applicable.""" - runtime_engine: Optional[RuntimeEngine] = None - """Determines the cluster's runtime engine, either standard or Photon. - - This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove - `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - - If left unspecified, the runtime engine defaults to standard unless the spark_version contains - -photon-, in which case Photon will be used.""" +@dataclass +class Created: + id: Optional[str] = None - single_user_name: Optional[str] = None - """Single user name if data_security_mode is `SINGLE_USER`""" + def as_dict(self) -> dict: + """Serializes the Created into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.id is not None: + body["id"] = self.id + return body - spark_conf: Optional[Dict[str, str]] = None - """An object containing a set of optional, user-specified Spark configuration key-value pairs. - Users can also pass in a string of extra JVM options to the driver and the executors via - `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" + def as_shallow_dict(self) -> dict: + """Serializes the Created into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: + body["id"] = self.id + return body - spark_env_vars: Optional[Dict[str, str]] = None - """An object containing a set of optional, user-specified environment variable key-value pairs. - Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) - while launching the driver and workers. - - In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them - to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default - databricks managed environmental variables are included as well. - - Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": - "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS - -Dspark.shuffle.service.enabled=true"}`""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Created: + """Deserializes the Created from a dictionary.""" + return cls(id=d.get("id", None)) - ssh_public_keys: Optional[List[str]] = None - """SSH public key contents that will be added to each Spark node in this cluster. The corresponding - private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can - be specified.""" - use_ml_runtime: Optional[bool] = None - """This field can only be used when `kind = CLASSIC_PREVIEW`. - - `effective_spark_version` is determined by `spark_version` (DBR release), this field - `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" +@dataclass +class CustomPolicyTag: + key: str + """The key of the tag. - Must be unique among all custom tags of the same policy - Cannot be + “budget-policy-name”, “budget-policy-id” or "budget-policy-resolution-result" - these + tags are preserved.""" - workload_type: Optional[WorkloadType] = None - """Cluster Attributes showing for clusters workload types.""" + value: Optional[str] = None + """The value of the tag.""" def as_dict(self) -> dict: - """Serializes the CreateCluster into a dictionary suitable for use as a JSON request body.""" + """Serializes the CustomPolicyTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.clone_from: - body["clone_from"] = self.clone_from.as_dict() - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode.value - if self.docker_image: - body["docker_image"] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.init_scripts: - body["init_scripts"] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind.value - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine.value - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = [v for v in self.ssh_public_keys] - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type.as_dict() + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: - """Serializes the CreateCluster into a shallow dictionary of its immediate attributes.""" + """Serializes the CustomPolicyTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.clone_from: - body["clone_from"] = self.clone_from - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode - if self.docker_image: - body["docker_image"] = self.docker_image - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.init_scripts: - body["init_scripts"] = self.init_scripts - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = self.ssh_public_keys - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCluster: - """Deserializes the CreateCluster from a dictionary.""" - return cls( - apply_policy_default_values=d.get("apply_policy_default_values", None), - autoscale=_from_dict(d, "autoscale", AutoScale), - autotermination_minutes=d.get("autotermination_minutes", None), - aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), - clone_from=_from_dict(d, "clone_from", CloneCluster), - cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), - cluster_name=d.get("cluster_name", None), - custom_tags=d.get("custom_tags", None), - data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), - docker_image=_from_dict(d, "docker_image", DockerImage), - driver_instance_pool_id=d.get("driver_instance_pool_id", None), - driver_node_type_id=d.get("driver_node_type_id", None), - enable_elastic_disk=d.get("enable_elastic_disk", None), - enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), - gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), - init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), - instance_pool_id=d.get("instance_pool_id", None), - is_single_node=d.get("is_single_node", None), - kind=_enum(d, "kind", Kind), - node_type_id=d.get("node_type_id", None), - num_workers=d.get("num_workers", None), - policy_id=d.get("policy_id", None), - runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), - single_user_name=d.get("single_user_name", None), - spark_conf=d.get("spark_conf", None), - spark_env_vars=d.get("spark_env_vars", None), - spark_version=d.get("spark_version", None), - ssh_public_keys=d.get("ssh_public_keys", None), - use_ml_runtime=d.get("use_ml_runtime", None), - workload_type=_from_dict(d, "workload_type", WorkloadType), - ) + def from_dict(cls, d: Dict[str, Any]) -> CustomPolicyTag: + """Deserializes the CustomPolicyTag from a dictionary.""" + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass -class CreateClusterResponse: - cluster_id: Optional[str] = None +class DataPlaneEventDetails: + event_type: Optional[DataPlaneEventDetailsEventType] = None + + executor_failures: Optional[int] = None + + host_id: Optional[str] = None + + timestamp: Optional[int] = None def as_dict(self) -> dict: - """Serializes the CreateClusterResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DataPlaneEventDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.event_type is not None: + body["event_type"] = self.event_type.value + if self.executor_failures is not None: + body["executor_failures"] = self.executor_failures + if self.host_id is not None: + body["host_id"] = self.host_id + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body def as_shallow_dict(self) -> dict: - """Serializes the CreateClusterResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the DataPlaneEventDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.event_type is not None: + body["event_type"] = self.event_type + if self.executor_failures is not None: + body["executor_failures"] = self.executor_failures + if self.host_id is not None: + body["host_id"] = self.host_id + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateClusterResponse: - """Deserializes the CreateClusterResponse from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) + def from_dict(cls, d: Dict[str, Any]) -> DataPlaneEventDetails: + """Deserializes the DataPlaneEventDetails from a dictionary.""" + return cls( + event_type=_enum(d, "event_type", DataPlaneEventDetailsEventType), + executor_failures=d.get("executor_failures", None), + host_id=d.get("host_id", None), + timestamp=d.get("timestamp", None), + ) + + +class DataPlaneEventDetailsEventType(Enum): + + NODE_BLACKLISTED = "NODE_BLACKLISTED" + NODE_EXCLUDED_DECOMMISSIONED = "NODE_EXCLUDED_DECOMMISSIONED" + + +class DataSecurityMode(Enum): + """Data security mode decides what data governance model to use when accessing data from a cluster. + + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: + Databricks will choose the most appropriate access mode depending on your compute configuration. + * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: + Alias for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for + multiple users sharing the cluster. Data governance features are not available in this mode. * + `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in + `single_user_name`. Most programming languages, cluster features and data governance features + are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple + users. Cluster users are fully isolated so that they cannot see each other's data and + credentials. Most data governance features are supported in this mode. But programming languages + and cluster features might be limited. + + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for + future Databricks Runtime versions: + + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * + `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high + concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy + Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that + doesn’t have UC nor passthrough enabled.""" + + DATA_SECURITY_MODE_AUTO = "DATA_SECURITY_MODE_AUTO" + DATA_SECURITY_MODE_DEDICATED = "DATA_SECURITY_MODE_DEDICATED" + DATA_SECURITY_MODE_STANDARD = "DATA_SECURITY_MODE_STANDARD" + LEGACY_PASSTHROUGH = "LEGACY_PASSTHROUGH" + LEGACY_SINGLE_USER = "LEGACY_SINGLE_USER" + LEGACY_SINGLE_USER_STANDARD = "LEGACY_SINGLE_USER_STANDARD" + LEGACY_TABLE_ACL = "LEGACY_TABLE_ACL" + NONE = "NONE" + SINGLE_USER = "SINGLE_USER" + USER_ISOLATION = "USER_ISOLATION" @dataclass -class CreateContext: - cluster_id: Optional[str] = None - """Running cluster id""" +class DbfsStorageInfo: + """A storage location in DBFS""" - language: Optional[Language] = None + destination: str + """dbfs destination, e.g. `dbfs:/my/path`""" def as_dict(self) -> dict: - """Serializes the CreateContext into a dictionary suitable for use as a JSON request body.""" + """Serializes the DbfsStorageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.language is not None: - body["language"] = self.language.value + if self.destination is not None: + body["destination"] = self.destination return body def as_shallow_dict(self) -> dict: - """Serializes the CreateContext into a shallow dictionary of its immediate attributes.""" + """Serializes the DbfsStorageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.language is not None: - body["language"] = self.language + if self.destination is not None: + body["destination"] = self.destination return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateContext: - """Deserializes the CreateContext from a dictionary.""" - return cls(cluster_id=d.get("clusterId", None), language=_enum(d, "language", Language)) + def from_dict(cls, d: Dict[str, Any]) -> DbfsStorageInfo: + """Deserializes the DbfsStorageInfo from a dictionary.""" + return cls(destination=d.get("destination", None)) @dataclass -class CreateInstancePool: - instance_pool_name: str - """Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 - characters.""" - - node_type_id: str - """This field encodes, through a single value, the resources available to each of the Spark nodes - in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or - compute intensive workloads. A list of available node types can be retrieved by using the - :method:clusters/listNodeTypes API call.""" +class DeleteClusterResponse: + def as_dict(self) -> dict: + """Serializes the DeleteClusterResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - aws_attributes: Optional[InstancePoolAwsAttributes] = None - """Attributes related to instance pools running on Amazon Web Services. If not specified at pool - creation, a set of default values will be used.""" + def as_shallow_dict(self) -> dict: + """Serializes the DeleteClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body - azure_attributes: Optional[InstancePoolAzureAttributes] = None - """Attributes related to instance pools running on Azure. If not specified at pool creation, a set - of default values will be used.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteClusterResponse: + """Deserializes the DeleteClusterResponse from a dictionary.""" + return cls() - custom_tags: Optional[Dict[str, str]] = None - """Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances - and EBS volumes) with these tags in addition to `default_tags`. Notes: - - - Currently, Databricks allows at most 45 custom tags""" - disk_spec: Optional[DiskSpec] = None - """Defines the specification of the disks that will be attached to all spark containers.""" +@dataclass +class DeleteInstancePoolResponse: + def as_dict(self) -> dict: + """Serializes the DeleteInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - enable_elastic_disk: Optional[bool] = None - """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire - additional disk space when its Spark workers are running low on disk space. In AWS, this feature - requires specific AWS permissions to function correctly - refer to the User Guide for more - details.""" + def as_shallow_dict(self) -> dict: + """Serializes the DeleteInstancePoolResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body - gcp_attributes: Optional[InstancePoolGcpAttributes] = None - """Attributes related to instance pools running on Google Cloud Platform. If not specified at pool - creation, a set of default values will be used.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteInstancePoolResponse: + """Deserializes the DeleteInstancePoolResponse from a dictionary.""" + return cls() - idle_instance_autotermination_minutes: Optional[int] = None - """Automatically terminates the extra instances in the pool cache after they are inactive for this - time in minutes if min_idle_instances requirement is already met. If not set, the extra pool - instances will be automatically terminated after a default timeout. If specified, the threshold - must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle - instances from the cache if min cache size could still hold.""" - max_capacity: Optional[int] = None - """Maximum number of outstanding instances to keep in the pool, including both instances used by - clusters and idle instances. Clusters that require further instance provisioning will fail - during upsize requests.""" - - min_idle_instances: Optional[int] = None - """Minimum number of idle instances to keep in the instance pool""" - - preloaded_docker_images: Optional[List[DockerImage]] = None - """Custom Docker Image BYOC""" - - preloaded_spark_versions: Optional[List[str]] = None - """A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters - started with the preloaded Spark version will start faster. A list of available Spark versions - can be retrieved by using the :method:clusters/sparkVersions API call.""" - - def as_dict(self) -> dict: - """Serializes the CreateInstancePool into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.disk_spec: - body["disk_spec"] = self.disk_spec.as_dict() - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.preloaded_docker_images: - body["preloaded_docker_images"] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: - body["preloaded_spark_versions"] = [v for v in self.preloaded_spark_versions] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateInstancePool into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.disk_spec: - body["disk_spec"] = self.disk_spec - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.preloaded_docker_images: - body["preloaded_docker_images"] = self.preloaded_docker_images - if self.preloaded_spark_versions: - body["preloaded_spark_versions"] = self.preloaded_spark_versions - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateInstancePool: - """Deserializes the CreateInstancePool from a dictionary.""" - return cls( - aws_attributes=_from_dict(d, "aws_attributes", InstancePoolAwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", InstancePoolAzureAttributes), - custom_tags=d.get("custom_tags", None), - disk_spec=_from_dict(d, "disk_spec", DiskSpec), - enable_elastic_disk=d.get("enable_elastic_disk", None), - gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), - idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), - instance_pool_name=d.get("instance_pool_name", None), - max_capacity=d.get("max_capacity", None), - min_idle_instances=d.get("min_idle_instances", None), - node_type_id=d.get("node_type_id", None), - preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), - preloaded_spark_versions=d.get("preloaded_spark_versions", None), - ) - - -@dataclass -class CreateInstancePoolResponse: - instance_pool_id: Optional[str] = None - """The ID of the created instance pool.""" - - def as_dict(self) -> dict: - """Serializes the CreateInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateInstancePoolResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateInstancePoolResponse: - """Deserializes the CreateInstancePoolResponse from a dictionary.""" - return cls(instance_pool_id=d.get("instance_pool_id", None)) - - -@dataclass -class CreatePolicy: - definition: Optional[str] = None - """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. - - [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - - description: Optional[str] = None - """Additional human-readable description of the cluster policy.""" - - libraries: Optional[List[Library]] = None - """A list of libraries to be installed on the next cluster restart that uses this policy. The - maximum number of libraries is 500.""" - - max_clusters_per_user: Optional[int] = None - """Max number of clusters per user that can be active using this policy. If not present, there is - no max limit.""" - - name: Optional[str] = None - """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and - 100 characters.""" - - policy_family_definition_overrides: Optional[str] = None - """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON - document must be passed as a string and cannot be embedded in the requests. - - You can use this to customize the policy definition inherited from the policy family. Policy - rules specified here are merged into the inherited policy definition. - - [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - - policy_family_id: Optional[str] = None - """ID of the policy family. The cluster policy's policy definition inherits the policy family's - policy definition. - - Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize - the policy definition.""" - - def as_dict(self) -> dict: - """Serializes the CreatePolicy into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id - return body +@dataclass +class DeletePolicyResponse: + def as_dict(self) -> dict: + """Serializes the DeletePolicyResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body def as_shallow_dict(self) -> dict: - """Serializes the CreatePolicy into a shallow dictionary of its immediate attributes.""" + """Serializes the DeletePolicyResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.libraries: - body["libraries"] = self.libraries - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePolicy: - """Deserializes the CreatePolicy from a dictionary.""" - return cls( - definition=d.get("definition", None), - description=d.get("description", None), - libraries=_repeated_dict(d, "libraries", Library), - max_clusters_per_user=d.get("max_clusters_per_user", None), - name=d.get("name", None), - policy_family_definition_overrides=d.get("policy_family_definition_overrides", None), - policy_family_id=d.get("policy_family_id", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> DeletePolicyResponse: + """Deserializes the DeletePolicyResponse from a dictionary.""" + return cls() @dataclass -class CreatePolicyResponse: - policy_id: Optional[str] = None - """Canonical unique identifier for the cluster policy.""" - +class DeleteResponse: def as_dict(self) -> dict: - """Serializes the CreatePolicyResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.policy_id is not None: - body["policy_id"] = self.policy_id return body def as_shallow_dict(self) -> dict: - """Serializes the CreatePolicyResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.policy_id is not None: - body["policy_id"] = self.policy_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePolicyResponse: - """Deserializes the CreatePolicyResponse from a dictionary.""" - return cls(policy_id=d.get("policy_id", None)) + def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: + """Deserializes the DeleteResponse from a dictionary.""" + return cls() @dataclass -class CreateResponse: - script_id: Optional[str] = None - """The global init script ID.""" - +class DestroyResponse: def as_dict(self) -> dict: - """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DestroyResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.script_id is not None: - body["script_id"] = self.script_id return body def as_shallow_dict(self) -> dict: - """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the DestroyResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.script_id is not None: - body["script_id"] = self.script_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: - """Deserializes the CreateResponse from a dictionary.""" - return cls(script_id=d.get("script_id", None)) + def from_dict(cls, d: Dict[str, Any]) -> DestroyResponse: + """Deserializes the DestroyResponse from a dictionary.""" + return cls() @dataclass -class Created: - id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the Created into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.id is not None: - body["id"] = self.id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Created into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - return body +class DiskSpec: + """Describes the disks that are launched for each instance in the spark cluster. For example, if + the cluster has 3 instances, each instance is configured to launch 2 disks, 100 GiB each, then + Databricks will launch a total of 6 disks, 100 GiB each, for this cluster.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Created: - """Deserializes the Created from a dictionary.""" - return cls(id=d.get("id", None)) - - -@dataclass -class CustomPolicyTag: - key: str - """The key of the tag. - Must be unique among all custom tags of the same policy - Cannot be - “budget-policy-name”, “budget-policy-id” or "budget-policy-resolution-result" - these - tags are preserved.""" - - value: Optional[str] = None - """The value of the tag.""" - - def as_dict(self) -> dict: - """Serializes the CustomPolicyTag into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CustomPolicyTag into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CustomPolicyTag: - """Deserializes the CustomPolicyTag from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) - - -@dataclass -class DataPlaneEventDetails: - event_type: Optional[DataPlaneEventDetailsEventType] = None - - executor_failures: Optional[int] = None - - host_id: Optional[str] = None - - timestamp: Optional[int] = None - - def as_dict(self) -> dict: - """Serializes the DataPlaneEventDetails into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.event_type is not None: - body["event_type"] = self.event_type.value - if self.executor_failures is not None: - body["executor_failures"] = self.executor_failures - if self.host_id is not None: - body["host_id"] = self.host_id - if self.timestamp is not None: - body["timestamp"] = self.timestamp - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DataPlaneEventDetails into a shallow dictionary of its immediate attributes.""" - body = {} - if self.event_type is not None: - body["event_type"] = self.event_type - if self.executor_failures is not None: - body["executor_failures"] = self.executor_failures - if self.host_id is not None: - body["host_id"] = self.host_id - if self.timestamp is not None: - body["timestamp"] = self.timestamp - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DataPlaneEventDetails: - """Deserializes the DataPlaneEventDetails from a dictionary.""" - return cls( - event_type=_enum(d, "event_type", DataPlaneEventDetailsEventType), - executor_failures=d.get("executor_failures", None), - host_id=d.get("host_id", None), - timestamp=d.get("timestamp", None), - ) - - -class DataPlaneEventDetailsEventType(Enum): - - NODE_BLACKLISTED = "NODE_BLACKLISTED" - NODE_EXCLUDED_DECOMMISSIONED = "NODE_EXCLUDED_DECOMMISSIONED" - - -class DataSecurityMode(Enum): - """Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - Alias for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - multiple users sharing the cluster. Data governance features are not available in this mode. * - `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - `single_user_name`. Most programming languages, cluster features and data governance features - are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - users. Cluster users are fully isolated so that they cannot see each other's data and - credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that - doesn’t have UC nor passthrough enabled.""" - - DATA_SECURITY_MODE_AUTO = "DATA_SECURITY_MODE_AUTO" - DATA_SECURITY_MODE_DEDICATED = "DATA_SECURITY_MODE_DEDICATED" - DATA_SECURITY_MODE_STANDARD = "DATA_SECURITY_MODE_STANDARD" - LEGACY_PASSTHROUGH = "LEGACY_PASSTHROUGH" - LEGACY_SINGLE_USER = "LEGACY_SINGLE_USER" - LEGACY_SINGLE_USER_STANDARD = "LEGACY_SINGLE_USER_STANDARD" - LEGACY_TABLE_ACL = "LEGACY_TABLE_ACL" - NONE = "NONE" - SINGLE_USER = "SINGLE_USER" - USER_ISOLATION = "USER_ISOLATION" - - -@dataclass -class DbfsStorageInfo: - """A storage location in DBFS""" - - destination: str - """dbfs destination, e.g. `dbfs:/my/path`""" - - def as_dict(self) -> dict: - """Serializes the DbfsStorageInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.destination is not None: - body["destination"] = self.destination - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DbfsStorageInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.destination is not None: - body["destination"] = self.destination - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DbfsStorageInfo: - """Deserializes the DbfsStorageInfo from a dictionary.""" - return cls(destination=d.get("destination", None)) - - -@dataclass -class DeleteCluster: - cluster_id: str - """The cluster to be terminated.""" - - def as_dict(self) -> dict: - """Serializes the DeleteCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteCluster: - """Deserializes the DeleteCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) - - -@dataclass -class DeleteClusterResponse: - def as_dict(self) -> dict: - """Serializes the DeleteClusterResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteClusterResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteClusterResponse: - """Deserializes the DeleteClusterResponse from a dictionary.""" - return cls() - - -@dataclass -class DeleteInstancePool: - instance_pool_id: str - """The instance pool to be terminated.""" - - def as_dict(self) -> dict: - """Serializes the DeleteInstancePool into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteInstancePool into a shallow dictionary of its immediate attributes.""" - body = {} - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteInstancePool: - """Deserializes the DeleteInstancePool from a dictionary.""" - return cls(instance_pool_id=d.get("instance_pool_id", None)) - - -@dataclass -class DeleteInstancePoolResponse: - def as_dict(self) -> dict: - """Serializes the DeleteInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteInstancePoolResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteInstancePoolResponse: - """Deserializes the DeleteInstancePoolResponse from a dictionary.""" - return cls() - - -@dataclass -class DeletePolicy: - policy_id: str - """The ID of the policy to delete.""" - - def as_dict(self) -> dict: - """Serializes the DeletePolicy into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.policy_id is not None: - body["policy_id"] = self.policy_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeletePolicy into a shallow dictionary of its immediate attributes.""" - body = {} - if self.policy_id is not None: - body["policy_id"] = self.policy_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeletePolicy: - """Deserializes the DeletePolicy from a dictionary.""" - return cls(policy_id=d.get("policy_id", None)) - - -@dataclass -class DeletePolicyResponse: - def as_dict(self) -> dict: - """Serializes the DeletePolicyResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeletePolicyResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeletePolicyResponse: - """Deserializes the DeletePolicyResponse from a dictionary.""" - return cls() - - -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - -@dataclass -class DestroyContext: - cluster_id: str - - context_id: str - - def as_dict(self) -> dict: - """Serializes the DestroyContext into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.context_id is not None: - body["contextId"] = self.context_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DestroyContext into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.context_id is not None: - body["contextId"] = self.context_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DestroyContext: - """Deserializes the DestroyContext from a dictionary.""" - return cls(cluster_id=d.get("clusterId", None), context_id=d.get("contextId", None)) - - -@dataclass -class DestroyResponse: - def as_dict(self) -> dict: - """Serializes the DestroyResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DestroyResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DestroyResponse: - """Deserializes the DestroyResponse from a dictionary.""" - return cls() - - -@dataclass -class DiskSpec: - """Describes the disks that are launched for each instance in the spark cluster. For example, if - the cluster has 3 instances, each instance is configured to launch 2 disks, 100 GiB each, then - Databricks will launch a total of 6 disks, 100 GiB each, for this cluster.""" - - disk_count: Optional[int] = None - """The number of disks launched for each instance: - This feature is only enabled for supported - node types. - Users can choose up to the limit of the disks supported by the node type. - For - node types with no OS disk, at least one disk must be specified; otherwise, cluster creation - will fail. - - If disks are attached, Databricks will configure Spark to use only the disks for scratch - storage, because heterogenously sized scratch devices can lead to inefficient disk utilization. - If no disks are attached, Databricks will configure Spark to use instance store disks. - - Note: If disks are specified, then the Spark configuration `spark.local.dir` will be overridden. - - Disks will be mounted at: - For AWS: `/ebs0`, `/ebs1`, and etc. - For Azure: `/remote_volume0`, - `/remote_volume1`, and etc.""" - - disk_iops: Optional[int] = None - - disk_size: Optional[int] = None - """The size of each disk (in GiB) launched for each instance. Values must fall into the supported - range for a particular instance type. - - For AWS: - General Purpose SSD: 100 - 4096 GiB - Throughput Optimized HDD: 500 - 4096 GiB - - For Azure: - Premium LRS (SSD): 1 - 1023 GiB - Standard LRS (HDD): 1- 1023 GiB""" - - disk_throughput: Optional[int] = None - - disk_type: Optional[DiskType] = None - """The type of disks that will be launched with this cluster.""" - - def as_dict(self) -> dict: - """Serializes the DiskSpec into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.disk_count is not None: - body["disk_count"] = self.disk_count - if self.disk_iops is not None: - body["disk_iops"] = self.disk_iops - if self.disk_size is not None: - body["disk_size"] = self.disk_size - if self.disk_throughput is not None: - body["disk_throughput"] = self.disk_throughput - if self.disk_type: - body["disk_type"] = self.disk_type.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DiskSpec into a shallow dictionary of its immediate attributes.""" - body = {} - if self.disk_count is not None: - body["disk_count"] = self.disk_count - if self.disk_iops is not None: - body["disk_iops"] = self.disk_iops - if self.disk_size is not None: - body["disk_size"] = self.disk_size - if self.disk_throughput is not None: - body["disk_throughput"] = self.disk_throughput - if self.disk_type: - body["disk_type"] = self.disk_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DiskSpec: - """Deserializes the DiskSpec from a dictionary.""" - return cls( - disk_count=d.get("disk_count", None), - disk_iops=d.get("disk_iops", None), - disk_size=d.get("disk_size", None), - disk_throughput=d.get("disk_throughput", None), - disk_type=_from_dict(d, "disk_type", DiskType), - ) - - -@dataclass -class DiskType: - """Describes the disk type.""" - - azure_disk_volume_type: Optional[DiskTypeAzureDiskVolumeType] = None - """All Azure Disk types that Databricks supports. See - https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks""" - - ebs_volume_type: Optional[DiskTypeEbsVolumeType] = None - """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for - details.""" - - def as_dict(self) -> dict: - """Serializes the DiskType into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.azure_disk_volume_type is not None: - body["azure_disk_volume_type"] = self.azure_disk_volume_type.value - if self.ebs_volume_type is not None: - body["ebs_volume_type"] = self.ebs_volume_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DiskType into a shallow dictionary of its immediate attributes.""" - body = {} - if self.azure_disk_volume_type is not None: - body["azure_disk_volume_type"] = self.azure_disk_volume_type - if self.ebs_volume_type is not None: - body["ebs_volume_type"] = self.ebs_volume_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DiskType: - """Deserializes the DiskType from a dictionary.""" - return cls( - azure_disk_volume_type=_enum(d, "azure_disk_volume_type", DiskTypeAzureDiskVolumeType), - ebs_volume_type=_enum(d, "ebs_volume_type", DiskTypeEbsVolumeType), - ) - - -class DiskTypeAzureDiskVolumeType(Enum): - """All Azure Disk types that Databricks supports. See - https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks""" - - PREMIUM_LRS = "PREMIUM_LRS" - STANDARD_LRS = "STANDARD_LRS" - - -class DiskTypeEbsVolumeType(Enum): - """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for - details.""" - - GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" - THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD" - - -@dataclass -class DockerBasicAuth: - password: Optional[str] = None - """Password of the user""" - - username: Optional[str] = None - """Name of the user""" - - def as_dict(self) -> dict: - """Serializes the DockerBasicAuth into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.password is not None: - body["password"] = self.password - if self.username is not None: - body["username"] = self.username - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DockerBasicAuth into a shallow dictionary of its immediate attributes.""" - body = {} - if self.password is not None: - body["password"] = self.password - if self.username is not None: - body["username"] = self.username - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DockerBasicAuth: - """Deserializes the DockerBasicAuth from a dictionary.""" - return cls(password=d.get("password", None), username=d.get("username", None)) - - -@dataclass -class DockerImage: - basic_auth: Optional[DockerBasicAuth] = None - """Basic auth with username and password""" - - url: Optional[str] = None - """URL of the docker image.""" - - def as_dict(self) -> dict: - """Serializes the DockerImage into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.basic_auth: - body["basic_auth"] = self.basic_auth.as_dict() - if self.url is not None: - body["url"] = self.url - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DockerImage into a shallow dictionary of its immediate attributes.""" - body = {} - if self.basic_auth: - body["basic_auth"] = self.basic_auth - if self.url is not None: - body["url"] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DockerImage: - """Deserializes the DockerImage from a dictionary.""" - return cls(basic_auth=_from_dict(d, "basic_auth", DockerBasicAuth), url=d.get("url", None)) - - -class EbsVolumeType(Enum): - """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for - details.""" - - GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" - THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD" - - -@dataclass -class EditCluster: - cluster_id: str - """ID of the cluster""" - - spark_version: str - """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can - be retrieved by using the :method:clusters/sparkVersions API call.""" - - apply_policy_default_values: Optional[bool] = None - """When set to true, fixed and default values from the policy will be used for fields that are - omitted. When set to false, only fixed values from the policy will be applied.""" - - autoscale: Optional[AutoScale] = None - """Parameters needed in order to automatically scale clusters up and down based on load. Note: - autoscaling works best with DB runtime versions 3.0 or later.""" - - autotermination_minutes: Optional[int] = None - """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, - this cluster will not be automatically terminated. If specified, the threshold must be between - 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic - termination.""" - - aws_attributes: Optional[AwsAttributes] = None - """Attributes related to clusters running on Amazon Web Services. If not specified at cluster - creation, a set of default values will be used.""" - - azure_attributes: Optional[AzureAttributes] = None - """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, - a set of default values will be used.""" - - cluster_log_conf: Optional[ClusterLogConf] = None - """The configuration for delivering spark logs to a long-term storage destination. Three kinds of - destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be - specified for one cluster. If the conf is given, the logs will be delivered to the destination - every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the - destination of executor logs is `$destination/$clusterId/executor`.""" - - cluster_name: Optional[str] = None - """Cluster name requested by the user. This doesn't have to be unique. If not specified at - creation, the cluster name will be an empty string. For job clusters, the cluster name is - automatically set based on the job and job run IDs.""" - - custom_tags: Optional[Dict[str, str]] = None - """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS - instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - - - Currently, Databricks allows at most 45 custom tags - - - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster - tags""" - - data_security_mode: Optional[DataSecurityMode] = None - """Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - Alias for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - multiple users sharing the cluster. Data governance features are not available in this mode. * - `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - `single_user_name`. Most programming languages, cluster features and data governance features - are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - users. Cluster users are fully isolated so that they cannot see each other's data and - credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that - doesn’t have UC nor passthrough enabled.""" - - docker_image: Optional[DockerImage] = None - """Custom docker image BYOC""" - - driver_instance_pool_id: Optional[str] = None - """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster - uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - - driver_node_type_id: Optional[str] = None - """The node type of the Spark driver. Note that this field is optional; if unset, the driver node - type will be set as the same value as `node_type_id` defined above. - - This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both - driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id - and node_type_id take precedence.""" - - enable_elastic_disk: Optional[bool] = None - """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk - space when its Spark workers are running low on disk space. This feature requires specific AWS - permissions to function correctly - refer to the User Guide for more details.""" - - enable_local_disk_encryption: Optional[bool] = None - """Whether to enable LUKS on cluster VMs' local disks""" - - gcp_attributes: Optional[GcpAttributes] = None - """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster - creation, a set of default values will be used.""" - - init_scripts: Optional[List[InitScriptInfo]] = None - """The configuration for storing init scripts. Any number of destinations can be specified. The - scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, - init script logs are sent to `//init_scripts`.""" - - instance_pool_id: Optional[str] = None - """The optional ID of the instance pool to which the cluster belongs.""" - - is_single_node: Optional[bool] = None - """This field can only be used when `kind = CLASSIC_PREVIEW`. - - When set to true, Databricks will automatically set single node related `custom_tags`, - `spark_conf`, and `num_workers`""" - - kind: Optional[Kind] = None - """The kind of compute described by this compute specification. - - Depending on `kind`, different validations and default values will be applied. - - Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no - specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * - [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * - [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to - `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - - By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - - [simple form]: https://docs.databricks.com/compute/simple-form.html""" - - node_type_id: Optional[str] = None - """This field encodes, through a single value, the resources available to each of the Spark nodes - in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or - compute intensive workloads. A list of available node types can be retrieved by using the - :method:clusters/listNodeTypes API call.""" - - num_workers: Optional[int] = None - """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and - `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - - Note: When reading the properties of a cluster, this field reflects the desired number of - workers rather than the actual current number of workers. For instance, if a cluster is resized - from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 - workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the - new nodes are provisioned.""" - - policy_id: Optional[str] = None - """The ID of the cluster policy used to create the cluster if applicable.""" - - runtime_engine: Optional[RuntimeEngine] = None - """Determines the cluster's runtime engine, either standard or Photon. - - This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove - `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - - If left unspecified, the runtime engine defaults to standard unless the spark_version contains - -photon-, in which case Photon will be used.""" - - single_user_name: Optional[str] = None - """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str, str]] = None - """An object containing a set of optional, user-specified Spark configuration key-value pairs. - Users can also pass in a string of extra JVM options to the driver and the executors via - `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - - spark_env_vars: Optional[Dict[str, str]] = None - """An object containing a set of optional, user-specified environment variable key-value pairs. - Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) - while launching the driver and workers. - - In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them - to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default - databricks managed environmental variables are included as well. - - Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": - "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS - -Dspark.shuffle.service.enabled=true"}`""" - - ssh_public_keys: Optional[List[str]] = None - """SSH public key contents that will be added to each Spark node in this cluster. The corresponding - private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can - be specified.""" - - use_ml_runtime: Optional[bool] = None - """This field can only be used when `kind = CLASSIC_PREVIEW`. - - `effective_spark_version` is determined by `spark_version` (DBR release), this field - `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - - workload_type: Optional[WorkloadType] = None - """Cluster Attributes showing for clusters workload types.""" - - def as_dict(self) -> dict: - """Serializes the EditCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode.value - if self.docker_image: - body["docker_image"] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.init_scripts: - body["init_scripts"] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind.value - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine.value - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = [v for v in self.ssh_public_keys] - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the EditCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode - if self.docker_image: - body["docker_image"] = self.docker_image - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.init_scripts: - body["init_scripts"] = self.init_scripts - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = self.ssh_public_keys - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type + disk_count: Optional[int] = None + """The number of disks launched for each instance: - This feature is only enabled for supported + node types. - Users can choose up to the limit of the disks supported by the node type. - For + node types with no OS disk, at least one disk must be specified; otherwise, cluster creation + will fail. + + If disks are attached, Databricks will configure Spark to use only the disks for scratch + storage, because heterogenously sized scratch devices can lead to inefficient disk utilization. + If no disks are attached, Databricks will configure Spark to use instance store disks. + + Note: If disks are specified, then the Spark configuration `spark.local.dir` will be overridden. + + Disks will be mounted at: - For AWS: `/ebs0`, `/ebs1`, and etc. - For Azure: `/remote_volume0`, + `/remote_volume1`, and etc.""" + + disk_iops: Optional[int] = None + + disk_size: Optional[int] = None + """The size of each disk (in GiB) launched for each instance. Values must fall into the supported + range for a particular instance type. + + For AWS: - General Purpose SSD: 100 - 4096 GiB - Throughput Optimized HDD: 500 - 4096 GiB + + For Azure: - Premium LRS (SSD): 1 - 1023 GiB - Standard LRS (HDD): 1- 1023 GiB""" + + disk_throughput: Optional[int] = None + + disk_type: Optional[DiskType] = None + """The type of disks that will be launched with this cluster.""" + + def as_dict(self) -> dict: + """Serializes the DiskSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.disk_count is not None: + body["disk_count"] = self.disk_count + if self.disk_iops is not None: + body["disk_iops"] = self.disk_iops + if self.disk_size is not None: + body["disk_size"] = self.disk_size + if self.disk_throughput is not None: + body["disk_throughput"] = self.disk_throughput + if self.disk_type: + body["disk_type"] = self.disk_type.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DiskSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.disk_count is not None: + body["disk_count"] = self.disk_count + if self.disk_iops is not None: + body["disk_iops"] = self.disk_iops + if self.disk_size is not None: + body["disk_size"] = self.disk_size + if self.disk_throughput is not None: + body["disk_throughput"] = self.disk_throughput + if self.disk_type: + body["disk_type"] = self.disk_type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditCluster: - """Deserializes the EditCluster from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> DiskSpec: + """Deserializes the DiskSpec from a dictionary.""" return cls( - apply_policy_default_values=d.get("apply_policy_default_values", None), - autoscale=_from_dict(d, "autoscale", AutoScale), - autotermination_minutes=d.get("autotermination_minutes", None), - aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), - cluster_id=d.get("cluster_id", None), - cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), - cluster_name=d.get("cluster_name", None), - custom_tags=d.get("custom_tags", None), - data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), - docker_image=_from_dict(d, "docker_image", DockerImage), - driver_instance_pool_id=d.get("driver_instance_pool_id", None), - driver_node_type_id=d.get("driver_node_type_id", None), - enable_elastic_disk=d.get("enable_elastic_disk", None), - enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), - gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), - init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), - instance_pool_id=d.get("instance_pool_id", None), - is_single_node=d.get("is_single_node", None), - kind=_enum(d, "kind", Kind), - node_type_id=d.get("node_type_id", None), - num_workers=d.get("num_workers", None), - policy_id=d.get("policy_id", None), - runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), - single_user_name=d.get("single_user_name", None), - spark_conf=d.get("spark_conf", None), - spark_env_vars=d.get("spark_env_vars", None), - spark_version=d.get("spark_version", None), - ssh_public_keys=d.get("ssh_public_keys", None), - use_ml_runtime=d.get("use_ml_runtime", None), - workload_type=_from_dict(d, "workload_type", WorkloadType), + disk_count=d.get("disk_count", None), + disk_iops=d.get("disk_iops", None), + disk_size=d.get("disk_size", None), + disk_throughput=d.get("disk_throughput", None), + disk_type=_from_dict(d, "disk_type", DiskType), ) @dataclass -class EditClusterResponse: +class DiskType: + """Describes the disk type.""" + + azure_disk_volume_type: Optional[DiskTypeAzureDiskVolumeType] = None + """All Azure Disk types that Databricks supports. See + https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks""" + + ebs_volume_type: Optional[DiskTypeEbsVolumeType] = None + """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for + details.""" + def as_dict(self) -> dict: - """Serializes the EditClusterResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DiskType into a dictionary suitable for use as a JSON request body.""" body = {} + if self.azure_disk_volume_type is not None: + body["azure_disk_volume_type"] = self.azure_disk_volume_type.value + if self.ebs_volume_type is not None: + body["ebs_volume_type"] = self.ebs_volume_type.value return body def as_shallow_dict(self) -> dict: - """Serializes the EditClusterResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the DiskType into a shallow dictionary of its immediate attributes.""" body = {} + if self.azure_disk_volume_type is not None: + body["azure_disk_volume_type"] = self.azure_disk_volume_type + if self.ebs_volume_type is not None: + body["ebs_volume_type"] = self.ebs_volume_type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditClusterResponse: - """Deserializes the EditClusterResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> DiskType: + """Deserializes the DiskType from a dictionary.""" + return cls( + azure_disk_volume_type=_enum(d, "azure_disk_volume_type", DiskTypeAzureDiskVolumeType), + ebs_volume_type=_enum(d, "ebs_volume_type", DiskTypeEbsVolumeType), + ) + + +class DiskTypeAzureDiskVolumeType(Enum): + """All Azure Disk types that Databricks supports. See + https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks""" + + PREMIUM_LRS = "PREMIUM_LRS" + STANDARD_LRS = "STANDARD_LRS" + + +class DiskTypeEbsVolumeType(Enum): + """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for + details.""" + + GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" + THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD" @dataclass -class EditInstancePool: - instance_pool_id: str - """Instance pool ID""" +class DockerBasicAuth: + password: Optional[str] = None + """Password of the user""" - instance_pool_name: str - """Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 - characters.""" + username: Optional[str] = None + """Name of the user""" - node_type_id: str - """This field encodes, through a single value, the resources available to each of the Spark nodes - in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or - compute intensive workloads. A list of available node types can be retrieved by using the - :method:clusters/listNodeTypes API call.""" + def as_dict(self) -> dict: + """Serializes the DockerBasicAuth into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.password is not None: + body["password"] = self.password + if self.username is not None: + body["username"] = self.username + return body - custom_tags: Optional[Dict[str, str]] = None - """Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances - and EBS volumes) with these tags in addition to `default_tags`. Notes: - - - Currently, Databricks allows at most 45 custom tags""" + def as_shallow_dict(self) -> dict: + """Serializes the DockerBasicAuth into a shallow dictionary of its immediate attributes.""" + body = {} + if self.password is not None: + body["password"] = self.password + if self.username is not None: + body["username"] = self.username + return body - idle_instance_autotermination_minutes: Optional[int] = None - """Automatically terminates the extra instances in the pool cache after they are inactive for this - time in minutes if min_idle_instances requirement is already met. If not set, the extra pool - instances will be automatically terminated after a default timeout. If specified, the threshold - must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle - instances from the cache if min cache size could still hold.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DockerBasicAuth: + """Deserializes the DockerBasicAuth from a dictionary.""" + return cls(password=d.get("password", None), username=d.get("username", None)) - max_capacity: Optional[int] = None - """Maximum number of outstanding instances to keep in the pool, including both instances used by - clusters and idle instances. Clusters that require further instance provisioning will fail - during upsize requests.""" - min_idle_instances: Optional[int] = None - """Minimum number of idle instances to keep in the instance pool""" +@dataclass +class DockerImage: + basic_auth: Optional[DockerBasicAuth] = None + """Basic auth with username and password""" + + url: Optional[str] = None + """URL of the docker image.""" def as_dict(self) -> dict: - """Serializes the EditInstancePool into a dictionary suitable for use as a JSON request body.""" + """Serializes the DockerImage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id + if self.basic_auth: + body["basic_auth"] = self.basic_auth.as_dict() + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: - """Serializes the EditInstancePool into a shallow dictionary of its immediate attributes.""" + """Serializes the DockerImage into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id + if self.basic_auth: + body["basic_auth"] = self.basic_auth + if self.url is not None: + body["url"] = self.url return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditInstancePool: - """Deserializes the EditInstancePool from a dictionary.""" - return cls( - custom_tags=d.get("custom_tags", None), - idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), - instance_pool_id=d.get("instance_pool_id", None), - instance_pool_name=d.get("instance_pool_name", None), - max_capacity=d.get("max_capacity", None), - min_idle_instances=d.get("min_idle_instances", None), - node_type_id=d.get("node_type_id", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> DockerImage: + """Deserializes the DockerImage from a dictionary.""" + return cls(basic_auth=_from_dict(d, "basic_auth", DockerBasicAuth), url=d.get("url", None)) + + +class EbsVolumeType(Enum): + """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for + details.""" + + GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" + THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD" @dataclass -class EditInstancePoolResponse: +class EditClusterResponse: def as_dict(self) -> dict: - """Serializes the EditInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the EditClusterResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the EditInstancePoolResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the EditClusterResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditInstancePoolResponse: - """Deserializes the EditInstancePoolResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> EditClusterResponse: + """Deserializes the EditClusterResponse from a dictionary.""" return cls() @dataclass -class EditPolicy: - policy_id: str - """The ID of the policy to update.""" - - definition: Optional[str] = None - """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. - - [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - - description: Optional[str] = None - """Additional human-readable description of the cluster policy.""" - - libraries: Optional[List[Library]] = None - """A list of libraries to be installed on the next cluster restart that uses this policy. The - maximum number of libraries is 500.""" - - max_clusters_per_user: Optional[int] = None - """Max number of clusters per user that can be active using this policy. If not present, there is - no max limit.""" - - name: Optional[str] = None - """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and - 100 characters.""" - - policy_family_definition_overrides: Optional[str] = None - """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON - document must be passed as a string and cannot be embedded in the requests. - - You can use this to customize the policy definition inherited from the policy family. Policy - rules specified here are merged into the inherited policy definition. - - [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - - policy_family_id: Optional[str] = None - """ID of the policy family. The cluster policy's policy definition inherits the policy family's - policy definition. - - Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize - the policy definition.""" - +class EditInstancePoolResponse: def as_dict(self) -> dict: - """Serializes the EditPolicy into a dictionary suitable for use as a JSON request body.""" + """Serializes the EditInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id - if self.policy_id is not None: - body["policy_id"] = self.policy_id return body def as_shallow_dict(self) -> dict: - """Serializes the EditPolicy into a shallow dictionary of its immediate attributes.""" + """Serializes the EditInstancePoolResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.libraries: - body["libraries"] = self.libraries - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id - if self.policy_id is not None: - body["policy_id"] = self.policy_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditPolicy: - """Deserializes the EditPolicy from a dictionary.""" - return cls( - definition=d.get("definition", None), - description=d.get("description", None), - libraries=_repeated_dict(d, "libraries", Library), - max_clusters_per_user=d.get("max_clusters_per_user", None), - name=d.get("name", None), - policy_family_definition_overrides=d.get("policy_family_definition_overrides", None), - policy_family_id=d.get("policy_family_id", None), - policy_id=d.get("policy_id", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> EditInstancePoolResponse: + """Deserializes the EditInstancePoolResponse from a dictionary.""" + return cls() @dataclass @@ -4702,39 +3154,6 @@ def from_dict(cls, d: Dict[str, Any]) -> EditResponse: return cls() -@dataclass -class EnforceClusterComplianceRequest: - cluster_id: str - """The ID of the cluster you want to enforce policy compliance on.""" - - validate_only: Optional[bool] = None - """If set, previews the changes that would be made to a cluster to enforce compliance but does not - update the cluster.""" - - def as_dict(self) -> dict: - """Serializes the EnforceClusterComplianceRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only - return body - - def as_shallow_dict(self) -> dict: - """Serializes the EnforceClusterComplianceRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EnforceClusterComplianceRequest: - """Deserializes the EnforceClusterComplianceRequest from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), validate_only=d.get("validate_only", None)) - - @dataclass class EnforceClusterComplianceResponse: changes: Optional[List[ClusterSettingsChange]] = None @@ -5672,65 +4091,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GetSparkVersionsResponse: return cls(versions=_repeated_dict(d, "versions", SparkVersion)) -@dataclass -class GlobalInitScriptCreateRequest: - name: str - """The name of the script""" - - script: str - """The Base64-encoded content of the script.""" - - enabled: Optional[bool] = None - """Specifies whether the script is enabled. The script runs only if enabled.""" - - position: Optional[int] = None - """The position of a global init script, where 0 represents the first script to run, 1 is the - second script to run, in ascending order. - - If you omit the numeric position for a new global init script, it defaults to last position. It - will run after all current scripts. Setting any value greater than the position of the last - script is equivalent to the last position. Example: Take three existing scripts with positions - 0, 1, and 2. Any position of (3) or greater puts the script in the last position. If an explicit - position value conflicts with an existing script value, your request succeeds, but the original - script at that position and all later scripts have their positions incremented by 1.""" - - def as_dict(self) -> dict: - """Serializes the GlobalInitScriptCreateRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GlobalInitScriptCreateRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptCreateRequest: - """Deserializes the GlobalInitScriptCreateRequest from a dictionary.""" - return cls( - enabled=d.get("enabled", None), - name=d.get("name", None), - position=d.get("position", None), - script=d.get("script", None), - ) - - @dataclass class GlobalInitScriptDetails: created_at: Optional[int] = None @@ -5907,73 +4267,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptDetailsWithContent: ) -@dataclass -class GlobalInitScriptUpdateRequest: - name: str - """The name of the script""" - - script: str - """The Base64-encoded content of the script.""" - - enabled: Optional[bool] = None - """Specifies whether the script is enabled. The script runs only if enabled.""" - - position: Optional[int] = None - """The position of a script, where 0 represents the first script to run, 1 is the second script to - run, in ascending order. To move the script to run first, set its position to 0. - - To move the script to the end, set its position to any value greater or equal to the position of - the last script. Example, three existing scripts with positions 0, 1, and 2. Any position value - of 2 or greater puts the script in the last position (2). - - If an explicit position value conflicts with an existing script, your request succeeds, but the - original script at that position and all later scripts have their positions incremented by 1.""" - - script_id: Optional[str] = None - """The ID of the global init script.""" - - def as_dict(self) -> dict: - """Serializes the GlobalInitScriptUpdateRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script - if self.script_id is not None: - body["script_id"] = self.script_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GlobalInitScriptUpdateRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script - if self.script_id is not None: - body["script_id"] = self.script_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptUpdateRequest: - """Deserializes the GlobalInitScriptUpdateRequest from a dictionary.""" - return cls( - enabled=d.get("enabled", None), - name=d.get("name", None), - position=d.get("position", None), - script=d.get("script", None), - script_id=d.get("script_id", None), - ) - - @dataclass class InitScriptEventDetails: cluster: Optional[List[InitScriptInfoAndExecutionDetails]] = None @@ -6199,62 +4492,30 @@ def as_shallow_dict(self) -> dict: if self.gcs: body["gcs"] = self.gcs if self.s3: - body["s3"] = self.s3 - if self.status is not None: - body["status"] = self.status - if self.volumes: - body["volumes"] = self.volumes - if self.workspace: - body["workspace"] = self.workspace - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> InitScriptInfoAndExecutionDetails: - """Deserializes the InitScriptInfoAndExecutionDetails from a dictionary.""" - return cls( - abfss=_from_dict(d, "abfss", Adlsgen2Info), - dbfs=_from_dict(d, "dbfs", DbfsStorageInfo), - error_message=d.get("error_message", None), - execution_duration_seconds=d.get("execution_duration_seconds", None), - file=_from_dict(d, "file", LocalFileInfo), - gcs=_from_dict(d, "gcs", GcsStorageInfo), - s3=_from_dict(d, "s3", S3StorageInfo), - status=_enum(d, "status", InitScriptExecutionDetailsInitScriptExecutionStatus), - volumes=_from_dict(d, "volumes", VolumesStorageInfo), - workspace=_from_dict(d, "workspace", WorkspaceStorageInfo), - ) - - -@dataclass -class InstallLibraries: - cluster_id: str - """Unique identifier for the cluster on which to install these libraries.""" - - libraries: List[Library] - """The libraries to install.""" - - def as_dict(self) -> dict: - """Serializes the InstallLibraries into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the InstallLibraries into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.libraries: - body["libraries"] = self.libraries + body["s3"] = self.s3 + if self.status is not None: + body["status"] = self.status + if self.volumes: + body["volumes"] = self.volumes + if self.workspace: + body["workspace"] = self.workspace return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> InstallLibraries: - """Deserializes the InstallLibraries from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), libraries=_repeated_dict(d, "libraries", Library)) + def from_dict(cls, d: Dict[str, Any]) -> InitScriptInfoAndExecutionDetails: + """Deserializes the InitScriptInfoAndExecutionDetails from a dictionary.""" + return cls( + abfss=_from_dict(d, "abfss", Adlsgen2Info), + dbfs=_from_dict(d, "dbfs", DbfsStorageInfo), + error_message=d.get("error_message", None), + execution_duration_seconds=d.get("execution_duration_seconds", None), + file=_from_dict(d, "file", LocalFileInfo), + gcs=_from_dict(d, "gcs", GcsStorageInfo), + s3=_from_dict(d, "s3", S3StorageInfo), + status=_enum(d, "status", InitScriptExecutionDetailsInitScriptExecutionStatus), + volumes=_from_dict(d, "volumes", VolumesStorageInfo), + workspace=_from_dict(d, "workspace", WorkspaceStorageInfo), + ) @dataclass @@ -6870,40 +5131,6 @@ def from_dict(cls, d: Dict[str, Any]) -> InstancePoolPermissionsDescription: ) -@dataclass -class InstancePoolPermissionsRequest: - access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None - - instance_pool_id: Optional[str] = None - """The instance pool for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the InstancePoolPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the InstancePoolPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> InstancePoolPermissionsRequest: - """Deserializes the InstancePoolPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", InstancePoolAccessControlRequest), - instance_pool_id=d.get("instance_pool_id", None), - ) - - class InstancePoolState(Enum): """The state of a Cluster. The current allowable state transitions are as follows: @@ -8062,31 +6289,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PendingInstanceError: return cls(instance_id=d.get("instance_id", None), message=d.get("message", None)) -@dataclass -class PermanentDeleteCluster: - cluster_id: str - """The cluster to be deleted.""" - - def as_dict(self) -> dict: - """Serializes the PermanentDeleteCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PermanentDeleteCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PermanentDeleteCluster: - """Deserializes the PermanentDeleteCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) - - @dataclass class PermanentDeleteClusterResponse: def as_dict(self) -> dict: @@ -8105,30 +6307,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PermanentDeleteClusterResponse: return cls() -@dataclass -class PinCluster: - cluster_id: str - - def as_dict(self) -> dict: - """Serializes the PinCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PinCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PinCluster: - """Deserializes the PinCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) - - @dataclass class PinClusterResponse: def as_dict(self) -> dict: @@ -8391,31 +6569,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RCranLibrary: return cls(package=d.get("package", None), repo=d.get("repo", None)) -@dataclass -class RemoveInstanceProfile: - instance_profile_arn: str - """The ARN of the instance profile to remove. This field is required.""" - - def as_dict(self) -> dict: - """Serializes the RemoveInstanceProfile into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RemoveInstanceProfile into a shallow dictionary of its immediate attributes.""" - body = {} - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RemoveInstanceProfile: - """Deserializes the RemoveInstanceProfile from a dictionary.""" - return cls(instance_profile_arn=d.get("instance_profile_arn", None)) - - @dataclass class RemoveResponse: def as_dict(self) -> dict: @@ -8434,57 +6587,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RemoveResponse: return cls() -@dataclass -class ResizeCluster: - cluster_id: str - """The cluster to be resized.""" - - autoscale: Optional[AutoScale] = None - """Parameters needed in order to automatically scale clusters up and down based on load. Note: - autoscaling works best with DB runtime versions 3.0 or later.""" - - num_workers: Optional[int] = None - """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and - `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - - Note: When reading the properties of a cluster, this field reflects the desired number of - workers rather than the actual current number of workers. For instance, if a cluster is resized - from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 - workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the - new nodes are provisioned.""" - - def as_dict(self) -> dict: - """Serializes the ResizeCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ResizeCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.autoscale: - body["autoscale"] = self.autoscale - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ResizeCluster: - """Deserializes the ResizeCluster from a dictionary.""" - return cls( - autoscale=_from_dict(d, "autoscale", AutoScale), - cluster_id=d.get("cluster_id", None), - num_workers=d.get("num_workers", None), - ) - - @dataclass class ResizeClusterResponse: def as_dict(self) -> dict: @@ -8503,37 +6605,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ResizeClusterResponse: return cls() -@dataclass -class RestartCluster: - cluster_id: str - """The cluster to be started.""" - - restart_user: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the RestartCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.restart_user is not None: - body["restart_user"] = self.restart_user - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RestartCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.restart_user is not None: - body["restart_user"] = self.restart_user - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RestartCluster: - """Deserializes the RestartCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), restart_user=d.get("restart_user", None)) - - @dataclass class RestartClusterResponse: def as_dict(self) -> dict: @@ -8895,31 +6966,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SparkVersion: return cls(key=d.get("key", None), name=d.get("name", None)) -@dataclass -class StartCluster: - cluster_id: str - """The cluster to be started.""" - - def as_dict(self) -> dict: - """Serializes the StartCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the StartCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> StartCluster: - """Deserializes the StartCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) - - @dataclass class StartClusterResponse: def as_dict(self) -> dict: @@ -9185,38 +7231,6 @@ class TerminationReasonType(Enum): SUCCESS = "SUCCESS" -@dataclass -class UninstallLibraries: - cluster_id: str - """Unique identifier for the cluster on which to uninstall these libraries.""" - - libraries: List[Library] - """The libraries to uninstall.""" - - def as_dict(self) -> dict: - """Serializes the UninstallLibraries into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UninstallLibraries into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.libraries: - body["libraries"] = self.libraries - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UninstallLibraries: - """Deserializes the UninstallLibraries from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), libraries=_repeated_dict(d, "libraries", Library)) - - @dataclass class UninstallLibrariesResponse: def as_dict(self) -> dict: @@ -9235,30 +7249,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UninstallLibrariesResponse: return cls() -@dataclass -class UnpinCluster: - cluster_id: str - - def as_dict(self) -> dict: - """Serializes the UnpinCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UnpinCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UnpinCluster: - """Deserializes the UnpinCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) - - @dataclass class UnpinClusterResponse: def as_dict(self) -> dict: @@ -9277,60 +7267,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UnpinClusterResponse: return cls() -@dataclass -class UpdateCluster: - cluster_id: str - """ID of the cluster.""" - - update_mask: str - """Used to specify which cluster attributes and size fields to update. See - https://google.aip.dev/161 for more details. - - The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - cluster: Optional[UpdateClusterResource] = None - """The cluster to be updated.""" - - def as_dict(self) -> dict: - """Serializes the UpdateCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster: - body["cluster"] = self.cluster.as_dict() - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster: - body["cluster"] = self.cluster - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCluster: - """Deserializes the UpdateCluster from a dictionary.""" - return cls( - cluster=_from_dict(d, "cluster", UpdateClusterResource), - cluster_id=d.get("cluster_id", None), - update_mask=d.get("update_mask", None), - ) - - @dataclass class UpdateClusterResource: autoscale: Optional[AutoScale] = None diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 44cb7680..ed2178d4 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -409,49 +409,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieConversation: ) -@dataclass -class GenieCreateConversationMessageRequest: - content: str - """User message content.""" - - conversation_id: Optional[str] = None - """The ID associated with the conversation.""" - - space_id: Optional[str] = None - """The ID associated with the Genie space where the conversation is started.""" - - def as_dict(self) -> dict: - """Serializes the GenieCreateConversationMessageRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.content is not None: - body["content"] = self.content - if self.conversation_id is not None: - body["conversation_id"] = self.conversation_id - if self.space_id is not None: - body["space_id"] = self.space_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GenieCreateConversationMessageRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.content is not None: - body["content"] = self.content - if self.conversation_id is not None: - body["conversation_id"] = self.conversation_id - if self.space_id is not None: - body["space_id"] = self.space_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenieCreateConversationMessageRequest: - """Deserializes the GenieCreateConversationMessageRequest from a dictionary.""" - return cls( - content=d.get("content", None), - conversation_id=d.get("conversation_id", None), - space_id=d.get("space_id", None), - ) - - @dataclass class GenieGenerateDownloadFullQueryResultResponse: download_id: Optional[str] = None @@ -836,38 +793,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieSpace: return cls(description=d.get("description", None), space_id=d.get("space_id", None), title=d.get("title", None)) -@dataclass -class GenieStartConversationMessageRequest: - content: str - """The text of the message that starts the conversation.""" - - space_id: Optional[str] = None - """The ID associated with the Genie space where you want to start a conversation.""" - - def as_dict(self) -> dict: - """Serializes the GenieStartConversationMessageRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.content is not None: - body["content"] = self.content - if self.space_id is not None: - body["space_id"] = self.space_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GenieStartConversationMessageRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.content is not None: - body["content"] = self.content - if self.space_id is not None: - body["space_id"] = self.space_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenieStartConversationMessageRequest: - """Deserializes the GenieStartConversationMessageRequest from a dictionary.""" - return cls(content=d.get("content", None), space_id=d.get("space_id", None)) - - @dataclass class GenieStartConversationResponse: message_id: str @@ -1185,102 +1110,6 @@ class MessageStatus(Enum): SUBMITTED = "SUBMITTED" -@dataclass -class MigrateDashboardRequest: - source_dashboard_id: str - """UUID of the dashboard to be migrated.""" - - display_name: Optional[str] = None - """Display name for the new Lakeview dashboard.""" - - parent_path: Optional[str] = None - """The workspace path of the folder to contain the migrated Lakeview dashboard.""" - - update_parameter_syntax: Optional[bool] = None - """Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named - syntax (:param) when converting datasets in the dashboard.""" - - def as_dict(self) -> dict: - """Serializes the MigrateDashboardRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.source_dashboard_id is not None: - body["source_dashboard_id"] = self.source_dashboard_id - if self.update_parameter_syntax is not None: - body["update_parameter_syntax"] = self.update_parameter_syntax - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MigrateDashboardRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.source_dashboard_id is not None: - body["source_dashboard_id"] = self.source_dashboard_id - if self.update_parameter_syntax is not None: - body["update_parameter_syntax"] = self.update_parameter_syntax - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MigrateDashboardRequest: - """Deserializes the MigrateDashboardRequest from a dictionary.""" - return cls( - display_name=d.get("display_name", None), - parent_path=d.get("parent_path", None), - source_dashboard_id=d.get("source_dashboard_id", None), - update_parameter_syntax=d.get("update_parameter_syntax", None), - ) - - -@dataclass -class PublishRequest: - dashboard_id: Optional[str] = None - """UUID identifying the dashboard to be published.""" - - embed_credentials: Optional[bool] = None - """Flag to indicate if the publisher's credentials should be embedded in the published dashboard. - These embedded credentials will be used to execute the published dashboard's queries.""" - - warehouse_id: Optional[str] = None - """The ID of the warehouse that can be used to override the warehouse which was set in the draft.""" - - def as_dict(self) -> dict: - """Serializes the PublishRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.embed_credentials is not None: - body["embed_credentials"] = self.embed_credentials - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PublishRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.embed_credentials is not None: - body["embed_credentials"] = self.embed_credentials - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PublishRequest: - """Deserializes the PublishRequest from a dictionary.""" - return cls( - dashboard_id=d.get("dashboard_id", None), - embed_credentials=d.get("embed_credentials", None), - warehouse_id=d.get("warehouse_id", None), - ) - - @dataclass class PublishedDashboard: display_name: Optional[str] = None diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index 18644759..40ce3e35 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -348,39 +348,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteSyncedDatabaseTableResponse: return cls() -@dataclass -class GenerateDatabaseCredentialRequest: - """Generates a credential that can be used to access database instances""" - - instance_names: Optional[List[str]] = None - """Instances to which the token will be scoped.""" - - request_id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the GenerateDatabaseCredentialRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.instance_names: - body["instance_names"] = [v for v in self.instance_names] - if self.request_id is not None: - body["request_id"] = self.request_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GenerateDatabaseCredentialRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.instance_names: - body["instance_names"] = self.instance_names - if self.request_id is not None: - body["request_id"] = self.request_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateDatabaseCredentialRequest: - """Deserializes the GenerateDatabaseCredentialRequest from a dictionary.""" - return cls(instance_names=d.get("instance_names", None), request_id=d.get("request_id", None)) - - @dataclass class ListDatabaseInstancesResponse: database_instances: Optional[List[DatabaseInstance]] = None diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py index f912a510..63f718e1 100755 --- a/databricks/sdk/service/files.py +++ b/databricks/sdk/service/files.py @@ -14,38 +14,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class AddBlock: - handle: int - """The handle on an open stream.""" - - data: str - """The base64-encoded data to append to the stream. This has a limit of 1 MB.""" - - def as_dict(self) -> dict: - """Serializes the AddBlock into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.data is not None: - body["data"] = self.data - if self.handle is not None: - body["handle"] = self.handle - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AddBlock into a shallow dictionary of its immediate attributes.""" - body = {} - if self.data is not None: - body["data"] = self.data - if self.handle is not None: - body["handle"] = self.handle - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AddBlock: - """Deserializes the AddBlock from a dictionary.""" - return cls(data=d.get("data", None), handle=d.get("handle", None)) - - @dataclass class AddBlockResponse: def as_dict(self) -> dict: @@ -64,31 +32,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AddBlockResponse: return cls() -@dataclass -class Close: - handle: int - """The handle on an open stream.""" - - def as_dict(self) -> dict: - """Serializes the Close into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.handle is not None: - body["handle"] = self.handle - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Close into a shallow dictionary of its immediate attributes.""" - body = {} - if self.handle is not None: - body["handle"] = self.handle - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Close: - """Deserializes the Close from a dictionary.""" - return cls(handle=d.get("handle", None)) - - @dataclass class CloseResponse: def as_dict(self) -> dict: @@ -107,38 +50,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CloseResponse: return cls() -@dataclass -class Create: - path: str - """The path of the new file. The path should be the absolute DBFS path.""" - - overwrite: Optional[bool] = None - """The flag that specifies whether to overwrite existing file/files.""" - - def as_dict(self) -> dict: - """Serializes the Create into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Create into a shallow dictionary of its immediate attributes.""" - body = {} - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Create: - """Deserializes the Create from a dictionary.""" - return cls(overwrite=d.get("overwrite", None), path=d.get("path", None)) - - @dataclass class CreateDirectoryResponse: def as_dict(self) -> dict: @@ -183,39 +94,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: return cls(handle=d.get("handle", None)) -@dataclass -class Delete: - path: str - """The path of the file or directory to delete. The path should be the absolute DBFS path.""" - - recursive: Optional[bool] = None - """Whether or not to recursively delete the directory's contents. Deleting empty directories can be - done without providing the recursive flag.""" - - def as_dict(self) -> dict: - """Serializes the Delete into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.path is not None: - body["path"] = self.path - if self.recursive is not None: - body["recursive"] = self.recursive - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Delete into a shallow dictionary of its immediate attributes.""" - body = {} - if self.path is not None: - body["path"] = self.path - if self.recursive is not None: - body["recursive"] = self.recursive - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Delete: - """Deserializes the Delete from a dictionary.""" - return cls(path=d.get("path", None), recursive=d.get("recursive", None)) - - @dataclass class DeleteDirectoryResponse: def as_dict(self) -> dict: @@ -530,31 +408,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListStatusResponse: return cls(files=_repeated_dict(d, "files", FileInfo)) -@dataclass -class MkDirs: - path: str - """The path of the new directory. The path should be the absolute DBFS path.""" - - def as_dict(self) -> dict: - """Serializes the MkDirs into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MkDirs into a shallow dictionary of its immediate attributes.""" - body = {} - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MkDirs: - """Deserializes the MkDirs from a dictionary.""" - return cls(path=d.get("path", None)) - - @dataclass class MkDirsResponse: def as_dict(self) -> dict: @@ -573,38 +426,6 @@ def from_dict(cls, d: Dict[str, Any]) -> MkDirsResponse: return cls() -@dataclass -class Move: - source_path: str - """The source path of the file or directory. The path should be the absolute DBFS path.""" - - destination_path: str - """The destination path of the file or directory. The path should be the absolute DBFS path.""" - - def as_dict(self) -> dict: - """Serializes the Move into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.destination_path is not None: - body["destination_path"] = self.destination_path - if self.source_path is not None: - body["source_path"] = self.source_path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Move into a shallow dictionary of its immediate attributes.""" - body = {} - if self.destination_path is not None: - body["destination_path"] = self.destination_path - if self.source_path is not None: - body["source_path"] = self.source_path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Move: - """Deserializes the Move from a dictionary.""" - return cls(destination_path=d.get("destination_path", None), source_path=d.get("source_path", None)) - - @dataclass class MoveResponse: def as_dict(self) -> dict: @@ -623,45 +444,6 @@ def from_dict(cls, d: Dict[str, Any]) -> MoveResponse: return cls() -@dataclass -class Put: - path: str - """The path of the new file. The path should be the absolute DBFS path.""" - - contents: Optional[str] = None - """This parameter might be absent, and instead a posted file will be used.""" - - overwrite: Optional[bool] = None - """The flag that specifies whether to overwrite existing file/files.""" - - def as_dict(self) -> dict: - """Serializes the Put into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.contents is not None: - body["contents"] = self.contents - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Put into a shallow dictionary of its immediate attributes.""" - body = {} - if self.contents is not None: - body["contents"] = self.contents - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Put: - """Deserializes the Put from a dictionary.""" - return cls(contents=d.get("contents", None), overwrite=d.get("overwrite", None), path=d.get("path", None)) - - @dataclass class PutResponse: def as_dict(self) -> dict: diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index a0f21da6..baad20eb 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -698,57 +698,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListUsersResponse: ) -@dataclass -class MigratePermissionsRequest: - workspace_id: int - """WorkspaceId of the associated workspace where the permission migration will occur.""" - - from_workspace_group_name: str - """The name of the workspace group that permissions will be migrated from.""" - - to_account_group_name: str - """The name of the account group that permissions will be migrated to.""" - - size: Optional[int] = None - """The maximum number of permissions that will be migrated.""" - - def as_dict(self) -> dict: - """Serializes the MigratePermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.from_workspace_group_name is not None: - body["from_workspace_group_name"] = self.from_workspace_group_name - if self.size is not None: - body["size"] = self.size - if self.to_account_group_name is not None: - body["to_account_group_name"] = self.to_account_group_name - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MigratePermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.from_workspace_group_name is not None: - body["from_workspace_group_name"] = self.from_workspace_group_name - if self.size is not None: - body["size"] = self.size - if self.to_account_group_name is not None: - body["to_account_group_name"] = self.to_account_group_name - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MigratePermissionsRequest: - """Deserializes the MigratePermissionsRequest from a dictionary.""" - return cls( - from_workspace_group_name=d.get("from_workspace_group_name", None), - size=d.get("size", None), - to_account_group_name=d.get("to_account_group_name", None), - workspace_id=d.get("workspace_id", None), - ) - - @dataclass class MigratePermissionsResponse: permissions_migrated: Optional[int] = None @@ -846,48 +795,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ObjectPermissions: ) -@dataclass -class PartialUpdate: - id: Optional[str] = None - """Unique ID in the Databricks workspace.""" - - operations: Optional[List[Patch]] = None - - schemas: Optional[List[PatchSchema]] = None - """The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].""" - - def as_dict(self) -> dict: - """Serializes the PartialUpdate into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.operations: - body["Operations"] = [v.as_dict() for v in self.operations] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PartialUpdate into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.operations: - body["Operations"] = self.operations - if self.schemas: - body["schemas"] = self.schemas - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PartialUpdate: - """Deserializes the PartialUpdate from a dictionary.""" - return cls( - id=d.get("id", None), - operations=_repeated_dict(d, "Operations", Patch), - schemas=_repeated_enum(d, "schemas", PatchSchema), - ) - - @dataclass class PasswordAccessControlRequest: group_name: Optional[str] = None @@ -1119,30 +1026,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PasswordPermissionsDescription: ) -@dataclass -class PasswordPermissionsRequest: - access_control_list: Optional[List[PasswordAccessControlRequest]] = None - - def as_dict(self) -> dict: - """Serializes the PasswordPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PasswordPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PasswordPermissionsRequest: - """Deserializes the PasswordPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, "access_control_list", PasswordAccessControlRequest)) - - @dataclass class Patch: op: Optional[PatchOp] = None @@ -1767,94 +1650,6 @@ class ServicePrincipalSchema(Enum): URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL = "urn:ietf:params:scim:schemas:core:2.0:ServicePrincipal" -@dataclass -class SetObjectPermissions: - access_control_list: Optional[List[AccessControlRequest]] = None - - request_object_id: Optional[str] = None - """The id of the request object.""" - - request_object_type: Optional[str] = None - """The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, - jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.""" - - def as_dict(self) -> dict: - """Serializes the SetObjectPermissions into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.request_object_id is not None: - body["request_object_id"] = self.request_object_id - if self.request_object_type is not None: - body["request_object_type"] = self.request_object_type - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetObjectPermissions into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.request_object_id is not None: - body["request_object_id"] = self.request_object_id - if self.request_object_type is not None: - body["request_object_type"] = self.request_object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetObjectPermissions: - """Deserializes the SetObjectPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AccessControlRequest), - request_object_id=d.get("request_object_id", None), - request_object_type=d.get("request_object_type", None), - ) - - -@dataclass -class UpdateObjectPermissions: - access_control_list: Optional[List[AccessControlRequest]] = None - - request_object_id: Optional[str] = None - """The id of the request object.""" - - request_object_type: Optional[str] = None - """The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, - jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.""" - - def as_dict(self) -> dict: - """Serializes the UpdateObjectPermissions into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.request_object_id is not None: - body["request_object_id"] = self.request_object_id - if self.request_object_type is not None: - body["request_object_type"] = self.request_object_type - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateObjectPermissions into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.request_object_id is not None: - body["request_object_id"] = self.request_object_id - if self.request_object_type is not None: - body["request_object_type"] = self.request_object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateObjectPermissions: - """Deserializes the UpdateObjectPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AccessControlRequest), - request_object_id=d.get("request_object_id", None), - request_object_type=d.get("request_object_type", None), - ) - - @dataclass class UpdateResponse: def as_dict(self) -> dict: @@ -1873,84 +1668,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: return cls() -@dataclass -class UpdateRuleSetRequest: - name: str - """Name of the rule set.""" - - rule_set: RuleSetUpdateRequest - - def as_dict(self) -> dict: - """Serializes the UpdateRuleSetRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.rule_set: - body["rule_set"] = self.rule_set.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateRuleSetRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.rule_set: - body["rule_set"] = self.rule_set - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRuleSetRequest: - """Deserializes the UpdateRuleSetRequest from a dictionary.""" - return cls(name=d.get("name", None), rule_set=_from_dict(d, "rule_set", RuleSetUpdateRequest)) - - -@dataclass -class UpdateWorkspaceAssignments: - permissions: Optional[List[WorkspacePermission]] = None - """Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN" - (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other - values will be ignored. Note that excluding this field, or providing unsupported values, will - have the same effect as providing an empty list, which will result in the deletion of all - permissions for the principal.""" - - principal_id: Optional[int] = None - """The ID of the user, service principal, or group.""" - - workspace_id: Optional[int] = None - """The workspace ID.""" - - def as_dict(self) -> dict: - """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.permissions: - body["permissions"] = [v.value for v in self.permissions] - if self.principal_id is not None: - body["principal_id"] = self.principal_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateWorkspaceAssignments into a shallow dictionary of its immediate attributes.""" - body = {} - if self.permissions: - body["permissions"] = self.permissions - if self.principal_id is not None: - body["principal_id"] = self.principal_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceAssignments: - """Deserializes the UpdateWorkspaceAssignments from a dictionary.""" - return cls( - permissions=_repeated_enum(d, "permissions", WorkspacePermission), - principal_id=d.get("principal_id", None), - workspace_id=d.get("workspace_id", None), - ) - - @dataclass class User: active: Optional[bool] = None diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 37099ef2..b4e4d51f 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -449,39 +449,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseRun: ) -@dataclass -class CancelAllRuns: - all_queued_runs: Optional[bool] = None - """Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs - in the workspace are canceled.""" - - job_id: Optional[int] = None - """The canonical identifier of the job to cancel all runs of.""" - - def as_dict(self) -> dict: - """Serializes the CancelAllRuns into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.all_queued_runs is not None: - body["all_queued_runs"] = self.all_queued_runs - if self.job_id is not None: - body["job_id"] = self.job_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelAllRuns into a shallow dictionary of its immediate attributes.""" - body = {} - if self.all_queued_runs is not None: - body["all_queued_runs"] = self.all_queued_runs - if self.job_id is not None: - body["job_id"] = self.job_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelAllRuns: - """Deserializes the CancelAllRuns from a dictionary.""" - return cls(all_queued_runs=d.get("all_queued_runs", None), job_id=d.get("job_id", None)) - - @dataclass class CancelAllRunsResponse: def as_dict(self) -> dict: @@ -500,31 +467,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CancelAllRunsResponse: return cls() -@dataclass -class CancelRun: - run_id: int - """This field is required.""" - - def as_dict(self) -> dict: - """Serializes the CancelRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelRun: - """Deserializes the CancelRun from a dictionary.""" - return cls(run_id=d.get("run_id", None)) - - @dataclass class CancelRunResponse: def as_dict(self) -> dict: @@ -952,271 +894,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Continuous: return cls(pause_status=_enum(d, "pause_status", PauseStatus)) -@dataclass -class CreateJob: - access_control_list: Optional[List[JobAccessControlRequest]] = None - """List of permissions to set on the job.""" - - budget_policy_id: Optional[str] = None - """The id of the user specified budget policy to use for this job. If not specified, a default - budget policy may be applied when creating or modifying the job. See - `effective_budget_policy_id` for the budget policy used by this workload.""" - - continuous: Optional[Continuous] = None - """An optional continuous property for this job. The continuous property will ensure that there is - always one run executing. Only one of `schedule` and `continuous` can be used.""" - - deployment: Optional[JobDeployment] = None - """Deployment information for jobs managed by external sources.""" - - description: Optional[str] = None - """An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.""" - - edit_mode: Optional[JobEditMode] = None - """Edit mode of the job. - - * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is - in an editable state and can be modified.""" - - email_notifications: Optional[JobEmailNotifications] = None - """An optional set of email addresses that is notified when runs of this job begin or complete as - well as when this job is deleted.""" - - environments: Optional[List[JobEnvironment]] = None - """A list of task execution environment specifications that can be referenced by serverless tasks - of this job. An environment is required to be present for serverless tasks. For serverless - notebook tasks, the environment is accessible in the notebook environment panel. For other - serverless tasks, the task environment is required to be specified using environment_key in the - task settings.""" - - format: Optional[Format] = None - """Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. - When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.""" - - git_source: Optional[GitSource] = None - """An optional specification for a remote Git repository containing the source code used by tasks. - Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - - If `git_source` is set, these tasks retrieve the file from the remote repository by default. - However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - - Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks - are used, `git_source` must be defined on the job.""" - - health: Optional[JobsHealthRules] = None - """An optional set of health rules that can be defined for this job.""" - - job_clusters: Optional[List[JobCluster]] = None - """A list of job cluster specifications that can be shared and reused by tasks of this job. - Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in - task settings.""" - - max_concurrent_runs: Optional[int] = None - """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to - be able to execute multiple runs of the same job concurrently. This is useful for example if you - trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each - other, or if you want to trigger multiple runs which differ by their input parameters. This - setting affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 - concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs. - However, from then on, new runs are skipped unless there are fewer than 3 active runs. This - value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.""" - - name: Optional[str] = None - """An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.""" - - notification_settings: Optional[JobNotificationSettings] = None - """Optional notification settings that are used when sending notifications to each of the - `email_notifications` and `webhook_notifications` for this job.""" - - parameters: Optional[List[JobParameterDefinition]] = None - """Job-level parameter definitions""" - - performance_target: Optional[PerformanceTarget] = None - """The performance mode on a serverless job. This field determines the level of compute performance - or cost-efficiency for the run. - - * `STANDARD`: Enables cost-efficient execution of serverless workloads. * - `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and - optimized cluster performance.""" - - queue: Optional[QueueSettings] = None - """The queue settings of the job.""" - - run_as: Optional[JobRunAs] = None - """Write-only setting. Specifies the user or service principal that the job runs as. If not - specified, the job runs as the user who created the job. - - Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" - - schedule: Optional[CronSchedule] = None - """An optional periodic schedule for this job. The default behavior is that the job only runs when - triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.""" - - tags: Optional[Dict[str, str]] = None - """A map of tags associated with the job. These are forwarded to the cluster as cluster tags for - jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can - be added to the job.""" - - tasks: Optional[List[Task]] = None - """A list of task specifications to be executed by this job. It supports up to 1000 elements in - write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, - :method:jobs/submit). Read endpoints return only 100 tasks. If more than 100 tasks are - available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field - at the object root to determine if more results are available.""" - - timeout_seconds: Optional[int] = None - """An optional timeout applied to each run of this job. A value of `0` means no timeout.""" - - trigger: Optional[TriggerSettings] = None - """A configuration to trigger a run when certain conditions are met. The default behavior is that - the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API - request to `runNow`.""" - - webhook_notifications: Optional[WebhookNotifications] = None - """A collection of system notification IDs to notify when runs of this job begin or complete.""" - - def as_dict(self) -> dict: - """Serializes the CreateJob into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.continuous: - body["continuous"] = self.continuous.as_dict() - if self.deployment: - body["deployment"] = self.deployment.as_dict() - if self.description is not None: - body["description"] = self.description - if self.edit_mode is not None: - body["edit_mode"] = self.edit_mode.value - if self.email_notifications: - body["email_notifications"] = self.email_notifications.as_dict() - if self.environments: - body["environments"] = [v.as_dict() for v in self.environments] - if self.format is not None: - body["format"] = self.format.value - if self.git_source: - body["git_source"] = self.git_source.as_dict() - if self.health: - body["health"] = self.health.as_dict() - if self.job_clusters: - body["job_clusters"] = [v.as_dict() for v in self.job_clusters] - if self.max_concurrent_runs is not None: - body["max_concurrent_runs"] = self.max_concurrent_runs - if self.name is not None: - body["name"] = self.name - if self.notification_settings: - body["notification_settings"] = self.notification_settings.as_dict() - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] - if self.performance_target is not None: - body["performance_target"] = self.performance_target.value - if self.queue: - body["queue"] = self.queue.as_dict() - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.tags: - body["tags"] = self.tags - if self.tasks: - body["tasks"] = [v.as_dict() for v in self.tasks] - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.trigger: - body["trigger"] = self.trigger.as_dict() - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateJob into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.continuous: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment - if self.description is not None: - body["description"] = self.description - if self.edit_mode is not None: - body["edit_mode"] = self.edit_mode - if self.email_notifications: - body["email_notifications"] = self.email_notifications - if self.environments: - body["environments"] = self.environments - if self.format is not None: - body["format"] = self.format - if self.git_source: - body["git_source"] = self.git_source - if self.health: - body["health"] = self.health - if self.job_clusters: - body["job_clusters"] = self.job_clusters - if self.max_concurrent_runs is not None: - body["max_concurrent_runs"] = self.max_concurrent_runs - if self.name is not None: - body["name"] = self.name - if self.notification_settings: - body["notification_settings"] = self.notification_settings - if self.parameters: - body["parameters"] = self.parameters - if self.performance_target is not None: - body["performance_target"] = self.performance_target - if self.queue: - body["queue"] = self.queue - if self.run_as: - body["run_as"] = self.run_as - if self.schedule: - body["schedule"] = self.schedule - if self.tags: - body["tags"] = self.tags - if self.tasks: - body["tasks"] = self.tasks - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.trigger: - body["trigger"] = self.trigger - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateJob: - """Deserializes the CreateJob from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest), - budget_policy_id=d.get("budget_policy_id", None), - continuous=_from_dict(d, "continuous", Continuous), - deployment=_from_dict(d, "deployment", JobDeployment), - description=d.get("description", None), - edit_mode=_enum(d, "edit_mode", JobEditMode), - email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), - environments=_repeated_dict(d, "environments", JobEnvironment), - format=_enum(d, "format", Format), - git_source=_from_dict(d, "git_source", GitSource), - health=_from_dict(d, "health", JobsHealthRules), - job_clusters=_repeated_dict(d, "job_clusters", JobCluster), - max_concurrent_runs=d.get("max_concurrent_runs", None), - name=d.get("name", None), - notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), - parameters=_repeated_dict(d, "parameters", JobParameterDefinition), - performance_target=_enum(d, "performance_target", PerformanceTarget), - queue=_from_dict(d, "queue", QueueSettings), - run_as=_from_dict(d, "run_as", JobRunAs), - schedule=_from_dict(d, "schedule", CronSchedule), - tags=d.get("tags", None), - tasks=_repeated_dict(d, "tasks", Task), - timeout_seconds=d.get("timeout_seconds", None), - trigger=_from_dict(d, "trigger", TriggerSettings), - webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), - ) - - @dataclass class CreateResponse: """Job was created successfully""" @@ -1825,31 +1502,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DbtTask: ) -@dataclass -class DeleteJob: - job_id: int - """The canonical identifier of the job to delete. This field is required.""" - - def as_dict(self) -> dict: - """Serializes the DeleteJob into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteJob into a shallow dictionary of its immediate attributes.""" - body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteJob: - """Deserializes the DeleteJob from a dictionary.""" - return cls(job_id=d.get("job_id", None)) - - @dataclass class DeleteResponse: def as_dict(self) -> dict: @@ -1868,31 +1520,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: return cls() -@dataclass -class DeleteRun: - run_id: int - """ID of the run to delete.""" - - def as_dict(self) -> dict: - """Serializes the DeleteRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteRun: - """Deserializes the DeleteRun from a dictionary.""" - return cls(run_id=d.get("run_id", None)) - - @dataclass class DeleteRunResponse: def as_dict(self) -> dict: @@ -1959,38 +1586,6 @@ def from_dict(cls, d: Dict[str, Any]) -> EnforcePolicyComplianceForJobResponseJo ) -@dataclass -class EnforcePolicyComplianceRequest: - job_id: int - """The ID of the job you want to enforce policy compliance on.""" - - validate_only: Optional[bool] = None - """If set, previews changes made to the job to comply with its policy, but does not update the job.""" - - def as_dict(self) -> dict: - """Serializes the EnforcePolicyComplianceRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only - return body - - def as_shallow_dict(self) -> dict: - """Serializes the EnforcePolicyComplianceRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EnforcePolicyComplianceRequest: - """Deserializes the EnforcePolicyComplianceRequest from a dictionary.""" - return cls(job_id=d.get("job_id", None), validate_only=d.get("validate_only", None)) - - @dataclass class EnforcePolicyComplianceResponse: has_changes: Optional[bool] = None @@ -3313,40 +2908,6 @@ def from_dict(cls, d: Dict[str, Any]) -> JobPermissionsDescription: ) -@dataclass -class JobPermissionsRequest: - access_control_list: Optional[List[JobAccessControlRequest]] = None - - job_id: Optional[str] = None - """The job for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the JobPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.job_id is not None: - body["job_id"] = self.job_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the JobPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.job_id is not None: - body["job_id"] = self.job_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> JobPermissionsRequest: - """Deserializes the JobPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest), - job_id=d.get("job_id", None), - ) - - @dataclass class JobRunAs: """Write-only setting. Specifies the user or service principal that the job runs as. If not @@ -4583,241 +4144,43 @@ def as_shallow_dict(self) -> dict: if self.effective_performance_target is not None: body["effective_performance_target"] = self.effective_performance_target if self.end_time is not None: - body["end_time"] = self.end_time - if self.id is not None: - body["id"] = self.id - if self.start_time is not None: - body["start_time"] = self.start_time - if self.state: - body["state"] = self.state - if self.status: - body["status"] = self.status - if self.task_run_ids: - body["task_run_ids"] = self.task_run_ids - if self.type is not None: - body["type"] = self.type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RepairHistoryItem: - """Deserializes the RepairHistoryItem from a dictionary.""" - return cls( - effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), - end_time=d.get("end_time", None), - id=d.get("id", None), - start_time=d.get("start_time", None), - state=_from_dict(d, "state", RunState), - status=_from_dict(d, "status", RunStatus), - task_run_ids=d.get("task_run_ids", None), - type=_enum(d, "type", RepairHistoryItemType), - ) - - -class RepairHistoryItemType(Enum): - """The repair history item type. Indicates whether a run is the original run or a repair run.""" - - ORIGINAL = "ORIGINAL" - REPAIR = "REPAIR" - - -@dataclass -class RepairRun: - run_id: int - """The job run ID of the run to repair. The run must not be in progress.""" - - dbt_commands: Optional[List[str]] = None - """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt - deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - - jar_params: Optional[List[str]] = None - """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", - "35"]`. The parameters are used to invoke the main function of the main class specified in the - Spark JAR task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot - be specified in conjunction with notebook_params. The JSON representation of this field (for - example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - - Use [Task parameter variables] to set parameters containing information about job runs. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - job_parameters: Optional[Dict[str, str]] = None - """Job-level parameters used in the run. for example `"param": "overriding_val"`""" - - latest_repair_id: Optional[int] = None - """The ID of the latest repair. This parameter is not required when repairing a run for the first - time, but must be provided on subsequent requests to repair the same run.""" - - notebook_params: Optional[Dict[str, str]] = None - """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": - "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the - [dbutils.widgets.get] function. - - If not specified upon `run-now`, the triggered run uses the job’s base parameters. - - notebook_params cannot be specified in conjunction with jar_params. - - Use [Task parameter variables] to set parameters containing information about job runs. - - The JSON representation of this field (for example `{"notebook_params":{"name":"john - doe","age":"35"}}`) cannot exceed 10,000 bytes. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables - [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - - performance_target: Optional[PerformanceTarget] = None - """The performance mode on a serverless job. The performance target determines the level of compute - performance or cost-efficiency for the run. This field overrides the performance target defined - on the job level. - - * `STANDARD`: Enables cost-efficient execution of serverless workloads. * - `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and - optimized cluster performance.""" - - pipeline_params: Optional[PipelineParams] = None - """Controls whether the pipeline should perform a full refresh""" - - python_named_params: Optional[Dict[str, str]] = None - - python_params: Optional[List[str]] = None - """A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", - "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon - `run-now`, it would overwrite the parameters specified in job setting. The JSON representation - of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - - Use [Task parameter variables] to set parameters containing information about job runs. - - Important - - These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters - returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and - emojis. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - rerun_all_failed_tasks: Optional[bool] = None - """If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be - used.""" - - rerun_dependent_tasks: Optional[bool] = None - """If true, repair all tasks that depend on the tasks in `rerun_tasks`, even if they were - previously successful. Can be also used in combination with `rerun_all_failed_tasks`.""" - - rerun_tasks: Optional[List[str]] = None - """The task keys of the task runs to repair.""" - - spark_submit_params: Optional[List[str]] = None - """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": - ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit - script as command-line parameters. If specified upon `run-now`, it would overwrite the - parameters specified in job setting. The JSON representation of this field (for example - `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - - Use [Task parameter variables] to set parameters containing information about job runs - - Important - - These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters - returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and - emojis. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - sql_params: Optional[Dict[str, str]] = None - """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john - doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" - - def as_dict(self) -> dict: - """Serializes the RepairRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dbt_commands: - body["dbt_commands"] = [v for v in self.dbt_commands] - if self.jar_params: - body["jar_params"] = [v for v in self.jar_params] - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.latest_repair_id is not None: - body["latest_repair_id"] = self.latest_repair_id - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.performance_target is not None: - body["performance_target"] = self.performance_target.value - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params.as_dict() - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = [v for v in self.python_params] - if self.rerun_all_failed_tasks is not None: - body["rerun_all_failed_tasks"] = self.rerun_all_failed_tasks - if self.rerun_dependent_tasks is not None: - body["rerun_dependent_tasks"] = self.rerun_dependent_tasks - if self.rerun_tasks: - body["rerun_tasks"] = [v for v in self.rerun_tasks] - if self.run_id is not None: - body["run_id"] = self.run_id - if self.spark_submit_params: - body["spark_submit_params"] = [v for v in self.spark_submit_params] - if self.sql_params: - body["sql_params"] = self.sql_params - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RepairRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dbt_commands: - body["dbt_commands"] = self.dbt_commands - if self.jar_params: - body["jar_params"] = self.jar_params - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.latest_repair_id is not None: - body["latest_repair_id"] = self.latest_repair_id - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.performance_target is not None: - body["performance_target"] = self.performance_target - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = self.python_params - if self.rerun_all_failed_tasks is not None: - body["rerun_all_failed_tasks"] = self.rerun_all_failed_tasks - if self.rerun_dependent_tasks is not None: - body["rerun_dependent_tasks"] = self.rerun_dependent_tasks - if self.rerun_tasks: - body["rerun_tasks"] = self.rerun_tasks - if self.run_id is not None: - body["run_id"] = self.run_id - if self.spark_submit_params: - body["spark_submit_params"] = self.spark_submit_params - if self.sql_params: - body["sql_params"] = self.sql_params + body["end_time"] = self.end_time + if self.id is not None: + body["id"] = self.id + if self.start_time is not None: + body["start_time"] = self.start_time + if self.state: + body["state"] = self.state + if self.status: + body["status"] = self.status + if self.task_run_ids: + body["task_run_ids"] = self.task_run_ids + if self.type is not None: + body["type"] = self.type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RepairRun: - """Deserializes the RepairRun from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> RepairHistoryItem: + """Deserializes the RepairHistoryItem from a dictionary.""" return cls( - dbt_commands=d.get("dbt_commands", None), - jar_params=d.get("jar_params", None), - job_parameters=d.get("job_parameters", None), - latest_repair_id=d.get("latest_repair_id", None), - notebook_params=d.get("notebook_params", None), - performance_target=_enum(d, "performance_target", PerformanceTarget), - pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), - python_named_params=d.get("python_named_params", None), - python_params=d.get("python_params", None), - rerun_all_failed_tasks=d.get("rerun_all_failed_tasks", None), - rerun_dependent_tasks=d.get("rerun_dependent_tasks", None), - rerun_tasks=d.get("rerun_tasks", None), - run_id=d.get("run_id", None), - spark_submit_params=d.get("spark_submit_params", None), - sql_params=d.get("sql_params", None), + effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), + end_time=d.get("end_time", None), + id=d.get("id", None), + start_time=d.get("start_time", None), + state=_from_dict(d, "state", RunState), + status=_from_dict(d, "status", RunStatus), + task_run_ids=d.get("task_run_ids", None), + type=_enum(d, "type", RepairHistoryItemType), ) +class RepairHistoryItemType(Enum): + """The repair history item type. Indicates whether a run is the original run or a repair run.""" + + ORIGINAL = "ORIGINAL" + REPAIR = "REPAIR" + + @dataclass class RepairRunResponse: """Run repair was initiated.""" @@ -4846,41 +4209,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RepairRunResponse: return cls(repair_id=d.get("repair_id", None)) -@dataclass -class ResetJob: - job_id: int - """The canonical identifier of the job to reset. This field is required.""" - - new_settings: JobSettings - """The new settings of the job. These settings completely replace the old settings. - - Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to - other fields are applied to future runs only.""" - - def as_dict(self) -> dict: - """Serializes the ResetJob into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.new_settings: - body["new_settings"] = self.new_settings.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ResetJob into a shallow dictionary of its immediate attributes.""" - body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.new_settings: - body["new_settings"] = self.new_settings - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ResetJob: - """Deserializes the ResetJob from a dictionary.""" - return cls(job_id=d.get("job_id", None), new_settings=_from_dict(d, "new_settings", JobSettings)) - - @dataclass class ResetResponse: def as_dict(self) -> dict: @@ -5657,247 +4985,48 @@ class RunIf(Enum): completed * `AT_LEAST_ONE_FAILED`: At least one dependency failed * `ALL_FAILED`: ALl dependencies have failed""" - ALL_DONE = "ALL_DONE" - ALL_FAILED = "ALL_FAILED" - ALL_SUCCESS = "ALL_SUCCESS" - AT_LEAST_ONE_FAILED = "AT_LEAST_ONE_FAILED" - AT_LEAST_ONE_SUCCESS = "AT_LEAST_ONE_SUCCESS" - NONE_FAILED = "NONE_FAILED" - - -@dataclass -class RunJobOutput: - run_id: Optional[int] = None - """The run id of the triggered job run""" - - def as_dict(self) -> dict: - """Serializes the RunJobOutput into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RunJobOutput into a shallow dictionary of its immediate attributes.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RunJobOutput: - """Deserializes the RunJobOutput from a dictionary.""" - return cls(run_id=d.get("run_id", None)) - - -@dataclass -class RunJobTask: - job_id: int - """ID of the job to trigger.""" - - dbt_commands: Optional[List[str]] = None - """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt - deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - - jar_params: Optional[List[str]] = None - """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", - "35"]`. The parameters are used to invoke the main function of the main class specified in the - Spark JAR task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot - be specified in conjunction with notebook_params. The JSON representation of this field (for - example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - - Use [Task parameter variables] to set parameters containing information about job runs. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - job_parameters: Optional[Dict[str, str]] = None - """Job-level parameters used to trigger the job.""" - - notebook_params: Optional[Dict[str, str]] = None - """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": - "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the - [dbutils.widgets.get] function. - - If not specified upon `run-now`, the triggered run uses the job’s base parameters. - - notebook_params cannot be specified in conjunction with jar_params. - - Use [Task parameter variables] to set parameters containing information about job runs. - - The JSON representation of this field (for example `{"notebook_params":{"name":"john - doe","age":"35"}}`) cannot exceed 10,000 bytes. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables - [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - - pipeline_params: Optional[PipelineParams] = None - """Controls whether the pipeline should perform a full refresh""" - - python_named_params: Optional[Dict[str, str]] = None - - python_params: Optional[List[str]] = None - """A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", - "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon - `run-now`, it would overwrite the parameters specified in job setting. The JSON representation - of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - - Use [Task parameter variables] to set parameters containing information about job runs. - - Important - - These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters - returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and - emojis. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - spark_submit_params: Optional[List[str]] = None - """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": - ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit - script as command-line parameters. If specified upon `run-now`, it would overwrite the - parameters specified in job setting. The JSON representation of this field (for example - `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - - Use [Task parameter variables] to set parameters containing information about job runs - - Important - - These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters - returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and - emojis. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - sql_params: Optional[Dict[str, str]] = None - """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john - doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" - - def as_dict(self) -> dict: - """Serializes the RunJobTask into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dbt_commands: - body["dbt_commands"] = [v for v in self.dbt_commands] - if self.jar_params: - body["jar_params"] = [v for v in self.jar_params] - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params.as_dict() - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = [v for v in self.python_params] - if self.spark_submit_params: - body["spark_submit_params"] = [v for v in self.spark_submit_params] - if self.sql_params: - body["sql_params"] = self.sql_params - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RunJobTask into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dbt_commands: - body["dbt_commands"] = self.dbt_commands - if self.jar_params: - body["jar_params"] = self.jar_params - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = self.python_params - if self.spark_submit_params: - body["spark_submit_params"] = self.spark_submit_params - if self.sql_params: - body["sql_params"] = self.sql_params - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RunJobTask: - """Deserializes the RunJobTask from a dictionary.""" - return cls( - dbt_commands=d.get("dbt_commands", None), - jar_params=d.get("jar_params", None), - job_id=d.get("job_id", None), - job_parameters=d.get("job_parameters", None), - notebook_params=d.get("notebook_params", None), - pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), - python_named_params=d.get("python_named_params", None), - python_params=d.get("python_params", None), - spark_submit_params=d.get("spark_submit_params", None), - sql_params=d.get("sql_params", None), - ) - - -class RunLifeCycleState(Enum): - """A value indicating the run's lifecycle state. The possible values are: * `QUEUED`: The run is - queued. * `PENDING`: The run is waiting to be executed while the cluster and execution context - are being prepared. * `RUNNING`: The task of this run is being executed. * `TERMINATING`: The - task of this run has completed, and the cluster and execution context are being cleaned up. * - `TERMINATED`: The task of this run has completed, and the cluster and execution context have - been cleaned up. This state is terminal. * `SKIPPED`: This run was aborted because a previous - run of the same job was already active. This state is terminal. * `INTERNAL_ERROR`: An - exceptional state that indicates a failure in the Jobs service, such as network failure over a - long period. If a run on a new cluster ends in the `INTERNAL_ERROR` state, the Jobs service - terminates the cluster as soon as possible. This state is terminal. * `BLOCKED`: The run is - blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry.""" - - BLOCKED = "BLOCKED" - INTERNAL_ERROR = "INTERNAL_ERROR" - PENDING = "PENDING" - QUEUED = "QUEUED" - RUNNING = "RUNNING" - SKIPPED = "SKIPPED" - TERMINATED = "TERMINATED" - TERMINATING = "TERMINATING" - WAITING_FOR_RETRY = "WAITING_FOR_RETRY" + ALL_DONE = "ALL_DONE" + ALL_FAILED = "ALL_FAILED" + ALL_SUCCESS = "ALL_SUCCESS" + AT_LEAST_ONE_FAILED = "AT_LEAST_ONE_FAILED" + AT_LEAST_ONE_SUCCESS = "AT_LEAST_ONE_SUCCESS" + NONE_FAILED = "NONE_FAILED" -class RunLifecycleStateV2State(Enum): - """The current state of the run.""" +@dataclass +class RunJobOutput: + run_id: Optional[int] = None + """The run id of the triggered job run""" - BLOCKED = "BLOCKED" - PENDING = "PENDING" - QUEUED = "QUEUED" - RUNNING = "RUNNING" - TERMINATED = "TERMINATED" - TERMINATING = "TERMINATING" - WAITING = "WAITING" + def as_dict(self) -> dict: + """Serializes the RunJobOutput into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.run_id is not None: + body["run_id"] = self.run_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RunJobOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.run_id is not None: + body["run_id"] = self.run_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RunJobOutput: + """Deserializes the RunJobOutput from a dictionary.""" + return cls(run_id=d.get("run_id", None)) @dataclass -class RunNow: +class RunJobTask: job_id: int - """The ID of the job to be executed""" + """ID of the job to trigger.""" dbt_commands: Optional[List[str]] = None """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - idempotency_token: Optional[str] = None - """An optional token to guarantee the idempotency of job run requests. If a run with the provided - token already exists, the request does not create a new run but returns the ID of the existing - run instead. If a run with the provided token is deleted, an error is returned. - - If you specify the idempotency token, upon failure you can retry until the request succeeds. - Databricks guarantees that exactly one run is launched with that idempotency token. - - This token must have at most 64 characters. - - For more information, see [How to ensure idempotency for jobs]. - - [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html""" - jar_params: Optional[List[str]] = None """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. The parameters are used to invoke the main function of the main class specified in the @@ -5910,7 +5039,7 @@ class RunNow: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" job_parameters: Optional[Dict[str, str]] = None - """Job-level parameters used in the run. for example `"param": "overriding_val"`""" + """Job-level parameters used to trigger the job.""" notebook_params: Optional[Dict[str, str]] = None """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": @@ -5929,19 +5058,6 @@ class RunNow: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - only: Optional[List[str]] = None - """A list of task keys to run inside of the job. If this field is not provided, all tasks in the - job will be run.""" - - performance_target: Optional[PerformanceTarget] = None - """The performance mode on a serverless job. The performance target determines the level of compute - performance or cost-efficiency for the run. This field overrides the performance target defined - on the job level. - - * `STANDARD`: Enables cost-efficient execution of serverless workloads. * - `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and - optimized cluster performance.""" - pipeline_params: Optional[PipelineParams] = None """Controls whether the pipeline should perform a full refresh""" @@ -5963,9 +5079,6 @@ class RunNow: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - queue: Optional[QueueSettings] = None - """The queue settings of the run.""" - spark_submit_params: Optional[List[str]] = None """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit @@ -5988,12 +5101,10 @@ class RunNow: doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" def as_dict(self) -> dict: - """Serializes the RunNow into a dictionary suitable for use as a JSON request body.""" + """Serializes the RunJobTask into a dictionary suitable for use as a JSON request body.""" body = {} if self.dbt_commands: body["dbt_commands"] = [v for v in self.dbt_commands] - if self.idempotency_token is not None: - body["idempotency_token"] = self.idempotency_token if self.jar_params: body["jar_params"] = [v for v in self.jar_params] if self.job_id is not None: @@ -6002,18 +5113,12 @@ def as_dict(self) -> dict: body["job_parameters"] = self.job_parameters if self.notebook_params: body["notebook_params"] = self.notebook_params - if self.only: - body["only"] = [v for v in self.only] - if self.performance_target is not None: - body["performance_target"] = self.performance_target.value if self.pipeline_params: body["pipeline_params"] = self.pipeline_params.as_dict() if self.python_named_params: body["python_named_params"] = self.python_named_params if self.python_params: body["python_params"] = [v for v in self.python_params] - if self.queue: - body["queue"] = self.queue.as_dict() if self.spark_submit_params: body["spark_submit_params"] = [v for v in self.spark_submit_params] if self.sql_params: @@ -6021,12 +5126,10 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the RunNow into a shallow dictionary of its immediate attributes.""" + """Serializes the RunJobTask into a shallow dictionary of its immediate attributes.""" body = {} if self.dbt_commands: body["dbt_commands"] = self.dbt_commands - if self.idempotency_token is not None: - body["idempotency_token"] = self.idempotency_token if self.jar_params: body["jar_params"] = self.jar_params if self.job_id is not None: @@ -6035,18 +5138,12 @@ def as_shallow_dict(self) -> dict: body["job_parameters"] = self.job_parameters if self.notebook_params: body["notebook_params"] = self.notebook_params - if self.only: - body["only"] = self.only - if self.performance_target is not None: - body["performance_target"] = self.performance_target if self.pipeline_params: body["pipeline_params"] = self.pipeline_params if self.python_named_params: body["python_named_params"] = self.python_named_params if self.python_params: body["python_params"] = self.python_params - if self.queue: - body["queue"] = self.queue if self.spark_submit_params: body["spark_submit_params"] = self.spark_submit_params if self.sql_params: @@ -6054,26 +5151,58 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RunNow: - """Deserializes the RunNow from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> RunJobTask: + """Deserializes the RunJobTask from a dictionary.""" return cls( dbt_commands=d.get("dbt_commands", None), - idempotency_token=d.get("idempotency_token", None), jar_params=d.get("jar_params", None), job_id=d.get("job_id", None), job_parameters=d.get("job_parameters", None), notebook_params=d.get("notebook_params", None), - only=d.get("only", None), - performance_target=_enum(d, "performance_target", PerformanceTarget), pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), python_named_params=d.get("python_named_params", None), python_params=d.get("python_params", None), - queue=_from_dict(d, "queue", QueueSettings), spark_submit_params=d.get("spark_submit_params", None), sql_params=d.get("sql_params", None), ) +class RunLifeCycleState(Enum): + """A value indicating the run's lifecycle state. The possible values are: * `QUEUED`: The run is + queued. * `PENDING`: The run is waiting to be executed while the cluster and execution context + are being prepared. * `RUNNING`: The task of this run is being executed. * `TERMINATING`: The + task of this run has completed, and the cluster and execution context are being cleaned up. * + `TERMINATED`: The task of this run has completed, and the cluster and execution context have + been cleaned up. This state is terminal. * `SKIPPED`: This run was aborted because a previous + run of the same job was already active. This state is terminal. * `INTERNAL_ERROR`: An + exceptional state that indicates a failure in the Jobs service, such as network failure over a + long period. If a run on a new cluster ends in the `INTERNAL_ERROR` state, the Jobs service + terminates the cluster as soon as possible. This state is terminal. * `BLOCKED`: The run is + blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry.""" + + BLOCKED = "BLOCKED" + INTERNAL_ERROR = "INTERNAL_ERROR" + PENDING = "PENDING" + QUEUED = "QUEUED" + RUNNING = "RUNNING" + SKIPPED = "SKIPPED" + TERMINATED = "TERMINATED" + TERMINATING = "TERMINATING" + WAITING_FOR_RETRY = "WAITING_FOR_RETRY" + + +class RunLifecycleStateV2State(Enum): + """The current state of the run.""" + + BLOCKED = "BLOCKED" + PENDING = "PENDING" + QUEUED = "QUEUED" + RUNNING = "RUNNING" + TERMINATED = "TERMINATED" + TERMINATING = "TERMINATING" + WAITING = "WAITING" + + @dataclass class RunNowResponse: """Run was started successfully.""" @@ -7764,157 +6893,6 @@ class StorageMode(Enum): IMPORT = "IMPORT" -@dataclass -class SubmitRun: - access_control_list: Optional[List[JobAccessControlRequest]] = None - """List of permissions to set on the job.""" - - budget_policy_id: Optional[str] = None - """The user specified id of the budget policy to use for this one-time run. If not specified, the - run will be not be attributed to any budget policy.""" - - email_notifications: Optional[JobEmailNotifications] = None - """An optional set of email addresses notified when the run begins or completes.""" - - environments: Optional[List[JobEnvironment]] = None - """A list of task execution environment specifications that can be referenced by tasks of this run.""" - - git_source: Optional[GitSource] = None - """An optional specification for a remote Git repository containing the source code used by tasks. - Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - - If `git_source` is set, these tasks retrieve the file from the remote repository by default. - However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - - Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks - are used, `git_source` must be defined on the job.""" - - health: Optional[JobsHealthRules] = None - """An optional set of health rules that can be defined for this job.""" - - idempotency_token: Optional[str] = None - """An optional token that can be used to guarantee the idempotency of job run requests. If a run - with the provided token already exists, the request does not create a new run but returns the ID - of the existing run instead. If a run with the provided token is deleted, an error is returned. - - If you specify the idempotency token, upon failure you can retry until the request succeeds. - Databricks guarantees that exactly one run is launched with that idempotency token. - - This token must have at most 64 characters. - - For more information, see [How to ensure idempotency for jobs]. - - [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html""" - - notification_settings: Optional[JobNotificationSettings] = None - """Optional notification settings that are used when sending notifications to each of the - `email_notifications` and `webhook_notifications` for this run.""" - - queue: Optional[QueueSettings] = None - """The queue settings of the one-time run.""" - - run_as: Optional[JobRunAs] = None - """Specifies the user or service principal that the job runs as. If not specified, the job runs as - the user who submits the request.""" - - run_name: Optional[str] = None - """An optional name for the run. The default value is `Untitled`.""" - - tasks: Optional[List[SubmitTask]] = None - - timeout_seconds: Optional[int] = None - """An optional timeout applied to each run of this job. A value of `0` means no timeout.""" - - webhook_notifications: Optional[WebhookNotifications] = None - """A collection of system notification IDs to notify when the run begins or completes.""" - - def as_dict(self) -> dict: - """Serializes the SubmitRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.email_notifications: - body["email_notifications"] = self.email_notifications.as_dict() - if self.environments: - body["environments"] = [v.as_dict() for v in self.environments] - if self.git_source: - body["git_source"] = self.git_source.as_dict() - if self.health: - body["health"] = self.health.as_dict() - if self.idempotency_token is not None: - body["idempotency_token"] = self.idempotency_token - if self.notification_settings: - body["notification_settings"] = self.notification_settings.as_dict() - if self.queue: - body["queue"] = self.queue.as_dict() - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.run_name is not None: - body["run_name"] = self.run_name - if self.tasks: - body["tasks"] = [v.as_dict() for v in self.tasks] - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SubmitRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.email_notifications: - body["email_notifications"] = self.email_notifications - if self.environments: - body["environments"] = self.environments - if self.git_source: - body["git_source"] = self.git_source - if self.health: - body["health"] = self.health - if self.idempotency_token is not None: - body["idempotency_token"] = self.idempotency_token - if self.notification_settings: - body["notification_settings"] = self.notification_settings - if self.queue: - body["queue"] = self.queue - if self.run_as: - body["run_as"] = self.run_as - if self.run_name is not None: - body["run_name"] = self.run_name - if self.tasks: - body["tasks"] = self.tasks - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SubmitRun: - """Deserializes the SubmitRun from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest), - budget_policy_id=d.get("budget_policy_id", None), - email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), - environments=_repeated_dict(d, "environments", JobEnvironment), - git_source=_from_dict(d, "git_source", GitSource), - health=_from_dict(d, "health", JobsHealthRules), - idempotency_token=d.get("idempotency_token", None), - notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), - queue=_from_dict(d, "queue", QueueSettings), - run_as=_from_dict(d, "run_as", JobRunAs), - run_name=d.get("run_name", None), - tasks=_repeated_dict(d, "tasks", SubmitTask), - timeout_seconds=d.get("timeout_seconds", None), - webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), - ) - - @dataclass class SubmitRunResponse: """Run was created and started successfully.""" @@ -9166,59 +8144,6 @@ class TriggerType(Enum): TABLE = "TABLE" -@dataclass -class UpdateJob: - job_id: int - """The canonical identifier of the job to update. This field is required.""" - - fields_to_remove: Optional[List[str]] = None - """Remove top-level fields in the job settings. Removing nested fields is not supported, except for - tasks and job clusters (`tasks/task_1`). This field is optional.""" - - new_settings: Optional[JobSettings] = None - """The new settings for the job. - - Top-level fields specified in `new_settings` are completely replaced, except for arrays which - are merged. That is, new and existing entries are completely replaced based on the respective - key fields, i.e. `task_key` or `job_cluster_key`, while previous entries are kept. - - Partially updating nested fields is not supported. - - Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other - fields are applied to future runs only.""" - - def as_dict(self) -> dict: - """Serializes the UpdateJob into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.fields_to_remove: - body["fields_to_remove"] = [v for v in self.fields_to_remove] - if self.job_id is not None: - body["job_id"] = self.job_id - if self.new_settings: - body["new_settings"] = self.new_settings.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateJob into a shallow dictionary of its immediate attributes.""" - body = {} - if self.fields_to_remove: - body["fields_to_remove"] = self.fields_to_remove - if self.job_id is not None: - body["job_id"] = self.job_id - if self.new_settings: - body["new_settings"] = self.new_settings - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateJob: - """Deserializes the UpdateJob from a dictionary.""" - return cls( - fields_to_remove=d.get("fields_to_remove", None), - job_id=d.get("job_id", None), - new_settings=_from_dict(d, "new_settings", JobSettings), - ) - - @dataclass class UpdateResponse: def as_dict(self) -> dict: diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index 9ac4c153..0537854f 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -15,36 +15,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class AddExchangeForListingRequest: - listing_id: str - - exchange_id: str - - def as_dict(self) -> dict: - """Serializes the AddExchangeForListingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.exchange_id is not None: - body["exchange_id"] = self.exchange_id - if self.listing_id is not None: - body["listing_id"] = self.listing_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AddExchangeForListingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.exchange_id is not None: - body["exchange_id"] = self.exchange_id - if self.listing_id is not None: - body["listing_id"] = self.listing_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AddExchangeForListingRequest: - """Deserializes the AddExchangeForListingRequest from a dictionary.""" - return cls(exchange_id=d.get("exchange_id", None), listing_id=d.get("listing_id", None)) - - @dataclass class AddExchangeForListingResponse: exchange_for_listing: Optional[ExchangeListing] = None @@ -233,30 +203,6 @@ class Cost(Enum): PAID = "PAID" -@dataclass -class CreateExchangeFilterRequest: - filter: ExchangeFilter - - def as_dict(self) -> dict: - """Serializes the CreateExchangeFilterRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.filter: - body["filter"] = self.filter.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateExchangeFilterRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.filter: - body["filter"] = self.filter - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeFilterRequest: - """Deserializes the CreateExchangeFilterRequest from a dictionary.""" - return cls(filter=_from_dict(d, "filter", ExchangeFilter)) - - @dataclass class CreateExchangeFilterResponse: filter_id: Optional[str] = None @@ -281,30 +227,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeFilterResponse: return cls(filter_id=d.get("filter_id", None)) -@dataclass -class CreateExchangeRequest: - exchange: Exchange - - def as_dict(self) -> dict: - """Serializes the CreateExchangeRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.exchange: - body["exchange"] = self.exchange.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateExchangeRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.exchange: - body["exchange"] = self.exchange - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeRequest: - """Deserializes the CreateExchangeRequest from a dictionary.""" - return cls(exchange=_from_dict(d, "exchange", Exchange)) - - @dataclass class CreateExchangeResponse: exchange_id: Optional[str] = None @@ -329,53 +251,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeResponse: return cls(exchange_id=d.get("exchange_id", None)) -@dataclass -class CreateFileRequest: - file_parent: FileParent - - marketplace_file_type: MarketplaceFileType - - mime_type: str - - display_name: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the CreateFileRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.file_parent: - body["file_parent"] = self.file_parent.as_dict() - if self.marketplace_file_type is not None: - body["marketplace_file_type"] = self.marketplace_file_type.value - if self.mime_type is not None: - body["mime_type"] = self.mime_type - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateFileRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.file_parent: - body["file_parent"] = self.file_parent - if self.marketplace_file_type is not None: - body["marketplace_file_type"] = self.marketplace_file_type - if self.mime_type is not None: - body["mime_type"] = self.mime_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateFileRequest: - """Deserializes the CreateFileRequest from a dictionary.""" - return cls( - display_name=d.get("display_name", None), - file_parent=_from_dict(d, "file_parent", FileParent), - marketplace_file_type=_enum(d, "marketplace_file_type", MarketplaceFileType), - mime_type=d.get("mime_type", None), - ) - - @dataclass class CreateFileResponse: file_info: Optional[FileInfo] = None @@ -407,92 +282,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateFileResponse: return cls(file_info=_from_dict(d, "file_info", FileInfo), upload_url=d.get("upload_url", None)) -@dataclass -class CreateInstallationRequest: - accepted_consumer_terms: Optional[ConsumerTerms] = None - - catalog_name: Optional[str] = None - - listing_id: Optional[str] = None - - recipient_type: Optional[DeltaSharingRecipientType] = None - - repo_detail: Optional[RepoInstallation] = None - """for git repo installations""" - - share_name: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the CreateInstallationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.accepted_consumer_terms: - body["accepted_consumer_terms"] = self.accepted_consumer_terms.as_dict() - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type.value - if self.repo_detail: - body["repo_detail"] = self.repo_detail.as_dict() - if self.share_name is not None: - body["share_name"] = self.share_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateInstallationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.accepted_consumer_terms: - body["accepted_consumer_terms"] = self.accepted_consumer_terms - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type - if self.repo_detail: - body["repo_detail"] = self.repo_detail - if self.share_name is not None: - body["share_name"] = self.share_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateInstallationRequest: - """Deserializes the CreateInstallationRequest from a dictionary.""" - return cls( - accepted_consumer_terms=_from_dict(d, "accepted_consumer_terms", ConsumerTerms), - catalog_name=d.get("catalog_name", None), - listing_id=d.get("listing_id", None), - recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), - repo_detail=_from_dict(d, "repo_detail", RepoInstallation), - share_name=d.get("share_name", None), - ) - - -@dataclass -class CreateListingRequest: - listing: Listing - - def as_dict(self) -> dict: - """Serializes the CreateListingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.listing: - body["listing"] = self.listing.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateListingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.listing: - body["listing"] = self.listing - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateListingRequest: - """Deserializes the CreateListingRequest from a dictionary.""" - return cls(listing=_from_dict(d, "listing", Listing)) - - @dataclass class CreateListingResponse: listing_id: Optional[str] = None @@ -517,90 +306,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateListingResponse: return cls(listing_id=d.get("listing_id", None)) -@dataclass -class CreatePersonalizationRequest: - """Data request messages also creates a lead (maybe)""" - - intended_use: str - - accepted_consumer_terms: ConsumerTerms - - comment: Optional[str] = None - - company: Optional[str] = None - - first_name: Optional[str] = None - - is_from_lighthouse: Optional[bool] = None - - last_name: Optional[str] = None - - listing_id: Optional[str] = None - - recipient_type: Optional[DeltaSharingRecipientType] = None - - def as_dict(self) -> dict: - """Serializes the CreatePersonalizationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.accepted_consumer_terms: - body["accepted_consumer_terms"] = self.accepted_consumer_terms.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.company is not None: - body["company"] = self.company - if self.first_name is not None: - body["first_name"] = self.first_name - if self.intended_use is not None: - body["intended_use"] = self.intended_use - if self.is_from_lighthouse is not None: - body["is_from_lighthouse"] = self.is_from_lighthouse - if self.last_name is not None: - body["last_name"] = self.last_name - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreatePersonalizationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.accepted_consumer_terms: - body["accepted_consumer_terms"] = self.accepted_consumer_terms - if self.comment is not None: - body["comment"] = self.comment - if self.company is not None: - body["company"] = self.company - if self.first_name is not None: - body["first_name"] = self.first_name - if self.intended_use is not None: - body["intended_use"] = self.intended_use - if self.is_from_lighthouse is not None: - body["is_from_lighthouse"] = self.is_from_lighthouse - if self.last_name is not None: - body["last_name"] = self.last_name - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePersonalizationRequest: - """Deserializes the CreatePersonalizationRequest from a dictionary.""" - return cls( - accepted_consumer_terms=_from_dict(d, "accepted_consumer_terms", ConsumerTerms), - comment=d.get("comment", None), - company=d.get("company", None), - first_name=d.get("first_name", None), - intended_use=d.get("intended_use", None), - is_from_lighthouse=d.get("is_from_lighthouse", None), - last_name=d.get("last_name", None), - listing_id=d.get("listing_id", None), - recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), - ) - - @dataclass class CreatePersonalizationRequestResponse: id: Optional[str] = None @@ -625,30 +330,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreatePersonalizationRequestResponse: return cls(id=d.get("id", None)) -@dataclass -class CreateProviderRequest: - provider: ProviderInfo - - def as_dict(self) -> dict: - """Serializes the CreateProviderRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.provider: - body["provider"] = self.provider.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateProviderRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.provider: - body["provider"] = self.provider - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateProviderRequest: - """Deserializes the CreateProviderRequest from a dictionary.""" - return cls(provider=_from_dict(d, "provider", ProviderInfo)) - - @dataclass class CreateProviderResponse: id: Optional[str] = None @@ -3061,36 +2742,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TokenInfo: ) -@dataclass -class UpdateExchangeFilterRequest: - filter: ExchangeFilter - - id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the UpdateExchangeFilterRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.filter: - body["filter"] = self.filter.as_dict() - if self.id is not None: - body["id"] = self.id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateExchangeFilterRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.filter: - body["filter"] = self.filter - if self.id is not None: - body["id"] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeFilterRequest: - """Deserializes the UpdateExchangeFilterRequest from a dictionary.""" - return cls(filter=_from_dict(d, "filter", ExchangeFilter), id=d.get("id", None)) - - @dataclass class UpdateExchangeFilterResponse: filter: Optional[ExchangeFilter] = None @@ -3115,36 +2766,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeFilterResponse: return cls(filter=_from_dict(d, "filter", ExchangeFilter)) -@dataclass -class UpdateExchangeRequest: - exchange: Exchange - - id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the UpdateExchangeRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.exchange: - body["exchange"] = self.exchange.as_dict() - if self.id is not None: - body["id"] = self.id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateExchangeRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.exchange: - body["exchange"] = self.exchange - if self.id is not None: - body["id"] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeRequest: - """Deserializes the UpdateExchangeRequest from a dictionary.""" - return cls(exchange=_from_dict(d, "exchange", Exchange), id=d.get("id", None)) - - @dataclass class UpdateExchangeResponse: exchange: Optional[Exchange] = None @@ -3169,53 +2790,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeResponse: return cls(exchange=_from_dict(d, "exchange", Exchange)) -@dataclass -class UpdateInstallationRequest: - installation: InstallationDetail - - installation_id: Optional[str] = None - - listing_id: Optional[str] = None - - rotate_token: Optional[bool] = None - - def as_dict(self) -> dict: - """Serializes the UpdateInstallationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.installation: - body["installation"] = self.installation.as_dict() - if self.installation_id is not None: - body["installation_id"] = self.installation_id - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.rotate_token is not None: - body["rotate_token"] = self.rotate_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateInstallationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.installation: - body["installation"] = self.installation - if self.installation_id is not None: - body["installation_id"] = self.installation_id - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.rotate_token is not None: - body["rotate_token"] = self.rotate_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateInstallationRequest: - """Deserializes the UpdateInstallationRequest from a dictionary.""" - return cls( - installation=_from_dict(d, "installation", InstallationDetail), - installation_id=d.get("installation_id", None), - listing_id=d.get("listing_id", None), - rotate_token=d.get("rotate_token", None), - ) - - @dataclass class UpdateInstallationResponse: installation: Optional[InstallationDetail] = None @@ -3240,36 +2814,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateInstallationResponse: return cls(installation=_from_dict(d, "installation", InstallationDetail)) -@dataclass -class UpdateListingRequest: - listing: Listing - - id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the UpdateListingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.listing: - body["listing"] = self.listing.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateListingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.listing: - body["listing"] = self.listing - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateListingRequest: - """Deserializes the UpdateListingRequest from a dictionary.""" - return cls(id=d.get("id", None), listing=_from_dict(d, "listing", Listing)) - - @dataclass class UpdateListingResponse: listing: Optional[Listing] = None @@ -3294,60 +2838,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateListingResponse: return cls(listing=_from_dict(d, "listing", Listing)) -@dataclass -class UpdatePersonalizationRequestRequest: - status: PersonalizationRequestStatus - - listing_id: Optional[str] = None - - reason: Optional[str] = None - - request_id: Optional[str] = None - - share: Optional[ShareInfo] = None - - def as_dict(self) -> dict: - """Serializes the UpdatePersonalizationRequestRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.reason is not None: - body["reason"] = self.reason - if self.request_id is not None: - body["request_id"] = self.request_id - if self.share: - body["share"] = self.share.as_dict() - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdatePersonalizationRequestRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.reason is not None: - body["reason"] = self.reason - if self.request_id is not None: - body["request_id"] = self.request_id - if self.share: - body["share"] = self.share - if self.status is not None: - body["status"] = self.status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdatePersonalizationRequestRequest: - """Deserializes the UpdatePersonalizationRequestRequest from a dictionary.""" - return cls( - listing_id=d.get("listing_id", None), - reason=d.get("reason", None), - request_id=d.get("request_id", None), - share=_from_dict(d, "share", ShareInfo), - status=_enum(d, "status", PersonalizationRequestStatus), - ) - - @dataclass class UpdatePersonalizationRequestResponse: request: Optional[PersonalizationRequest] = None @@ -3372,39 +2862,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdatePersonalizationRequestResponse: return cls(request=_from_dict(d, "request", PersonalizationRequest)) -@dataclass -class UpdateProviderAnalyticsDashboardRequest: - id: Optional[str] = None - """id is immutable property and can't be updated.""" - - version: Optional[int] = None - """this is the version of the dashboard template we want to update our user to current expectation - is that it should be equal to latest version of the dashboard template""" - - def as_dict(self) -> dict: - """Serializes the UpdateProviderAnalyticsDashboardRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateProviderAnalyticsDashboardRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderAnalyticsDashboardRequest: - """Deserializes the UpdateProviderAnalyticsDashboardRequest from a dictionary.""" - return cls(id=d.get("id", None), version=d.get("version", None)) - - @dataclass class UpdateProviderAnalyticsDashboardResponse: id: str @@ -3443,36 +2900,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderAnalyticsDashboardRespons return cls(dashboard_id=d.get("dashboard_id", None), id=d.get("id", None), version=d.get("version", None)) -@dataclass -class UpdateProviderRequest: - provider: ProviderInfo - - id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the UpdateProviderRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.provider: - body["provider"] = self.provider.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateProviderRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.provider: - body["provider"] = self.provider - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderRequest: - """Deserializes the UpdateProviderRequest from a dictionary.""" - return cls(id=d.get("id", None), provider=_from_dict(d, "provider", ProviderInfo)) - - @dataclass class UpdateProviderResponse: provider: Optional[ProviderInfo] = None diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index c8acbfd6..98a552c2 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -179,73 +179,6 @@ class ActivityType(Enum): SYSTEM_TRANSITION = "SYSTEM_TRANSITION" -@dataclass -class ApproveTransitionRequest: - name: str - """Name of the model.""" - - version: str - """Version of the model.""" - - stage: Stage - """Target stage of the transition. Valid values are: - - * `None`: The initial stage of a model version. - - * `Staging`: Staging or pre-production stage. - - * `Production`: Production stage. - - * `Archived`: Archived stage.""" - - archive_existing_versions: bool - """Specifies whether to archive all current model versions in the target stage.""" - - comment: Optional[str] = None - """User-provided comment on the action.""" - - def as_dict(self) -> dict: - """Serializes the ApproveTransitionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.archive_existing_versions is not None: - body["archive_existing_versions"] = self.archive_existing_versions - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage.value - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ApproveTransitionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.archive_existing_versions is not None: - body["archive_existing_versions"] = self.archive_existing_versions - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ApproveTransitionRequest: - """Deserializes the ApproveTransitionRequest from a dictionary.""" - return cls( - archive_existing_versions=d.get("archive_existing_versions", None), - comment=d.get("comment", None), - name=d.get("name", None), - stage=_enum(d, "stage", Stage), - version=d.get("version", None), - ) - - @dataclass class ApproveTransitionRequestResponse: activity: Optional[Activity] = None @@ -350,45 +283,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CommentObject: ) -@dataclass -class CreateComment: - name: str - """Name of the model.""" - - version: str - """Version of the model.""" - - comment: str - """User-provided comment on the action.""" - - def as_dict(self) -> dict: - """Serializes the CreateComment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateComment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateComment: - """Deserializes the CreateComment from a dictionary.""" - return cls(comment=d.get("comment", None), name=d.get("name", None), version=d.get("version", None)) - - @dataclass class CreateCommentResponse: comment: Optional[CommentObject] = None @@ -414,53 +308,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateCommentResponse: return cls(comment=_from_dict(d, "comment", CommentObject)) -@dataclass -class CreateExperiment: - name: str - """Experiment name.""" - - artifact_location: Optional[str] = None - """Location where all artifacts for the experiment are stored. If not provided, the remote server - will select an appropriate default.""" - - tags: Optional[List[ExperimentTag]] = None - """A collection of tags to set on the experiment. Maximum tag size and number of tags per request - depends on the storage backend. All storage backends are guaranteed to support tag keys up to - 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also - guaranteed to support up to 20 tags per request.""" - - def as_dict(self) -> dict: - """Serializes the CreateExperiment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.artifact_location is not None: - body["artifact_location"] = self.artifact_location - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateExperiment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.artifact_location is not None: - body["artifact_location"] = self.artifact_location - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateExperiment: - """Deserializes the CreateExperiment from a dictionary.""" - return cls( - artifact_location=d.get("artifact_location", None), - name=d.get("name", None), - tags=_repeated_dict(d, "tags", ExperimentTag), - ) - - @dataclass class CreateExperimentResponse: experiment_id: Optional[str] = None @@ -486,177 +333,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateExperimentResponse: return cls(experiment_id=d.get("experiment_id", None)) -@dataclass -class CreateForecastingExperimentRequest: - train_data_path: str - """The fully qualified path of a Unity Catalog table, formatted as - catalog_name.schema_name.table_name, used as training data for the forecasting model.""" - - target_column: str - """The column in the input training table used as the prediction target for model training. The - values in this column are used as the ground truth for model training.""" - - time_column: str - """The column in the input training table that represents each row's timestamp.""" - - forecast_granularity: str - """The time interval between consecutive rows in the time series data. Possible values include: '1 - second', '1 minute', '5 minutes', '10 minutes', '15 minutes', '30 minutes', 'Hourly', 'Daily', - 'Weekly', 'Monthly', 'Quarterly', 'Yearly'.""" - - forecast_horizon: int - """The number of time steps into the future to make predictions, calculated as a multiple of - forecast_granularity. This value represents how far ahead the model should forecast.""" - - custom_weights_column: Optional[str] = None - """The column in the training table used to customize weights for each time series.""" - - experiment_path: Optional[str] = None - """The path in the workspace to store the created experiment.""" - - future_feature_data_path: Optional[str] = None - """The fully qualified path of a Unity Catalog table, formatted as - catalog_name.schema_name.table_name, used to store future feature data for predictions.""" - - holiday_regions: Optional[List[str]] = None - """The region code(s) to automatically add holiday features. Currently supports only one region.""" - - include_features: Optional[List[str]] = None - """Specifies the list of feature columns to include in model training. These columns must exist in - the training data and be of type string, numerical, or boolean. If not specified, no additional - features will be included. Note: Certain columns are automatically handled: - Automatically - excluded: split_column, target_column, custom_weights_column. - Automatically included: - time_column.""" - - max_runtime: Optional[int] = None - """The maximum duration for the experiment in minutes. The experiment stops automatically if it - exceeds this limit.""" - - prediction_data_path: Optional[str] = None - """The fully qualified path of a Unity Catalog table, formatted as - catalog_name.schema_name.table_name, used to store predictions.""" - - primary_metric: Optional[str] = None - """The evaluation metric used to optimize the forecasting model.""" - - register_to: Optional[str] = None - """The fully qualified path of a Unity Catalog model, formatted as - catalog_name.schema_name.model_name, used to store the best model.""" - - split_column: Optional[str] = None - """// The column in the training table used for custom data splits. Values must be 'train', - 'validate', or 'test'.""" - - timeseries_identifier_columns: Optional[List[str]] = None - """The column in the training table used to group the dataset for predicting individual time - series.""" - - training_frameworks: Optional[List[str]] = None - """List of frameworks to include for model tuning. Possible values are 'Prophet', 'ARIMA', - 'DeepAR'. An empty list includes all supported frameworks.""" - - def as_dict(self) -> dict: - """Serializes the CreateForecastingExperimentRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.custom_weights_column is not None: - body["custom_weights_column"] = self.custom_weights_column - if self.experiment_path is not None: - body["experiment_path"] = self.experiment_path - if self.forecast_granularity is not None: - body["forecast_granularity"] = self.forecast_granularity - if self.forecast_horizon is not None: - body["forecast_horizon"] = self.forecast_horizon - if self.future_feature_data_path is not None: - body["future_feature_data_path"] = self.future_feature_data_path - if self.holiday_regions: - body["holiday_regions"] = [v for v in self.holiday_regions] - if self.include_features: - body["include_features"] = [v for v in self.include_features] - if self.max_runtime is not None: - body["max_runtime"] = self.max_runtime - if self.prediction_data_path is not None: - body["prediction_data_path"] = self.prediction_data_path - if self.primary_metric is not None: - body["primary_metric"] = self.primary_metric - if self.register_to is not None: - body["register_to"] = self.register_to - if self.split_column is not None: - body["split_column"] = self.split_column - if self.target_column is not None: - body["target_column"] = self.target_column - if self.time_column is not None: - body["time_column"] = self.time_column - if self.timeseries_identifier_columns: - body["timeseries_identifier_columns"] = [v for v in self.timeseries_identifier_columns] - if self.train_data_path is not None: - body["train_data_path"] = self.train_data_path - if self.training_frameworks: - body["training_frameworks"] = [v for v in self.training_frameworks] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateForecastingExperimentRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.custom_weights_column is not None: - body["custom_weights_column"] = self.custom_weights_column - if self.experiment_path is not None: - body["experiment_path"] = self.experiment_path - if self.forecast_granularity is not None: - body["forecast_granularity"] = self.forecast_granularity - if self.forecast_horizon is not None: - body["forecast_horizon"] = self.forecast_horizon - if self.future_feature_data_path is not None: - body["future_feature_data_path"] = self.future_feature_data_path - if self.holiday_regions: - body["holiday_regions"] = self.holiday_regions - if self.include_features: - body["include_features"] = self.include_features - if self.max_runtime is not None: - body["max_runtime"] = self.max_runtime - if self.prediction_data_path is not None: - body["prediction_data_path"] = self.prediction_data_path - if self.primary_metric is not None: - body["primary_metric"] = self.primary_metric - if self.register_to is not None: - body["register_to"] = self.register_to - if self.split_column is not None: - body["split_column"] = self.split_column - if self.target_column is not None: - body["target_column"] = self.target_column - if self.time_column is not None: - body["time_column"] = self.time_column - if self.timeseries_identifier_columns: - body["timeseries_identifier_columns"] = self.timeseries_identifier_columns - if self.train_data_path is not None: - body["train_data_path"] = self.train_data_path - if self.training_frameworks: - body["training_frameworks"] = self.training_frameworks - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateForecastingExperimentRequest: - """Deserializes the CreateForecastingExperimentRequest from a dictionary.""" - return cls( - custom_weights_column=d.get("custom_weights_column", None), - experiment_path=d.get("experiment_path", None), - forecast_granularity=d.get("forecast_granularity", None), - forecast_horizon=d.get("forecast_horizon", None), - future_feature_data_path=d.get("future_feature_data_path", None), - holiday_regions=d.get("holiday_regions", None), - include_features=d.get("include_features", None), - max_runtime=d.get("max_runtime", None), - prediction_data_path=d.get("prediction_data_path", None), - primary_metric=d.get("primary_metric", None), - register_to=d.get("register_to", None), - split_column=d.get("split_column", None), - target_column=d.get("target_column", None), - time_column=d.get("time_column", None), - timeseries_identifier_columns=d.get("timeseries_identifier_columns", None), - train_data_path=d.get("train_data_path", None), - training_frameworks=d.get("training_frameworks", None), - ) - - @dataclass class CreateForecastingExperimentResponse: experiment_id: Optional[str] = None @@ -682,73 +358,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateForecastingExperimentResponse: return cls(experiment_id=d.get("experiment_id", None)) -@dataclass -class CreateLoggedModelRequest: - experiment_id: str - """The ID of the experiment that owns the model.""" - - model_type: Optional[str] = None - """The type of the model, such as ``"Agent"``, ``"Classifier"``, ``"LLM"``.""" - - name: Optional[str] = None - """The name of the model (optional). If not specified one will be generated.""" - - params: Optional[List[LoggedModelParameter]] = None - """Parameters attached to the model.""" - - source_run_id: Optional[str] = None - """The ID of the run that created the model.""" - - tags: Optional[List[LoggedModelTag]] = None - """Tags attached to the model.""" - - def as_dict(self) -> dict: - """Serializes the CreateLoggedModelRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.model_type is not None: - body["model_type"] = self.model_type - if self.name is not None: - body["name"] = self.name - if self.params: - body["params"] = [v.as_dict() for v in self.params] - if self.source_run_id is not None: - body["source_run_id"] = self.source_run_id - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateLoggedModelRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.model_type is not None: - body["model_type"] = self.model_type - if self.name is not None: - body["name"] = self.name - if self.params: - body["params"] = self.params - if self.source_run_id is not None: - body["source_run_id"] = self.source_run_id - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateLoggedModelRequest: - """Deserializes the CreateLoggedModelRequest from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - model_type=d.get("model_type", None), - name=d.get("name", None), - params=_repeated_dict(d, "params", LoggedModelParameter), - source_run_id=d.get("source_run_id", None), - tags=_repeated_dict(d, "tags", LoggedModelTag), - ) - - @dataclass class CreateLoggedModelResponse: model: Optional[LoggedModel] = None @@ -774,47 +383,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateLoggedModelResponse: return cls(model=_from_dict(d, "model", LoggedModel)) -@dataclass -class CreateModelRequest: - name: str - """Register models under this name""" - - description: Optional[str] = None - """Optional description for registered model.""" - - tags: Optional[List[ModelTag]] = None - """Additional metadata for registered model.""" - - def as_dict(self) -> dict: - """Serializes the CreateModelRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateModelRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateModelRequest: - """Deserializes the CreateModelRequest from a dictionary.""" - return cls( - description=d.get("description", None), name=d.get("name", None), tags=_repeated_dict(d, "tags", ModelTag) - ) - - @dataclass class CreateModelResponse: registered_model: Optional[Model] = None @@ -840,256 +408,28 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateModelResponse: @dataclass -class CreateModelVersionRequest: - name: str - """Register model under this name""" - - source: str - """URI indicating the location of the model artifacts.""" +class CreateModelVersionResponse: + model_version: Optional[ModelVersion] = None + """Return new version number generated for this model in registry.""" - description: Optional[str] = None - """Optional description for model version.""" + def as_dict(self) -> dict: + """Serializes the CreateModelVersionResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.model_version: + body["model_version"] = self.model_version.as_dict() + return body - run_id: Optional[str] = None - """MLflow run ID for correlation, if `source` was generated by an experiment run in MLflow tracking - server""" + def as_shallow_dict(self) -> dict: + """Serializes the CreateModelVersionResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.model_version: + body["model_version"] = self.model_version + return body - run_link: Optional[str] = None - """MLflow run link - this is the exact link of the run that generated this model version, - potentially hosted at another instance of MLflow.""" - - tags: Optional[List[ModelVersionTag]] = None - """Additional metadata for model version.""" - - def as_dict(self) -> dict: - """Serializes the CreateModelVersionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_link is not None: - body["run_link"] = self.run_link - if self.source is not None: - body["source"] = self.source - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateModelVersionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_link is not None: - body["run_link"] = self.run_link - if self.source is not None: - body["source"] = self.source - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateModelVersionRequest: - """Deserializes the CreateModelVersionRequest from a dictionary.""" - return cls( - description=d.get("description", None), - name=d.get("name", None), - run_id=d.get("run_id", None), - run_link=d.get("run_link", None), - source=d.get("source", None), - tags=_repeated_dict(d, "tags", ModelVersionTag), - ) - - -@dataclass -class CreateModelVersionResponse: - model_version: Optional[ModelVersion] = None - """Return new version number generated for this model in registry.""" - - def as_dict(self) -> dict: - """Serializes the CreateModelVersionResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.model_version: - body["model_version"] = self.model_version.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateModelVersionResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.model_version: - body["model_version"] = self.model_version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateModelVersionResponse: - """Deserializes the CreateModelVersionResponse from a dictionary.""" - return cls(model_version=_from_dict(d, "model_version", ModelVersion)) - - -@dataclass -class CreateRegistryWebhook: - events: List[RegistryWebhookEvent] - """Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was - created for the associated model. - - * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. - - * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. - - * `COMMENT_CREATED`: A user wrote a comment on a registered model. - - * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be - specified for a registry-wide webhook, which can be created by not specifying a model name in - the create request. - - * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. - - * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. - - * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. - - * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. - - * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to - staging. - - * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned - to production. - - * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.""" - - description: Optional[str] = None - """User-specified description for the webhook.""" - - http_url_spec: Optional[HttpUrlSpec] = None - - job_spec: Optional[JobSpec] = None - - model_name: Optional[str] = None - """If model name is not specified, a registry-wide webhook is created that listens for the - specified events across all versions of all registered models.""" - - status: Optional[RegistryWebhookStatus] = None - """Enable or disable triggering the webhook, or put the webhook into test mode. The default is - `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - - * `DISABLED`: Webhook is not triggered. - - * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a - real event.""" - - def as_dict(self) -> dict: - """Serializes the CreateRegistryWebhook into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = [v.value for v in self.events] - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec.as_dict() - if self.job_spec: - body["job_spec"] = self.job_spec.as_dict() - if self.model_name is not None: - body["model_name"] = self.model_name - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateRegistryWebhook into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = self.events - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec - if self.job_spec: - body["job_spec"] = self.job_spec - if self.model_name is not None: - body["model_name"] = self.model_name - if self.status is not None: - body["status"] = self.status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateRegistryWebhook: - """Deserializes the CreateRegistryWebhook from a dictionary.""" - return cls( - description=d.get("description", None), - events=_repeated_enum(d, "events", RegistryWebhookEvent), - http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpec), - job_spec=_from_dict(d, "job_spec", JobSpec), - model_name=d.get("model_name", None), - status=_enum(d, "status", RegistryWebhookStatus), - ) - - -@dataclass -class CreateRun: - experiment_id: Optional[str] = None - """ID of the associated experiment.""" - - run_name: Optional[str] = None - """The name of the run.""" - - start_time: Optional[int] = None - """Unix timestamp in milliseconds of when the run started.""" - - tags: Optional[List[RunTag]] = None - """Additional metadata for run.""" - - user_id: Optional[str] = None - """ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed - in a future MLflow release. Use 'mlflow.user' tag instead.""" - - def as_dict(self) -> dict: - """Serializes the CreateRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.start_time is not None: - body["start_time"] = self.start_time - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - if self.user_id is not None: - body["user_id"] = self.user_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.start_time is not None: - body["start_time"] = self.start_time - if self.tags: - body["tags"] = self.tags - if self.user_id is not None: - body["user_id"] = self.user_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateRun: - """Deserializes the CreateRun from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - run_name=d.get("run_name", None), - start_time=d.get("start_time", None), - tags=_repeated_dict(d, "tags", RunTag), - user_id=d.get("user_id", None), - ) + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateModelVersionResponse: + """Deserializes the CreateModelVersionResponse from a dictionary.""" + return cls(model_version=_from_dict(d, "model_version", ModelVersion)) @dataclass @@ -1117,65 +457,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateRunResponse: return cls(run=_from_dict(d, "run", Run)) -@dataclass -class CreateTransitionRequest: - name: str - """Name of the model.""" - - version: str - """Version of the model.""" - - stage: Stage - """Target stage of the transition. Valid values are: - - * `None`: The initial stage of a model version. - - * `Staging`: Staging or pre-production stage. - - * `Production`: Production stage. - - * `Archived`: Archived stage.""" - - comment: Optional[str] = None - """User-provided comment on the action.""" - - def as_dict(self) -> dict: - """Serializes the CreateTransitionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage.value - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateTransitionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateTransitionRequest: - """Deserializes the CreateTransitionRequest from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - stage=_enum(d, "stage", Stage), - version=d.get("version", None), - ) - - @dataclass class CreateTransitionRequestResponse: request: Optional[TransitionRequest] = None @@ -1352,34 +633,9 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteCommentResponse: @dataclass -class DeleteExperiment: - experiment_id: str - """ID of the associated experiment.""" - +class DeleteExperimentResponse: def as_dict(self) -> dict: - """Serializes the DeleteExperiment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteExperiment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteExperiment: - """Deserializes the DeleteExperiment from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None)) - - -@dataclass -class DeleteExperimentResponse: - def as_dict(self) -> dict: - """Serializes the DeleteExperimentResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteExperimentResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body @@ -1520,31 +776,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteOnlineStoreResponse: return cls() -@dataclass -class DeleteRun: - run_id: str - """ID of the run to delete.""" - - def as_dict(self) -> dict: - """Serializes the DeleteRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteRun: - """Deserializes the DeleteRun from a dictionary.""" - return cls(run_id=d.get("run_id", None)) - - @dataclass class DeleteRunResponse: def as_dict(self) -> dict: @@ -1563,51 +794,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteRunResponse: return cls() -@dataclass -class DeleteRuns: - experiment_id: str - """The ID of the experiment containing the runs to delete.""" - - max_timestamp_millis: int - """The maximum creation timestamp in milliseconds since the UNIX epoch for deleting runs. Only runs - created prior to or at this timestamp are deleted.""" - - max_runs: Optional[int] = None - """An optional positive integer indicating the maximum number of runs to delete. The maximum - allowed value for max_runs is 10000.""" - - def as_dict(self) -> dict: - """Serializes the DeleteRuns into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.max_runs is not None: - body["max_runs"] = self.max_runs - if self.max_timestamp_millis is not None: - body["max_timestamp_millis"] = self.max_timestamp_millis - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteRuns into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.max_runs is not None: - body["max_runs"] = self.max_runs - if self.max_timestamp_millis is not None: - body["max_timestamp_millis"] = self.max_timestamp_millis - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteRuns: - """Deserializes the DeleteRuns from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - max_runs=d.get("max_runs", None), - max_timestamp_millis=d.get("max_timestamp_millis", None), - ) - - @dataclass class DeleteRunsResponse: runs_deleted: Optional[int] = None @@ -1633,38 +819,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteRunsResponse: return cls(runs_deleted=d.get("runs_deleted", None)) -@dataclass -class DeleteTag: - run_id: str - """ID of the run that the tag was logged under. Must be provided.""" - - key: str - """Name of the tag. Maximum size is 255 bytes. Must be provided.""" - - def as_dict(self) -> dict: - """Serializes the DeleteTag into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteTag into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteTag: - """Deserializes the DeleteTag from a dictionary.""" - return cls(key=d.get("key", None), run_id=d.get("run_id", None)) - - @dataclass class DeleteTagResponse: def as_dict(self) -> dict: @@ -2038,40 +1192,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ExperimentPermissionsDescription: ) -@dataclass -class ExperimentPermissionsRequest: - access_control_list: Optional[List[ExperimentAccessControlRequest]] = None - - experiment_id: Optional[str] = None - """The experiment for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the ExperimentPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ExperimentPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExperimentPermissionsRequest: - """Deserializes the ExperimentPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ExperimentAccessControlRequest), - experiment_id=d.get("experiment_id", None), - ) - - @dataclass class ExperimentTag: """A tag for an experiment.""" @@ -2147,39 +1267,6 @@ def from_dict(cls, d: Dict[str, Any]) -> FileInfo: return cls(file_size=d.get("file_size", None), is_dir=d.get("is_dir", None), path=d.get("path", None)) -@dataclass -class FinalizeLoggedModelRequest: - status: LoggedModelStatus - """Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that - something went wrong when logging the model weights / agent code.""" - - model_id: Optional[str] = None - """The ID of the logged model to finalize.""" - - def as_dict(self) -> dict: - """Serializes the FinalizeLoggedModelRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the FinalizeLoggedModelRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.status is not None: - body["status"] = self.status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FinalizeLoggedModelRequest: - """Deserializes the FinalizeLoggedModelRequest from a dictionary.""" - return cls(model_id=d.get("model_id", None), status=_enum(d, "status", LoggedModelStatus)) - - @dataclass class FinalizeLoggedModelResponse: model: Optional[LoggedModel] = None @@ -2334,38 +1421,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GetExperimentResponse: return cls(experiment=_from_dict(d, "experiment", Experiment)) -@dataclass -class GetLatestVersionsRequest: - name: str - """Registered model unique name identifier.""" - - stages: Optional[List[str]] = None - """List of stages.""" - - def as_dict(self) -> dict: - """Serializes the GetLatestVersionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.stages: - body["stages"] = [v for v in self.stages] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GetLatestVersionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.stages: - body["stages"] = self.stages - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetLatestVersionsRequest: - """Deserializes the GetLatestVersionsRequest from a dictionary.""" - return cls(name=d.get("name", None), stages=d.get("stages", None)) - - @dataclass class GetLatestVersionsResponse: model_versions: Optional[List[ModelVersion]] = None @@ -2988,452 +2043,113 @@ def from_dict(cls, d: Dict[str, Any]) -> ListTransitionRequestsResponse: @dataclass -class LogBatch: - metrics: Optional[List[Metric]] = None - """Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, - and tags in total.""" - - params: Optional[List[Param]] = None - """Params to log. A single request can contain up to 100 params, and up to 1000 metrics, params, - and tags in total.""" - - run_id: Optional[str] = None - """ID of the run to log under""" - - tags: Optional[List[RunTag]] = None - """Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and - tags in total.""" - +class LogBatchResponse: def as_dict(self) -> dict: - """Serializes the LogBatch into a dictionary suitable for use as a JSON request body.""" + """Serializes the LogBatchResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metrics: - body["metrics"] = [v.as_dict() for v in self.metrics] - if self.params: - body["params"] = [v.as_dict() for v in self.params] - if self.run_id is not None: - body["run_id"] = self.run_id - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: - """Serializes the LogBatch into a shallow dictionary of its immediate attributes.""" + """Serializes the LogBatchResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.metrics: - body["metrics"] = self.metrics - if self.params: - body["params"] = self.params - if self.run_id is not None: - body["run_id"] = self.run_id - if self.tags: - body["tags"] = self.tags return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogBatch: - """Deserializes the LogBatch from a dictionary.""" - return cls( - metrics=_repeated_dict(d, "metrics", Metric), - params=_repeated_dict(d, "params", Param), - run_id=d.get("run_id", None), - tags=_repeated_dict(d, "tags", RunTag), - ) + def from_dict(cls, d: Dict[str, Any]) -> LogBatchResponse: + """Deserializes the LogBatchResponse from a dictionary.""" + return cls() @dataclass -class LogBatchResponse: +class LogInputsResponse: def as_dict(self) -> dict: - """Serializes the LogBatchResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the LogInputsResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the LogBatchResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the LogInputsResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogBatchResponse: - """Deserializes the LogBatchResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> LogInputsResponse: + """Deserializes the LogInputsResponse from a dictionary.""" return cls() @dataclass -class LogInputs: - run_id: str - """ID of the run to log under""" - - datasets: Optional[List[DatasetInput]] = None - """Dataset inputs""" - - models: Optional[List[ModelInput]] = None - """Model inputs""" - +class LogLoggedModelParamsRequestResponse: def as_dict(self) -> dict: - """Serializes the LogInputs into a dictionary suitable for use as a JSON request body.""" + """Serializes the LogLoggedModelParamsRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.datasets: - body["datasets"] = [v.as_dict() for v in self.datasets] - if self.models: - body["models"] = [v.as_dict() for v in self.models] - if self.run_id is not None: - body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: - """Serializes the LogInputs into a shallow dictionary of its immediate attributes.""" + """Serializes the LogLoggedModelParamsRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.datasets: - body["datasets"] = self.datasets - if self.models: - body["models"] = self.models - if self.run_id is not None: - body["run_id"] = self.run_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogInputs: - """Deserializes the LogInputs from a dictionary.""" - return cls( - datasets=_repeated_dict(d, "datasets", DatasetInput), - models=_repeated_dict(d, "models", ModelInput), - run_id=d.get("run_id", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> LogLoggedModelParamsRequestResponse: + """Deserializes the LogLoggedModelParamsRequestResponse from a dictionary.""" + return cls() @dataclass -class LogInputsResponse: +class LogMetricResponse: def as_dict(self) -> dict: - """Serializes the LogInputsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the LogMetricResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the LogInputsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the LogMetricResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogInputsResponse: - """Deserializes the LogInputsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> LogMetricResponse: + """Deserializes the LogMetricResponse from a dictionary.""" return cls() @dataclass -class LogLoggedModelParamsRequest: - model_id: Optional[str] = None - """The ID of the logged model to log params for.""" - - params: Optional[List[LoggedModelParameter]] = None - """Parameters to attach to the model.""" - +class LogModelResponse: def as_dict(self) -> dict: - """Serializes the LogLoggedModelParamsRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the LogModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.params: - body["params"] = [v.as_dict() for v in self.params] return body def as_shallow_dict(self) -> dict: - """Serializes the LogLoggedModelParamsRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the LogModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.params: - body["params"] = self.params return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogLoggedModelParamsRequest: - """Deserializes the LogLoggedModelParamsRequest from a dictionary.""" - return cls(model_id=d.get("model_id", None), params=_repeated_dict(d, "params", LoggedModelParameter)) + def from_dict(cls, d: Dict[str, Any]) -> LogModelResponse: + """Deserializes the LogModelResponse from a dictionary.""" + return cls() @dataclass -class LogLoggedModelParamsRequestResponse: +class LogOutputsResponse: def as_dict(self) -> dict: - """Serializes the LogLoggedModelParamsRequestResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the LogOutputsResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the LogLoggedModelParamsRequestResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the LogOutputsResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogLoggedModelParamsRequestResponse: - """Deserializes the LogLoggedModelParamsRequestResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> LogOutputsResponse: + """Deserializes the LogOutputsResponse from a dictionary.""" return cls() -@dataclass -class LogMetric: - key: str - """Name of the metric.""" - - value: float - """Double value of the metric being logged.""" - - timestamp: int - """Unix timestamp in milliseconds at the time metric was logged.""" - - dataset_digest: Optional[str] = None - """Dataset digest of the dataset associated with the metric, e.g. an md5 hash of the dataset that - uniquely identifies it within datasets of the same name.""" - - dataset_name: Optional[str] = None - """The name of the dataset associated with the metric. E.g. “my.uc.table@2” - “nyc-taxi-dataset”, “fantastic-elk-3”""" - - model_id: Optional[str] = None - """ID of the logged model associated with the metric, if applicable""" - - run_id: Optional[str] = None - """ID of the run under which to log the metric. Must be provided.""" - - run_uuid: Optional[str] = None - """[Deprecated, use `run_id` instead] ID of the run under which to log the metric. This field will - be removed in a future MLflow version.""" - - step: Optional[int] = None - """Step at which to log the metric""" - - def as_dict(self) -> dict: - """Serializes the LogMetric into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dataset_digest is not None: - body["dataset_digest"] = self.dataset_digest - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name - if self.key is not None: - body["key"] = self.key - if self.model_id is not None: - body["model_id"] = self.model_id - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.step is not None: - body["step"] = self.step - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogMetric into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dataset_digest is not None: - body["dataset_digest"] = self.dataset_digest - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name - if self.key is not None: - body["key"] = self.key - if self.model_id is not None: - body["model_id"] = self.model_id - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.step is not None: - body["step"] = self.step - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogMetric: - """Deserializes the LogMetric from a dictionary.""" - return cls( - dataset_digest=d.get("dataset_digest", None), - dataset_name=d.get("dataset_name", None), - key=d.get("key", None), - model_id=d.get("model_id", None), - run_id=d.get("run_id", None), - run_uuid=d.get("run_uuid", None), - step=d.get("step", None), - timestamp=d.get("timestamp", None), - value=d.get("value", None), - ) - - -@dataclass -class LogMetricResponse: - def as_dict(self) -> dict: - """Serializes the LogMetricResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogMetricResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogMetricResponse: - """Deserializes the LogMetricResponse from a dictionary.""" - return cls() - - -@dataclass -class LogModel: - model_json: Optional[str] = None - """MLmodel file in json format.""" - - run_id: Optional[str] = None - """ID of the run to log under""" - - def as_dict(self) -> dict: - """Serializes the LogModel into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.model_json is not None: - body["model_json"] = self.model_json - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogModel into a shallow dictionary of its immediate attributes.""" - body = {} - if self.model_json is not None: - body["model_json"] = self.model_json - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogModel: - """Deserializes the LogModel from a dictionary.""" - return cls(model_json=d.get("model_json", None), run_id=d.get("run_id", None)) - - -@dataclass -class LogModelResponse: - def as_dict(self) -> dict: - """Serializes the LogModelResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogModelResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogModelResponse: - """Deserializes the LogModelResponse from a dictionary.""" - return cls() - - -@dataclass -class LogOutputsRequest: - run_id: str - """The ID of the Run from which to log outputs.""" - - models: Optional[List[ModelOutput]] = None - """The model outputs from the Run.""" - - def as_dict(self) -> dict: - """Serializes the LogOutputsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.models: - body["models"] = [v.as_dict() for v in self.models] - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogOutputsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.models: - body["models"] = self.models - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogOutputsRequest: - """Deserializes the LogOutputsRequest from a dictionary.""" - return cls(models=_repeated_dict(d, "models", ModelOutput), run_id=d.get("run_id", None)) - - -@dataclass -class LogOutputsResponse: - def as_dict(self) -> dict: - """Serializes the LogOutputsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogOutputsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogOutputsResponse: - """Deserializes the LogOutputsResponse from a dictionary.""" - return cls() - - -@dataclass -class LogParam: - key: str - """Name of the param. Maximum size is 255 bytes.""" - - value: str - """String value of the param being logged. Maximum size is 500 bytes.""" - - run_id: Optional[str] = None - """ID of the run under which to log the param. Must be provided.""" - - run_uuid: Optional[str] = None - """[Deprecated, use `run_id` instead] ID of the run under which to log the param. This field will - be removed in a future MLflow version.""" - - def as_dict(self) -> dict: - """Serializes the LogParam into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogParam into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogParam: - """Deserializes the LogParam from a dictionary.""" - return cls( - key=d.get("key", None), - run_id=d.get("run_id", None), - run_uuid=d.get("run_uuid", None), - value=d.get("value", None), - ) - - @dataclass class LogParamResponse: def as_dict(self) -> dict: @@ -4545,40 +3261,6 @@ class PublishSpecPublishMode(Enum): TRIGGERED = "TRIGGERED" -@dataclass -class PublishTableRequest: - publish_spec: PublishSpec - """The specification for publishing the online table from the source table.""" - - source_table_name: Optional[str] = None - """The full three-part (catalog, schema, table) name of the source table.""" - - def as_dict(self) -> dict: - """Serializes the PublishTableRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.publish_spec: - body["publish_spec"] = self.publish_spec.as_dict() - if self.source_table_name is not None: - body["source_table_name"] = self.source_table_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PublishTableRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.publish_spec: - body["publish_spec"] = self.publish_spec - if self.source_table_name is not None: - body["source_table_name"] = self.source_table_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PublishTableRequest: - """Deserializes the PublishTableRequest from a dictionary.""" - return cls( - publish_spec=_from_dict(d, "publish_spec", PublishSpec), source_table_name=d.get("source_table_name", None) - ) - - @dataclass class PublishTableResponse: online_table_name: Optional[str] = None @@ -4846,40 +3528,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermissionsDescription: ) -@dataclass -class RegisteredModelPermissionsRequest: - access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None - - registered_model_id: Optional[str] = None - """The registered model for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the RegisteredModelPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.registered_model_id is not None: - body["registered_model_id"] = self.registered_model_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RegisteredModelPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.registered_model_id is not None: - body["registered_model_id"] = self.registered_model_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermissionsRequest: - """Deserializes the RegisteredModelPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", RegisteredModelAccessControlRequest), - registered_model_id=d.get("registered_model_id", None), - ) - - @dataclass class RegistryWebhook: creation_timestamp: Optional[int] = None @@ -5033,68 +3681,9 @@ class RegistryWebhookStatus(Enum): @dataclass -class RejectTransitionRequest: - name: str - """Name of the model.""" - - version: str - """Version of the model.""" - - stage: Stage - """Target stage of the transition. Valid values are: - - * `None`: The initial stage of a model version. - - * `Staging`: Staging or pre-production stage. - - * `Production`: Production stage. - - * `Archived`: Archived stage.""" - - comment: Optional[str] = None - """User-provided comment on the action.""" - - def as_dict(self) -> dict: - """Serializes the RejectTransitionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage.value - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RejectTransitionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RejectTransitionRequest: - """Deserializes the RejectTransitionRequest from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - stage=_enum(d, "stage", Stage), - version=d.get("version", None), - ) - - -@dataclass -class RejectTransitionRequestResponse: - activity: Optional[Activity] = None - """Activity recorded for the action.""" +class RejectTransitionRequestResponse: + activity: Optional[Activity] = None + """Activity recorded for the action.""" def as_dict(self) -> dict: """Serializes the RejectTransitionRequestResponse into a dictionary suitable for use as a JSON request body.""" @@ -5116,38 +3705,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RejectTransitionRequestResponse: return cls(activity=_from_dict(d, "activity", Activity)) -@dataclass -class RenameModelRequest: - name: str - """Registered model unique name identifier.""" - - new_name: Optional[str] = None - """If provided, updates the name for this `registered_model`.""" - - def as_dict(self) -> dict: - """Serializes the RenameModelRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RenameModelRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RenameModelRequest: - """Deserializes the RenameModelRequest from a dictionary.""" - return cls(name=d.get("name", None), new_name=d.get("new_name", None)) - - @dataclass class RenameModelResponse: registered_model: Optional[Model] = None @@ -5172,31 +3729,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RenameModelResponse: return cls(registered_model=_from_dict(d, "registered_model", Model)) -@dataclass -class RestoreExperiment: - experiment_id: str - """ID of the associated experiment.""" - - def as_dict(self) -> dict: - """Serializes the RestoreExperiment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RestoreExperiment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RestoreExperiment: - """Deserializes the RestoreExperiment from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None)) - - @dataclass class RestoreExperimentResponse: def as_dict(self) -> dict: @@ -5215,31 +3747,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RestoreExperimentResponse: return cls() -@dataclass -class RestoreRun: - run_id: str - """ID of the run to restore.""" - - def as_dict(self) -> dict: - """Serializes the RestoreRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RestoreRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RestoreRun: - """Deserializes the RestoreRun from a dictionary.""" - return cls(run_id=d.get("run_id", None)) - - @dataclass class RestoreRunResponse: def as_dict(self) -> dict: @@ -5258,51 +3765,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RestoreRunResponse: return cls() -@dataclass -class RestoreRuns: - experiment_id: str - """The ID of the experiment containing the runs to restore.""" - - min_timestamp_millis: int - """The minimum deletion timestamp in milliseconds since the UNIX epoch for restoring runs. Only - runs deleted no earlier than this timestamp are restored.""" - - max_runs: Optional[int] = None - """An optional positive integer indicating the maximum number of runs to restore. The maximum - allowed value for max_runs is 10000.""" - - def as_dict(self) -> dict: - """Serializes the RestoreRuns into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.max_runs is not None: - body["max_runs"] = self.max_runs - if self.min_timestamp_millis is not None: - body["min_timestamp_millis"] = self.min_timestamp_millis - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RestoreRuns into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.max_runs is not None: - body["max_runs"] = self.max_runs - if self.min_timestamp_millis is not None: - body["min_timestamp_millis"] = self.min_timestamp_millis - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RestoreRuns: - """Deserializes the RestoreRuns from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - max_runs=d.get("max_runs", None), - min_timestamp_millis=d.get("min_timestamp_millis", None), - ) - - @dataclass class RestoreRunsResponse: runs_restored: Optional[int] = None @@ -5604,68 +4066,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RunTag: return cls(key=d.get("key", None), value=d.get("value", None)) -@dataclass -class SearchExperiments: - filter: Optional[str] = None - """String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'")""" - - max_results: Optional[int] = None - """Maximum number of experiments desired. Max threshold is 3000.""" - - order_by: Optional[List[str]] = None - """List of columns for ordering search results, which can include experiment name and last updated - timestamp with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks are - done by experiment id DESC.""" - - page_token: Optional[str] = None - """Token indicating the page of experiments to fetch""" - - view_type: Optional[ViewType] = None - """Qualifier for type of experiments to be returned. If unspecified, return only active - experiments.""" - - def as_dict(self) -> dict: - """Serializes the SearchExperiments into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = [v for v in self.order_by] - if self.page_token is not None: - body["page_token"] = self.page_token - if self.view_type is not None: - body["view_type"] = self.view_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SearchExperiments into a shallow dictionary of its immediate attributes.""" - body = {} - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = self.order_by - if self.page_token is not None: - body["page_token"] = self.page_token - if self.view_type is not None: - body["view_type"] = self.view_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SearchExperiments: - """Deserializes the SearchExperiments from a dictionary.""" - return cls( - filter=d.get("filter", None), - max_results=d.get("max_results", None), - order_by=d.get("order_by", None), - page_token=d.get("page_token", None), - view_type=_enum(d, "view_type", ViewType), - ) - - @dataclass class SearchExperimentsResponse: experiments: Optional[List[Experiment]] = None @@ -5788,80 +4188,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsOrderBy: ) -@dataclass -class SearchLoggedModelsRequest: - datasets: Optional[List[SearchLoggedModelsDataset]] = None - """List of datasets on which to apply the metrics filter clauses. For example, a filter with - `metrics.accuracy > 0.9` and dataset info with name "test_dataset" means we will return all - logged models with accuracy > 0.9 on the test_dataset. Metric values from ANY dataset matching - the criteria are considered. If no datasets are specified, then metrics across all datasets are - considered in the filter.""" - - experiment_ids: Optional[List[str]] = None - """The IDs of the experiments in which to search for logged models.""" - - filter: Optional[str] = None - """A filter expression over logged model info and data that allows returning a subset of logged - models. The syntax is a subset of SQL that supports AND'ing together binary operations. - - Example: ``params.alpha < 0.3 AND metrics.accuracy > 0.9``.""" - - max_results: Optional[int] = None - """The maximum number of Logged Models to return. The maximum limit is 50.""" - - order_by: Optional[List[SearchLoggedModelsOrderBy]] = None - """The list of columns for ordering the results, with additional fields for sorting criteria.""" - - page_token: Optional[str] = None - """The token indicating the page of logged models to fetch.""" - - def as_dict(self) -> dict: - """Serializes the SearchLoggedModelsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.datasets: - body["datasets"] = [v.as_dict() for v in self.datasets] - if self.experiment_ids: - body["experiment_ids"] = [v for v in self.experiment_ids] - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = [v.as_dict() for v in self.order_by] - if self.page_token is not None: - body["page_token"] = self.page_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SearchLoggedModelsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.datasets: - body["datasets"] = self.datasets - if self.experiment_ids: - body["experiment_ids"] = self.experiment_ids - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = self.order_by - if self.page_token is not None: - body["page_token"] = self.page_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsRequest: - """Deserializes the SearchLoggedModelsRequest from a dictionary.""" - return cls( - datasets=_repeated_dict(d, "datasets", SearchLoggedModelsDataset), - experiment_ids=d.get("experiment_ids", None), - filter=d.get("filter", None), - max_results=d.get("max_results", None), - order_by=_repeated_dict(d, "order_by", SearchLoggedModelsOrderBy), - page_token=d.get("page_token", None), - ) - - @dataclass class SearchLoggedModelsResponse: models: Optional[List[LoggedModel]] = None @@ -5964,86 +4290,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SearchModelsResponse: ) -@dataclass -class SearchRuns: - experiment_ids: Optional[List[str]] = None - """List of experiment IDs to search over.""" - - filter: Optional[str] = None - """A filter expression over params, metrics, and tags, that allows returning a subset of runs. The - syntax is a subset of SQL that supports ANDing together binary operations between a param, - metric, or tag and a constant. - - Example: `metrics.rmse < 1 and params.model_class = 'LogisticRegression'` - - You can select columns with special characters (hyphen, space, period, etc.) by using double - quotes: `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'` - - Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`.""" - - max_results: Optional[int] = None - """Maximum number of runs desired. Max threshold is 50000""" - - order_by: Optional[List[str]] = None - """List of columns to be ordered by, including attributes, params, metrics, and tags with an - optional `"DESC"` or `"ASC"` annotation, where `"ASC"` is the default. Example: `["params.input - DESC", "metrics.alpha ASC", "metrics.rmse"]`. Tiebreaks are done by start_time `DESC` followed - by `run_id` for runs with the same start time (and this is the default ordering criterion if - order_by is not provided).""" - - page_token: Optional[str] = None - """Token for the current page of runs.""" - - run_view_type: Optional[ViewType] = None - """Whether to display only active, only deleted, or all runs. Defaults to only active runs.""" - - def as_dict(self) -> dict: - """Serializes the SearchRuns into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_ids: - body["experiment_ids"] = [v for v in self.experiment_ids] - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = [v for v in self.order_by] - if self.page_token is not None: - body["page_token"] = self.page_token - if self.run_view_type is not None: - body["run_view_type"] = self.run_view_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SearchRuns into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_ids: - body["experiment_ids"] = self.experiment_ids - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = self.order_by - if self.page_token is not None: - body["page_token"] = self.page_token - if self.run_view_type is not None: - body["run_view_type"] = self.run_view_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SearchRuns: - """Deserializes the SearchRuns from a dictionary.""" - return cls( - experiment_ids=d.get("experiment_ids", None), - filter=d.get("filter", None), - max_results=d.get("max_results", None), - order_by=d.get("order_by", None), - page_token=d.get("page_token", None), - run_view_type=_enum(d, "run_view_type", ViewType), - ) - - @dataclass class SearchRunsResponse: next_page_token: Optional[str] = None @@ -6077,302 +4323,86 @@ def from_dict(cls, d: Dict[str, Any]) -> SearchRunsResponse: @dataclass -class SetExperimentTag: - experiment_id: str - """ID of the experiment under which to log the tag. Must be provided.""" - - key: str - """Name of the tag. Keys up to 250 bytes in size are supported.""" - - value: str - """String value of the tag being logged. Values up to 64KB in size are supported.""" - +class SetExperimentTagResponse: def as_dict(self) -> dict: - """Serializes the SetExperimentTag into a dictionary suitable for use as a JSON request body.""" + """Serializes the SetExperimentTagResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value return body def as_shallow_dict(self) -> dict: - """Serializes the SetExperimentTag into a shallow dictionary of its immediate attributes.""" + """Serializes the SetExperimentTagResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetExperimentTag: - """Deserializes the SetExperimentTag from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None), key=d.get("key", None), value=d.get("value", None)) + def from_dict(cls, d: Dict[str, Any]) -> SetExperimentTagResponse: + """Deserializes the SetExperimentTagResponse from a dictionary.""" + return cls() @dataclass -class SetExperimentTagResponse: +class SetLoggedModelTagsResponse: def as_dict(self) -> dict: - """Serializes the SetExperimentTagResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the SetLoggedModelTagsResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the SetExperimentTagResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the SetLoggedModelTagsResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetExperimentTagResponse: - """Deserializes the SetExperimentTagResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> SetLoggedModelTagsResponse: + """Deserializes the SetLoggedModelTagsResponse from a dictionary.""" return cls() @dataclass -class SetLoggedModelTagsRequest: - model_id: Optional[str] = None - """The ID of the logged model to set the tags on.""" - - tags: Optional[List[LoggedModelTag]] = None - """The tags to set on the logged model.""" - +class SetModelTagResponse: def as_dict(self) -> dict: - """Serializes the SetLoggedModelTagsRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the SetModelTagResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: - """Serializes the SetLoggedModelTagsRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the SetModelTagResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.tags: - body["tags"] = self.tags return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetLoggedModelTagsRequest: - """Deserializes the SetLoggedModelTagsRequest from a dictionary.""" - return cls(model_id=d.get("model_id", None), tags=_repeated_dict(d, "tags", LoggedModelTag)) + def from_dict(cls, d: Dict[str, Any]) -> SetModelTagResponse: + """Deserializes the SetModelTagResponse from a dictionary.""" + return cls() @dataclass -class SetLoggedModelTagsResponse: +class SetModelVersionTagResponse: def as_dict(self) -> dict: - """Serializes the SetLoggedModelTagsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the SetModelVersionTagResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the SetLoggedModelTagsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the SetModelVersionTagResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetLoggedModelTagsResponse: - """Deserializes the SetLoggedModelTagsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> SetModelVersionTagResponse: + """Deserializes the SetModelVersionTagResponse from a dictionary.""" return cls() @dataclass -class SetModelTagRequest: - name: str - """Unique name of the model.""" - - key: str - """Name of the tag. Maximum size depends on storage backend. If a tag with this name already - exists, its preexisting value will be replaced by the specified `value`. All storage backends - are guaranteed to support key values up to 250 bytes in size.""" - - value: str - """String value of the tag being logged. Maximum size depends on storage backend. All storage - backends are guaranteed to support key values up to 5000 bytes in size.""" - +class SetTagResponse: def as_dict(self) -> dict: - """Serializes the SetModelTagRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the SetTagResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value return body def as_shallow_dict(self) -> dict: - """Serializes the SetModelTagRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetModelTagRequest: - """Deserializes the SetModelTagRequest from a dictionary.""" - return cls(key=d.get("key", None), name=d.get("name", None), value=d.get("value", None)) - - -@dataclass -class SetModelTagResponse: - def as_dict(self) -> dict: - """Serializes the SetModelTagResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetModelTagResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetModelTagResponse: - """Deserializes the SetModelTagResponse from a dictionary.""" - return cls() - - -@dataclass -class SetModelVersionTagRequest: - name: str - """Unique name of the model.""" - - version: str - """Model version number.""" - - key: str - """Name of the tag. Maximum size depends on storage backend. If a tag with this name already - exists, its preexisting value will be replaced by the specified `value`. All storage backends - are guaranteed to support key values up to 250 bytes in size.""" - - value: str - """String value of the tag being logged. Maximum size depends on storage backend. All storage - backends are guaranteed to support key values up to 5000 bytes in size.""" - - def as_dict(self) -> dict: - """Serializes the SetModelVersionTagRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetModelVersionTagRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetModelVersionTagRequest: - """Deserializes the SetModelVersionTagRequest from a dictionary.""" - return cls( - key=d.get("key", None), name=d.get("name", None), value=d.get("value", None), version=d.get("version", None) - ) - - -@dataclass -class SetModelVersionTagResponse: - def as_dict(self) -> dict: - """Serializes the SetModelVersionTagResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetModelVersionTagResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetModelVersionTagResponse: - """Deserializes the SetModelVersionTagResponse from a dictionary.""" - return cls() - - -@dataclass -class SetTag: - key: str - """Name of the tag. Keys up to 250 bytes in size are supported.""" - - value: str - """String value of the tag being logged. Values up to 64KB in size are supported.""" - - run_id: Optional[str] = None - """ID of the run under which to log the tag. Must be provided.""" - - run_uuid: Optional[str] = None - """[Deprecated, use `run_id` instead] ID of the run under which to log the tag. This field will be - removed in a future MLflow version.""" - - def as_dict(self) -> dict: - """Serializes the SetTag into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetTag into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetTag: - """Deserializes the SetTag from a dictionary.""" - return cls( - key=d.get("key", None), - run_id=d.get("run_id", None), - run_uuid=d.get("run_uuid", None), - value=d.get("value", None), - ) - - -@dataclass -class SetTagResponse: - def as_dict(self) -> dict: - """Serializes the SetTagResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetTagResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the SetTagResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @@ -6446,39 +4476,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhook: return cls(body=d.get("body", None), status_code=d.get("status_code", None)) -@dataclass -class TestRegistryWebhookRequest: - id: str - """Webhook ID""" - - event: Optional[RegistryWebhookEvent] = None - """If `event` is specified, the test trigger uses the specified event. If `event` is not specified, - the test trigger uses a randomly chosen event associated with the webhook.""" - - def as_dict(self) -> dict: - """Serializes the TestRegistryWebhookRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.event is not None: - body["event"] = self.event.value - if self.id is not None: - body["id"] = self.id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TestRegistryWebhookRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.event is not None: - body["event"] = self.event - if self.id is not None: - body["id"] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookRequest: - """Deserializes the TestRegistryWebhookRequest from a dictionary.""" - return cls(event=_enum(d, "event", RegistryWebhookEvent), id=d.get("id", None)) - - @dataclass class TestRegistryWebhookResponse: webhook: Optional[TestRegistryWebhook] = None @@ -6504,73 +4501,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookResponse: return cls(webhook=_from_dict(d, "webhook", TestRegistryWebhook)) -@dataclass -class TransitionModelVersionStageDatabricks: - name: str - """Name of the model.""" - - version: str - """Version of the model.""" - - stage: Stage - """Target stage of the transition. Valid values are: - - * `None`: The initial stage of a model version. - - * `Staging`: Staging or pre-production stage. - - * `Production`: Production stage. - - * `Archived`: Archived stage.""" - - archive_existing_versions: bool - """Specifies whether to archive all current model versions in the target stage.""" - - comment: Optional[str] = None - """User-provided comment on the action.""" - - def as_dict(self) -> dict: - """Serializes the TransitionModelVersionStageDatabricks into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.archive_existing_versions is not None: - body["archive_existing_versions"] = self.archive_existing_versions - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage.value - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TransitionModelVersionStageDatabricks into a shallow dictionary of its immediate attributes.""" - body = {} - if self.archive_existing_versions is not None: - body["archive_existing_versions"] = self.archive_existing_versions - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TransitionModelVersionStageDatabricks: - """Deserializes the TransitionModelVersionStageDatabricks from a dictionary.""" - return cls( - archive_existing_versions=d.get("archive_existing_versions", None), - comment=d.get("comment", None), - name=d.get("name", None), - stage=_enum(d, "stage", Stage), - version=d.get("version", None), - ) - - @dataclass class TransitionRequest: """Transition request details.""" @@ -6664,38 +4594,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TransitionStageResponse: return cls(model_version=_from_dict(d, "model_version", ModelVersionDatabricks)) -@dataclass -class UpdateComment: - id: str - """Unique identifier of an activity""" - - comment: str - """User-provided comment on the action.""" - - def as_dict(self) -> dict: - """Serializes the UpdateComment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.id is not None: - body["id"] = self.id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateComment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.id is not None: - body["id"] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateComment: - """Deserializes the UpdateComment from a dictionary.""" - return cls(comment=d.get("comment", None), id=d.get("id", None)) - - @dataclass class UpdateCommentResponse: comment: Optional[CommentObject] = None @@ -6721,38 +4619,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateCommentResponse: return cls(comment=_from_dict(d, "comment", CommentObject)) -@dataclass -class UpdateExperiment: - experiment_id: str - """ID of the associated experiment.""" - - new_name: Optional[str] = None - """If provided, the experiment's name is changed to the new name. The new name must be unique.""" - - def as_dict(self) -> dict: - """Serializes the UpdateExperiment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.new_name is not None: - body["new_name"] = self.new_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateExperiment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.new_name is not None: - body["new_name"] = self.new_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateExperiment: - """Deserializes the UpdateExperiment from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None), new_name=d.get("new_name", None)) - - @dataclass class UpdateExperimentResponse: def as_dict(self) -> dict: @@ -6771,38 +4637,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateExperimentResponse: return cls() -@dataclass -class UpdateModelRequest: - name: str - """Registered model unique name identifier.""" - - description: Optional[str] = None - """If provided, updates the description for this `registered_model`.""" - - def as_dict(self) -> dict: - """Serializes the UpdateModelRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateModelRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateModelRequest: - """Deserializes the UpdateModelRequest from a dictionary.""" - return cls(description=d.get("description", None), name=d.get("name", None)) - - @dataclass class UpdateModelResponse: def as_dict(self) -> dict: @@ -6821,45 +4655,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateModelResponse: return cls() -@dataclass -class UpdateModelVersionRequest: - name: str - """Name of the registered model""" - - version: str - """Model version number""" - - description: Optional[str] = None - """If provided, updates the description for this `registered_model`.""" - - def as_dict(self) -> dict: - """Serializes the UpdateModelVersionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionRequest: - """Deserializes the UpdateModelVersionRequest from a dictionary.""" - return cls(description=d.get("description", None), name=d.get("name", None), version=d.get("version", None)) - - @dataclass class UpdateModelVersionResponse: def as_dict(self) -> dict: @@ -6878,164 +4673,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionResponse: return cls() -@dataclass -class UpdateRegistryWebhook: - id: str - """Webhook ID""" - - description: Optional[str] = None - """User-specified description for the webhook.""" - - events: Optional[List[RegistryWebhookEvent]] = None - """Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was - created for the associated model. - - * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. - - * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. - - * `COMMENT_CREATED`: A user wrote a comment on a registered model. - - * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be - specified for a registry-wide webhook, which can be created by not specifying a model name in - the create request. - - * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. - - * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. - - * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. - - * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. - - * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to - staging. - - * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned - to production. - - * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.""" - - http_url_spec: Optional[HttpUrlSpec] = None - - job_spec: Optional[JobSpec] = None - - status: Optional[RegistryWebhookStatus] = None - """Enable or disable triggering the webhook, or put the webhook into test mode. The default is - `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - - * `DISABLED`: Webhook is not triggered. - - * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a - real event.""" - - def as_dict(self) -> dict: - """Serializes the UpdateRegistryWebhook into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = [v.value for v in self.events] - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec.as_dict() - if self.id is not None: - body["id"] = self.id - if self.job_spec: - body["job_spec"] = self.job_spec.as_dict() - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateRegistryWebhook into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = self.events - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec - if self.id is not None: - body["id"] = self.id - if self.job_spec: - body["job_spec"] = self.job_spec - if self.status is not None: - body["status"] = self.status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRegistryWebhook: - """Deserializes the UpdateRegistryWebhook from a dictionary.""" - return cls( - description=d.get("description", None), - events=_repeated_enum(d, "events", RegistryWebhookEvent), - http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpec), - id=d.get("id", None), - job_spec=_from_dict(d, "job_spec", JobSpec), - status=_enum(d, "status", RegistryWebhookStatus), - ) - - -@dataclass -class UpdateRun: - end_time: Optional[int] = None - """Unix timestamp in milliseconds of when the run ended.""" - - run_id: Optional[str] = None - """ID of the run to update. Must be provided.""" - - run_name: Optional[str] = None - """Updated name of the run.""" - - run_uuid: Optional[str] = None - """[Deprecated, use `run_id` instead] ID of the run to update. This field will be removed in a - future MLflow version.""" - - status: Optional[UpdateRunStatus] = None - """Updated status of the run.""" - - def as_dict(self) -> dict: - """Serializes the UpdateRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.end_time is not None: - body["end_time"] = self.end_time - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.end_time is not None: - body["end_time"] = self.end_time - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.status is not None: - body["status"] = self.status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRun: - """Deserializes the UpdateRun from a dictionary.""" - return cls( - end_time=d.get("end_time", None), - run_id=d.get("run_id", None), - run_name=d.get("run_name", None), - run_uuid=d.get("run_uuid", None), - status=_enum(d, "status", UpdateRunStatus), - ) - - @dataclass class UpdateRunResponse: run_info: Optional[RunInfo] = None diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 174ee21a..5ce142ec 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -14,75 +14,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class CreateCustomAppIntegration: - confidential: Optional[bool] = None - """This field indicates whether an OAuth client secret is required to authenticate this client.""" - - name: Optional[str] = None - """Name of the custom OAuth app""" - - redirect_urls: Optional[List[str]] = None - """List of OAuth redirect urls""" - - scopes: Optional[List[str]] = None - """OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, - openid, profile, email.""" - - token_access_policy: Optional[TokenAccessPolicy] = None - """Token access policy""" - - user_authorized_scopes: Optional[List[str]] = None - """Scopes that will need to be consented by end user to mint the access token. If the user does not - authorize the access token will not be minted. Must be a subset of scopes.""" - - def as_dict(self) -> dict: - """Serializes the CreateCustomAppIntegration into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.confidential is not None: - body["confidential"] = self.confidential - if self.name is not None: - body["name"] = self.name - if self.redirect_urls: - body["redirect_urls"] = [v for v in self.redirect_urls] - if self.scopes: - body["scopes"] = [v for v in self.scopes] - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() - if self.user_authorized_scopes: - body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateCustomAppIntegration into a shallow dictionary of its immediate attributes.""" - body = {} - if self.confidential is not None: - body["confidential"] = self.confidential - if self.name is not None: - body["name"] = self.name - if self.redirect_urls: - body["redirect_urls"] = self.redirect_urls - if self.scopes: - body["scopes"] = self.scopes - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy - if self.user_authorized_scopes: - body["user_authorized_scopes"] = self.user_authorized_scopes - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCustomAppIntegration: - """Deserializes the CreateCustomAppIntegration from a dictionary.""" - return cls( - confidential=d.get("confidential", None), - name=d.get("name", None), - redirect_urls=d.get("redirect_urls", None), - scopes=d.get("scopes", None), - token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), - user_authorized_scopes=d.get("user_authorized_scopes", None), - ) - - @dataclass class CreateCustomAppIntegrationOutput: client_id: Optional[str] = None @@ -127,40 +58,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateCustomAppIntegrationOutput: ) -@dataclass -class CreatePublishedAppIntegration: - app_id: Optional[str] = None - """App id of the OAuth published app integration. For example power-bi, tableau-deskop""" - - token_access_policy: Optional[TokenAccessPolicy] = None - """Token access policy""" - - def as_dict(self) -> dict: - """Serializes the CreatePublishedAppIntegration into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.app_id is not None: - body["app_id"] = self.app_id - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreatePublishedAppIntegration into a shallow dictionary of its immediate attributes.""" - body = {} - if self.app_id is not None: - body["app_id"] = self.app_id - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePublishedAppIntegration: - """Deserializes the CreatePublishedAppIntegration from a dictionary.""" - return cls( - app_id=d.get("app_id", None), token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy) - ) - - @dataclass class CreatePublishedAppIntegrationOutput: integration_id: Optional[str] = None @@ -186,39 +83,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreatePublishedAppIntegrationOutput: return cls(integration_id=d.get("integration_id", None)) -@dataclass -class CreateServicePrincipalSecretRequest: - lifetime: Optional[str] = None - """The lifetime of the secret in seconds. If this parameter is not provided, the secret will have a - default lifetime of 730 days (63072000s).""" - - service_principal_id: Optional[int] = None - """The service principal ID.""" - - def as_dict(self) -> dict: - """Serializes the CreateServicePrincipalSecretRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.lifetime is not None: - body["lifetime"] = self.lifetime - if self.service_principal_id is not None: - body["service_principal_id"] = self.service_principal_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateServicePrincipalSecretRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.lifetime is not None: - body["lifetime"] = self.lifetime - if self.service_principal_id is not None: - body["service_principal_id"] = self.service_principal_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateServicePrincipalSecretRequest: - """Deserializes the CreateServicePrincipalSecretRequest from a dictionary.""" - return cls(lifetime=d.get("lifetime", None), service_principal_id=d.get("service_principal_id", None)) - - @dataclass class CreateServicePrincipalSecretResponse: create_time: Optional[str] = None @@ -1033,66 +897,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TokenAccessPolicy: ) -@dataclass -class UpdateCustomAppIntegration: - integration_id: Optional[str] = None - - redirect_urls: Optional[List[str]] = None - """List of OAuth redirect urls to be updated in the custom OAuth app integration""" - - scopes: Optional[List[str]] = None - """List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs - this will fully replace the existing values instead of appending""" - - token_access_policy: Optional[TokenAccessPolicy] = None - """Token access policy to be updated in the custom OAuth app integration""" - - user_authorized_scopes: Optional[List[str]] = None - """Scopes that will need to be consented by end user to mint the access token. If the user does not - authorize the access token will not be minted. Must be a subset of scopes.""" - - def as_dict(self) -> dict: - """Serializes the UpdateCustomAppIntegration into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.redirect_urls: - body["redirect_urls"] = [v for v in self.redirect_urls] - if self.scopes: - body["scopes"] = [v for v in self.scopes] - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() - if self.user_authorized_scopes: - body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCustomAppIntegration into a shallow dictionary of its immediate attributes.""" - body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.redirect_urls: - body["redirect_urls"] = self.redirect_urls - if self.scopes: - body["scopes"] = self.scopes - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy - if self.user_authorized_scopes: - body["user_authorized_scopes"] = self.user_authorized_scopes - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomAppIntegration: - """Deserializes the UpdateCustomAppIntegration from a dictionary.""" - return cls( - integration_id=d.get("integration_id", None), - redirect_urls=d.get("redirect_urls", None), - scopes=d.get("scopes", None), - token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), - user_authorized_scopes=d.get("user_authorized_scopes", None), - ) - - @dataclass class UpdateCustomAppIntegrationOutput: def as_dict(self) -> dict: @@ -1111,40 +915,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomAppIntegrationOutput: return cls() -@dataclass -class UpdatePublishedAppIntegration: - integration_id: Optional[str] = None - - token_access_policy: Optional[TokenAccessPolicy] = None - """Token access policy to be updated in the published OAuth app integration""" - - def as_dict(self) -> dict: - """Serializes the UpdatePublishedAppIntegration into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdatePublishedAppIntegration into a shallow dictionary of its immediate attributes.""" - body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdatePublishedAppIntegration: - """Deserializes the UpdatePublishedAppIntegration from a dictionary.""" - return cls( - integration_id=d.get("integration_id", None), - token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), - ) - - @dataclass class UpdatePublishedAppIntegrationOutput: def as_dict(self) -> dict: diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index ca0a7604..8733a9e7 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -21,279 +21,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class CreatePipeline: - allow_duplicate_names: Optional[bool] = None - """If false, deployment will fail if name conflicts with that of another pipeline.""" - - budget_policy_id: Optional[str] = None - """Budget policy of this pipeline.""" - - catalog: Optional[str] = None - """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, - tables in this pipeline are published to a `target` schema inside `catalog` (for example, - `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity - Catalog.""" - - channel: Optional[str] = None - """DLT Release Channel that specifies which version to use.""" - - clusters: Optional[List[PipelineCluster]] = None - """Cluster settings for this pipeline deployment.""" - - configuration: Optional[Dict[str, str]] = None - """String-String configuration for this pipeline execution.""" - - continuous: Optional[bool] = None - """Whether the pipeline is continuous or triggered. This replaces `trigger`.""" - - deployment: Optional[PipelineDeployment] = None - """Deployment type of this pipeline.""" - - development: Optional[bool] = None - """Whether the pipeline is in Development mode. Defaults to false.""" - - dry_run: Optional[bool] = None - - edition: Optional[str] = None - """Pipeline product edition.""" - - environment: Optional[PipelinesEnvironment] = None - """Environment specification for this pipeline used to install dependencies.""" - - event_log: Optional[EventLogSpec] = None - """Event log configuration for this pipeline""" - - filters: Optional[Filters] = None - """Filters on which Pipeline packages to include in the deployed graph.""" - - gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None - """The definition of a gateway pipeline to support change data capture.""" - - id: Optional[str] = None - """Unique identifier for this pipeline.""" - - ingestion_definition: Optional[IngestionPipelineDefinition] = None - """The configuration for a managed ingestion pipeline. These settings cannot be used with the - 'libraries', 'schema', 'target', or 'catalog' settings.""" - - libraries: Optional[List[PipelineLibrary]] = None - """Libraries or code needed by this deployment.""" - - name: Optional[str] = None - """Friendly identifier for this pipeline.""" - - notifications: Optional[List[Notifications]] = None - """List of notification settings for this pipeline.""" - - photon: Optional[bool] = None - """Whether Photon is enabled for this pipeline.""" - - restart_window: Optional[RestartWindow] = None - """Restart window of this pipeline.""" - - root_path: Optional[str] = None - """Root path for this pipeline. This is used as the root directory when editing the pipeline in the - Databricks user interface and it is added to sys.path when executing Python sources during - pipeline execution.""" - - run_as: Optional[RunAs] = None - """Write-only setting, available only in Create/Update calls. Specifies the user or service - principal that the pipeline runs as. If not specified, the pipeline runs as the user who created - the pipeline. - - Only `user_name` or `service_principal_name` can be specified. If both are specified, an error - is thrown.""" - - schema: Optional[str] = None - """The default schema (database) where tables are read from or published to.""" - - serverless: Optional[bool] = None - """Whether serverless compute is enabled for this pipeline.""" - - storage: Optional[str] = None - """DBFS root directory for storing checkpoints and tables.""" - - tags: Optional[Dict[str, str]] = None - """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, - and are therefore subject to the same limitations. A maximum of 25 tags can be added to the - pipeline.""" - - target: Optional[str] = None - """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` - must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is - deprecated for pipeline creation in favor of the `schema` field.""" - - trigger: Optional[PipelineTrigger] = None - """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" - - def as_dict(self) -> dict: - """Serializes the CreatePipeline into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_duplicate_names is not None: - body["allow_duplicate_names"] = self.allow_duplicate_names - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = [v.as_dict() for v in self.clusters] - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment.as_dict() - if self.development is not None: - body["development"] = self.development - if self.dry_run is not None: - body["dry_run"] = self.dry_run - if self.edition is not None: - body["edition"] = self.edition - if self.environment: - body["environment"] = self.environment.as_dict() - if self.event_log: - body["event_log"] = self.event_log.as_dict() - if self.filters: - body["filters"] = self.filters.as_dict() - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition.as_dict() - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition.as_dict() - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = [v.as_dict() for v in self.notifications] - if self.photon is not None: - body["photon"] = self.photon - if self.restart_window: - body["restart_window"] = self.restart_window.as_dict() - if self.root_path is not None: - body["root_path"] = self.root_path - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.tags: - body["tags"] = self.tags - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreatePipeline into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_duplicate_names is not None: - body["allow_duplicate_names"] = self.allow_duplicate_names - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = self.clusters - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment - if self.development is not None: - body["development"] = self.development - if self.dry_run is not None: - body["dry_run"] = self.dry_run - if self.edition is not None: - body["edition"] = self.edition - if self.environment: - body["environment"] = self.environment - if self.event_log: - body["event_log"] = self.event_log - if self.filters: - body["filters"] = self.filters - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition - if self.libraries: - body["libraries"] = self.libraries - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = self.notifications - if self.photon is not None: - body["photon"] = self.photon - if self.restart_window: - body["restart_window"] = self.restart_window - if self.root_path is not None: - body["root_path"] = self.root_path - if self.run_as: - body["run_as"] = self.run_as - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.tags: - body["tags"] = self.tags - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePipeline: - """Deserializes the CreatePipeline from a dictionary.""" - return cls( - allow_duplicate_names=d.get("allow_duplicate_names", None), - budget_policy_id=d.get("budget_policy_id", None), - catalog=d.get("catalog", None), - channel=d.get("channel", None), - clusters=_repeated_dict(d, "clusters", PipelineCluster), - configuration=d.get("configuration", None), - continuous=d.get("continuous", None), - deployment=_from_dict(d, "deployment", PipelineDeployment), - development=d.get("development", None), - dry_run=d.get("dry_run", None), - edition=d.get("edition", None), - environment=_from_dict(d, "environment", PipelinesEnvironment), - event_log=_from_dict(d, "event_log", EventLogSpec), - filters=_from_dict(d, "filters", Filters), - gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition), - id=d.get("id", None), - ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition), - libraries=_repeated_dict(d, "libraries", PipelineLibrary), - name=d.get("name", None), - notifications=_repeated_dict(d, "notifications", Notifications), - photon=d.get("photon", None), - restart_window=_from_dict(d, "restart_window", RestartWindow), - root_path=d.get("root_path", None), - run_as=_from_dict(d, "run_as", RunAs), - schema=d.get("schema", None), - serverless=d.get("serverless", None), - storage=d.get("storage", None), - tags=d.get("tags", None), - target=d.get("target", None), - trigger=_from_dict(d, "trigger", PipelineTrigger), - ) - - @dataclass class CreatePipelineResponse: effective_settings: Optional[PipelineSpec] = None @@ -394,321 +121,38 @@ class DayOfWeek(Enum): """Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour). If not specified all days of the week will be used.""" - FRIDAY = "FRIDAY" - MONDAY = "MONDAY" - SATURDAY = "SATURDAY" - SUNDAY = "SUNDAY" - THURSDAY = "THURSDAY" - TUESDAY = "TUESDAY" - WEDNESDAY = "WEDNESDAY" - - -@dataclass -class DeletePipelineResponse: - def as_dict(self) -> dict: - """Serializes the DeletePipelineResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeletePipelineResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeletePipelineResponse: - """Deserializes the DeletePipelineResponse from a dictionary.""" - return cls() - - -class DeploymentKind(Enum): - """The deployment method that manages the pipeline: - BUNDLE: The pipeline is managed by a - Databricks Asset Bundle.""" - - BUNDLE = "BUNDLE" - - -@dataclass -class EditPipeline: - allow_duplicate_names: Optional[bool] = None - """If false, deployment will fail if name has changed and conflicts the name of another pipeline.""" - - budget_policy_id: Optional[str] = None - """Budget policy of this pipeline.""" - - catalog: Optional[str] = None - """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, - tables in this pipeline are published to a `target` schema inside `catalog` (for example, - `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity - Catalog.""" - - channel: Optional[str] = None - """DLT Release Channel that specifies which version to use.""" - - clusters: Optional[List[PipelineCluster]] = None - """Cluster settings for this pipeline deployment.""" - - configuration: Optional[Dict[str, str]] = None - """String-String configuration for this pipeline execution.""" - - continuous: Optional[bool] = None - """Whether the pipeline is continuous or triggered. This replaces `trigger`.""" - - deployment: Optional[PipelineDeployment] = None - """Deployment type of this pipeline.""" - - development: Optional[bool] = None - """Whether the pipeline is in Development mode. Defaults to false.""" - - edition: Optional[str] = None - """Pipeline product edition.""" - - environment: Optional[PipelinesEnvironment] = None - """Environment specification for this pipeline used to install dependencies.""" - - event_log: Optional[EventLogSpec] = None - """Event log configuration for this pipeline""" - - expected_last_modified: Optional[int] = None - """If present, the last-modified time of the pipeline settings before the edit. If the settings - were modified after that time, then the request will fail with a conflict.""" - - filters: Optional[Filters] = None - """Filters on which Pipeline packages to include in the deployed graph.""" - - gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None - """The definition of a gateway pipeline to support change data capture.""" - - id: Optional[str] = None - """Unique identifier for this pipeline.""" - - ingestion_definition: Optional[IngestionPipelineDefinition] = None - """The configuration for a managed ingestion pipeline. These settings cannot be used with the - 'libraries', 'schema', 'target', or 'catalog' settings.""" - - libraries: Optional[List[PipelineLibrary]] = None - """Libraries or code needed by this deployment.""" - - name: Optional[str] = None - """Friendly identifier for this pipeline.""" - - notifications: Optional[List[Notifications]] = None - """List of notification settings for this pipeline.""" - - photon: Optional[bool] = None - """Whether Photon is enabled for this pipeline.""" - - pipeline_id: Optional[str] = None - """Unique identifier for this pipeline.""" - - restart_window: Optional[RestartWindow] = None - """Restart window of this pipeline.""" - - root_path: Optional[str] = None - """Root path for this pipeline. This is used as the root directory when editing the pipeline in the - Databricks user interface and it is added to sys.path when executing Python sources during - pipeline execution.""" - - run_as: Optional[RunAs] = None - """Write-only setting, available only in Create/Update calls. Specifies the user or service - principal that the pipeline runs as. If not specified, the pipeline runs as the user who created - the pipeline. - - Only `user_name` or `service_principal_name` can be specified. If both are specified, an error - is thrown.""" - - schema: Optional[str] = None - """The default schema (database) where tables are read from or published to.""" - - serverless: Optional[bool] = None - """Whether serverless compute is enabled for this pipeline.""" - - storage: Optional[str] = None - """DBFS root directory for storing checkpoints and tables.""" - - tags: Optional[Dict[str, str]] = None - """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, - and are therefore subject to the same limitations. A maximum of 25 tags can be added to the - pipeline.""" - - target: Optional[str] = None - """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` - must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is - deprecated for pipeline creation in favor of the `schema` field.""" - - trigger: Optional[PipelineTrigger] = None - """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" + FRIDAY = "FRIDAY" + MONDAY = "MONDAY" + SATURDAY = "SATURDAY" + SUNDAY = "SUNDAY" + THURSDAY = "THURSDAY" + TUESDAY = "TUESDAY" + WEDNESDAY = "WEDNESDAY" + +@dataclass +class DeletePipelineResponse: def as_dict(self) -> dict: - """Serializes the EditPipeline into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeletePipelineResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_duplicate_names is not None: - body["allow_duplicate_names"] = self.allow_duplicate_names - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = [v.as_dict() for v in self.clusters] - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment.as_dict() - if self.development is not None: - body["development"] = self.development - if self.edition is not None: - body["edition"] = self.edition - if self.environment: - body["environment"] = self.environment.as_dict() - if self.event_log: - body["event_log"] = self.event_log.as_dict() - if self.expected_last_modified is not None: - body["expected_last_modified"] = self.expected_last_modified - if self.filters: - body["filters"] = self.filters.as_dict() - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition.as_dict() - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition.as_dict() - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = [v.as_dict() for v in self.notifications] - if self.photon is not None: - body["photon"] = self.photon - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.restart_window: - body["restart_window"] = self.restart_window.as_dict() - if self.root_path is not None: - body["root_path"] = self.root_path - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.tags: - body["tags"] = self.tags - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the EditPipeline into a shallow dictionary of its immediate attributes.""" + """Serializes the DeletePipelineResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_duplicate_names is not None: - body["allow_duplicate_names"] = self.allow_duplicate_names - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = self.clusters - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment - if self.development is not None: - body["development"] = self.development - if self.edition is not None: - body["edition"] = self.edition - if self.environment: - body["environment"] = self.environment - if self.event_log: - body["event_log"] = self.event_log - if self.expected_last_modified is not None: - body["expected_last_modified"] = self.expected_last_modified - if self.filters: - body["filters"] = self.filters - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition - if self.libraries: - body["libraries"] = self.libraries - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = self.notifications - if self.photon is not None: - body["photon"] = self.photon - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.restart_window: - body["restart_window"] = self.restart_window - if self.root_path is not None: - body["root_path"] = self.root_path - if self.run_as: - body["run_as"] = self.run_as - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.tags: - body["tags"] = self.tags - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditPipeline: - """Deserializes the EditPipeline from a dictionary.""" - return cls( - allow_duplicate_names=d.get("allow_duplicate_names", None), - budget_policy_id=d.get("budget_policy_id", None), - catalog=d.get("catalog", None), - channel=d.get("channel", None), - clusters=_repeated_dict(d, "clusters", PipelineCluster), - configuration=d.get("configuration", None), - continuous=d.get("continuous", None), - deployment=_from_dict(d, "deployment", PipelineDeployment), - development=d.get("development", None), - edition=d.get("edition", None), - environment=_from_dict(d, "environment", PipelinesEnvironment), - event_log=_from_dict(d, "event_log", EventLogSpec), - expected_last_modified=d.get("expected_last_modified", None), - filters=_from_dict(d, "filters", Filters), - gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition), - id=d.get("id", None), - ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition), - libraries=_repeated_dict(d, "libraries", PipelineLibrary), - name=d.get("name", None), - notifications=_repeated_dict(d, "notifications", Notifications), - photon=d.get("photon", None), - pipeline_id=d.get("pipeline_id", None), - restart_window=_from_dict(d, "restart_window", RestartWindow), - root_path=d.get("root_path", None), - run_as=_from_dict(d, "run_as", RunAs), - schema=d.get("schema", None), - serverless=d.get("serverless", None), - storage=d.get("storage", None), - tags=d.get("tags", None), - target=d.get("target", None), - trigger=_from_dict(d, "trigger", PipelineTrigger), - ) + def from_dict(cls, d: Dict[str, Any]) -> DeletePipelineResponse: + """Deserializes the DeletePipelineResponse from a dictionary.""" + return cls() + + +class DeploymentKind(Enum): + """The deployment method that manages the pipeline: - BUNDLE: The pipeline is managed by a + Databricks Asset Bundle.""" + + BUNDLE = "BUNDLE" @dataclass @@ -2312,40 +1756,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PipelinePermissionsDescription: ) -@dataclass -class PipelinePermissionsRequest: - access_control_list: Optional[List[PipelineAccessControlRequest]] = None - - pipeline_id: Optional[str] = None - """The pipeline for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the PipelinePermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PipelinePermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PipelinePermissionsRequest: - """Deserializes the PipelinePermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", PipelineAccessControlRequest), - pipeline_id=d.get("pipeline_id", None), - ) - - @dataclass class PipelineSpec: budget_policy_id: Optional[str] = None @@ -3098,77 +2508,6 @@ def from_dict(cls, d: Dict[str, Any]) -> StackFrame: ) -@dataclass -class StartUpdate: - cause: Optional[StartUpdateCause] = None - """What triggered this update.""" - - full_refresh: Optional[bool] = None - """If true, this update will reset all tables before running.""" - - full_refresh_selection: Optional[List[str]] = None - """A list of tables to update with fullRefresh. If both refresh_selection and - full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means - that the states of the table will be reset before the refresh.""" - - pipeline_id: Optional[str] = None - - refresh_selection: Optional[List[str]] = None - """A list of tables to update without fullRefresh. If both refresh_selection and - full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means - that the states of the table will be reset before the refresh.""" - - validate_only: Optional[bool] = None - """If true, this update only validates the correctness of pipeline source code but does not - materialize or publish any datasets.""" - - def as_dict(self) -> dict: - """Serializes the StartUpdate into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cause is not None: - body["cause"] = self.cause.value - if self.full_refresh is not None: - body["full_refresh"] = self.full_refresh - if self.full_refresh_selection: - body["full_refresh_selection"] = [v for v in self.full_refresh_selection] - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.refresh_selection: - body["refresh_selection"] = [v for v in self.refresh_selection] - if self.validate_only is not None: - body["validate_only"] = self.validate_only - return body - - def as_shallow_dict(self) -> dict: - """Serializes the StartUpdate into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cause is not None: - body["cause"] = self.cause - if self.full_refresh is not None: - body["full_refresh"] = self.full_refresh - if self.full_refresh_selection: - body["full_refresh_selection"] = self.full_refresh_selection - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.refresh_selection: - body["refresh_selection"] = self.refresh_selection - if self.validate_only is not None: - body["validate_only"] = self.validate_only - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> StartUpdate: - """Deserializes the StartUpdate from a dictionary.""" - return cls( - cause=_enum(d, "cause", StartUpdateCause), - full_refresh=d.get("full_refresh", None), - full_refresh_selection=d.get("full_refresh_selection", None), - pipeline_id=d.get("pipeline_id", None), - refresh_selection=d.get("refresh_selection", None), - validate_only=d.get("validate_only", None), - ) - - class StartUpdateCause(Enum): """What triggered this update.""" diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index 12a5d61f..e0c1c537 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -225,40 +225,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialAwsCredentials: return cls(sts_role=_from_dict(d, "sts_role", CreateCredentialStsRole)) -@dataclass -class CreateCredentialRequest: - credentials_name: str - """The human-readable name of the credential configuration object.""" - - aws_credentials: CreateCredentialAwsCredentials - - def as_dict(self) -> dict: - """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_credentials: - body["aws_credentials"] = self.aws_credentials.as_dict() - if self.credentials_name is not None: - body["credentials_name"] = self.credentials_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_credentials: - body["aws_credentials"] = self.aws_credentials - if self.credentials_name is not None: - body["credentials_name"] = self.credentials_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialRequest: - """Deserializes the CreateCredentialRequest from a dictionary.""" - return cls( - aws_credentials=_from_dict(d, "aws_credentials", CreateCredentialAwsCredentials), - credentials_name=d.get("credentials_name", None), - ) - - @dataclass class CreateCredentialStsRole: role_arn: Optional[str] = None @@ -276,449 +242,37 @@ def as_shallow_dict(self) -> dict: body = {} if self.role_arn is not None: body["role_arn"] = self.role_arn - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialStsRole: - """Deserializes the CreateCredentialStsRole from a dictionary.""" - return cls(role_arn=d.get("role_arn", None)) - - -@dataclass -class CreateCustomerManagedKeyRequest: - use_cases: List[KeyUseCase] - """The cases that the key can be used for.""" - - aws_key_info: Optional[CreateAwsKeyInfo] = None - - gcp_key_info: Optional[CreateGcpKeyInfo] = None - - def as_dict(self) -> dict: - """Serializes the CreateCustomerManagedKeyRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_key_info: - body["aws_key_info"] = self.aws_key_info.as_dict() - if self.gcp_key_info: - body["gcp_key_info"] = self.gcp_key_info.as_dict() - if self.use_cases: - body["use_cases"] = [v.value for v in self.use_cases] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateCustomerManagedKeyRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_key_info: - body["aws_key_info"] = self.aws_key_info - if self.gcp_key_info: - body["gcp_key_info"] = self.gcp_key_info - if self.use_cases: - body["use_cases"] = self.use_cases - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCustomerManagedKeyRequest: - """Deserializes the CreateCustomerManagedKeyRequest from a dictionary.""" - return cls( - aws_key_info=_from_dict(d, "aws_key_info", CreateAwsKeyInfo), - gcp_key_info=_from_dict(d, "gcp_key_info", CreateGcpKeyInfo), - use_cases=_repeated_enum(d, "use_cases", KeyUseCase), - ) - - -@dataclass -class CreateGcpKeyInfo: - kms_key_id: str - """The GCP KMS key's resource name""" - - def as_dict(self) -> dict: - """Serializes the CreateGcpKeyInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.kms_key_id is not None: - body["kms_key_id"] = self.kms_key_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateGcpKeyInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.kms_key_id is not None: - body["kms_key_id"] = self.kms_key_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateGcpKeyInfo: - """Deserializes the CreateGcpKeyInfo from a dictionary.""" - return cls(kms_key_id=d.get("kms_key_id", None)) - - -@dataclass -class CreateNetworkRequest: - network_name: str - """The human-readable name of the network configuration.""" - - gcp_network_info: Optional[GcpNetworkInfo] = None - """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and - secondary IP ranges).""" - - security_group_ids: Optional[List[str]] = None - """IDs of one to five security groups associated with this network. Security group IDs **cannot** - be used in multiple network configurations.""" - - subnet_ids: Optional[List[str]] = None - """IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in - multiple network configurations.""" - - vpc_endpoints: Optional[NetworkVpcEndpoints] = None - """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over - [AWS PrivateLink]. - - [AWS PrivateLink]: https://aws.amazon.com/privatelink/""" - - vpc_id: Optional[str] = None - """The ID of the VPC associated with this network. VPC IDs can be used in multiple network - configurations.""" - - def as_dict(self) -> dict: - """Serializes the CreateNetworkRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.gcp_network_info: - body["gcp_network_info"] = self.gcp_network_info.as_dict() - if self.network_name is not None: - body["network_name"] = self.network_name - if self.security_group_ids: - body["security_group_ids"] = [v for v in self.security_group_ids] - if self.subnet_ids: - body["subnet_ids"] = [v for v in self.subnet_ids] - if self.vpc_endpoints: - body["vpc_endpoints"] = self.vpc_endpoints.as_dict() - if self.vpc_id is not None: - body["vpc_id"] = self.vpc_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateNetworkRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.gcp_network_info: - body["gcp_network_info"] = self.gcp_network_info - if self.network_name is not None: - body["network_name"] = self.network_name - if self.security_group_ids: - body["security_group_ids"] = self.security_group_ids - if self.subnet_ids: - body["subnet_ids"] = self.subnet_ids - if self.vpc_endpoints: - body["vpc_endpoints"] = self.vpc_endpoints - if self.vpc_id is not None: - body["vpc_id"] = self.vpc_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateNetworkRequest: - """Deserializes the CreateNetworkRequest from a dictionary.""" - return cls( - gcp_network_info=_from_dict(d, "gcp_network_info", GcpNetworkInfo), - network_name=d.get("network_name", None), - security_group_ids=d.get("security_group_ids", None), - subnet_ids=d.get("subnet_ids", None), - vpc_endpoints=_from_dict(d, "vpc_endpoints", NetworkVpcEndpoints), - vpc_id=d.get("vpc_id", None), - ) - - -@dataclass -class CreateStorageConfigurationRequest: - storage_configuration_name: str - """The human-readable name of the storage configuration.""" - - root_bucket_info: RootBucketInfo - """Root S3 bucket information.""" - - def as_dict(self) -> dict: - """Serializes the CreateStorageConfigurationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.root_bucket_info: - body["root_bucket_info"] = self.root_bucket_info.as_dict() - if self.storage_configuration_name is not None: - body["storage_configuration_name"] = self.storage_configuration_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateStorageConfigurationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.root_bucket_info: - body["root_bucket_info"] = self.root_bucket_info - if self.storage_configuration_name is not None: - body["storage_configuration_name"] = self.storage_configuration_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateStorageConfigurationRequest: - """Deserializes the CreateStorageConfigurationRequest from a dictionary.""" - return cls( - root_bucket_info=_from_dict(d, "root_bucket_info", RootBucketInfo), - storage_configuration_name=d.get("storage_configuration_name", None), - ) - - -@dataclass -class CreateVpcEndpointRequest: - vpc_endpoint_name: str - """The human-readable name of the storage configuration.""" - - aws_vpc_endpoint_id: Optional[str] = None - """The ID of the VPC endpoint object in AWS.""" - - gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None - """The Google Cloud specific information for this Private Service Connect endpoint.""" - - region: Optional[str] = None - """The AWS region in which this VPC endpoint object exists.""" - - def as_dict(self) -> dict: - """Serializes the CreateVpcEndpointRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_vpc_endpoint_id is not None: - body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id - if self.gcp_vpc_endpoint_info: - body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info.as_dict() - if self.region is not None: - body["region"] = self.region - if self.vpc_endpoint_name is not None: - body["vpc_endpoint_name"] = self.vpc_endpoint_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateVpcEndpointRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_vpc_endpoint_id is not None: - body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id - if self.gcp_vpc_endpoint_info: - body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info - if self.region is not None: - body["region"] = self.region - if self.vpc_endpoint_name is not None: - body["vpc_endpoint_name"] = self.vpc_endpoint_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateVpcEndpointRequest: - """Deserializes the CreateVpcEndpointRequest from a dictionary.""" - return cls( - aws_vpc_endpoint_id=d.get("aws_vpc_endpoint_id", None), - gcp_vpc_endpoint_info=_from_dict(d, "gcp_vpc_endpoint_info", GcpVpcEndpointInfo), - region=d.get("region", None), - vpc_endpoint_name=d.get("vpc_endpoint_name", None), - ) - - -@dataclass -class CreateWorkspaceRequest: - workspace_name: str - """The workspace's human-readable name.""" - - aws_region: Optional[str] = None - """The AWS region of the workspace's data plane.""" - - cloud: Optional[str] = None - """The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field - to `gcp`.""" - - cloud_resource_container: Optional[CloudResourceContainer] = None - """The general workspace configurations that are specific to cloud providers.""" - - credentials_id: Optional[str] = None - """ID of the workspace's credential configuration object.""" - - custom_tags: Optional[Dict[str, str]] = None - """The custom tags key-value pairing that is attached to this workspace. The key-value pair is a - string of utf-8 characters. The value can be an empty string, with maximum length of 255 - characters. The key can be of maximum length of 127 characters, and cannot be empty.""" - - deployment_name: Optional[str] = None - """The deployment name defines part of the subdomain for the workspace. The workspace URL for the - web application and REST APIs is `.cloud.databricks.com`. For - example, if the deployment name is `abcsales`, your workspace URL will be - `https://abcsales.cloud.databricks.com`. Hyphens are allowed. This property supports only the - set of characters that are allowed in a subdomain. - - To set this value, you must have a deployment name prefix. Contact your Databricks account team - to add an account deployment name prefix to your account. - - Workspace deployment names follow the account prefix and a hyphen. For example, if your - account's deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the - JSON response for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL - would be `acme-workspace-1.cloud.databricks.com`. - - You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the - deployment name to only include the deployment prefix. For example, if your account's deployment - prefix is `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes - `acme` only and the workspace URL is `acme.cloud.databricks.com`. - - This value must be unique across all non-deleted deployments across all AWS regions. - - If a new workspace omits this property, the server generates a unique deployment name for you - with the pattern `dbc-xxxxxxxx-xxxx`.""" - - gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None - """The network settings for the workspace. The configurations are only for Databricks-managed VPCs. - It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP - range configurations must be mutually exclusive. An attempt to create a workspace fails if - Databricks detects an IP range overlap. - - Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and - all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, - `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - - The sizes of these IP ranges affect the maximum number of nodes for the workspace. - - **Important**: Confirm the IP ranges used by your Databricks workspace before creating the - workspace. You cannot change them after your workspace is deployed. If the IP address ranges for - your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To - determine the address range sizes that you need, Databricks provides a calculator as a Microsoft - Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - - [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html""" - - gke_config: Optional[GkeConfig] = None - """The configurations for the GKE cluster of a Databricks workspace.""" - - is_no_public_ip_enabled: Optional[bool] = None - """Whether no public IP is enabled for the workspace.""" - - location: Optional[str] = None - """The Google Cloud region of the workspace data plane in your Google account. For example, - `us-east4`.""" - - managed_services_customer_managed_key_id: Optional[str] = None - """The ID of the workspace's managed services encryption key configuration object. This is used to - help protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, - and query history. The provided key configuration object property `use_cases` must contain - `MANAGED_SERVICES`.""" - - network_id: Optional[str] = None - - pricing_tier: Optional[PricingTier] = None - """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - - [AWS Pricing]: https://databricks.com/product/aws-pricing""" - - private_access_settings_id: Optional[str] = None - """ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be - specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace - connection), back-end (data plane to control plane connection), or both connection types. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - - [AWS PrivateLink]: https://aws.amazon.com/privatelink/ - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html""" + return body - storage_configuration_id: Optional[str] = None - """The ID of the workspace's storage configuration object.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialStsRole: + """Deserializes the CreateCredentialStsRole from a dictionary.""" + return cls(role_arn=d.get("role_arn", None)) - storage_customer_managed_key_id: Optional[str] = None - """The ID of the workspace's storage encryption key configuration object. This is used to encrypt - the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. - The provided key configuration object property `use_cases` must contain `STORAGE`.""" + +@dataclass +class CreateGcpKeyInfo: + kms_key_id: str + """The GCP KMS key's resource name""" def as_dict(self) -> dict: - """Serializes the CreateWorkspaceRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the CreateGcpKeyInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.cloud is not None: - body["cloud"] = self.cloud - if self.cloud_resource_container: - body["cloud_resource_container"] = self.cloud_resource_container.as_dict() - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.deployment_name is not None: - body["deployment_name"] = self.deployment_name - if self.gcp_managed_network_config: - body["gcp_managed_network_config"] = self.gcp_managed_network_config.as_dict() - if self.gke_config: - body["gke_config"] = self.gke_config.as_dict() - if self.is_no_public_ip_enabled is not None: - body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled - if self.location is not None: - body["location"] = self.location - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.pricing_tier is not None: - body["pricing_tier"] = self.pricing_tier.value - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_name is not None: - body["workspace_name"] = self.workspace_name + if self.kms_key_id is not None: + body["kms_key_id"] = self.kms_key_id return body def as_shallow_dict(self) -> dict: - """Serializes the CreateWorkspaceRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the CreateGcpKeyInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.cloud is not None: - body["cloud"] = self.cloud - if self.cloud_resource_container: - body["cloud_resource_container"] = self.cloud_resource_container - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.deployment_name is not None: - body["deployment_name"] = self.deployment_name - if self.gcp_managed_network_config: - body["gcp_managed_network_config"] = self.gcp_managed_network_config - if self.gke_config: - body["gke_config"] = self.gke_config - if self.is_no_public_ip_enabled is not None: - body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled - if self.location is not None: - body["location"] = self.location - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.pricing_tier is not None: - body["pricing_tier"] = self.pricing_tier - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_name is not None: - body["workspace_name"] = self.workspace_name + if self.kms_key_id is not None: + body["kms_key_id"] = self.kms_key_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateWorkspaceRequest: - """Deserializes the CreateWorkspaceRequest from a dictionary.""" - return cls( - aws_region=d.get("aws_region", None), - cloud=d.get("cloud", None), - cloud_resource_container=_from_dict(d, "cloud_resource_container", CloudResourceContainer), - credentials_id=d.get("credentials_id", None), - custom_tags=d.get("custom_tags", None), - deployment_name=d.get("deployment_name", None), - gcp_managed_network_config=_from_dict(d, "gcp_managed_network_config", GcpManagedNetworkConfig), - gke_config=_from_dict(d, "gke_config", GkeConfig), - is_no_public_ip_enabled=d.get("is_no_public_ip_enabled", None), - location=d.get("location", None), - managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None), - network_id=d.get("network_id", None), - pricing_tier=_enum(d, "pricing_tier", PricingTier), - private_access_settings_id=d.get("private_access_settings_id", None), - storage_configuration_id=d.get("storage_configuration_id", None), - storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None), - workspace_name=d.get("workspace_name", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> CreateGcpKeyInfo: + """Deserializes the CreateGcpKeyInfo from a dictionary.""" + return cls(kms_key_id=d.get("kms_key_id", None)) @dataclass @@ -1738,199 +1292,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: return cls() -@dataclass -class UpdateWorkspaceRequest: - aws_region: Optional[str] = None - """The AWS region of the workspace's data plane (for example, `us-west-2`). This parameter is - available only for updating failed workspaces.""" - - credentials_id: Optional[str] = None - """ID of the workspace's credential configuration object. This parameter is available for updating - both failed and running workspaces.""" - - custom_tags: Optional[Dict[str, str]] = None - """The custom tags key-value pairing that is attached to this workspace. The key-value pair is a - string of utf-8 characters. The value can be an empty string, with maximum length of 255 - characters. The key can be of maximum length of 127 characters, and cannot be empty.""" - - managed_services_customer_managed_key_id: Optional[str] = None - """The ID of the workspace's managed services encryption key configuration object. This parameter - is available only for updating failed workspaces.""" - - network_connectivity_config_id: Optional[str] = None - - network_id: Optional[str] = None - """The ID of the workspace's network configuration object. Used only if you already use a - customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC - to a customer-managed VPC by updating the workspace to add a network configuration ID.""" - - private_access_settings_id: Optional[str] = None - """The ID of the workspace's private access settings configuration object. This parameter is - available only for updating failed workspaces.""" - - storage_configuration_id: Optional[str] = None - """The ID of the workspace's storage configuration object. This parameter is available only for - updating failed workspaces.""" - - storage_customer_managed_key_id: Optional[str] = None - """The ID of the key configuration object for workspace storage. This parameter is available for - updating both failed and running workspaces.""" - - workspace_id: Optional[int] = None - """Workspace ID.""" - - def as_dict(self) -> dict: - """Serializes the UpdateWorkspaceRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = self.network_connectivity_config_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateWorkspaceRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = self.network_connectivity_config_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceRequest: - """Deserializes the UpdateWorkspaceRequest from a dictionary.""" - return cls( - aws_region=d.get("aws_region", None), - credentials_id=d.get("credentials_id", None), - custom_tags=d.get("custom_tags", None), - managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None), - network_connectivity_config_id=d.get("network_connectivity_config_id", None), - network_id=d.get("network_id", None), - private_access_settings_id=d.get("private_access_settings_id", None), - storage_configuration_id=d.get("storage_configuration_id", None), - storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None), - workspace_id=d.get("workspace_id", None), - ) - - -@dataclass -class UpsertPrivateAccessSettingsRequest: - private_access_settings_name: str - """The human-readable name of the private access settings object.""" - - region: str - """The cloud region for workspaces associated with this private access settings object.""" - - allowed_vpc_endpoint_ids: Optional[List[str]] = None - """An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when - registering the VPC endpoint configuration in your Databricks account. This is not the ID of the - VPC endpoint in AWS. - - Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC - endpoints that in your account that can connect to your workspace over AWS PrivateLink. - - If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this - control only works for PrivateLink connections. To control how your workspace is accessed via - public internet, see [IP access lists]. - - [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html""" - - private_access_level: Optional[PrivateAccessLevel] = None - """The private access level controls which VPC endpoints can connect to the UI or API of any - workspace that attaches this private access settings object. * `ACCOUNT` level access (the - default) allows only VPC endpoints that are registered in your Databricks account connect to - your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your - workspace. For details, see `allowed_vpc_endpoint_ids`.""" - - private_access_settings_id: Optional[str] = None - """Databricks Account API private access settings ID.""" - - public_access_enabled: Optional[bool] = None - """Determines if the workspace can be accessed over public internet. For fully private workspaces, - you can optionally specify `false`, but only if you implement both the front-end and the - back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is - enabled.""" - - def as_dict(self) -> dict: - """Serializes the UpsertPrivateAccessSettingsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allowed_vpc_endpoint_ids: - body["allowed_vpc_endpoint_ids"] = [v for v in self.allowed_vpc_endpoint_ids] - if self.private_access_level is not None: - body["private_access_level"] = self.private_access_level.value - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.private_access_settings_name is not None: - body["private_access_settings_name"] = self.private_access_settings_name - if self.public_access_enabled is not None: - body["public_access_enabled"] = self.public_access_enabled - if self.region is not None: - body["region"] = self.region - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpsertPrivateAccessSettingsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allowed_vpc_endpoint_ids: - body["allowed_vpc_endpoint_ids"] = self.allowed_vpc_endpoint_ids - if self.private_access_level is not None: - body["private_access_level"] = self.private_access_level - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.private_access_settings_name is not None: - body["private_access_settings_name"] = self.private_access_settings_name - if self.public_access_enabled is not None: - body["public_access_enabled"] = self.public_access_enabled - if self.region is not None: - body["region"] = self.region - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpsertPrivateAccessSettingsRequest: - """Deserializes the UpsertPrivateAccessSettingsRequest from a dictionary.""" - return cls( - allowed_vpc_endpoint_ids=d.get("allowed_vpc_endpoint_ids", None), - private_access_level=_enum(d, "private_access_level", PrivateAccessLevel), - private_access_settings_id=d.get("private_access_settings_id", None), - private_access_settings_name=d.get("private_access_settings_name", None), - public_access_enabled=d.get("public_access_enabled", None), - region=d.get("region", None), - ) - - @dataclass class VpcEndpoint: account_id: Optional[str] = None diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 23d7db40..0eae4245 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -842,145 +842,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CohereConfig: ) -@dataclass -class CreatePtEndpointRequest: - name: str - """The name of the serving endpoint. This field is required and must be unique across a Databricks - workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.""" - - config: PtEndpointCoreConfig - """The core config of the serving endpoint.""" - - ai_gateway: Optional[AiGatewayConfig] = None - """The AI Gateway configuration for the serving endpoint.""" - - budget_policy_id: Optional[str] = None - """The budget policy associated with the endpoint.""" - - tags: Optional[List[EndpointTag]] = None - """Tags to be attached to the serving endpoint and automatically propagated to billing logs.""" - - def as_dict(self) -> dict: - """Serializes the CreatePtEndpointRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway.as_dict() - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config.as_dict() - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreatePtEndpointRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePtEndpointRequest: - """Deserializes the CreatePtEndpointRequest from a dictionary.""" - return cls( - ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), - budget_policy_id=d.get("budget_policy_id", None), - config=_from_dict(d, "config", PtEndpointCoreConfig), - name=d.get("name", None), - tags=_repeated_dict(d, "tags", EndpointTag), - ) - - -@dataclass -class CreateServingEndpoint: - name: str - """The name of the serving endpoint. This field is required and must be unique across a Databricks - workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.""" - - ai_gateway: Optional[AiGatewayConfig] = None - """The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned - throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only - support inference tables.""" - - budget_policy_id: Optional[str] = None - """The budget policy to be applied to the serving endpoint.""" - - config: Optional[EndpointCoreConfigInput] = None - """The core config of the serving endpoint.""" - - rate_limits: Optional[List[RateLimit]] = None - """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI - Gateway to manage rate limits.""" - - route_optimized: Optional[bool] = None - """Enable route optimization for the serving endpoint.""" - - tags: Optional[List[EndpointTag]] = None - """Tags to be attached to the serving endpoint and automatically propagated to billing logs.""" - - def as_dict(self) -> dict: - """Serializes the CreateServingEndpoint into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway.as_dict() - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config.as_dict() - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = [v.as_dict() for v in self.rate_limits] - if self.route_optimized is not None: - body["route_optimized"] = self.route_optimized - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateServingEndpoint into a shallow dictionary of its immediate attributes.""" - body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = self.rate_limits - if self.route_optimized is not None: - body["route_optimized"] = self.route_optimized - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateServingEndpoint: - """Deserializes the CreateServingEndpoint from a dictionary.""" - return cls( - ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), - budget_policy_id=d.get("budget_policy_id", None), - config=_from_dict(d, "config", EndpointCoreConfigInput), - name=d.get("name", None), - rate_limits=_repeated_dict(d, "rate_limits", RateLimit), - route_optimized=d.get("route_optimized", None), - tags=_repeated_dict(d, "tags", EndpointTag), - ) - - @dataclass class CustomProviderConfig: """Configs needed to create a custom provider model route.""" @@ -1583,76 +1444,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ExportMetricsResponse: return cls(contents=d.get("contents", None)) -@dataclass -class ExternalFunctionRequest: - """Simple Proto message for testing""" - - connection_name: str - """The connection name to use. This is required to identify the external connection.""" - - method: ExternalFunctionRequestHttpMethod - """The HTTP method to use (e.g., 'GET', 'POST').""" - - path: str - """The relative path for the API endpoint. This is required.""" - - headers: Optional[str] = None - """Additional headers for the request. If not provided, only auth headers from connections would be - passed.""" - - json: Optional[str] = None - """The JSON payload to send in the request body.""" - - params: Optional[str] = None - """Query parameters for the request.""" - - def as_dict(self) -> dict: - """Serializes the ExternalFunctionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.headers is not None: - body["headers"] = self.headers - if self.json is not None: - body["json"] = self.json - if self.method is not None: - body["method"] = self.method.value - if self.params is not None: - body["params"] = self.params - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ExternalFunctionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.headers is not None: - body["headers"] = self.headers - if self.json is not None: - body["json"] = self.json - if self.method is not None: - body["method"] = self.method - if self.params is not None: - body["params"] = self.params - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalFunctionRequest: - """Deserializes the ExternalFunctionRequest from a dictionary.""" - return cls( - connection_name=d.get("connection_name", None), - headers=d.get("headers", None), - json=d.get("json", None), - method=_enum(d, "method", ExternalFunctionRequestHttpMethod), - params=d.get("params", None), - path=d.get("path", None), - ) - - class ExternalFunctionRequestHttpMethod(Enum): DELETE = "DELETE" @@ -2273,49 +2064,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PaLmConfig: ) -@dataclass -class PatchServingEndpointTags: - add_tags: Optional[List[EndpointTag]] = None - """List of endpoint tags to add""" - - delete_tags: Optional[List[str]] = None - """List of tag keys to delete""" - - name: Optional[str] = None - """The name of the serving endpoint who's tags to patch. This field is required.""" - - def as_dict(self) -> dict: - """Serializes the PatchServingEndpointTags into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.add_tags: - body["add_tags"] = [v.as_dict() for v in self.add_tags] - if self.delete_tags: - body["delete_tags"] = [v for v in self.delete_tags] - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PatchServingEndpointTags into a shallow dictionary of its immediate attributes.""" - body = {} - if self.add_tags: - body["add_tags"] = self.add_tags - if self.delete_tags: - body["delete_tags"] = self.delete_tags - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PatchServingEndpointTags: - """Deserializes the PatchServingEndpointTags from a dictionary.""" - return cls( - add_tags=_repeated_dict(d, "add_tags", EndpointTag), - delete_tags=d.get("delete_tags", None), - name=d.get("name", None), - ) - - @dataclass class PayloadTable: name: Optional[str] = None @@ -2442,77 +2190,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PtServedModel: ) -@dataclass -class PutAiGatewayRequest: - fallback_config: Optional[FallbackConfig] = None - """Configuration for traffic fallback which auto fallbacks to other served entities if the request - to a served entity fails with certain error codes, to increase availability.""" - - guardrails: Optional[AiGatewayGuardrails] = None - """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and - responses.""" - - inference_table_config: Optional[AiGatewayInferenceTableConfig] = None - """Configuration for payload logging using inference tables. Use these tables to monitor and audit - data being sent to and received from model APIs and to improve model quality.""" - - name: Optional[str] = None - """The name of the serving endpoint whose AI Gateway is being updated. This field is required.""" - - rate_limits: Optional[List[AiGatewayRateLimit]] = None - """Configuration for rate limits which can be set to limit endpoint traffic.""" - - usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None - """Configuration to enable usage tracking using system tables. These tables allow you to monitor - operational usage on endpoints and their associated costs.""" - - def as_dict(self) -> dict: - """Serializes the PutAiGatewayRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.fallback_config: - body["fallback_config"] = self.fallback_config.as_dict() - if self.guardrails: - body["guardrails"] = self.guardrails.as_dict() - if self.inference_table_config: - body["inference_table_config"] = self.inference_table_config.as_dict() - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = [v.as_dict() for v in self.rate_limits] - if self.usage_tracking_config: - body["usage_tracking_config"] = self.usage_tracking_config.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutAiGatewayRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.fallback_config: - body["fallback_config"] = self.fallback_config - if self.guardrails: - body["guardrails"] = self.guardrails - if self.inference_table_config: - body["inference_table_config"] = self.inference_table_config - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = self.rate_limits - if self.usage_tracking_config: - body["usage_tracking_config"] = self.usage_tracking_config - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutAiGatewayRequest: - """Deserializes the PutAiGatewayRequest from a dictionary.""" - return cls( - fallback_config=_from_dict(d, "fallback_config", FallbackConfig), - guardrails=_from_dict(d, "guardrails", AiGatewayGuardrails), - inference_table_config=_from_dict(d, "inference_table_config", AiGatewayInferenceTableConfig), - name=d.get("name", None), - rate_limits=_repeated_dict(d, "rate_limits", AiGatewayRateLimit), - usage_tracking_config=_from_dict(d, "usage_tracking_config", AiGatewayUsageTrackingConfig), - ) - - @dataclass class PutAiGatewayResponse: fallback_config: Optional[FallbackConfig] = None @@ -2576,38 +2253,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PutAiGatewayResponse: ) -@dataclass -class PutRequest: - name: Optional[str] = None - """The name of the serving endpoint whose rate limits are being updated. This field is required.""" - - rate_limits: Optional[List[RateLimit]] = None - """The list of endpoint rate limits.""" - - def as_dict(self) -> dict: - """Serializes the PutRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = [v.as_dict() for v in self.rate_limits] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = self.rate_limits - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutRequest: - """Deserializes the PutRequest from a dictionary.""" - return cls(name=d.get("name", None), rate_limits=_repeated_dict(d, "rate_limits", RateLimit)) - - @dataclass class PutResponse: rate_limits: Optional[List[RateLimit]] = None @@ -2633,153 +2278,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PutResponse: return cls(rate_limits=_repeated_dict(d, "rate_limits", RateLimit)) -@dataclass -class QueryEndpointInput: - dataframe_records: Optional[List[Any]] = None - """Pandas Dataframe input in the records orientation.""" - - dataframe_split: Optional[DataframeSplitInput] = None - """Pandas Dataframe input in the split orientation.""" - - extra_params: Optional[Dict[str, str]] = None - """The extra parameters field used ONLY for __completions, chat,__ and __embeddings external & - foundation model__ serving endpoints. This is a map of strings and should only be used with - other external/foundation model query fields.""" - - input: Optional[Any] = None - """The input string (or array of strings) field used ONLY for __embeddings external & foundation - model__ serving endpoints and is the only field (along with extra_params if needed) used by - embeddings queries.""" - - inputs: Optional[Any] = None - """Tensor-based input in columnar format.""" - - instances: Optional[List[Any]] = None - """Tensor-based input in row format.""" - - max_tokens: Optional[int] = None - """The max tokens field used ONLY for __completions__ and __chat external & foundation model__ - serving endpoints. This is an integer and should only be used with other chat/completions query - fields.""" - - messages: Optional[List[ChatMessage]] = None - """The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is - a map of strings and should only be used with other chat query fields.""" - - n: Optional[int] = None - """The n (number of candidates) field used ONLY for __completions__ and __chat external & - foundation model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and - should only be used with other chat/completions query fields.""" - - name: Optional[str] = None - """The name of the serving endpoint. This field is required.""" - - prompt: Optional[Any] = None - """The prompt string (or array of strings) field used ONLY for __completions external & foundation - model__ serving endpoints and should only be used with other completions query fields.""" - - stop: Optional[List[str]] = None - """The stop sequences field used ONLY for __completions__ and __chat external & foundation model__ - serving endpoints. This is a list of strings and should only be used with other chat/completions - query fields.""" - - stream: Optional[bool] = None - """The stream field used ONLY for __completions__ and __chat external & foundation model__ serving - endpoints. This is a boolean defaulting to false and should only be used with other - chat/completions query fields.""" - - temperature: Optional[float] = None - """The temperature field used ONLY for __completions__ and __chat external & foundation model__ - serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be - used with other chat/completions query fields.""" - - def as_dict(self) -> dict: - """Serializes the QueryEndpointInput into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dataframe_records: - body["dataframe_records"] = [v for v in self.dataframe_records] - if self.dataframe_split: - body["dataframe_split"] = self.dataframe_split.as_dict() - if self.extra_params: - body["extra_params"] = self.extra_params - if self.input: - body["input"] = self.input - if self.inputs: - body["inputs"] = self.inputs - if self.instances: - body["instances"] = [v for v in self.instances] - if self.max_tokens is not None: - body["max_tokens"] = self.max_tokens - if self.messages: - body["messages"] = [v.as_dict() for v in self.messages] - if self.n is not None: - body["n"] = self.n - if self.name is not None: - body["name"] = self.name - if self.prompt: - body["prompt"] = self.prompt - if self.stop: - body["stop"] = [v for v in self.stop] - if self.stream is not None: - body["stream"] = self.stream - if self.temperature is not None: - body["temperature"] = self.temperature - return body - - def as_shallow_dict(self) -> dict: - """Serializes the QueryEndpointInput into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dataframe_records: - body["dataframe_records"] = self.dataframe_records - if self.dataframe_split: - body["dataframe_split"] = self.dataframe_split - if self.extra_params: - body["extra_params"] = self.extra_params - if self.input: - body["input"] = self.input - if self.inputs: - body["inputs"] = self.inputs - if self.instances: - body["instances"] = self.instances - if self.max_tokens is not None: - body["max_tokens"] = self.max_tokens - if self.messages: - body["messages"] = self.messages - if self.n is not None: - body["n"] = self.n - if self.name is not None: - body["name"] = self.name - if self.prompt: - body["prompt"] = self.prompt - if self.stop: - body["stop"] = self.stop - if self.stream is not None: - body["stream"] = self.stream - if self.temperature is not None: - body["temperature"] = self.temperature - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryEndpointInput: - """Deserializes the QueryEndpointInput from a dictionary.""" - return cls( - dataframe_records=d.get("dataframe_records", None), - dataframe_split=_from_dict(d, "dataframe_split", DataframeSplitInput), - extra_params=d.get("extra_params", None), - input=d.get("input", None), - inputs=d.get("inputs", None), - instances=d.get("instances", None), - max_tokens=d.get("max_tokens", None), - messages=_repeated_dict(d, "messages", ChatMessage), - n=d.get("n", None), - name=d.get("name", None), - prompt=d.get("prompt", None), - stop=d.get("stop", None), - stream=d.get("stream", None), - temperature=d.get("temperature", None), - ) - - @dataclass class QueryEndpointResponse: choices: Optional[List[V1ResponseChoiceElement]] = None @@ -4295,40 +3793,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointPermissionsDescription: ) -@dataclass -class ServingEndpointPermissionsRequest: - access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None - - serving_endpoint_id: Optional[str] = None - """The serving endpoint for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the ServingEndpointPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.serving_endpoint_id is not None: - body["serving_endpoint_id"] = self.serving_endpoint_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ServingEndpointPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.serving_endpoint_id is not None: - body["serving_endpoint_id"] = self.serving_endpoint_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointPermissionsRequest: - """Deserializes the ServingEndpointPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ServingEndpointAccessControlRequest), - serving_endpoint_id=d.get("serving_endpoint_id", None), - ) - - class ServingModelWorkloadType(Enum): """Please keep this in sync with with workload types in InferenceEndpointEntities.scala""" @@ -4364,37 +3828,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TrafficConfig: return cls(routes=_repeated_dict(d, "routes", Route)) -@dataclass -class UpdateProvisionedThroughputEndpointConfigRequest: - config: PtEndpointCoreConfig - - name: Optional[str] = None - """The name of the pt endpoint to update. This field is required.""" - - def as_dict(self) -> dict: - """Serializes the UpdateProvisionedThroughputEndpointConfigRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.config: - body["config"] = self.config.as_dict() - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateProvisionedThroughputEndpointConfigRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.config: - body["config"] = self.config - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateProvisionedThroughputEndpointConfigRequest: - """Deserializes the UpdateProvisionedThroughputEndpointConfigRequest from a dictionary.""" - return cls(config=_from_dict(d, "config", PtEndpointCoreConfig), name=d.get("name", None)) - - @dataclass class V1ResponseChoiceElement: finish_reason: Optional[str] = None diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 6d1318a2..f48af893 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -737,53 +737,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Config: ) -@dataclass -class CreateIpAccessList: - """Details required to configure a block list or allow list.""" - - label: str - """Label for the IP access list. This **cannot** be empty.""" - - list_type: ListType - """Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list.""" - - ip_addresses: Optional[List[str]] = None - - def as_dict(self) -> dict: - """Serializes the CreateIpAccessList into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.ip_addresses: - body["ip_addresses"] = [v for v in self.ip_addresses] - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateIpAccessList into a shallow dictionary of its immediate attributes.""" - body = {} - if self.ip_addresses: - body["ip_addresses"] = self.ip_addresses - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateIpAccessList: - """Deserializes the CreateIpAccessList from a dictionary.""" - return cls( - ip_addresses=d.get("ip_addresses", None), - label=d.get("label", None), - list_type=_enum(d, "list_type", ListType), - ) - - @dataclass class CreateIpAccessListResponse: """An IP access list was successfully created.""" @@ -848,83 +801,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateNetworkConnectivityConfiguration: return cls(name=d.get("name", None), region=d.get("region", None)) -@dataclass -class CreateNotificationDestinationRequest: - config: Optional[Config] = None - """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.""" - - display_name: Optional[str] = None - """The display name for the notification destination.""" - - def as_dict(self) -> dict: - """Serializes the CreateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.config: - body["config"] = self.config.as_dict() - if self.display_name is not None: - body["display_name"] = self.display_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateNotificationDestinationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.config: - body["config"] = self.config - if self.display_name is not None: - body["display_name"] = self.display_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateNotificationDestinationRequest: - """Deserializes the CreateNotificationDestinationRequest from a dictionary.""" - return cls(config=_from_dict(d, "config", Config), display_name=d.get("display_name", None)) - - -@dataclass -class CreateOboTokenRequest: - """Configuration details for creating on-behalf tokens.""" - - application_id: str - """Application ID of the service principal.""" - - comment: Optional[str] = None - """Comment that describes the purpose of the token.""" - - lifetime_seconds: Optional[int] = None - """The number of seconds before the token expires.""" - - def as_dict(self) -> dict: - """Serializes the CreateOboTokenRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.application_id is not None: - body["application_id"] = self.application_id - if self.comment is not None: - body["comment"] = self.comment - if self.lifetime_seconds is not None: - body["lifetime_seconds"] = self.lifetime_seconds - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateOboTokenRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.application_id is not None: - body["application_id"] = self.application_id - if self.comment is not None: - body["comment"] = self.comment - if self.lifetime_seconds is not None: - body["lifetime_seconds"] = self.lifetime_seconds - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateOboTokenRequest: - """Deserializes the CreateOboTokenRequest from a dictionary.""" - return cls( - application_id=d.get("application_id", None), - comment=d.get("comment", None), - lifetime_seconds=d.get("lifetime_seconds", None), - ) - - @dataclass class CreateOboTokenResponse: """An on-behalf token was successfully created for the service principal.""" @@ -1031,40 +907,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreatePrivateEndpointRule: ) -@dataclass -class CreateTokenRequest: - comment: Optional[str] = None - """Optional description to attach to the token.""" - - lifetime_seconds: Optional[int] = None - """The lifetime of the token, in seconds. - - If the lifetime is not specified, this token remains valid indefinitely.""" - - def as_dict(self) -> dict: - """Serializes the CreateTokenRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.lifetime_seconds is not None: - body["lifetime_seconds"] = self.lifetime_seconds - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateTokenRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.lifetime_seconds is not None: - body["lifetime_seconds"] = self.lifetime_seconds - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateTokenRequest: - """Deserializes the CreateTokenRequest from a dictionary.""" - return cls(comment=d.get("comment", None), lifetime_seconds=d.get("lifetime_seconds", None)) - - @dataclass class CreateTokenResponse: token_info: Optional[PublicTokenInfo] = None @@ -2973,51 +2815,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ExchangeToken: ) -@dataclass -class ExchangeTokenRequest: - """Exchange a token with the IdP""" - - partition_id: PartitionId - """The partition of Credentials store""" - - token_type: List[TokenType] - """A list of token types being requested""" - - scopes: List[str] - """Array of scopes for the token request.""" - - def as_dict(self) -> dict: - """Serializes the ExchangeTokenRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.partition_id: - body["partitionId"] = self.partition_id.as_dict() - if self.scopes: - body["scopes"] = [v for v in self.scopes] - if self.token_type: - body["tokenType"] = [v.value for v in self.token_type] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ExchangeTokenRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.partition_id: - body["partitionId"] = self.partition_id - if self.scopes: - body["scopes"] = self.scopes - if self.token_type: - body["tokenType"] = self.token_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExchangeTokenRequest: - """Deserializes the ExchangeTokenRequest from a dictionary.""" - return cls( - partition_id=_from_dict(d, "partitionId", PartitionId), - scopes=d.get("scopes", None), - token_type=_repeated_enum(d, "tokenType", TokenType), - ) - - @dataclass class ExchangeTokenResponse: """Exhanged tokens were successfully returned.""" @@ -4671,69 +4468,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PublicTokenInfo: ) -@dataclass -class ReplaceIpAccessList: - """Details required to replace an IP access list.""" - - label: str - """Label for the IP access list. This **cannot** be empty.""" - - list_type: ListType - """Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list.""" - - enabled: bool - """Specifies whether this IP access list is enabled.""" - - ip_access_list_id: Optional[str] = None - """The ID for the corresponding IP access list""" - - ip_addresses: Optional[List[str]] = None - - def as_dict(self) -> dict: - """Serializes the ReplaceIpAccessList into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_access_list_id is not None: - body["ip_access_list_id"] = self.ip_access_list_id - if self.ip_addresses: - body["ip_addresses"] = [v for v in self.ip_addresses] - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ReplaceIpAccessList into a shallow dictionary of its immediate attributes.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_access_list_id is not None: - body["ip_access_list_id"] = self.ip_access_list_id - if self.ip_addresses: - body["ip_addresses"] = self.ip_addresses - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ReplaceIpAccessList: - """Deserializes the ReplaceIpAccessList from a dictionary.""" - return cls( - enabled=d.get("enabled", None), - ip_access_list_id=d.get("ip_access_list_id", None), - ip_addresses=d.get("ip_addresses", None), - label=d.get("label", None), - list_type=_enum(d, "list_type", ListType), - ) - - @dataclass class ReplaceResponse: def as_dict(self) -> dict: @@ -4832,31 +4566,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RestrictWorkspaceAdminsSetting: ) -@dataclass -class RevokeTokenRequest: - token_id: str - """The ID of the token to be revoked.""" - - def as_dict(self) -> dict: - """Serializes the RevokeTokenRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.token_id is not None: - body["token_id"] = self.token_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RevokeTokenRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.token_id is not None: - body["token_id"] = self.token_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RevokeTokenRequest: - """Deserializes the RevokeTokenRequest from a dictionary.""" - return cls(token_id=d.get("token_id", None)) - - @dataclass class RevokeTokenResponse: def as_dict(self) -> dict: @@ -5321,30 +5030,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TokenPermissionsDescription: ) -@dataclass -class TokenPermissionsRequest: - access_control_list: Optional[List[TokenAccessControlRequest]] = None - - def as_dict(self) -> dict: - """Serializes the TokenPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TokenPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TokenPermissionsRequest: - """Deserializes the TokenPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, "access_control_list", TokenAccessControlRequest)) - - class TokenType(Enum): """The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported.""" @@ -5358,1214 +5043,63 @@ class TokenType(Enum): @dataclass -class UpdateAccountIpAccessEnableRequest: - """Details required to update a setting.""" +class UpdatePrivateEndpointRule: + """Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure + portal after initialization.""" - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" + domain_names: Optional[List[str]] = None + """Only used by private endpoints to customer-managed private endpoint services. + + Domain names of target private link service. When updating this field, the full list of target + domain_names must be specified.""" - setting: AccountIpAccessEnable + enabled: Optional[bool] = None + """Only used by private endpoints towards an AWS S3 service. + + Update this field to activate/deactivate this private endpoint to allow egress access from + serverless compute resources.""" - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. + resource_names: Optional[List[str]] = None + """Only used by private endpoints towards AWS S3 service. - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" + The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket names + must be in the same region as the NCC/endpoint service. When updating this field, we perform + full update on this field. Please ensure a full list of desired resource_names is provided.""" def as_dict(self) -> dict: - """Serializes the UpdateAccountIpAccessEnableRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdatePrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.domain_names: + body["domain_names"] = [v for v in self.domain_names] + if self.enabled is not None: + body["enabled"] = self.enabled + if self.resource_names: + body["resource_names"] = [v for v in self.resource_names] return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateAccountIpAccessEnableRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdatePrivateEndpointRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.domain_names: + body["domain_names"] = self.domain_names + if self.enabled is not None: + body["enabled"] = self.enabled + if self.resource_names: + body["resource_names"] = self.resource_names return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAccountIpAccessEnableRequest: - """Deserializes the UpdateAccountIpAccessEnableRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> UpdatePrivateEndpointRule: + """Deserializes the UpdatePrivateEndpointRule from a dictionary.""" return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", AccountIpAccessEnable), + domain_names=d.get("domain_names", None), + enabled=d.get("enabled", None), + resource_names=d.get("resource_names", None), ) @dataclass -class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: AibiDashboardEmbeddingAccessPolicySetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAibiDashboardEmbeddingAccessPolicySettingRequest: - """Deserializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", AibiDashboardEmbeddingAccessPolicySetting), - ) - - -@dataclass -class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: AibiDashboardEmbeddingApprovedDomainsSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest: - """Deserializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", AibiDashboardEmbeddingApprovedDomainsSetting), - ) - - -@dataclass -class UpdateAutomaticClusterUpdateSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: AutomaticClusterUpdateSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAutomaticClusterUpdateSettingRequest: - """Deserializes the UpdateAutomaticClusterUpdateSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", AutomaticClusterUpdateSetting), - ) - - -@dataclass -class UpdateComplianceSecurityProfileSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: ComplianceSecurityProfileSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateComplianceSecurityProfileSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateComplianceSecurityProfileSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateComplianceSecurityProfileSettingRequest: - """Deserializes the UpdateComplianceSecurityProfileSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", ComplianceSecurityProfileSetting), - ) - - -@dataclass -class UpdateCspEnablementAccountSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: CspEnablementAccountSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateCspEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCspEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCspEnablementAccountSettingRequest: - """Deserializes the UpdateCspEnablementAccountSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", CspEnablementAccountSetting), - ) - - -@dataclass -class UpdateDashboardEmailSubscriptionsRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: DashboardEmailSubscriptions - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateDashboardEmailSubscriptionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateDashboardEmailSubscriptionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateDashboardEmailSubscriptionsRequest: - """Deserializes the UpdateDashboardEmailSubscriptionsRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DashboardEmailSubscriptions), - ) - - -@dataclass -class UpdateDefaultNamespaceSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: DefaultNamespaceSetting - """This represents the setting configuration for the default namespace in the Databricks workspace. - Setting the default catalog for the workspace determines the catalog that is used when queries - do not reference a fully qualified 3 level name. For example, if the default catalog is set to - 'retail_prod' then a query 'SELECT * FROM myTable' would reference the object - 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a - restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only - applies when using Unity Catalog-enabled compute.""" - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateDefaultNamespaceSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateDefaultNamespaceSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateDefaultNamespaceSettingRequest: - """Deserializes the UpdateDefaultNamespaceSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DefaultNamespaceSetting), - ) - - -@dataclass -class UpdateDisableLegacyAccessRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: DisableLegacyAccess - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateDisableLegacyAccessRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateDisableLegacyAccessRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateDisableLegacyAccessRequest: - """Deserializes the UpdateDisableLegacyAccessRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DisableLegacyAccess), - ) - - -@dataclass -class UpdateDisableLegacyDbfsRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: DisableLegacyDbfs - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateDisableLegacyDbfsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateDisableLegacyDbfsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateDisableLegacyDbfsRequest: - """Deserializes the UpdateDisableLegacyDbfsRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DisableLegacyDbfs), - ) - - -@dataclass -class UpdateDisableLegacyFeaturesRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: DisableLegacyFeatures - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateDisableLegacyFeaturesRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateDisableLegacyFeaturesRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateDisableLegacyFeaturesRequest: - """Deserializes the UpdateDisableLegacyFeaturesRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DisableLegacyFeatures), - ) - - -@dataclass -class UpdateEnableExportNotebookRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: EnableExportNotebook - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateEnableExportNotebookRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateEnableExportNotebookRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateEnableExportNotebookRequest: - """Deserializes the UpdateEnableExportNotebookRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EnableExportNotebook), - ) - - -@dataclass -class UpdateEnableNotebookTableClipboardRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: EnableNotebookTableClipboard - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateEnableNotebookTableClipboardRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateEnableNotebookTableClipboardRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateEnableNotebookTableClipboardRequest: - """Deserializes the UpdateEnableNotebookTableClipboardRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EnableNotebookTableClipboard), - ) - - -@dataclass -class UpdateEnableResultsDownloadingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: EnableResultsDownloading - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateEnableResultsDownloadingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateEnableResultsDownloadingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateEnableResultsDownloadingRequest: - """Deserializes the UpdateEnableResultsDownloadingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EnableResultsDownloading), - ) - - -@dataclass -class UpdateEnhancedSecurityMonitoringSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: EnhancedSecurityMonitoringSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateEnhancedSecurityMonitoringSettingRequest: - """Deserializes the UpdateEnhancedSecurityMonitoringSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EnhancedSecurityMonitoringSetting), - ) - - -@dataclass -class UpdateEsmEnablementAccountSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: EsmEnablementAccountSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateEsmEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateEsmEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateEsmEnablementAccountSettingRequest: - """Deserializes the UpdateEsmEnablementAccountSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EsmEnablementAccountSetting), - ) - - -@dataclass -class UpdateIpAccessList: - """Details required to update an IP access list.""" - - enabled: Optional[bool] = None - """Specifies whether this IP access list is enabled.""" - - ip_access_list_id: Optional[str] = None - """The ID for the corresponding IP access list""" - - ip_addresses: Optional[List[str]] = None - - label: Optional[str] = None - """Label for the IP access list. This **cannot** be empty.""" - - list_type: Optional[ListType] = None - """Type of IP access list. Valid values are as follows and are case-sensitive: - - * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or - range. IP addresses in the block list are excluded even if they are included in an allow list.""" - - def as_dict(self) -> dict: - """Serializes the UpdateIpAccessList into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_access_list_id is not None: - body["ip_access_list_id"] = self.ip_access_list_id - if self.ip_addresses: - body["ip_addresses"] = [v for v in self.ip_addresses] - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateIpAccessList into a shallow dictionary of its immediate attributes.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_access_list_id is not None: - body["ip_access_list_id"] = self.ip_access_list_id - if self.ip_addresses: - body["ip_addresses"] = self.ip_addresses - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateIpAccessList: - """Deserializes the UpdateIpAccessList from a dictionary.""" - return cls( - enabled=d.get("enabled", None), - ip_access_list_id=d.get("ip_access_list_id", None), - ip_addresses=d.get("ip_addresses", None), - label=d.get("label", None), - list_type=_enum(d, "list_type", ListType), - ) - - -@dataclass -class UpdateLlmProxyPartnerPoweredAccountRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: LlmProxyPartnerPoweredAccount - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateLlmProxyPartnerPoweredAccountRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateLlmProxyPartnerPoweredAccountRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateLlmProxyPartnerPoweredAccountRequest: - """Deserializes the UpdateLlmProxyPartnerPoweredAccountRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", LlmProxyPartnerPoweredAccount), - ) - - -@dataclass -class UpdateLlmProxyPartnerPoweredEnforceRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: LlmProxyPartnerPoweredEnforce - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateLlmProxyPartnerPoweredEnforceRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateLlmProxyPartnerPoweredEnforceRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateLlmProxyPartnerPoweredEnforceRequest: - """Deserializes the UpdateLlmProxyPartnerPoweredEnforceRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", LlmProxyPartnerPoweredEnforce), - ) - - -@dataclass -class UpdateLlmProxyPartnerPoweredWorkspaceRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: LlmProxyPartnerPoweredWorkspace - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateLlmProxyPartnerPoweredWorkspaceRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateLlmProxyPartnerPoweredWorkspaceRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateLlmProxyPartnerPoweredWorkspaceRequest: - """Deserializes the UpdateLlmProxyPartnerPoweredWorkspaceRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", LlmProxyPartnerPoweredWorkspace), - ) - - -@dataclass -class UpdateNotificationDestinationRequest: - config: Optional[Config] = None - """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.""" - - display_name: Optional[str] = None - """The display name for the notification destination.""" - - id: Optional[str] = None - """UUID identifying notification destination.""" - - def as_dict(self) -> dict: - """Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.config: - body["config"] = self.config.as_dict() - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateNotificationDestinationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.config: - body["config"] = self.config - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateNotificationDestinationRequest: - """Deserializes the UpdateNotificationDestinationRequest from a dictionary.""" - return cls( - config=_from_dict(d, "config", Config), display_name=d.get("display_name", None), id=d.get("id", None) - ) - - -@dataclass -class UpdatePersonalComputeSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: PersonalComputeSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdatePersonalComputeSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdatePersonalComputeSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdatePersonalComputeSettingRequest: - """Deserializes the UpdatePersonalComputeSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", PersonalComputeSetting), - ) - - -@dataclass -class UpdatePrivateEndpointRule: - """Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure - portal after initialization.""" - - domain_names: Optional[List[str]] = None - """Only used by private endpoints to customer-managed private endpoint services. - - Domain names of target private link service. When updating this field, the full list of target - domain_names must be specified.""" - - enabled: Optional[bool] = None - """Only used by private endpoints towards an AWS S3 service. - - Update this field to activate/deactivate this private endpoint to allow egress access from - serverless compute resources.""" - - resource_names: Optional[List[str]] = None - """Only used by private endpoints towards AWS S3 service. - - The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket names - must be in the same region as the NCC/endpoint service. When updating this field, we perform - full update on this field. Please ensure a full list of desired resource_names is provided.""" - - def as_dict(self) -> dict: - """Serializes the UpdatePrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.domain_names: - body["domain_names"] = [v for v in self.domain_names] - if self.enabled is not None: - body["enabled"] = self.enabled - if self.resource_names: - body["resource_names"] = [v for v in self.resource_names] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdatePrivateEndpointRule into a shallow dictionary of its immediate attributes.""" - body = {} - if self.domain_names: - body["domain_names"] = self.domain_names - if self.enabled is not None: - body["enabled"] = self.enabled - if self.resource_names: - body["resource_names"] = self.resource_names - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdatePrivateEndpointRule: - """Deserializes the UpdatePrivateEndpointRule from a dictionary.""" - return cls( - domain_names=d.get("domain_names", None), - enabled=d.get("enabled", None), - resource_names=d.get("resource_names", None), - ) - - -@dataclass -class UpdateResponse: +class UpdateResponse: def as_dict(self) -> dict: """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" body = {} @@ -6582,110 +5116,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: return cls() -@dataclass -class UpdateRestrictWorkspaceAdminsSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: RestrictWorkspaceAdminsSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRestrictWorkspaceAdminsSettingRequest: - """Deserializes the UpdateRestrictWorkspaceAdminsSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", RestrictWorkspaceAdminsSetting), - ) - - -@dataclass -class UpdateSqlResultsDownloadRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: SqlResultsDownload - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateSqlResultsDownloadRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateSqlResultsDownloadRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateSqlResultsDownloadRequest: - """Deserializes the UpdateSqlResultsDownloadRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", SqlResultsDownload), - ) - - WorkspaceConf = Dict[str, str] diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index ca52c3a9..111168c2 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -55,193 +55,6 @@ class ColumnTypeName(Enum): VARIANT = "VARIANT" -@dataclass -class CreateProvider: - name: str - """The name of the Provider.""" - - authentication_type: AuthenticationType - """The delta sharing authentication type.""" - - comment: Optional[str] = None - """Description about the provider.""" - - recipient_profile_str: Optional[str] = None - """This field is required when the __authentication_type__ is **TOKEN**, - **OAUTH_CLIENT_CREDENTIALS** or not provided.""" - - def as_dict(self) -> dict: - """Serializes the CreateProvider into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type.value - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateProvider into a shallow dictionary of its immediate attributes.""" - body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateProvider: - """Deserializes the CreateProvider from a dictionary.""" - return cls( - authentication_type=_enum(d, "authentication_type", AuthenticationType), - comment=d.get("comment", None), - name=d.get("name", None), - recipient_profile_str=d.get("recipient_profile_str", None), - ) - - -@dataclass -class CreateRecipient: - name: str - """Name of Recipient.""" - - authentication_type: AuthenticationType - """The delta sharing authentication type.""" - - comment: Optional[str] = None - """Description about the recipient.""" - - data_recipient_global_metastore_id: Optional[str] = None - """The global Unity Catalog metastore id provided by the data recipient. This field is only present - when the __authentication_type__ is **DATABRICKS**. The identifier is of format - __cloud__:__region__:__metastore-uuid__.""" - - expiration_time: Optional[int] = None - """Expiration timestamp of the token, in epoch milliseconds.""" - - ip_access_list: Optional[IpAccessList] = None - """IP Access List""" - - owner: Optional[str] = None - """Username of the recipient owner.""" - - properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None - """Recipient properties as map of string key-value pairs. When provided in update request, the - specified properties will override the existing properties. To add and remove properties, one - would need to perform a read-modify-write.""" - - sharing_code: Optional[str] = None - """The one-time sharing code provided by the data recipient. This field is only present when the - __authentication_type__ is **DATABRICKS**.""" - - def as_dict(self) -> dict: - """Serializes the CreateRecipient into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type.value - if self.comment is not None: - body["comment"] = self.comment - if self.data_recipient_global_metastore_id is not None: - body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list.as_dict() - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs.as_dict() - if self.sharing_code is not None: - body["sharing_code"] = self.sharing_code - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateRecipient into a shallow dictionary of its immediate attributes.""" - body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type - if self.comment is not None: - body["comment"] = self.comment - if self.data_recipient_global_metastore_id is not None: - body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs - if self.sharing_code is not None: - body["sharing_code"] = self.sharing_code - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateRecipient: - """Deserializes the CreateRecipient from a dictionary.""" - return cls( - authentication_type=_enum(d, "authentication_type", AuthenticationType), - comment=d.get("comment", None), - data_recipient_global_metastore_id=d.get("data_recipient_global_metastore_id", None), - expiration_time=d.get("expiration_time", None), - ip_access_list=_from_dict(d, "ip_access_list", IpAccessList), - name=d.get("name", None), - owner=d.get("owner", None), - properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs), - sharing_code=d.get("sharing_code", None), - ) - - -@dataclass -class CreateShare: - name: str - """Name of the share.""" - - comment: Optional[str] = None - """User-provided free-form text description.""" - - storage_root: Optional[str] = None - """Storage root URL for the share.""" - - def as_dict(self) -> dict: - """Serializes the CreateShare into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.storage_root is not None: - body["storage_root"] = self.storage_root - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateShare into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.storage_root is not None: - body["storage_root"] = self.storage_root - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateShare: - """Deserializes the CreateShare from a dictionary.""" - return cls(comment=d.get("comment", None), name=d.get("name", None), storage_root=d.get("storage_root", None)) - - @dataclass class DeleteResponse: def as_dict(self) -> dict: @@ -1979,42 +1792,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RetrieveTokenResponse: ) -@dataclass -class RotateRecipientToken: - existing_token_expire_in_seconds: int - """The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of - existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to - expire the existing token immediately, negative number will return an error.""" - - name: Optional[str] = None - """The name of the Recipient.""" - - def as_dict(self) -> dict: - """Serializes the RotateRecipientToken into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.existing_token_expire_in_seconds is not None: - body["existing_token_expire_in_seconds"] = self.existing_token_expire_in_seconds - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RotateRecipientToken into a shallow dictionary of its immediate attributes.""" - body = {} - if self.existing_token_expire_in_seconds is not None: - body["existing_token_expire_in_seconds"] = self.existing_token_expire_in_seconds - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RotateRecipientToken: - """Deserializes the RotateRecipientToken from a dictionary.""" - return cls( - existing_token_expire_in_seconds=d.get("existing_token_expire_in_seconds", None), name=d.get("name", None) - ) - - @dataclass class SecurablePropertiesKvPairs: """An object with __properties__ containing map of key-value properties attached to the securable.""" @@ -2557,253 +2334,6 @@ class TableInternalAttributesSharedTableType(Enum): VIEW = "VIEW" -@dataclass -class UpdateProvider: - comment: Optional[str] = None - """Description about the provider.""" - - name: Optional[str] = None - """Name of the provider.""" - - new_name: Optional[str] = None - """New name for the provider.""" - - owner: Optional[str] = None - """Username of Provider owner.""" - - recipient_profile_str: Optional[str] = None - """This field is required when the __authentication_type__ is **TOKEN**, - **OAUTH_CLIENT_CREDENTIALS** or not provided.""" - - def as_dict(self) -> dict: - """Serializes the UpdateProvider into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateProvider into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateProvider: - """Deserializes the UpdateProvider from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - recipient_profile_str=d.get("recipient_profile_str", None), - ) - - -@dataclass -class UpdateRecipient: - comment: Optional[str] = None - """Description about the recipient.""" - - expiration_time: Optional[int] = None - """Expiration timestamp of the token, in epoch milliseconds.""" - - ip_access_list: Optional[IpAccessList] = None - """IP Access List""" - - name: Optional[str] = None - """Name of the recipient.""" - - new_name: Optional[str] = None - """New name for the recipient. .""" - - owner: Optional[str] = None - """Username of the recipient owner.""" - - properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None - """Recipient properties as map of string key-value pairs. When provided in update request, the - specified properties will override the existing properties. To add and remove properties, one - would need to perform a read-modify-write.""" - - def as_dict(self) -> dict: - """Serializes the UpdateRecipient into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list.as_dict() - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateRecipient into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRecipient: - """Deserializes the UpdateRecipient from a dictionary.""" - return cls( - comment=d.get("comment", None), - expiration_time=d.get("expiration_time", None), - ip_access_list=_from_dict(d, "ip_access_list", IpAccessList), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs), - ) - - -@dataclass -class UpdateShare: - comment: Optional[str] = None - """User-provided free-form text description.""" - - name: Optional[str] = None - """The name of the share.""" - - new_name: Optional[str] = None - """New name for the share.""" - - owner: Optional[str] = None - """Username of current owner of share.""" - - storage_root: Optional[str] = None - """Storage root URL for the share.""" - - updates: Optional[List[SharedDataObjectUpdate]] = None - """Array of shared data object updates.""" - - def as_dict(self) -> dict: - """Serializes the UpdateShare into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updates: - body["updates"] = [v.as_dict() for v in self.updates] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateShare into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updates: - body["updates"] = self.updates - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateShare: - """Deserializes the UpdateShare from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - storage_root=d.get("storage_root", None), - updates=_repeated_dict(d, "updates", SharedDataObjectUpdate), - ) - - -@dataclass -class UpdateSharePermissions: - changes: Optional[List[PermissionsChange]] = None - """Array of permissions change objects.""" - - name: Optional[str] = None - """The name of the share.""" - - omit_permissions_list: Optional[bool] = None - """Optional. Whether to return the latest permissions list of the share in the response.""" - - def as_dict(self) -> dict: - """Serializes the UpdateSharePermissions into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.changes: - body["changes"] = [v.as_dict() for v in self.changes] - if self.name is not None: - body["name"] = self.name - if self.omit_permissions_list is not None: - body["omit_permissions_list"] = self.omit_permissions_list - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateSharePermissions into a shallow dictionary of its immediate attributes.""" - body = {} - if self.changes: - body["changes"] = self.changes - if self.name is not None: - body["name"] = self.name - if self.omit_permissions_list is not None: - body["omit_permissions_list"] = self.omit_permissions_list - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateSharePermissions: - """Deserializes the UpdateSharePermissions from a dictionary.""" - return cls( - changes=_repeated_dict(d, "changes", PermissionsChange), - name=d.get("name", None), - omit_permissions_list=d.get("omit_permissions_list", None), - ) - - @dataclass class UpdateSharePermissionsResponse: privilege_assignments: Optional[List[PrivilegeAssignment]] = None diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 9e7eb240..ce7e09d9 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -1371,101 +1371,6 @@ class ComparisonOperator(Enum): NOT_EQUAL = "NOT_EQUAL" -@dataclass -class CreateAlert: - name: str - """Name of the alert.""" - - options: AlertOptions - """Alert configuration options.""" - - query_id: str - """Query ID.""" - - parent: Optional[str] = None - """The identifier of the workspace folder containing the object.""" - - rearm: Optional[int] = None - """Number of seconds after being triggered before the alert rearms itself and can be triggered - again. If `null`, alert will never be triggered again.""" - - def as_dict(self) -> dict: - """Serializes the CreateAlert into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options.as_dict() - if self.parent is not None: - body["parent"] = self.parent - if self.query_id is not None: - body["query_id"] = self.query_id - if self.rearm is not None: - body["rearm"] = self.rearm - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateAlert into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.parent is not None: - body["parent"] = self.parent - if self.query_id is not None: - body["query_id"] = self.query_id - if self.rearm is not None: - body["rearm"] = self.rearm - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateAlert: - """Deserializes the CreateAlert from a dictionary.""" - return cls( - name=d.get("name", None), - options=_from_dict(d, "options", AlertOptions), - parent=d.get("parent", None), - query_id=d.get("query_id", None), - rearm=d.get("rearm", None), - ) - - -@dataclass -class CreateAlertRequest: - alert: Optional[CreateAlertRequestAlert] = None - - auto_resolve_display_name: Optional[bool] = None - """If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the - alert's display name conflicts with an existing alert's display name.""" - - def as_dict(self) -> dict: - """Serializes the CreateAlertRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.alert: - body["alert"] = self.alert.as_dict() - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateAlertRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.alert: - body["alert"] = self.alert - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateAlertRequest: - """Deserializes the CreateAlertRequest from a dictionary.""" - return cls( - alert=_from_dict(d, "alert", CreateAlertRequestAlert), - auto_resolve_display_name=d.get("auto_resolve_display_name", None), - ) - - @dataclass class CreateAlertRequestAlert: condition: Optional[AlertCondition] = None @@ -1555,41 +1460,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateAlertRequestAlert: ) -@dataclass -class CreateQueryRequest: - auto_resolve_display_name: Optional[bool] = None - """If true, automatically resolve query display name conflicts. Otherwise, fail the request if the - query's display name conflicts with an existing query's display name.""" - - query: Optional[CreateQueryRequestQuery] = None - - def as_dict(self) -> dict: - """Serializes the CreateQueryRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.query: - body["query"] = self.query.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateQueryRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.query: - body["query"] = self.query - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateQueryRequest: - """Deserializes the CreateQueryRequest from a dictionary.""" - return cls( - auto_resolve_display_name=d.get("auto_resolve_display_name", None), - query=_from_dict(d, "query", CreateQueryRequestQuery), - ) - - @dataclass class CreateQueryRequestQuery: apply_auto_limit: Optional[bool] = None @@ -1696,92 +1566,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateQueryRequestQuery: ) -@dataclass -class CreateQueryVisualizationsLegacyRequest: - """Add visualization to a query""" - - query_id: str - """The identifier returned by :method:queries/create""" - - type: str - """The type of visualization: chart, table, pivot table, and so on.""" - - options: Any - """The options object varies widely from one visualization type to the next and is unsupported. - Databricks does not recommend modifying visualization settings in JSON.""" - - description: Optional[str] = None - """A short description of this visualization. This is not displayed in the UI.""" - - name: Optional[str] = None - """The name of the visualization that appears on dashboards and the query screen.""" - - def as_dict(self) -> dict: - """Serializes the CreateQueryVisualizationsLegacyRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query_id is not None: - body["query_id"] = self.query_id - if self.type is not None: - body["type"] = self.type - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateQueryVisualizationsLegacyRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query_id is not None: - body["query_id"] = self.query_id - if self.type is not None: - body["type"] = self.type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateQueryVisualizationsLegacyRequest: - """Deserializes the CreateQueryVisualizationsLegacyRequest from a dictionary.""" - return cls( - description=d.get("description", None), - name=d.get("name", None), - options=d.get("options", None), - query_id=d.get("query_id", None), - type=d.get("type", None), - ) - - -@dataclass -class CreateVisualizationRequest: - visualization: Optional[CreateVisualizationRequestVisualization] = None - - def as_dict(self) -> dict: - """Serializes the CreateVisualizationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.visualization: - body["visualization"] = self.visualization.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateVisualizationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.visualization: - body["visualization"] = self.visualization - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateVisualizationRequest: - """Deserializes the CreateVisualizationRequest from a dictionary.""" - return cls(visualization=_from_dict(d, "visualization", CreateVisualizationRequestVisualization)) - - @dataclass class CreateVisualizationRequestVisualization: display_name: Optional[str] = None @@ -1843,159 +1627,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateVisualizationRequestVisualization ) -@dataclass -class CreateWarehouseRequest: - auto_stop_mins: Optional[int] = None - """The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) - before it is automatically stopped. - - Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for - non-serverless warehouses - 0 indicates no autostop. - - Defaults to 120 mins""" - - channel: Optional[Channel] = None - """Channel Details""" - - cluster_size: Optional[str] = None - """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows - you to run larger queries on it. If you want to increase the number of concurrent queries, - please tune max_num_clusters. - - Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - - 4X-Large""" - - creator_name: Optional[str] = None - """warehouse creator name""" - - enable_photon: Optional[bool] = None - """Configures whether the warehouse should use Photon optimized clusters. - - Defaults to false.""" - - enable_serverless_compute: Optional[bool] = None - """Configures whether the warehouse should use serverless compute""" - - instance_profile_arn: Optional[str] = None - """Deprecated. Instance profile used to pass IAM role to the cluster""" - - max_num_clusters: Optional[int] = None - """Maximum number of clusters that the autoscaler will create to handle concurrent queries. - - Supported values: - Must be >= min_num_clusters - Must be <= 30. - - Defaults to min_clusters if unset.""" - - min_num_clusters: Optional[int] = None - """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing - this will ensure that a larger number of clusters are always running and therefore may reduce - the cold start time for new queries. This is similar to reserved vs. revocable cores in a - resource manager. - - Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - - Defaults to 1""" - - name: Optional[str] = None - """Logical name for the cluster. - - Supported values: - Must be unique within an org. - Must be less than 100 characters.""" - - spot_instance_policy: Optional[SpotInstancePolicy] = None - """Configurations whether the warehouse should use spot instances.""" - - tags: Optional[EndpointTags] = None - """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS - volumes) associated with this SQL warehouse. - - Supported values: - Number of tags < 45.""" - - warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" - - def as_dict(self) -> dict: - """Serializes the CreateWarehouseRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel.as_dict() - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy.value - if self.tags: - body["tags"] = self.tags.as_dict() - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateWarehouseRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy - if self.tags: - body["tags"] = self.tags - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateWarehouseRequest: - """Deserializes the CreateWarehouseRequest from a dictionary.""" - return cls( - auto_stop_mins=d.get("auto_stop_mins", None), - channel=_from_dict(d, "channel", Channel), - cluster_size=d.get("cluster_size", None), - creator_name=d.get("creator_name", None), - enable_photon=d.get("enable_photon", None), - enable_serverless_compute=d.get("enable_serverless_compute", None), - instance_profile_arn=d.get("instance_profile_arn", None), - max_num_clusters=d.get("max_num_clusters", None), - min_num_clusters=d.get("min_num_clusters", None), - name=d.get("name", None), - spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy), - tags=_from_dict(d, "tags", EndpointTags), - warehouse_type=_enum(d, "warehouse_type", CreateWarehouseRequestWarehouseType), - ) - - class CreateWarehouseRequestWarehouseType(Enum): """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" @@ -2030,73 +1661,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateWarehouseResponse: return cls(id=d.get("id", None)) -@dataclass -class CreateWidget: - dashboard_id: str - """Dashboard ID returned by :method:dashboards/create.""" - - options: WidgetOptions - - width: int - """Width of a widget""" - - id: Optional[str] = None - """Widget ID returned by :method:dashboardwidgets/create""" - - text: Optional[str] = None - """If this is a textbox widget, the application displays this text. This field is ignored if the - widget contains a visualization in the `visualization` field.""" - - visualization_id: Optional[str] = None - """Query Vizualization ID returned by :method:queryvisualizations/create.""" - - def as_dict(self) -> dict: - """Serializes the CreateWidget into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.id is not None: - body["id"] = self.id - if self.options: - body["options"] = self.options.as_dict() - if self.text is not None: - body["text"] = self.text - if self.visualization_id is not None: - body["visualization_id"] = self.visualization_id - if self.width is not None: - body["width"] = self.width - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateWidget into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.id is not None: - body["id"] = self.id - if self.options: - body["options"] = self.options - if self.text is not None: - body["text"] = self.text - if self.visualization_id is not None: - body["visualization_id"] = self.visualization_id - if self.width is not None: - body["width"] = self.width - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateWidget: - """Deserializes the CreateWidget from a dictionary.""" - return cls( - dashboard_id=d.get("dashboard_id", None), - id=d.get("id", None), - options=_from_dict(d, "options", WidgetOptions), - text=d.get("text", None), - visualization_id=d.get("visualization_id", None), - width=d.get("width", None), - ) - - @dataclass class CronSchedule: pause_status: Optional[SchedulePauseStatus] = None @@ -2284,169 +1848,52 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> Dashboard: """Deserializes the Dashboard from a dictionary.""" return cls( - can_edit=d.get("can_edit", None), - created_at=d.get("created_at", None), - dashboard_filters_enabled=d.get("dashboard_filters_enabled", None), - id=d.get("id", None), - is_archived=d.get("is_archived", None), - is_draft=d.get("is_draft", None), - is_favorite=d.get("is_favorite", None), - name=d.get("name", None), - options=_from_dict(d, "options", DashboardOptions), - parent=d.get("parent", None), - permission_tier=_enum(d, "permission_tier", PermissionLevel), - slug=d.get("slug", None), - tags=d.get("tags", None), - updated_at=d.get("updated_at", None), - user=_from_dict(d, "user", User), - user_id=d.get("user_id", None), - widgets=_repeated_dict(d, "widgets", Widget), - ) - - -@dataclass -class DashboardEditContent: - dashboard_id: Optional[str] = None - - name: Optional[str] = None - """The title of this dashboard that appears in list views and at the top of the dashboard page.""" - - run_as_role: Optional[RunAsRole] = None - """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - - tags: Optional[List[str]] = None - - def as_dict(self) -> dict: - """Serializes the DashboardEditContent into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.name is not None: - body["name"] = self.name - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DashboardEditContent into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.name is not None: - body["name"] = self.name - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DashboardEditContent: - """Deserializes the DashboardEditContent from a dictionary.""" - return cls( - dashboard_id=d.get("dashboard_id", None), - name=d.get("name", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), - tags=d.get("tags", None), - ) - - -@dataclass -class DashboardOptions: - moved_to_trash_at: Optional[str] = None - """The timestamp when this dashboard was moved to trash. Only present when the `is_archived` - property is `true`. Trashed items are deleted after thirty days.""" - - def as_dict(self) -> dict: - """Serializes the DashboardOptions into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.moved_to_trash_at is not None: - body["moved_to_trash_at"] = self.moved_to_trash_at - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DashboardOptions into a shallow dictionary of its immediate attributes.""" - body = {} - if self.moved_to_trash_at is not None: - body["moved_to_trash_at"] = self.moved_to_trash_at - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DashboardOptions: - """Deserializes the DashboardOptions from a dictionary.""" - return cls(moved_to_trash_at=d.get("moved_to_trash_at", None)) - - -@dataclass -class DashboardPostContent: - name: str - """The title of this dashboard that appears in list views and at the top of the dashboard page.""" - - dashboard_filters_enabled: Optional[bool] = None - """Indicates whether the dashboard filters are enabled""" - - is_favorite: Optional[bool] = None - """Indicates whether this dashboard object should appear in the current user's favorites list.""" - - parent: Optional[str] = None - """The identifier of the workspace folder containing the object.""" - - run_as_role: Optional[RunAsRole] = None - """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - - tags: Optional[List[str]] = None - - def as_dict(self) -> dict: - """Serializes the DashboardPostContent into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_filters_enabled is not None: - body["dashboard_filters_enabled"] = self.dashboard_filters_enabled - if self.is_favorite is not None: - body["is_favorite"] = self.is_favorite - if self.name is not None: - body["name"] = self.name - if self.parent is not None: - body["parent"] = self.parent - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DashboardPostContent into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_filters_enabled is not None: - body["dashboard_filters_enabled"] = self.dashboard_filters_enabled - if self.is_favorite is not None: - body["is_favorite"] = self.is_favorite - if self.name is not None: - body["name"] = self.name - if self.parent is not None: - body["parent"] = self.parent - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DashboardPostContent: - """Deserializes the DashboardPostContent from a dictionary.""" - return cls( + can_edit=d.get("can_edit", None), + created_at=d.get("created_at", None), dashboard_filters_enabled=d.get("dashboard_filters_enabled", None), + id=d.get("id", None), + is_archived=d.get("is_archived", None), + is_draft=d.get("is_draft", None), is_favorite=d.get("is_favorite", None), name=d.get("name", None), + options=_from_dict(d, "options", DashboardOptions), parent=d.get("parent", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), + permission_tier=_enum(d, "permission_tier", PermissionLevel), + slug=d.get("slug", None), tags=d.get("tags", None), + updated_at=d.get("updated_at", None), + user=_from_dict(d, "user", User), + user_id=d.get("user_id", None), + widgets=_repeated_dict(d, "widgets", Widget), ) +@dataclass +class DashboardOptions: + moved_to_trash_at: Optional[str] = None + """The timestamp when this dashboard was moved to trash. Only present when the `is_archived` + property is `true`. Trashed items are deleted after thirty days.""" + + def as_dict(self) -> dict: + """Serializes the DashboardOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.moved_to_trash_at is not None: + body["moved_to_trash_at"] = self.moved_to_trash_at + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DashboardOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.moved_to_trash_at is not None: + body["moved_to_trash_at"] = self.moved_to_trash_at + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DashboardOptions: + """Deserializes the DashboardOptions from a dictionary.""" + return cls(moved_to_trash_at=d.get("moved_to_trash_at", None)) + + @dataclass class DataSource: """A JSON object representing a DBSQL data source / SQL warehouse.""" @@ -2729,239 +2176,20 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the DeleteWarehouseResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteWarehouseResponse: - """Deserializes the DeleteWarehouseResponse from a dictionary.""" - return cls() - - -class Disposition(Enum): - - EXTERNAL_LINKS = "EXTERNAL_LINKS" - INLINE = "INLINE" - - -@dataclass -class EditAlert: - name: str - """Name of the alert.""" - - options: AlertOptions - """Alert configuration options.""" - - query_id: str - """Query ID.""" - - alert_id: Optional[str] = None - - rearm: Optional[int] = None - """Number of seconds after being triggered before the alert rearms itself and can be triggered - again. If `null`, alert will never be triggered again.""" - - def as_dict(self) -> dict: - """Serializes the EditAlert into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.alert_id is not None: - body["alert_id"] = self.alert_id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options.as_dict() - if self.query_id is not None: - body["query_id"] = self.query_id - if self.rearm is not None: - body["rearm"] = self.rearm - return body - - def as_shallow_dict(self) -> dict: - """Serializes the EditAlert into a shallow dictionary of its immediate attributes.""" - body = {} - if self.alert_id is not None: - body["alert_id"] = self.alert_id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query_id is not None: - body["query_id"] = self.query_id - if self.rearm is not None: - body["rearm"] = self.rearm - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditAlert: - """Deserializes the EditAlert from a dictionary.""" - return cls( - alert_id=d.get("alert_id", None), - name=d.get("name", None), - options=_from_dict(d, "options", AlertOptions), - query_id=d.get("query_id", None), - rearm=d.get("rearm", None), - ) - - -@dataclass -class EditWarehouseRequest: - auto_stop_mins: Optional[int] = None - """The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) - before it is automatically stopped. - - Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. - - Defaults to 120 mins""" - - channel: Optional[Channel] = None - """Channel Details""" - - cluster_size: Optional[str] = None - """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows - you to run larger queries on it. If you want to increase the number of concurrent queries, - please tune max_num_clusters. - - Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - - 4X-Large""" - - creator_name: Optional[str] = None - """warehouse creator name""" - - enable_photon: Optional[bool] = None - """Configures whether the warehouse should use Photon optimized clusters. - - Defaults to false.""" - - enable_serverless_compute: Optional[bool] = None - """Configures whether the warehouse should use serverless compute.""" - - id: Optional[str] = None - """Required. Id of the warehouse to configure.""" - - instance_profile_arn: Optional[str] = None - """Deprecated. Instance profile used to pass IAM role to the cluster""" - - max_num_clusters: Optional[int] = None - """Maximum number of clusters that the autoscaler will create to handle concurrent queries. - - Supported values: - Must be >= min_num_clusters - Must be <= 30. - - Defaults to min_clusters if unset.""" - - min_num_clusters: Optional[int] = None - """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing - this will ensure that a larger number of clusters are always running and therefore may reduce - the cold start time for new queries. This is similar to reserved vs. revocable cores in a - resource manager. - - Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - - Defaults to 1""" - - name: Optional[str] = None - """Logical name for the cluster. - - Supported values: - Must be unique within an org. - Must be less than 100 characters.""" - - spot_instance_policy: Optional[SpotInstancePolicy] = None - """Configurations whether the warehouse should use spot instances.""" - - tags: Optional[EndpointTags] = None - """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS - volumes) associated with this SQL warehouse. - - Supported values: - Number of tags < 45.""" - - warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" - - def as_dict(self) -> dict: - """Serializes the EditWarehouseRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel.as_dict() - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.id is not None: - body["id"] = self.id - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy.value - if self.tags: - body["tags"] = self.tags.as_dict() - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the EditWarehouseRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.id is not None: - body["id"] = self.id - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy - if self.tags: - body["tags"] = self.tags - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditWarehouseRequest: - """Deserializes the EditWarehouseRequest from a dictionary.""" - return cls( - auto_stop_mins=d.get("auto_stop_mins", None), - channel=_from_dict(d, "channel", Channel), - cluster_size=d.get("cluster_size", None), - creator_name=d.get("creator_name", None), - enable_photon=d.get("enable_photon", None), - enable_serverless_compute=d.get("enable_serverless_compute", None), - id=d.get("id", None), - instance_profile_arn=d.get("instance_profile_arn", None), - max_num_clusters=d.get("max_num_clusters", None), - min_num_clusters=d.get("min_num_clusters", None), - name=d.get("name", None), - spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy), - tags=_from_dict(d, "tags", EndpointTags), - warehouse_type=_enum(d, "warehouse_type", EditWarehouseRequestWarehouseType), - ) + """Serializes the DeleteWarehouseResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteWarehouseResponse: + """Deserializes the DeleteWarehouseResponse from a dictionary.""" + return cls() + + +class Disposition(Enum): + + EXTERNAL_LINKS = "EXTERNAL_LINKS" + INLINE = "INLINE" class EditWarehouseRequestWarehouseType(Enum): @@ -3416,195 +2644,6 @@ def from_dict(cls, d: Dict[str, Any]) -> EnumValue: ) -@dataclass -class ExecuteStatementRequest: - statement: str - """The SQL statement to execute. The statement can optionally be parameterized, see `parameters`.""" - - warehouse_id: str - """Warehouse upon which to execute a statement. See also [What are SQL warehouses?] - - [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html""" - - byte_limit: Optional[int] = None - """Applies the given byte limit to the statement's result size. Byte counts are based on internal - data representations and might not match the final size in the requested `format`. If the result - was truncated due to the byte limit, then `truncated` in the response is set to `true`. When - using `EXTERNAL_LINKS` disposition, a default `byte_limit` of 100 GiB is applied if `byte_limit` - is not explcitly set.""" - - catalog: Optional[str] = None - """Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL. - - [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html""" - - disposition: Optional[Disposition] = None - - format: Optional[Format] = None - """Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and - `CSV`. - - Important: The formats `ARROW_STREAM` and `CSV` are supported only with `EXTERNAL_LINKS` - disposition. `JSON_ARRAY` is supported in `INLINE` and `EXTERNAL_LINKS` disposition. - - When specifying `format=JSON_ARRAY`, result data will be formatted as an array of arrays of - values, where each value is either the *string representation* of a value, or `null`. For - example, the output of `SELECT concat('id-', id) AS strCol, id AS intCol, null AS nullCol FROM - range(3)` would look like this: - - ``` [ [ "id-1", "1", null ], [ "id-2", "2", null ], [ "id-3", "3", null ], ] ``` - - When specifying `format=JSON_ARRAY` and `disposition=EXTERNAL_LINKS`, each chunk in the result - contains compact JSON with no indentation or extra whitespace. - - When specifying `format=ARROW_STREAM` and `disposition=EXTERNAL_LINKS`, each chunk in the result - will be formatted as Apache Arrow Stream. See the [Apache Arrow streaming format]. - - When specifying `format=CSV` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be - a CSV according to [RFC 4180] standard. All the columns values will have *string representation* - similar to the `JSON_ARRAY` format, and `null` values will be encoded as “null”. Only the - first chunk in the result would contain a header row with column names. For example, the output - of `SELECT concat('id-', id) AS strCol, id AS intCol, null as nullCol FROM range(3)` would look - like this: - - ``` strCol,intCol,nullCol id-1,1,null id-2,2,null id-3,3,null ``` - - [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format - [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180""" - - on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout] = None - """When `wait_timeout > 0s`, the call will block up to the specified time. If the statement - execution doesn't finish within this time, `on_wait_timeout` determines whether the execution - should continue or be canceled. When set to `CONTINUE`, the statement execution continues - asynchronously and the call returns a statement ID which can be used for polling with - :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is - canceled and the call returns with a `CANCELED` state.""" - - parameters: Optional[List[StatementParameterListItem]] = None - """A list of parameters to pass into a SQL statement containing parameter markers. A parameter - consists of a name, a value, and optionally a type. To represent a NULL value, the `value` field - may be omitted or set to `null` explicitly. If the `type` field is omitted, the value is - interpreted as a string. - - If the type is given, parameters will be checked for type correctness according to the given - type. A value is correct if the provided string can be converted to the requested type using the - `cast` function. The exact semantics are described in the section [`cast` function] of the SQL - language reference. - - For example, the following statement contains two parameters, `my_name` and `my_date`: - - SELECT * FROM my_table WHERE name = :my_name AND date = :my_date - - The parameters can be passed in the request body as follows: - - { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date", - "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value": - "2020-01-01", "type": "DATE" } ] } - - Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL - Statement Execution API. - - Also see the section [Parameter markers] of the SQL language reference. - - [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html - [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html""" - - row_limit: Optional[int] = None - """Applies the given row limit to the statement's result set, but unlike the `LIMIT` clause in SQL, - it also sets the `truncated` field in the response to indicate whether the result was trimmed - due to the limit or not.""" - - schema: Optional[str] = None - """Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL. - - [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html""" - - wait_timeout: Optional[str] = None - """The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be - set to 0 or to a value between 5 and 50. - - When set to `0s`, the statement will execute in asynchronous mode and the call will not wait for - the execution to finish. In this case, the call returns directly with `PENDING` state and a - statement ID which can be used for polling with :method:statementexecution/getStatement. - - When set between 5 and 50 seconds, the call will behave synchronously up to this timeout and - wait for the statement execution to finish. If the execution finishes within this time, the call - returns immediately with a manifest and result data (or a `FAILED` state in case of an execution - error). If the statement takes longer to execute, `on_wait_timeout` determines what should - happen after the timeout is reached.""" - - def as_dict(self) -> dict: - """Serializes the ExecuteStatementRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.byte_limit is not None: - body["byte_limit"] = self.byte_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.disposition is not None: - body["disposition"] = self.disposition.value - if self.format is not None: - body["format"] = self.format.value - if self.on_wait_timeout is not None: - body["on_wait_timeout"] = self.on_wait_timeout.value - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] - if self.row_limit is not None: - body["row_limit"] = self.row_limit - if self.schema is not None: - body["schema"] = self.schema - if self.statement is not None: - body["statement"] = self.statement - if self.wait_timeout is not None: - body["wait_timeout"] = self.wait_timeout - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ExecuteStatementRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.byte_limit is not None: - body["byte_limit"] = self.byte_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.disposition is not None: - body["disposition"] = self.disposition - if self.format is not None: - body["format"] = self.format - if self.on_wait_timeout is not None: - body["on_wait_timeout"] = self.on_wait_timeout - if self.parameters: - body["parameters"] = self.parameters - if self.row_limit is not None: - body["row_limit"] = self.row_limit - if self.schema is not None: - body["schema"] = self.schema - if self.statement is not None: - body["statement"] = self.statement - if self.wait_timeout is not None: - body["wait_timeout"] = self.wait_timeout - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExecuteStatementRequest: - """Deserializes the ExecuteStatementRequest from a dictionary.""" - return cls( - byte_limit=d.get("byte_limit", None), - catalog=d.get("catalog", None), - disposition=_enum(d, "disposition", Disposition), - format=_enum(d, "format", Format), - on_wait_timeout=_enum(d, "on_wait_timeout", ExecuteStatementRequestOnWaitTimeout), - parameters=_repeated_dict(d, "parameters", StatementParameterListItem), - row_limit=d.get("row_limit", None), - schema=d.get("schema", None), - statement=d.get("statement", None), - wait_timeout=d.get("wait_timeout", None), - warehouse_id=d.get("warehouse_id", None), - ) - - class ExecuteStatementRequestOnWaitTimeout(Enum): """When `wait_timeout > 0s`, the call will block up to the specified time. If the statement execution doesn't finish within this time, `on_wait_timeout` determines whether the execution @@ -5629,93 +4668,6 @@ def from_dict(cls, d: Dict[str, Any]) -> QueryBackedValue: ) -@dataclass -class QueryEditContent: - data_source_id: Optional[str] = None - """Data source ID maps to the ID of the data source used by the resource and is distinct from the - warehouse ID. [Learn more] - - [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - - description: Optional[str] = None - """General description that conveys additional information about this query such as usage notes.""" - - name: Optional[str] = None - """The title of this query that appears in list views, widget headings, and on the query page.""" - - options: Optional[Any] = None - """Exclusively used for storing a list parameter definitions. A parameter is an object with - `title`, `name`, `type`, and `value` properties. The `value` field here is the default value. It - can be overridden at runtime.""" - - query: Optional[str] = None - """The text of the query to be run.""" - - query_id: Optional[str] = None - - run_as_role: Optional[RunAsRole] = None - """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - - tags: Optional[List[str]] = None - - def as_dict(self) -> dict: - """Serializes the QueryEditContent into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query is not None: - body["query"] = self.query - if self.query_id is not None: - body["query_id"] = self.query_id - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the QueryEditContent into a shallow dictionary of its immediate attributes.""" - body = {} - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query is not None: - body["query"] = self.query - if self.query_id is not None: - body["query_id"] = self.query_id - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryEditContent: - """Deserializes the QueryEditContent from a dictionary.""" - return cls( - data_source_id=d.get("data_source_id", None), - description=d.get("description", None), - name=d.get("name", None), - options=d.get("options", None), - query=d.get("query", None), - query_id=d.get("query_id", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), - tags=d.get("tags", None), - ) - - @dataclass class QueryFilter: query_start_time_range: Optional[TimeRange] = None @@ -6388,98 +5340,10 @@ def from_dict(cls, d: Dict[str, Any]) -> QueryParameter: date_value=_from_dict(d, "date_value", DateValue), enum_value=_from_dict(d, "enum_value", EnumValue), name=d.get("name", None), - numeric_value=_from_dict(d, "numeric_value", NumericValue), - query_backed_value=_from_dict(d, "query_backed_value", QueryBackedValue), - text_value=_from_dict(d, "text_value", TextValue), - title=d.get("title", None), - ) - - -@dataclass -class QueryPostContent: - data_source_id: Optional[str] = None - """Data source ID maps to the ID of the data source used by the resource and is distinct from the - warehouse ID. [Learn more] - - [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - - description: Optional[str] = None - """General description that conveys additional information about this query such as usage notes.""" - - name: Optional[str] = None - """The title of this query that appears in list views, widget headings, and on the query page.""" - - options: Optional[Any] = None - """Exclusively used for storing a list parameter definitions. A parameter is an object with - `title`, `name`, `type`, and `value` properties. The `value` field here is the default value. It - can be overridden at runtime.""" - - parent: Optional[str] = None - """The identifier of the workspace folder containing the object.""" - - query: Optional[str] = None - """The text of the query to be run.""" - - run_as_role: Optional[RunAsRole] = None - """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - - tags: Optional[List[str]] = None - - def as_dict(self) -> dict: - """Serializes the QueryPostContent into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.parent is not None: - body["parent"] = self.parent - if self.query is not None: - body["query"] = self.query - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the QueryPostContent into a shallow dictionary of its immediate attributes.""" - body = {} - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.parent is not None: - body["parent"] = self.parent - if self.query is not None: - body["query"] = self.query - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryPostContent: - """Deserializes the QueryPostContent from a dictionary.""" - return cls( - data_source_id=d.get("data_source_id", None), - description=d.get("description", None), - name=d.get("name", None), - options=d.get("options", None), - parent=d.get("parent", None), - query=d.get("query", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), - tags=d.get("tags", None), + numeric_value=_from_dict(d, "numeric_value", NumericValue), + query_backed_value=_from_dict(d, "query_backed_value", QueryBackedValue), + text_value=_from_dict(d, "text_value", TextValue), + title=d.get("title", None), ) @@ -6840,50 +5704,6 @@ class ServiceErrorCode(Enum): WORKSPACE_TEMPORARILY_UNAVAILABLE = "WORKSPACE_TEMPORARILY_UNAVAILABLE" -@dataclass -class SetRequest: - """Set object ACL""" - - access_control_list: Optional[List[AccessControl]] = None - - object_id: Optional[str] = None - """Object ID. The ACL for the object with this UUID is overwritten by this request's POST content.""" - - object_type: Optional[ObjectTypePlural] = None - """The type of object permission to set.""" - - def as_dict(self) -> dict: - """Serializes the SetRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["objectId"] = self.object_id - if self.object_type is not None: - body["objectType"] = self.object_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["objectId"] = self.object_id - if self.object_type is not None: - body["objectType"] = self.object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetRequest: - """Deserializes the SetRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AccessControl), - object_id=d.get("objectId", None), - object_type=_enum(d, "objectType", ObjectTypePlural), - ) - - @dataclass class SetResponse: access_control_list: Optional[List[AccessControl]] = None @@ -6926,102 +5746,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SetResponse: ) -@dataclass -class SetWorkspaceWarehouseConfigRequest: - channel: Optional[Channel] = None - """Optional: Channel selection details""" - - config_param: Optional[RepeatedEndpointConfPairs] = None - """Deprecated: Use sql_configuration_parameters""" - - data_access_config: Optional[List[EndpointConfPair]] = None - """Spark confs for external hive metastore configuration JSON serialized size must be less than <= - 512K""" - - enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None - """List of Warehouse Types allowed in this workspace (limits allowed value of the type field in - CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be - specified in SetWorkspaceWarehouseConfig. Note: Disabling a type may cause existing warehouses - to be converted to another type. Used by frontend to save specific type availability in the - warehouse create and edit form UI.""" - - global_param: Optional[RepeatedEndpointConfPairs] = None - """Deprecated: Use sql_configuration_parameters""" - - google_service_account: Optional[str] = None - """GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage""" - - instance_profile_arn: Optional[str] = None - """AWS Only: Instance profile used to pass IAM role to the cluster""" - - security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy] = None - """Security policy for warehouses""" - - sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None - """SQL configuration parameters""" - - def as_dict(self) -> dict: - """Serializes the SetWorkspaceWarehouseConfigRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.channel: - body["channel"] = self.channel.as_dict() - if self.config_param: - body["config_param"] = self.config_param.as_dict() - if self.data_access_config: - body["data_access_config"] = [v.as_dict() for v in self.data_access_config] - if self.enabled_warehouse_types: - body["enabled_warehouse_types"] = [v.as_dict() for v in self.enabled_warehouse_types] - if self.global_param: - body["global_param"] = self.global_param.as_dict() - if self.google_service_account is not None: - body["google_service_account"] = self.google_service_account - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.security_policy is not None: - body["security_policy"] = self.security_policy.value - if self.sql_configuration_parameters: - body["sql_configuration_parameters"] = self.sql_configuration_parameters.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetWorkspaceWarehouseConfigRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.channel: - body["channel"] = self.channel - if self.config_param: - body["config_param"] = self.config_param - if self.data_access_config: - body["data_access_config"] = self.data_access_config - if self.enabled_warehouse_types: - body["enabled_warehouse_types"] = self.enabled_warehouse_types - if self.global_param: - body["global_param"] = self.global_param - if self.google_service_account is not None: - body["google_service_account"] = self.google_service_account - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.security_policy is not None: - body["security_policy"] = self.security_policy - if self.sql_configuration_parameters: - body["sql_configuration_parameters"] = self.sql_configuration_parameters - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetWorkspaceWarehouseConfigRequest: - """Deserializes the SetWorkspaceWarehouseConfigRequest from a dictionary.""" - return cls( - channel=_from_dict(d, "channel", Channel), - config_param=_from_dict(d, "config_param", RepeatedEndpointConfPairs), - data_access_config=_repeated_dict(d, "data_access_config", EndpointConfPair), - enabled_warehouse_types=_repeated_dict(d, "enabled_warehouse_types", WarehouseTypePair), - global_param=_from_dict(d, "global_param", RepeatedEndpointConfPairs), - google_service_account=d.get("google_service_account", None), - instance_profile_arn=d.get("instance_profile_arn", None), - security_policy=_enum(d, "security_policy", SetWorkspaceWarehouseConfigRequestSecurityPolicy), - sql_configuration_parameters=_from_dict(d, "sql_configuration_parameters", RepeatedEndpointConfPairs), - ) - - class SetWorkspaceWarehouseConfigRequestSecurityPolicy(Enum): """Security policy for warehouses""" @@ -7562,109 +6286,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TransferOwnershipObjectId: return cls(new_owner=d.get("new_owner", None)) -@dataclass -class TransferOwnershipRequest: - """Transfer object ownership""" - - new_owner: Optional[str] = None - """Email address for the new owner, who must exist in the workspace.""" - - object_id: Optional[TransferOwnershipObjectId] = None - """The ID of the object on which to change ownership.""" - - object_type: Optional[OwnableObjectType] = None - """The type of object on which to change ownership.""" - - def as_dict(self) -> dict: - """Serializes the TransferOwnershipRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.new_owner is not None: - body["new_owner"] = self.new_owner - if self.object_id: - body["objectId"] = self.object_id.as_dict() - if self.object_type is not None: - body["objectType"] = self.object_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TransferOwnershipRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.new_owner is not None: - body["new_owner"] = self.new_owner - if self.object_id: - body["objectId"] = self.object_id - if self.object_type is not None: - body["objectType"] = self.object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TransferOwnershipRequest: - """Deserializes the TransferOwnershipRequest from a dictionary.""" - return cls( - new_owner=d.get("new_owner", None), - object_id=_from_dict(d, "objectId", TransferOwnershipObjectId), - object_type=_enum(d, "objectType", OwnableObjectType), - ) - - -@dataclass -class UpdateAlertRequest: - update_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - alert: Optional[UpdateAlertRequestAlert] = None - - auto_resolve_display_name: Optional[bool] = None - """If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the - alert's display name conflicts with an existing alert's display name.""" - - id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the UpdateAlertRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.alert: - body["alert"] = self.alert.as_dict() - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAlertRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.alert: - body["alert"] = self.alert - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAlertRequest: - """Deserializes the UpdateAlertRequest from a dictionary.""" - return cls( - alert=_from_dict(d, "alert", UpdateAlertRequestAlert), - auto_resolve_display_name=d.get("auto_resolve_display_name", None), - id=d.get("id", None), - update_mask=d.get("update_mask", None), - ) - - @dataclass class UpdateAlertRequestAlert: condition: Optional[AlertCondition] = None @@ -7754,64 +6375,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateAlertRequestAlert: ) -@dataclass -class UpdateQueryRequest: - update_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - auto_resolve_display_name: Optional[bool] = None - """If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the - alert's display name conflicts with an existing alert's display name.""" - - id: Optional[str] = None - - query: Optional[UpdateQueryRequestQuery] = None - - def as_dict(self) -> dict: - """Serializes the UpdateQueryRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.id is not None: - body["id"] = self.id - if self.query: - body["query"] = self.query.as_dict() - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateQueryRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.id is not None: - body["id"] = self.id - if self.query: - body["query"] = self.query - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateQueryRequest: - """Deserializes the UpdateQueryRequest from a dictionary.""" - return cls( - auto_resolve_display_name=d.get("auto_resolve_display_name", None), - id=d.get("id", None), - query=_from_dict(d, "query", UpdateQueryRequestQuery), - update_mask=d.get("update_mask", None), - ) - - @dataclass class UpdateQueryRequestQuery: apply_auto_limit: Optional[bool] = None @@ -7936,55 +6499,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: return cls() -@dataclass -class UpdateVisualizationRequest: - update_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - id: Optional[str] = None - - visualization: Optional[UpdateVisualizationRequestVisualization] = None - - def as_dict(self) -> dict: - """Serializes the UpdateVisualizationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - if self.visualization: - body["visualization"] = self.visualization.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateVisualizationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - if self.visualization: - body["visualization"] = self.visualization - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateVisualizationRequest: - """Deserializes the UpdateVisualizationRequest from a dictionary.""" - return cls( - id=d.get("id", None), - update_mask=d.get("update_mask", None), - visualization=_from_dict(d, "visualization", UpdateVisualizationRequestVisualization), - ) - - @dataclass class UpdateVisualizationRequestVisualization: display_name: Optional[str] = None @@ -8394,40 +6908,6 @@ def from_dict(cls, d: Dict[str, Any]) -> WarehousePermissionsDescription: ) -@dataclass -class WarehousePermissionsRequest: - access_control_list: Optional[List[WarehouseAccessControlRequest]] = None - - warehouse_id: Optional[str] = None - """The SQL warehouse for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the WarehousePermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the WarehousePermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> WarehousePermissionsRequest: - """Deserializes the WarehousePermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", WarehouseAccessControlRequest), - warehouse_id=d.get("warehouse_id", None), - ) - - @dataclass class WarehouseTypePair: enabled: Optional[bool] = None diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index ea3af776..858a3cbf 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -44,120 +44,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ColumnInfo: return cls(name=d.get("name", None)) -@dataclass -class CreateEndpoint: - name: str - """Name of the vector search endpoint""" - - endpoint_type: EndpointType - """Type of endpoint""" - - budget_policy_id: Optional[str] = None - """The budget policy id to be applied""" - - def as_dict(self) -> dict: - """Serializes the CreateEndpoint into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.endpoint_type is not None: - body["endpoint_type"] = self.endpoint_type.value - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateEndpoint into a shallow dictionary of its immediate attributes.""" - body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.endpoint_type is not None: - body["endpoint_type"] = self.endpoint_type - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateEndpoint: - """Deserializes the CreateEndpoint from a dictionary.""" - return cls( - budget_policy_id=d.get("budget_policy_id", None), - endpoint_type=_enum(d, "endpoint_type", EndpointType), - name=d.get("name", None), - ) - - -@dataclass -class CreateVectorIndexRequest: - name: str - """Name of the index""" - - endpoint_name: str - """Name of the endpoint to be used for serving the index""" - - primary_key: str - """Primary key of the index""" - - index_type: VectorIndexType - """There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs - with a source Delta Table, automatically and incrementally updating the index as the underlying - data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write - of vectors and metadata through our REST and SDK APIs. With this model, the user manages index - updates.""" - - delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest] = None - """Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`.""" - - direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None - """Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`.""" - - def as_dict(self) -> dict: - """Serializes the CreateVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.delta_sync_index_spec: - body["delta_sync_index_spec"] = self.delta_sync_index_spec.as_dict() - if self.direct_access_index_spec: - body["direct_access_index_spec"] = self.direct_access_index_spec.as_dict() - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_type is not None: - body["index_type"] = self.index_type.value - if self.name is not None: - body["name"] = self.name - if self.primary_key is not None: - body["primary_key"] = self.primary_key - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateVectorIndexRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.delta_sync_index_spec: - body["delta_sync_index_spec"] = self.delta_sync_index_spec - if self.direct_access_index_spec: - body["direct_access_index_spec"] = self.direct_access_index_spec - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_type is not None: - body["index_type"] = self.index_type - if self.name is not None: - body["name"] = self.name - if self.primary_key is not None: - body["primary_key"] = self.primary_key - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateVectorIndexRequest: - """Deserializes the CreateVectorIndexRequest from a dictionary.""" - return cls( - delta_sync_index_spec=_from_dict(d, "delta_sync_index_spec", DeltaSyncVectorIndexSpecRequest), - direct_access_index_spec=_from_dict(d, "direct_access_index_spec", DirectAccessVectorIndexSpec), - endpoint_name=d.get("endpoint_name", None), - index_type=_enum(d, "index_type", VectorIndexType), - name=d.get("name", None), - primary_key=d.get("primary_key", None), - ) - - @dataclass class CustomTag: key: str @@ -899,38 +785,6 @@ def from_dict(cls, d: Dict[str, Any]) -> MiniVectorIndex: ) -@dataclass -class PatchEndpointBudgetPolicyRequest: - budget_policy_id: str - """The budget policy id to be applied""" - - endpoint_name: Optional[str] = None - """Name of the vector search endpoint""" - - def as_dict(self) -> dict: - """Serializes the PatchEndpointBudgetPolicyRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PatchEndpointBudgetPolicyRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PatchEndpointBudgetPolicyRequest: - """Deserializes the PatchEndpointBudgetPolicyRequest from a dictionary.""" - return cls(budget_policy_id=d.get("budget_policy_id", None), endpoint_name=d.get("endpoint_name", None)) - - @dataclass class PatchEndpointBudgetPolicyResponse: effective_budget_policy_id: Optional[str] = None @@ -967,149 +821,6 @@ class PipelineType(Enum): TRIGGERED = "TRIGGERED" -@dataclass -class QueryVectorIndexNextPageRequest: - """Request payload for getting next page of results.""" - - endpoint_name: Optional[str] = None - """Name of the endpoint.""" - - index_name: Optional[str] = None - """Name of the vector index to query.""" - - page_token: Optional[str] = None - """Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API.""" - - def as_dict(self) -> dict: - """Serializes the QueryVectorIndexNextPageRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_name is not None: - body["index_name"] = self.index_name - if self.page_token is not None: - body["page_token"] = self.page_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the QueryVectorIndexNextPageRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_name is not None: - body["index_name"] = self.index_name - if self.page_token is not None: - body["page_token"] = self.page_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryVectorIndexNextPageRequest: - """Deserializes the QueryVectorIndexNextPageRequest from a dictionary.""" - return cls( - endpoint_name=d.get("endpoint_name", None), - index_name=d.get("index_name", None), - page_token=d.get("page_token", None), - ) - - -@dataclass -class QueryVectorIndexRequest: - columns: List[str] - """List of column names to include in the response.""" - - columns_to_rerank: Optional[List[str]] = None - """Column names used to retrieve data to send to the reranker.""" - - filters_json: Optional[str] = None - """JSON string representing query filters. - - Example filters: - - - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater than 5. - - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id greater than - equal to 5. - `{"id": 5}`: Filter for id equal to 5.""" - - index_name: Optional[str] = None - """Name of the vector index to query.""" - - num_results: Optional[int] = None - """Number of results to return. Defaults to 10.""" - - query_text: Optional[str] = None - """Query text. Required for Delta Sync Index using model endpoint.""" - - query_type: Optional[str] = None - """The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`.""" - - query_vector: Optional[List[float]] = None - """Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed - vectors.""" - - score_threshold: Optional[float] = None - """Threshold for the approximate nearest neighbor search. Defaults to 0.0.""" - - def as_dict(self) -> dict: - """Serializes the QueryVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.columns: - body["columns"] = [v for v in self.columns] - if self.columns_to_rerank: - body["columns_to_rerank"] = [v for v in self.columns_to_rerank] - if self.filters_json is not None: - body["filters_json"] = self.filters_json - if self.index_name is not None: - body["index_name"] = self.index_name - if self.num_results is not None: - body["num_results"] = self.num_results - if self.query_text is not None: - body["query_text"] = self.query_text - if self.query_type is not None: - body["query_type"] = self.query_type - if self.query_vector: - body["query_vector"] = [v for v in self.query_vector] - if self.score_threshold is not None: - body["score_threshold"] = self.score_threshold - return body - - def as_shallow_dict(self) -> dict: - """Serializes the QueryVectorIndexRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.columns: - body["columns"] = self.columns - if self.columns_to_rerank: - body["columns_to_rerank"] = self.columns_to_rerank - if self.filters_json is not None: - body["filters_json"] = self.filters_json - if self.index_name is not None: - body["index_name"] = self.index_name - if self.num_results is not None: - body["num_results"] = self.num_results - if self.query_text is not None: - body["query_text"] = self.query_text - if self.query_type is not None: - body["query_type"] = self.query_type - if self.query_vector: - body["query_vector"] = self.query_vector - if self.score_threshold is not None: - body["score_threshold"] = self.score_threshold - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryVectorIndexRequest: - """Deserializes the QueryVectorIndexRequest from a dictionary.""" - return cls( - columns=d.get("columns", None), - columns_to_rerank=d.get("columns_to_rerank", None), - filters_json=d.get("filters_json", None), - index_name=d.get("index_name", None), - num_results=d.get("num_results", None), - query_text=d.get("query_text", None), - query_type=d.get("query_type", None), - query_vector=d.get("query_vector", None), - score_threshold=d.get("score_threshold", None), - ) - - @dataclass class QueryVectorIndexResponse: manifest: Optional[ResultManifest] = None @@ -1223,49 +934,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ResultManifest: return cls(column_count=d.get("column_count", None), columns=_repeated_dict(d, "columns", ColumnInfo)) -@dataclass -class ScanVectorIndexRequest: - index_name: Optional[str] = None - """Name of the vector index to scan.""" - - last_primary_key: Optional[str] = None - """Primary key of the last entry returned in the previous scan.""" - - num_results: Optional[int] = None - """Number of results to return. Defaults to 10.""" - - def as_dict(self) -> dict: - """Serializes the ScanVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.index_name is not None: - body["index_name"] = self.index_name - if self.last_primary_key is not None: - body["last_primary_key"] = self.last_primary_key - if self.num_results is not None: - body["num_results"] = self.num_results - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ScanVectorIndexRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.index_name is not None: - body["index_name"] = self.index_name - if self.last_primary_key is not None: - body["last_primary_key"] = self.last_primary_key - if self.num_results is not None: - body["num_results"] = self.num_results - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ScanVectorIndexRequest: - """Deserializes the ScanVectorIndexRequest from a dictionary.""" - return cls( - index_name=d.get("index_name", None), - last_primary_key=d.get("last_primary_key", None), - num_results=d.get("num_results", None), - ) - - @dataclass class ScanVectorIndexResponse: """Response to a scan vector index request.""" @@ -1343,38 +1011,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SyncIndexResponse: return cls() -@dataclass -class UpdateEndpointCustomTagsRequest: - custom_tags: List[CustomTag] - """The new custom tags for the vector search endpoint""" - - endpoint_name: Optional[str] = None - """Name of the vector search endpoint""" - - def as_dict(self) -> dict: - """Serializes the UpdateEndpointCustomTagsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.custom_tags: - body["custom_tags"] = [v.as_dict() for v in self.custom_tags] - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateEndpointCustomTagsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateEndpointCustomTagsRequest: - """Deserializes the UpdateEndpointCustomTagsRequest from a dictionary.""" - return cls(custom_tags=_repeated_dict(d, "custom_tags", CustomTag), endpoint_name=d.get("endpoint_name", None)) - - @dataclass class UpdateEndpointCustomTagsResponse: custom_tags: Optional[List[CustomTag]] = None @@ -1448,38 +1084,6 @@ class UpsertDataStatus(Enum): SUCCESS = "SUCCESS" -@dataclass -class UpsertDataVectorIndexRequest: - inputs_json: str - """JSON string representing the data to be upserted.""" - - index_name: Optional[str] = None - """Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index.""" - - def as_dict(self) -> dict: - """Serializes the UpsertDataVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.index_name is not None: - body["index_name"] = self.index_name - if self.inputs_json is not None: - body["inputs_json"] = self.inputs_json - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpsertDataVectorIndexRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.index_name is not None: - body["index_name"] = self.index_name - if self.inputs_json is not None: - body["inputs_json"] = self.inputs_json - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpsertDataVectorIndexRequest: - """Deserializes the UpsertDataVectorIndexRequest from a dictionary.""" - return cls(index_name=d.get("index_name", None), inputs_json=d.get("inputs_json", None)) - - @dataclass class UpsertDataVectorIndexResponse: result: Optional[UpsertDataResult] = None diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index 556bf1fd..d51cdca3 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -86,58 +86,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AzureKeyVaultSecretScopeMetadata: return cls(dns_name=d.get("dns_name", None), resource_id=d.get("resource_id", None)) -@dataclass -class CreateCredentialsRequest: - git_provider: str - """Git provider. This field is case-insensitive. The available Git providers are `gitHub`, - `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, - `gitLabEnterpriseEdition` and `awsCodeCommit`.""" - - git_username: Optional[str] = None - """The username or email provided with your Git provider account, depending on which provider you - are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or - username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS - CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please - see your provider's Personal Access Token authentication documentation to see what is supported.""" - - personal_access_token: Optional[str] = None - """The personal access token used to authenticate to the corresponding Git provider. For certain - providers, support may exist for other types of scoped access tokens. [Learn more]. - - [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html""" - - def as_dict(self) -> dict: - """Serializes the CreateCredentialsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username - if self.personal_access_token is not None: - body["personal_access_token"] = self.personal_access_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateCredentialsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username - if self.personal_access_token is not None: - body["personal_access_token"] = self.personal_access_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialsRequest: - """Deserializes the CreateCredentialsRequest from a dictionary.""" - return cls( - git_provider=d.get("git_provider", None), - git_username=d.get("git_username", None), - personal_access_token=d.get("personal_access_token", None), - ) - - @dataclass class CreateCredentialsResponse: credential_id: int @@ -182,61 +130,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialsResponse: ) -@dataclass -class CreateRepoRequest: - url: str - """URL of the Git repository to be linked.""" - - provider: str - """Git provider. This field is case-insensitive. The available Git providers are `gitHub`, - `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, - `gitLabEnterpriseEdition` and `awsCodeCommit`.""" - - path: Optional[str] = None - """Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If - repo is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.""" - - sparse_checkout: Optional[SparseCheckout] = None - """If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable - sparse checkout after the repo is created.""" - - def as_dict(self) -> dict: - """Serializes the CreateRepoRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.path is not None: - body["path"] = self.path - if self.provider is not None: - body["provider"] = self.provider - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout.as_dict() - if self.url is not None: - body["url"] = self.url - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateRepoRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.path is not None: - body["path"] = self.path - if self.provider is not None: - body["provider"] = self.provider - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout - if self.url is not None: - body["url"] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateRepoRequest: - """Deserializes the CreateRepoRequest from a dictionary.""" - return cls( - path=d.get("path", None), - provider=d.get("provider", None), - sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout), - url=d.get("url", None), - ) - - @dataclass class CreateRepoResponse: branch: Optional[str] = None @@ -312,57 +205,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateRepoResponse: ) -@dataclass -class CreateScope: - scope: str - """Scope name requested by the user. Scope names are unique.""" - - backend_azure_keyvault: Optional[AzureKeyVaultSecretScopeMetadata] = None - """The metadata for the secret scope if the type is `AZURE_KEYVAULT`""" - - initial_manage_principal: Optional[str] = None - """The principal that is initially granted `MANAGE` permission to the created scope.""" - - scope_backend_type: Optional[ScopeBackendType] = None - """The backend type the scope will be created with. If not specified, will default to `DATABRICKS`""" - - def as_dict(self) -> dict: - """Serializes the CreateScope into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.backend_azure_keyvault: - body["backend_azure_keyvault"] = self.backend_azure_keyvault.as_dict() - if self.initial_manage_principal is not None: - body["initial_manage_principal"] = self.initial_manage_principal - if self.scope is not None: - body["scope"] = self.scope - if self.scope_backend_type is not None: - body["scope_backend_type"] = self.scope_backend_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateScope into a shallow dictionary of its immediate attributes.""" - body = {} - if self.backend_azure_keyvault: - body["backend_azure_keyvault"] = self.backend_azure_keyvault - if self.initial_manage_principal is not None: - body["initial_manage_principal"] = self.initial_manage_principal - if self.scope is not None: - body["scope"] = self.scope - if self.scope_backend_type is not None: - body["scope_backend_type"] = self.scope_backend_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateScope: - """Deserializes the CreateScope from a dictionary.""" - return cls( - backend_azure_keyvault=_from_dict(d, "backend_azure_keyvault", AzureKeyVaultSecretScopeMetadata), - initial_manage_principal=d.get("initial_manage_principal", None), - scope=d.get("scope", None), - scope_backend_type=_enum(d, "scope_backend_type", ScopeBackendType), - ) - - @dataclass class CreateScopeResponse: def as_dict(self) -> dict: @@ -425,72 +267,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: ) -@dataclass -class Delete: - path: str - """The absolute path of the notebook or directory.""" - - recursive: Optional[bool] = None - """The flag that specifies whether to delete the object recursively. It is `false` by default. - Please note this deleting directory is not atomic. If it fails in the middle, some of objects - under this directory may be deleted and cannot be undone.""" - - def as_dict(self) -> dict: - """Serializes the Delete into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.path is not None: - body["path"] = self.path - if self.recursive is not None: - body["recursive"] = self.recursive - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Delete into a shallow dictionary of its immediate attributes.""" - body = {} - if self.path is not None: - body["path"] = self.path - if self.recursive is not None: - body["recursive"] = self.recursive - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Delete: - """Deserializes the Delete from a dictionary.""" - return cls(path=d.get("path", None), recursive=d.get("recursive", None)) - - -@dataclass -class DeleteAcl: - scope: str - """The name of the scope to remove permissions from.""" - - principal: str - """The principal to remove an existing ACL from.""" - - def as_dict(self) -> dict: - """Serializes the DeleteAcl into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.scope is not None: - body["scope"] = self.scope - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteAcl into a shallow dictionary of its immediate attributes.""" - body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.scope is not None: - body["scope"] = self.scope - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteAcl: - """Deserializes the DeleteAcl from a dictionary.""" - return cls(principal=d.get("principal", None), scope=d.get("scope", None)) - - @dataclass class DeleteAclResponse: def as_dict(self) -> dict: @@ -563,31 +339,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: return cls() -@dataclass -class DeleteScope: - scope: str - """Name of the scope to delete.""" - - def as_dict(self) -> dict: - """Serializes the DeleteScope into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.scope is not None: - body["scope"] = self.scope - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteScope into a shallow dictionary of its immediate attributes.""" - body = {} - if self.scope is not None: - body["scope"] = self.scope - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteScope: - """Deserializes the DeleteScope from a dictionary.""" - return cls(scope=d.get("scope", None)) - - @dataclass class DeleteScopeResponse: def as_dict(self) -> dict: @@ -606,38 +357,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteScopeResponse: return cls() -@dataclass -class DeleteSecret: - scope: str - """The name of the scope that contains the secret to delete.""" - - key: str - """Name of the secret to delete.""" - - def as_dict(self) -> dict: - """Serializes the DeleteSecret into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.scope is not None: - body["scope"] = self.scope - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteSecret into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.scope is not None: - body["scope"] = self.scope - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteSecret: - """Deserializes the DeleteSecret from a dictionary.""" - return cls(key=d.get("key", None), scope=d.get("scope", None)) - - @dataclass class DeleteSecretResponse: def as_dict(self) -> dict: @@ -905,80 +624,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceObjectPermissionLevelsRespo return cls(permission_levels=_repeated_dict(d, "permission_levels", WorkspaceObjectPermissionsDescription)) -@dataclass -class Import: - path: str - """The absolute path of the object or directory. Importing a directory is only supported for the - `DBC` and `SOURCE` formats.""" - - content: Optional[str] = None - """The base64-encoded content. This has a limit of 10 MB. - - If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is - thrown. This parameter might be absent, and instead a posted file is used.""" - - format: Optional[ImportFormat] = None - """This specifies the format of the file to be imported. - - The value is case sensitive. - - - `AUTO`: The item is imported depending on an analysis of the item's extension and the header - content provided in the request. If the item is imported as a notebook, then the item's - extension is automatically removed. - `SOURCE`: The notebook or directory is imported as source - code. - `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported - as a Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive - format. Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown - format.""" - - language: Optional[Language] = None - """The language of the object. This value is set only if the object type is `NOTEBOOK`.""" - - overwrite: Optional[bool] = None - """The flag that specifies whether to overwrite existing object. It is `false` by default. For - `DBC` format, `overwrite` is not supported since it may contain a directory.""" - - def as_dict(self) -> dict: - """Serializes the Import into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.content is not None: - body["content"] = self.content - if self.format is not None: - body["format"] = self.format.value - if self.language is not None: - body["language"] = self.language.value - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Import into a shallow dictionary of its immediate attributes.""" - body = {} - if self.content is not None: - body["content"] = self.content - if self.format is not None: - body["format"] = self.format - if self.language is not None: - body["language"] = self.language - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Import: - """Deserializes the Import from a dictionary.""" - return cls( - content=d.get("content", None), - format=_enum(d, "format", ImportFormat), - language=_enum(d, "language", Language), - overwrite=d.get("overwrite", None), - path=d.get("path", None), - ) - - class ImportFormat(Enum): """The format for workspace import and export.""" @@ -1176,32 +821,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListSecretsResponse: return cls(secrets=_repeated_dict(d, "secrets", SecretMetadata)) -@dataclass -class Mkdirs: - path: str - """The absolute path of the directory. If the parent directories do not exist, it will also create - them. If the directory already exists, this command will do nothing and succeed.""" - - def as_dict(self) -> dict: - """Serializes the Mkdirs into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Mkdirs into a shallow dictionary of its immediate attributes.""" - body = {} - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Mkdirs: - """Deserializes the Mkdirs from a dictionary.""" - return cls(path=d.get("path", None)) - - @dataclass class MkdirsResponse: def as_dict(self) -> dict: @@ -1320,49 +939,6 @@ class ObjectType(Enum): REPO = "REPO" -@dataclass -class PutAcl: - scope: str - """The name of the scope to apply permissions to.""" - - principal: str - """The principal in which the permission is applied.""" - - permission: AclPermission - """The permission level applied to the principal.""" - - def as_dict(self) -> dict: - """Serializes the PutAcl into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission.value - if self.principal is not None: - body["principal"] = self.principal - if self.scope is not None: - body["scope"] = self.scope - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutAcl into a shallow dictionary of its immediate attributes.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission - if self.principal is not None: - body["principal"] = self.principal - if self.scope is not None: - body["scope"] = self.scope - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutAcl: - """Deserializes the PutAcl from a dictionary.""" - return cls( - permission=_enum(d, "permission", AclPermission), - principal=d.get("principal", None), - scope=d.get("scope", None), - ) - - @dataclass class PutAclResponse: def as_dict(self) -> dict: @@ -1381,57 +957,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PutAclResponse: return cls() -@dataclass -class PutSecret: - scope: str - """The name of the scope to which the secret will be associated with.""" - - key: str - """A unique name to identify the secret.""" - - bytes_value: Optional[str] = None - """If specified, value will be stored as bytes.""" - - string_value: Optional[str] = None - """If specified, note that the value will be stored in UTF-8 (MB4) form.""" - - def as_dict(self) -> dict: - """Serializes the PutSecret into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.bytes_value is not None: - body["bytes_value"] = self.bytes_value - if self.key is not None: - body["key"] = self.key - if self.scope is not None: - body["scope"] = self.scope - if self.string_value is not None: - body["string_value"] = self.string_value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutSecret into a shallow dictionary of its immediate attributes.""" - body = {} - if self.bytes_value is not None: - body["bytes_value"] = self.bytes_value - if self.key is not None: - body["key"] = self.key - if self.scope is not None: - body["scope"] = self.scope - if self.string_value is not None: - body["string_value"] = self.string_value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutSecret: - """Deserializes the PutSecret from a dictionary.""" - return cls( - bytes_value=d.get("bytes_value", None), - key=d.get("key", None), - scope=d.get("scope", None), - string_value=d.get("string_value", None), - ) - - @dataclass class PutSecretResponse: def as_dict(self) -> dict: @@ -1760,40 +1285,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RepoPermissionsDescription: ) -@dataclass -class RepoPermissionsRequest: - access_control_list: Optional[List[RepoAccessControlRequest]] = None - - repo_id: Optional[str] = None - """The repo for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the RepoPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.repo_id is not None: - body["repo_id"] = self.repo_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RepoPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.repo_id is not None: - body["repo_id"] = self.repo_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RepoPermissionsRequest: - """Deserializes the RepoPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", RepoAccessControlRequest), - repo_id=d.get("repo_id", None), - ) - - class ScopeBackendType(Enum): AZURE_KEYVAULT = "AZURE_KEYVAULT" @@ -1933,66 +1424,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SparseCheckoutUpdate: return cls(patterns=d.get("patterns", None)) -@dataclass -class UpdateCredentialsRequest: - git_provider: str - """Git provider. This field is case-insensitive. The available Git providers are `gitHub`, - `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, - `gitLabEnterpriseEdition` and `awsCodeCommit`.""" - - credential_id: Optional[int] = None - """The ID for the corresponding credential to access.""" - - git_username: Optional[str] = None - """The username or email provided with your Git provider account, depending on which provider you - are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or - username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS - CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please - see your provider's Personal Access Token authentication documentation to see what is supported.""" - - personal_access_token: Optional[str] = None - """The personal access token used to authenticate to the corresponding Git provider. For certain - providers, support may exist for other types of scoped access tokens. [Learn more]. - - [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html""" - - def as_dict(self) -> dict: - """Serializes the UpdateCredentialsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username - if self.personal_access_token is not None: - body["personal_access_token"] = self.personal_access_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCredentialsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username - if self.personal_access_token is not None: - body["personal_access_token"] = self.personal_access_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCredentialsRequest: - """Deserializes the UpdateCredentialsRequest from a dictionary.""" - return cls( - credential_id=d.get("credential_id", None), - git_provider=d.get("git_provider", None), - git_username=d.get("git_username", None), - personal_access_token=d.get("personal_access_token", None), - ) - - @dataclass class UpdateCredentialsResponse: def as_dict(self) -> dict: @@ -2011,60 +1442,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateCredentialsResponse: return cls() -@dataclass -class UpdateRepoRequest: - branch: Optional[str] = None - """Branch that the local version of the repo is checked out to.""" - - repo_id: Optional[int] = None - """ID of the Git folder (repo) object in the workspace.""" - - sparse_checkout: Optional[SparseCheckoutUpdate] = None - """If specified, update the sparse checkout settings. The update will fail if sparse checkout is - not enabled for the repo.""" - - tag: Optional[str] = None - """Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the - repo in a detached HEAD state. Before committing new changes, you must update the repo to a - branch instead of the detached HEAD.""" - - def as_dict(self) -> dict: - """Serializes the UpdateRepoRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.branch is not None: - body["branch"] = self.branch - if self.repo_id is not None: - body["repo_id"] = self.repo_id - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout.as_dict() - if self.tag is not None: - body["tag"] = self.tag - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateRepoRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.branch is not None: - body["branch"] = self.branch - if self.repo_id is not None: - body["repo_id"] = self.repo_id - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout - if self.tag is not None: - body["tag"] = self.tag - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRepoRequest: - """Deserializes the UpdateRepoRequest from a dictionary.""" - return cls( - branch=d.get("branch", None), - repo_id=d.get("repo_id", None), - sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckoutUpdate), - tag=d.get("tag", None), - ) - - @dataclass class UpdateRepoResponse: def as_dict(self) -> dict: @@ -2317,48 +1694,6 @@ def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectPermissionsDescription: ) -@dataclass -class WorkspaceObjectPermissionsRequest: - access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None - - workspace_object_id: Optional[str] = None - """The workspace object for which to get or manage permissions.""" - - workspace_object_type: Optional[str] = None - """The workspace object type for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the WorkspaceObjectPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.workspace_object_id is not None: - body["workspace_object_id"] = self.workspace_object_id - if self.workspace_object_type is not None: - body["workspace_object_type"] = self.workspace_object_type - return body - - def as_shallow_dict(self) -> dict: - """Serializes the WorkspaceObjectPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.workspace_object_id is not None: - body["workspace_object_id"] = self.workspace_object_id - if self.workspace_object_type is not None: - body["workspace_object_type"] = self.workspace_object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectPermissionsRequest: - """Deserializes the WorkspaceObjectPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", WorkspaceObjectAccessControlRequest), - workspace_object_id=d.get("workspace_object_id", None), - workspace_object_type=d.get("workspace_object_type", None), - ) - - class GitCredentialsAPI: """Registers personal access token for Databricks to do operations on behalf of the user. diff --git a/docs/account/iam/service_principals.rst b/docs/account/iam/service_principals.rst index 6ec4fb81..78816845 100644 --- a/docs/account/iam/service_principals.rst +++ b/docs/account/iam/service_principals.rst @@ -23,10 +23,7 @@ a = AccountClient() - sp_create = a.service_principals.create(active=True, display_name=f"sdk-{time.time_ns()}") - - # cleanup - a.service_principals.delete(id=sp_create.id) + spn = a.service_principals.create(display_name=f"sdk-{time.time_ns()}") Creates a new service principal in the Databricks account. diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index fa9c2ee3..2a804317 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -74,9 +74,9 @@ spn_id = spn.id - workspace_id = os.environ["DUMMY_WORKSPACE_ID"] + workspace_id = os.environ["TEST_WORKSPACE_ID"] - _ = a.workspace_assignment.update( + a.workspace_assignment.update( workspace_id=workspace_id, principal_id=spn_id, permissions=[iam.WorkspacePermission.USER], diff --git a/docs/account/provisioning/credentials.rst b/docs/account/provisioning/credentials.rst index faf3b52b..dd35d5fe 100644 --- a/docs/account/provisioning/credentials.rst +++ b/docs/account/provisioning/credentials.rst @@ -24,15 +24,15 @@ a = AccountClient() - role = a.credentials.create( + creds = a.credentials.create( credentials_name=f"sdk-{time.time_ns()}", aws_credentials=provisioning.CreateCredentialAwsCredentials( - sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]) + sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"]) ), ) # cleanup - a.credentials.delete(credentials_id=role.credentials_id) + a.credentials.delete(credentials_id=creds.credentials_id) Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst index c8958928..3d4b7dd4 100644 --- a/docs/account/provisioning/storage.rst +++ b/docs/account/provisioning/storage.rst @@ -16,7 +16,6 @@ .. code-block:: - import os import time from databricks.sdk import AccountClient @@ -24,13 +23,13 @@ a = AccountClient() - storage = a.storage.create( + bucket = a.storage.create( storage_configuration_name=f"sdk-{time.time_ns()}", - root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]), + root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"), ) # cleanup - a.storage.delete(storage_configuration_id=storage.storage_configuration_id) + a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) Creates new storage configuration for an account, specified by ID. Uploads a storage configuration object that represents the root AWS S3 bucket in your account. Databricks stores related workspace diff --git a/docs/dbdataclasses/aibuilder.rst b/docs/dbdataclasses/aibuilder.rst index b04e12c3..37e42e3e 100644 --- a/docs/dbdataclasses/aibuilder.rst +++ b/docs/dbdataclasses/aibuilder.rst @@ -4,18 +4,10 @@ AI Builder These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.aibuilder`` module. .. py:currentmodule:: databricks.sdk.service.aibuilder -.. autoclass:: CancelCustomLlmOptimizationRunRequest - :members: - :undoc-members: - .. autoclass:: CancelOptimizeResponse :members: :undoc-members: -.. autoclass:: CreateCustomLlmRequest - :members: - :undoc-members: - .. autoclass:: CustomLlm :members: :undoc-members: @@ -28,10 +20,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: StartCustomLlmOptimizationRunRequest - :members: - :undoc-members: - .. py:class:: State States of Custom LLM optimization lifecycle. @@ -57,7 +45,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: Table :members: :undoc-members: - -.. autoclass:: UpdateCustomLlmRequest - :members: - :undoc-members: diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst index bbd625c6..e850c938 100644 --- a/docs/dbdataclasses/apps.rst +++ b/docs/dbdataclasses/apps.rst @@ -72,10 +72,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: AppPermissionsRequest - :members: - :undoc-members: - .. autoclass:: AppResource :members: :undoc-members: @@ -218,11 +214,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: ListAppsResponse :members: :undoc-members: - -.. autoclass:: StartAppRequest - :members: - :undoc-members: - -.. autoclass:: StopAppRequest - :members: - :undoc-members: diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst index ca8408bd..c652eb79 100644 --- a/docs/dbdataclasses/billing.rst +++ b/docs/dbdataclasses/billing.rst @@ -61,10 +61,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateBillingUsageDashboardRequest - :members: - :undoc-members: - .. autoclass:: CreateBillingUsageDashboardResponse :members: :undoc-members: @@ -81,18 +77,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateBudgetConfigurationRequest - :members: - :undoc-members: - .. autoclass:: CreateBudgetConfigurationResponse :members: :undoc-members: -.. autoclass:: CreateBudgetPolicyRequest - :members: - :undoc-members: - .. autoclass:: CreateLogDeliveryConfigurationParams :members: :undoc-members: @@ -212,18 +200,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateBudgetConfigurationRequest - :members: - :undoc-members: - .. autoclass:: UpdateBudgetConfigurationResponse :members: :undoc-members: -.. autoclass:: UpdateLogDeliveryConfigurationStatusRequest - :members: - :undoc-members: - .. py:class:: UsageDashboardType .. py:attribute:: USAGE_DASHBOARD_TYPE_GLOBAL @@ -232,10 +212,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: USAGE_DASHBOARD_TYPE_WORKSPACE :value: "USAGE_DASHBOARD_TYPE_WORKSPACE" -.. autoclass:: WrappedCreateLogDeliveryConfiguration - :members: - :undoc-members: - .. autoclass:: WrappedLogDeliveryConfiguration :members: :undoc-members: diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index 9ebf9b05..1efdc348 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -4,18 +4,6 @@ Unity Catalog These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.catalog`` module. .. py:currentmodule:: databricks.sdk.service.catalog -.. autoclass:: AccountsCreateMetastore - :members: - :undoc-members: - -.. autoclass:: AccountsCreateMetastoreAssignment - :members: - :undoc-members: - -.. autoclass:: AccountsCreateStorageCredential - :members: - :undoc-members: - .. autoclass:: AccountsMetastoreAssignment :members: :undoc-members: @@ -28,18 +16,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: AccountsUpdateMetastore - :members: - :undoc-members: - -.. autoclass:: AccountsUpdateMetastoreAssignment - :members: - :undoc-members: - -.. autoclass:: AccountsUpdateStorageCredential - :members: - :undoc-members: - .. autoclass:: ArtifactAllowlistInfo :members: :undoc-members: @@ -312,22 +288,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateCatalog - :members: - :undoc-members: - -.. autoclass:: CreateConnection - :members: - :undoc-members: - -.. autoclass:: CreateCredentialRequest - :members: - :undoc-members: - -.. autoclass:: CreateExternalLocation - :members: - :undoc-members: - .. autoclass:: CreateFunction :members: :undoc-members: @@ -339,10 +299,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: S :value: "S" -.. autoclass:: CreateFunctionRequest - :members: - :undoc-members: - .. py:class:: CreateFunctionRoutineBody Function language. When **EXTERNAL** is used, the language of the routine function should be specified in the __external_language__ field, and the __return_params__ of the function cannot be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be **NO_SQL**. @@ -381,34 +337,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateMonitor - :members: - :undoc-members: - -.. autoclass:: CreateRegisteredModelRequest - :members: - :undoc-members: - .. autoclass:: CreateResponse :members: :undoc-members: -.. autoclass:: CreateSchema - :members: - :undoc-members: - .. autoclass:: CreateStorageCredential :members: :undoc-members: -.. autoclass:: CreateTableConstraint - :members: - :undoc-members: - -.. autoclass:: CreateVolumeRequestContent - :members: - :undoc-members: - .. autoclass:: CredentialInfo :members: :undoc-members: @@ -623,10 +559,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: INHERIT :value: "INHERIT" -.. autoclass:: EnableRequest - :members: - :undoc-members: - .. autoclass:: EnableResponse :members: :undoc-members: @@ -737,14 +669,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GenerateTemporaryServiceCredentialRequest - :members: - :undoc-members: - -.. autoclass:: GenerateTemporaryTableCredentialRequest - :members: - :undoc-members: - .. autoclass:: GenerateTemporaryTableCredentialResponse :members: :undoc-members: @@ -1264,10 +1188,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: RegenerateDashboardRequest - :members: - :undoc-members: - .. autoclass:: RegenerateDashboardResponse :members: :undoc-members: @@ -1342,14 +1262,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VOLUME :value: "VOLUME" -.. autoclass:: SetArtifactAllowlist - :members: - :undoc-members: - -.. autoclass:: SetRegisteredModelAliasRequest - :members: - :undoc-members: - .. autoclass:: SseEncryptionDetails :members: :undoc-members: @@ -1448,30 +1360,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateCatalog - :members: - :undoc-members: - .. autoclass:: UpdateCatalogWorkspaceBindingsResponse :members: :undoc-members: -.. autoclass:: UpdateConnection - :members: - :undoc-members: - -.. autoclass:: UpdateCredentialRequest - :members: - :undoc-members: - -.. autoclass:: UpdateExternalLocation - :members: - :undoc-members: - -.. autoclass:: UpdateFunction - :members: - :undoc-members: - .. autoclass:: UpdateMetastore :members: :undoc-members: @@ -1480,62 +1372,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateModelVersionRequest - :members: - :undoc-members: - -.. autoclass:: UpdateMonitor - :members: - :undoc-members: - -.. autoclass:: UpdatePermissions - :members: - :undoc-members: - .. autoclass:: UpdatePermissionsResponse :members: :undoc-members: -.. autoclass:: UpdateRegisteredModelRequest - :members: - :undoc-members: - .. autoclass:: UpdateResponse :members: :undoc-members: -.. autoclass:: UpdateSchema - :members: - :undoc-members: - .. autoclass:: UpdateStorageCredential :members: :undoc-members: -.. autoclass:: UpdateTableRequest - :members: - :undoc-members: - -.. autoclass:: UpdateVolumeRequestContent - :members: - :undoc-members: - -.. autoclass:: UpdateWorkspaceBindings - :members: - :undoc-members: - -.. autoclass:: UpdateWorkspaceBindingsParameters - :members: - :undoc-members: - .. autoclass:: UpdateWorkspaceBindingsResponse :members: :undoc-members: -.. autoclass:: ValidateCredentialRequest - :members: - :undoc-members: - .. autoclass:: ValidateCredentialResponse :members: :undoc-members: @@ -1553,10 +1405,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SKIP :value: "SKIP" -.. autoclass:: ValidateStorageCredential - :members: - :undoc-members: - .. autoclass:: ValidateStorageCredentialResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/cleanrooms.rst b/docs/dbdataclasses/cleanrooms.rst index b07745b6..4ffe75ab 100644 --- a/docs/dbdataclasses/cleanrooms.rst +++ b/docs/dbdataclasses/cleanrooms.rst @@ -175,7 +175,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: ListCleanRoomsResponse :members: :undoc-members: - -.. autoclass:: UpdateCleanRoomRequest - :members: - :undoc-members: diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index 54b17b74..15763ae5 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -4,10 +4,6 @@ Compute These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.compute`` module. .. py:currentmodule:: databricks.sdk.service.compute -.. autoclass:: AddInstanceProfile - :members: - :undoc-members: - .. autoclass:: AddResponse :members: :undoc-members: @@ -55,18 +51,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SPOT_WITH_FALLBACK_AZURE :value: "SPOT_WITH_FALLBACK_AZURE" -.. autoclass:: CancelCommand - :members: - :undoc-members: - .. autoclass:: CancelResponse :members: :undoc-members: -.. autoclass:: ChangeClusterOwner - :members: - :undoc-members: - .. autoclass:: ChangeClusterOwnerResponse :members: :undoc-members: @@ -148,10 +136,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ClusterPermissionsRequest - :members: - :undoc-members: - .. autoclass:: ClusterPolicyAccessControlRequest :members: :undoc-members: @@ -179,10 +163,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ClusterPolicyPermissionsRequest - :members: - :undoc-members: - .. autoclass:: ClusterSettingsChange :members: :undoc-members: @@ -220,10 +200,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Command - :members: - :undoc-members: - .. py:class:: CommandStatus .. py:attribute:: CANCELLED @@ -263,30 +239,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateCluster - :members: - :undoc-members: - .. autoclass:: CreateClusterResponse :members: :undoc-members: -.. autoclass:: CreateContext - :members: - :undoc-members: - -.. autoclass:: CreateInstancePool - :members: - :undoc-members: - .. autoclass:: CreateInstancePoolResponse :members: :undoc-members: -.. autoclass:: CreatePolicy - :members: - :undoc-members: - .. autoclass:: CreatePolicyResponse :members: :undoc-members: @@ -357,26 +317,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteCluster - :members: - :undoc-members: - .. autoclass:: DeleteClusterResponse :members: :undoc-members: -.. autoclass:: DeleteInstancePool - :members: - :undoc-members: - .. autoclass:: DeleteInstancePoolResponse :members: :undoc-members: -.. autoclass:: DeletePolicy - :members: - :undoc-members: - .. autoclass:: DeletePolicyResponse :members: :undoc-members: @@ -385,10 +333,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DestroyContext - :members: - :undoc-members: - .. autoclass:: DestroyResponse :members: :undoc-members: @@ -439,26 +383,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: THROUGHPUT_OPTIMIZED_HDD :value: "THROUGHPUT_OPTIMIZED_HDD" -.. autoclass:: EditCluster - :members: - :undoc-members: - .. autoclass:: EditClusterResponse :members: :undoc-members: -.. autoclass:: EditInstancePool - :members: - :undoc-members: - .. autoclass:: EditInstancePoolResponse :members: :undoc-members: -.. autoclass:: EditPolicy - :members: - :undoc-members: - .. autoclass:: EditPolicyResponse :members: :undoc-members: @@ -467,10 +399,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: EnforceClusterComplianceRequest - :members: - :undoc-members: - .. autoclass:: EnforceClusterComplianceResponse :members: :undoc-members: @@ -652,10 +580,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GlobalInitScriptCreateRequest - :members: - :undoc-members: - .. autoclass:: GlobalInitScriptDetails :members: :undoc-members: @@ -664,10 +588,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GlobalInitScriptUpdateRequest - :members: - :undoc-members: - .. autoclass:: InitScriptEventDetails :members: :undoc-members: @@ -705,10 +625,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: InstallLibraries - :members: - :undoc-members: - .. autoclass:: InstallLibrariesResponse :members: :undoc-members: @@ -779,10 +695,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: InstancePoolPermissionsRequest - :members: - :undoc-members: - .. py:class:: InstancePoolState The state of a Cluster. The current allowable state transitions are as follows: @@ -975,18 +887,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PermanentDeleteCluster - :members: - :undoc-members: - .. autoclass:: PermanentDeleteClusterResponse :members: :undoc-members: -.. autoclass:: PinCluster - :members: - :undoc-members: - .. autoclass:: PinClusterResponse :members: :undoc-members: @@ -1007,26 +911,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: RemoveInstanceProfile - :members: - :undoc-members: - .. autoclass:: RemoveResponse :members: :undoc-members: -.. autoclass:: ResizeCluster - :members: - :undoc-members: - .. autoclass:: ResizeClusterResponse :members: :undoc-members: -.. autoclass:: RestartCluster - :members: - :undoc-members: - .. autoclass:: RestartClusterResponse :members: :undoc-members: @@ -1079,10 +971,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: StartCluster - :members: - :undoc-members: - .. autoclass:: StartClusterResponse :members: :undoc-members: @@ -1656,26 +1544,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCESS :value: "SUCCESS" -.. autoclass:: UninstallLibraries - :members: - :undoc-members: - .. autoclass:: UninstallLibrariesResponse :members: :undoc-members: -.. autoclass:: UnpinCluster - :members: - :undoc-members: - .. autoclass:: UnpinClusterResponse :members: :undoc-members: -.. autoclass:: UpdateCluster - :members: - :undoc-members: - .. autoclass:: UpdateClusterResource :members: :undoc-members: diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst index c2ddc82f..1cd047db 100644 --- a/docs/dbdataclasses/dashboards.rst +++ b/docs/dbdataclasses/dashboards.rst @@ -41,10 +41,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GenieCreateConversationMessageRequest - :members: - :undoc-members: - .. autoclass:: GenieGenerateDownloadFullQueryResultResponse :members: :undoc-members: @@ -77,10 +73,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GenieStartConversationMessageRequest - :members: - :undoc-members: - .. autoclass:: GenieStartConversationResponse :members: :undoc-members: @@ -308,14 +300,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUBMITTED :value: "SUBMITTED" -.. autoclass:: MigrateDashboardRequest - :members: - :undoc-members: - -.. autoclass:: PublishRequest - :members: - :undoc-members: - .. autoclass:: PublishedDashboard :members: :undoc-members: diff --git a/docs/dbdataclasses/database.rst b/docs/dbdataclasses/database.rst index 86340b5e..1e7b0d7a 100644 --- a/docs/dbdataclasses/database.rst +++ b/docs/dbdataclasses/database.rst @@ -56,10 +56,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GenerateDatabaseCredentialRequest - :members: - :undoc-members: - .. autoclass:: ListDatabaseInstancesResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/files.rst b/docs/dbdataclasses/files.rst index 2b0d9845..42be15a7 100644 --- a/docs/dbdataclasses/files.rst +++ b/docs/dbdataclasses/files.rst @@ -4,26 +4,14 @@ File Management These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.files`` module. .. py:currentmodule:: databricks.sdk.service.files -.. autoclass:: AddBlock - :members: - :undoc-members: - .. autoclass:: AddBlockResponse :members: :undoc-members: -.. autoclass:: Close - :members: - :undoc-members: - .. autoclass:: CloseResponse :members: :undoc-members: -.. autoclass:: Create - :members: - :undoc-members: - .. autoclass:: CreateDirectoryResponse :members: :undoc-members: @@ -32,10 +20,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Delete - :members: - :undoc-members: - .. autoclass:: DeleteDirectoryResponse :members: :undoc-members: @@ -72,26 +56,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: MkDirs - :members: - :undoc-members: - .. autoclass:: MkDirsResponse :members: :undoc-members: -.. autoclass:: Move - :members: - :undoc-members: - .. autoclass:: MoveResponse :members: :undoc-members: -.. autoclass:: Put - :members: - :undoc-members: - .. autoclass:: PutResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/iam.rst b/docs/dbdataclasses/iam.rst index a471503a..96abd374 100644 --- a/docs/dbdataclasses/iam.rst +++ b/docs/dbdataclasses/iam.rst @@ -94,10 +94,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: MigratePermissionsRequest - :members: - :undoc-members: - .. autoclass:: MigratePermissionsResponse :members: :undoc-members: @@ -110,10 +106,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PartialUpdate - :members: - :undoc-members: - .. autoclass:: PasswordAccessControlRequest :members: :undoc-members: @@ -141,10 +133,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PasswordPermissionsRequest - :members: - :undoc-members: - .. autoclass:: Patch :members: :undoc-members: @@ -295,26 +283,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL :value: "URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL" -.. autoclass:: SetObjectPermissions - :members: - :undoc-members: - -.. autoclass:: UpdateObjectPermissions - :members: - :undoc-members: - .. autoclass:: UpdateResponse :members: :undoc-members: -.. autoclass:: UpdateRuleSetRequest - :members: - :undoc-members: - -.. autoclass:: UpdateWorkspaceAssignments - :members: - :undoc-members: - .. autoclass:: User :members: :undoc-members: diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index af4915a6..81e586f8 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -20,18 +20,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CancelAllRuns - :members: - :undoc-members: - .. autoclass:: CancelAllRunsResponse :members: :undoc-members: -.. autoclass:: CancelRun - :members: - :undoc-members: - .. autoclass:: CancelRunResponse :members: :undoc-members: @@ -176,10 +168,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateJob - :members: - :undoc-members: - .. autoclass:: CreateResponse :members: :undoc-members: @@ -254,18 +242,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteJob - :members: - :undoc-members: - .. autoclass:: DeleteResponse :members: :undoc-members: -.. autoclass:: DeleteRun - :members: - :undoc-members: - .. autoclass:: DeleteRunResponse :members: :undoc-members: @@ -274,10 +254,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: EnforcePolicyComplianceRequest - :members: - :undoc-members: - .. autoclass:: EnforcePolicyComplianceResponse :members: :undoc-members: @@ -454,10 +430,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: JobPermissionsRequest - :members: - :undoc-members: - .. autoclass:: JobRunAs :members: :undoc-members: @@ -632,18 +604,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: REPAIR :value: "REPAIR" -.. autoclass:: RepairRun - :members: - :undoc-members: - .. autoclass:: RepairRunResponse :members: :undoc-members: -.. autoclass:: ResetJob - :members: - :undoc-members: - .. autoclass:: ResetResponse :members: :undoc-members: @@ -779,10 +743,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: WAITING :value: "WAITING" -.. autoclass:: RunNow - :members: - :undoc-members: - .. autoclass:: RunNowResponse :members: :undoc-members: @@ -972,10 +932,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: IMPORT :value: "IMPORT" -.. autoclass:: SubmitRun - :members: - :undoc-members: - .. autoclass:: SubmitRunResponse :members: :undoc-members: @@ -1148,10 +1104,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TABLE :value: "TABLE" -.. autoclass:: UpdateJob - :members: - :undoc-members: - .. autoclass:: UpdateResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/marketplace.rst b/docs/dbdataclasses/marketplace.rst index 02e48c38..f243184c 100644 --- a/docs/dbdataclasses/marketplace.rst +++ b/docs/dbdataclasses/marketplace.rst @@ -4,10 +4,6 @@ Marketplace These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.marketplace`` module. .. py:currentmodule:: databricks.sdk.service.marketplace -.. autoclass:: AddExchangeForListingRequest - :members: - :undoc-members: - .. autoclass:: AddExchangeForListingResponse :members: :undoc-members: @@ -127,54 +123,26 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PAID :value: "PAID" -.. autoclass:: CreateExchangeFilterRequest - :members: - :undoc-members: - .. autoclass:: CreateExchangeFilterResponse :members: :undoc-members: -.. autoclass:: CreateExchangeRequest - :members: - :undoc-members: - .. autoclass:: CreateExchangeResponse :members: :undoc-members: -.. autoclass:: CreateFileRequest - :members: - :undoc-members: - .. autoclass:: CreateFileResponse :members: :undoc-members: -.. autoclass:: CreateInstallationRequest - :members: - :undoc-members: - -.. autoclass:: CreateListingRequest - :members: - :undoc-members: - .. autoclass:: CreateListingResponse :members: :undoc-members: -.. autoclass:: CreatePersonalizationRequest - :members: - :undoc-members: - .. autoclass:: CreatePersonalizationRequestResponse :members: :undoc-members: -.. autoclass:: CreateProviderRequest - :members: - :undoc-members: - .. autoclass:: CreateProviderResponse :members: :undoc-members: @@ -535,58 +503,30 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateExchangeFilterRequest - :members: - :undoc-members: - .. autoclass:: UpdateExchangeFilterResponse :members: :undoc-members: -.. autoclass:: UpdateExchangeRequest - :members: - :undoc-members: - .. autoclass:: UpdateExchangeResponse :members: :undoc-members: -.. autoclass:: UpdateInstallationRequest - :members: - :undoc-members: - .. autoclass:: UpdateInstallationResponse :members: :undoc-members: -.. autoclass:: UpdateListingRequest - :members: - :undoc-members: - .. autoclass:: UpdateListingResponse :members: :undoc-members: -.. autoclass:: UpdatePersonalizationRequestRequest - :members: - :undoc-members: - .. autoclass:: UpdatePersonalizationRequestResponse :members: :undoc-members: -.. autoclass:: UpdateProviderAnalyticsDashboardRequest - :members: - :undoc-members: - .. autoclass:: UpdateProviderAnalyticsDashboardResponse :members: :undoc-members: -.. autoclass:: UpdateProviderRequest - :members: - :undoc-members: - .. autoclass:: UpdateProviderResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/ml.rst b/docs/dbdataclasses/ml.rst index 75a9798d..2b45ffbe 100644 --- a/docs/dbdataclasses/ml.rst +++ b/docs/dbdataclasses/ml.rst @@ -53,10 +53,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SYSTEM_TRANSITION :value: "SYSTEM_TRANSITION" -.. autoclass:: ApproveTransitionRequest - :members: - :undoc-members: - .. autoclass:: ApproveTransitionRequestResponse :members: :undoc-members: @@ -76,70 +72,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateComment - :members: - :undoc-members: - .. autoclass:: CreateCommentResponse :members: :undoc-members: -.. autoclass:: CreateExperiment - :members: - :undoc-members: - .. autoclass:: CreateExperimentResponse :members: :undoc-members: -.. autoclass:: CreateForecastingExperimentRequest - :members: - :undoc-members: - .. autoclass:: CreateForecastingExperimentResponse :members: :undoc-members: -.. autoclass:: CreateLoggedModelRequest - :members: - :undoc-members: - .. autoclass:: CreateLoggedModelResponse :members: :undoc-members: -.. autoclass:: CreateModelRequest - :members: - :undoc-members: - .. autoclass:: CreateModelResponse :members: :undoc-members: -.. autoclass:: CreateModelVersionRequest - :members: - :undoc-members: - .. autoclass:: CreateModelVersionResponse :members: :undoc-members: -.. autoclass:: CreateRegistryWebhook - :members: - :undoc-members: - -.. autoclass:: CreateRun - :members: - :undoc-members: - .. autoclass:: CreateRunResponse :members: :undoc-members: -.. autoclass:: CreateTransitionRequest - :members: - :undoc-members: - .. autoclass:: CreateTransitionRequestResponse :members: :undoc-members: @@ -160,10 +120,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteExperiment - :members: - :undoc-members: - .. autoclass:: DeleteExperimentResponse :members: :undoc-members: @@ -196,26 +152,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteRun - :members: - :undoc-members: - .. autoclass:: DeleteRunResponse :members: :undoc-members: -.. autoclass:: DeleteRuns - :members: - :undoc-members: - .. autoclass:: DeleteRunsResponse :members: :undoc-members: -.. autoclass:: DeleteTag - :members: - :undoc-members: - .. autoclass:: DeleteTagResponse :members: :undoc-members: @@ -279,10 +223,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ExperimentPermissionsRequest - :members: - :undoc-members: - .. autoclass:: ExperimentTag :members: :undoc-members: @@ -291,10 +231,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: FinalizeLoggedModelRequest - :members: - :undoc-members: - .. autoclass:: FinalizeLoggedModelResponse :members: :undoc-members: @@ -332,10 +268,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GetLatestVersionsRequest - :members: - :undoc-members: - .. autoclass:: GetLatestVersionsResponse :members: :undoc-members: @@ -412,58 +344,30 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: LogBatch - :members: - :undoc-members: - .. autoclass:: LogBatchResponse :members: :undoc-members: -.. autoclass:: LogInputs - :members: - :undoc-members: - .. autoclass:: LogInputsResponse :members: :undoc-members: -.. autoclass:: LogLoggedModelParamsRequest - :members: - :undoc-members: - .. autoclass:: LogLoggedModelParamsRequestResponse :members: :undoc-members: -.. autoclass:: LogMetric - :members: - :undoc-members: - .. autoclass:: LogMetricResponse :members: :undoc-members: -.. autoclass:: LogModel - :members: - :undoc-members: - .. autoclass:: LogModelResponse :members: :undoc-members: -.. autoclass:: LogOutputsRequest - :members: - :undoc-members: - .. autoclass:: LogOutputsResponse :members: :undoc-members: -.. autoclass:: LogParam - :members: - :undoc-members: - .. autoclass:: LogParamResponse :members: :undoc-members: @@ -609,10 +513,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TRIGGERED :value: "TRIGGERED" -.. autoclass:: PublishTableRequest - :members: - :undoc-members: - .. autoclass:: PublishTableResponse :members: :undoc-members: @@ -656,10 +556,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: RegisteredModelPermissionsRequest - :members: - :undoc-members: - .. autoclass:: RegistryWebhook :members: :undoc-members: @@ -717,42 +613,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TEST_MODE :value: "TEST_MODE" -.. autoclass:: RejectTransitionRequest - :members: - :undoc-members: - .. autoclass:: RejectTransitionRequestResponse :members: :undoc-members: -.. autoclass:: RenameModelRequest - :members: - :undoc-members: - .. autoclass:: RenameModelResponse :members: :undoc-members: -.. autoclass:: RestoreExperiment - :members: - :undoc-members: - .. autoclass:: RestoreExperimentResponse :members: :undoc-members: -.. autoclass:: RestoreRun - :members: - :undoc-members: - .. autoclass:: RestoreRunResponse :members: :undoc-members: -.. autoclass:: RestoreRuns - :members: - :undoc-members: - .. autoclass:: RestoreRunsResponse :members: :undoc-members: @@ -796,10 +672,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: SearchExperiments - :members: - :undoc-members: - .. autoclass:: SearchExperimentsResponse :members: :undoc-members: @@ -812,10 +684,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: SearchLoggedModelsRequest - :members: - :undoc-members: - .. autoclass:: SearchLoggedModelsResponse :members: :undoc-members: @@ -828,50 +696,26 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: SearchRuns - :members: - :undoc-members: - .. autoclass:: SearchRunsResponse :members: :undoc-members: -.. autoclass:: SetExperimentTag - :members: - :undoc-members: - .. autoclass:: SetExperimentTagResponse :members: :undoc-members: -.. autoclass:: SetLoggedModelTagsRequest - :members: - :undoc-members: - .. autoclass:: SetLoggedModelTagsResponse :members: :undoc-members: -.. autoclass:: SetModelTagRequest - :members: - :undoc-members: - .. autoclass:: SetModelTagResponse :members: :undoc-members: -.. autoclass:: SetModelVersionTagRequest - :members: - :undoc-members: - .. autoclass:: SetModelVersionTagResponse :members: :undoc-members: -.. autoclass:: SetTag - :members: - :undoc-members: - .. autoclass:: SetTagResponse :members: :undoc-members: @@ -915,18 +759,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: TestRegistryWebhookRequest - :members: - :undoc-members: - .. autoclass:: TestRegistryWebhookResponse :members: :undoc-members: -.. autoclass:: TransitionModelVersionStageDatabricks - :members: - :undoc-members: - .. autoclass:: TransitionRequest :members: :undoc-members: @@ -935,46 +771,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateComment - :members: - :undoc-members: - .. autoclass:: UpdateCommentResponse :members: :undoc-members: -.. autoclass:: UpdateExperiment - :members: - :undoc-members: - .. autoclass:: UpdateExperimentResponse :members: :undoc-members: -.. autoclass:: UpdateModelRequest - :members: - :undoc-members: - .. autoclass:: UpdateModelResponse :members: :undoc-members: -.. autoclass:: UpdateModelVersionRequest - :members: - :undoc-members: - .. autoclass:: UpdateModelVersionResponse :members: :undoc-members: -.. autoclass:: UpdateRegistryWebhook - :members: - :undoc-members: - -.. autoclass:: UpdateRun - :members: - :undoc-members: - .. autoclass:: UpdateRunResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/oauth2.rst b/docs/dbdataclasses/oauth2.rst index 4097add9..00c96115 100644 --- a/docs/dbdataclasses/oauth2.rst +++ b/docs/dbdataclasses/oauth2.rst @@ -4,26 +4,14 @@ OAuth These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.oauth2`` module. .. py:currentmodule:: databricks.sdk.service.oauth2 -.. autoclass:: CreateCustomAppIntegration - :members: - :undoc-members: - .. autoclass:: CreateCustomAppIntegrationOutput :members: :undoc-members: -.. autoclass:: CreatePublishedAppIntegration - :members: - :undoc-members: - .. autoclass:: CreatePublishedAppIntegrationOutput :members: :undoc-members: -.. autoclass:: CreateServicePrincipalSecretRequest - :members: - :undoc-members: - .. autoclass:: CreateServicePrincipalSecretResponse :members: :undoc-members: @@ -88,18 +76,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateCustomAppIntegration - :members: - :undoc-members: - .. autoclass:: UpdateCustomAppIntegrationOutput :members: :undoc-members: -.. autoclass:: UpdatePublishedAppIntegration - :members: - :undoc-members: - .. autoclass:: UpdatePublishedAppIntegrationOutput :members: :undoc-members: diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index 44679fc4..6a305deb 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -4,10 +4,6 @@ Delta Live Tables These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.pipelines`` module. .. py:currentmodule:: databricks.sdk.service.pipelines -.. autoclass:: CreatePipeline - :members: - :undoc-members: - .. autoclass:: CreatePipelineResponse :members: :undoc-members: @@ -56,10 +52,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: BUNDLE :value: "BUNDLE" -.. autoclass:: EditPipeline - :members: - :undoc-members: - .. autoclass:: EditPipelineResponse :members: :undoc-members: @@ -282,10 +274,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PipelinePermissionsRequest - :members: - :undoc-members: - .. autoclass:: PipelineSpec :members: :undoc-members: @@ -371,10 +359,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: StartUpdate - :members: - :undoc-members: - .. py:class:: StartUpdateCause What triggered this update. diff --git a/docs/dbdataclasses/provisioning.rst b/docs/dbdataclasses/provisioning.rst index 4c909d48..69e237d5 100644 --- a/docs/dbdataclasses/provisioning.rst +++ b/docs/dbdataclasses/provisioning.rst @@ -28,38 +28,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateCredentialRequest - :members: - :undoc-members: - .. autoclass:: CreateCredentialStsRole :members: :undoc-members: -.. autoclass:: CreateCustomerManagedKeyRequest - :members: - :undoc-members: - .. autoclass:: CreateGcpKeyInfo :members: :undoc-members: -.. autoclass:: CreateNetworkRequest - :members: - :undoc-members: - -.. autoclass:: CreateStorageConfigurationRequest - :members: - :undoc-members: - -.. autoclass:: CreateVpcEndpointRequest - :members: - :undoc-members: - -.. autoclass:: CreateWorkspaceRequest - :members: - :undoc-members: - .. autoclass:: Credential :members: :undoc-members: @@ -225,14 +201,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateWorkspaceRequest - :members: - :undoc-members: - -.. autoclass:: UpsertPrivateAccessSettingsRequest - :members: - :undoc-members: - .. autoclass:: VpcEndpoint :members: :undoc-members: diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst index 01249dce..89b953b5 100644 --- a/docs/dbdataclasses/serving.rst +++ b/docs/dbdataclasses/serving.rst @@ -124,14 +124,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreatePtEndpointRequest - :members: - :undoc-members: - -.. autoclass:: CreateServingEndpoint - :members: - :undoc-members: - .. autoclass:: CustomProviderConfig :members: :undoc-members: @@ -217,10 +209,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ExternalFunctionRequest - :members: - :undoc-members: - .. py:class:: ExternalFunctionRequestHttpMethod .. py:attribute:: DELETE @@ -315,10 +303,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PatchServingEndpointTags - :members: - :undoc-members: - .. autoclass:: PayloadTable :members: :undoc-members: @@ -331,26 +315,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PutAiGatewayRequest - :members: - :undoc-members: - .. autoclass:: PutAiGatewayResponse :members: :undoc-members: -.. autoclass:: PutRequest - :members: - :undoc-members: - .. autoclass:: PutResponse :members: :undoc-members: -.. autoclass:: QueryEndpointInput - :members: - :undoc-members: - .. autoclass:: QueryEndpointResponse :members: :undoc-members: @@ -509,10 +481,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ServingEndpointPermissionsRequest - :members: - :undoc-members: - .. py:class:: ServingModelWorkloadType Please keep this in sync with with workload types in InferenceEndpointEntities.scala @@ -536,10 +504,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateProvisionedThroughputEndpointConfigRequest - :members: - :undoc-members: - .. autoclass:: V1ResponseChoiceElement :members: :undoc-members: diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index 0f97314d..ff62a464 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -168,10 +168,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateIpAccessList - :members: - :undoc-members: - .. autoclass:: CreateIpAccessListResponse :members: :undoc-members: @@ -180,14 +176,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateNotificationDestinationRequest - :members: - :undoc-members: - -.. autoclass:: CreateOboTokenRequest - :members: - :undoc-members: - .. autoclass:: CreateOboTokenResponse :members: :undoc-members: @@ -196,10 +184,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateTokenRequest - :members: - :undoc-members: - .. autoclass:: CreateTokenResponse :members: :undoc-members: @@ -514,10 +498,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ExchangeTokenRequest - :members: - :undoc-members: - .. autoclass:: ExchangeTokenResponse :members: :undoc-members: @@ -716,10 +696,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ReplaceIpAccessList - :members: - :undoc-members: - .. autoclass:: ReplaceResponse :members: :undoc-members: @@ -740,10 +716,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: RevokeTokenRequest - :members: - :undoc-members: - .. autoclass:: RevokeTokenResponse :members: :undoc-members: @@ -795,10 +767,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: TokenPermissionsRequest - :members: - :undoc-members: - .. py:class:: TokenType The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported. @@ -818,94 +786,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: AZURE_ACTIVE_DIRECTORY_TOKEN :value: "AZURE_ACTIVE_DIRECTORY_TOKEN" -.. autoclass:: UpdateAccountIpAccessEnableRequest - :members: - :undoc-members: - -.. autoclass:: UpdateAibiDashboardEmbeddingAccessPolicySettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateAutomaticClusterUpdateSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateComplianceSecurityProfileSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateCspEnablementAccountSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateDashboardEmailSubscriptionsRequest - :members: - :undoc-members: - -.. autoclass:: UpdateDefaultNamespaceSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateDisableLegacyAccessRequest - :members: - :undoc-members: - -.. autoclass:: UpdateDisableLegacyDbfsRequest - :members: - :undoc-members: - -.. autoclass:: UpdateDisableLegacyFeaturesRequest - :members: - :undoc-members: - -.. autoclass:: UpdateEnableExportNotebookRequest - :members: - :undoc-members: - -.. autoclass:: UpdateEnableNotebookTableClipboardRequest - :members: - :undoc-members: - -.. autoclass:: UpdateEnableResultsDownloadingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateEnhancedSecurityMonitoringSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateEsmEnablementAccountSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateIpAccessList - :members: - :undoc-members: - -.. autoclass:: UpdateLlmProxyPartnerPoweredAccountRequest - :members: - :undoc-members: - -.. autoclass:: UpdateLlmProxyPartnerPoweredEnforceRequest - :members: - :undoc-members: - -.. autoclass:: UpdateLlmProxyPartnerPoweredWorkspaceRequest - :members: - :undoc-members: - -.. autoclass:: UpdateNotificationDestinationRequest - :members: - :undoc-members: - -.. autoclass:: UpdatePersonalComputeSettingRequest - :members: - :undoc-members: - .. autoclass:: UpdatePrivateEndpointRule :members: :undoc-members: @@ -914,14 +794,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateRestrictWorkspaceAdminsSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateSqlResultsDownloadRequest - :members: - :undoc-members: - .. autoclass:: WorkspaceNetworkOption :members: :undoc-members: diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst index cd1cc8b9..811abbd8 100644 --- a/docs/dbdataclasses/sharing.rst +++ b/docs/dbdataclasses/sharing.rst @@ -90,18 +90,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VARIANT :value: "VARIANT" -.. autoclass:: CreateProvider - :members: - :undoc-members: - -.. autoclass:: CreateRecipient - :members: - :undoc-members: - -.. autoclass:: CreateShare - :members: - :undoc-members: - .. autoclass:: DeleteResponse :members: :undoc-members: @@ -394,10 +382,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: RotateRecipientToken - :members: - :undoc-members: - .. autoclass:: SecurablePropertiesKvPairs :members: :undoc-members: @@ -515,22 +499,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VIEW :value: "VIEW" -.. autoclass:: UpdateProvider - :members: - :undoc-members: - -.. autoclass:: UpdateRecipient - :members: - :undoc-members: - -.. autoclass:: UpdateShare - :members: - :undoc-members: - -.. autoclass:: UpdateSharePermissions - :members: - :undoc-members: - .. autoclass:: UpdateSharePermissionsResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index 2c2578d9..7dd3f7f8 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -282,42 +282,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: NOT_EQUAL :value: "NOT_EQUAL" -.. autoclass:: CreateAlert - :members: - :undoc-members: - -.. autoclass:: CreateAlertRequest - :members: - :undoc-members: - .. autoclass:: CreateAlertRequestAlert :members: :undoc-members: -.. autoclass:: CreateQueryRequest - :members: - :undoc-members: - .. autoclass:: CreateQueryRequestQuery :members: :undoc-members: -.. autoclass:: CreateQueryVisualizationsLegacyRequest - :members: - :undoc-members: - -.. autoclass:: CreateVisualizationRequest - :members: - :undoc-members: - .. autoclass:: CreateVisualizationRequestVisualization :members: :undoc-members: -.. autoclass:: CreateWarehouseRequest - :members: - :undoc-members: - .. py:class:: CreateWarehouseRequestWarehouseType Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. @@ -335,10 +311,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateWidget - :members: - :undoc-members: - .. autoclass:: CronSchedule :members: :undoc-members: @@ -347,18 +319,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DashboardEditContent - :members: - :undoc-members: - .. autoclass:: DashboardOptions :members: :undoc-members: -.. autoclass:: DashboardPostContent - :members: - :undoc-members: - .. autoclass:: DataSource :members: :undoc-members: @@ -463,14 +427,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: INLINE :value: "INLINE" -.. autoclass:: EditAlert - :members: - :undoc-members: - -.. autoclass:: EditWarehouseRequest - :members: - :undoc-members: - .. py:class:: EditWarehouseRequestWarehouseType Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. @@ -529,10 +485,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ExecuteStatementRequest - :members: - :undoc-members: - .. py:class:: ExecuteStatementRequestOnWaitTimeout When `wait_timeout > 0s`, the call will block up to the specified time. If the statement execution doesn't finish within this time, `on_wait_timeout` determines whether the execution should continue or be canceled. When set to `CONTINUE`, the statement execution continues asynchronously and the call returns a statement ID which can be used for polling with :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is canceled and the call returns with a `CANCELED` state. @@ -811,10 +763,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: QueryEditContent - :members: - :undoc-members: - .. autoclass:: QueryFilter :members: :undoc-members: @@ -839,10 +787,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: QueryPostContent - :members: - :undoc-members: - .. py:class:: QueryStatementType .. py:attribute:: ALTER @@ -1033,18 +977,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: WORKSPACE_TEMPORARILY_UNAVAILABLE :value: "WORKSPACE_TEMPORARILY_UNAVAILABLE" -.. autoclass:: SetRequest - :members: - :undoc-members: - .. autoclass:: SetResponse :members: :undoc-members: -.. autoclass:: SetWorkspaceWarehouseConfigRequest - :members: - :undoc-members: - .. py:class:: SetWorkspaceWarehouseConfigRequestSecurityPolicy Security policy for warehouses @@ -1445,22 +1381,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: TransferOwnershipRequest - :members: - :undoc-members: - -.. autoclass:: UpdateAlertRequest - :members: - :undoc-members: - .. autoclass:: UpdateAlertRequestAlert :members: :undoc-members: -.. autoclass:: UpdateQueryRequest - :members: - :undoc-members: - .. autoclass:: UpdateQueryRequestQuery :members: :undoc-members: @@ -1469,10 +1393,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateVisualizationRequest - :members: - :undoc-members: - .. autoclass:: UpdateVisualizationRequestVisualization :members: :undoc-members: @@ -1524,10 +1444,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: WarehousePermissionsRequest - :members: - :undoc-members: - .. autoclass:: WarehouseTypePair :members: :undoc-members: diff --git a/docs/dbdataclasses/vectorsearch.rst b/docs/dbdataclasses/vectorsearch.rst index 5433f267..d68e083d 100644 --- a/docs/dbdataclasses/vectorsearch.rst +++ b/docs/dbdataclasses/vectorsearch.rst @@ -8,14 +8,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateEndpoint - :members: - :undoc-members: - -.. autoclass:: CreateVectorIndexRequest - :members: - :undoc-members: - .. autoclass:: CustomTag :members: :undoc-members: @@ -115,10 +107,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PatchEndpointBudgetPolicyRequest - :members: - :undoc-members: - .. autoclass:: PatchEndpointBudgetPolicyResponse :members: :undoc-members: @@ -133,14 +121,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TRIGGERED :value: "TRIGGERED" -.. autoclass:: QueryVectorIndexNextPageRequest - :members: - :undoc-members: - -.. autoclass:: QueryVectorIndexRequest - :members: - :undoc-members: - .. autoclass:: QueryVectorIndexResponse :members: :undoc-members: @@ -153,10 +133,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ScanVectorIndexRequest - :members: - :undoc-members: - .. autoclass:: ScanVectorIndexResponse :members: :undoc-members: @@ -169,10 +145,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateEndpointCustomTagsRequest - :members: - :undoc-members: - .. autoclass:: UpdateEndpointCustomTagsResponse :members: :undoc-members: @@ -192,10 +164,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCESS :value: "SUCCESS" -.. autoclass:: UpsertDataVectorIndexRequest - :members: - :undoc-members: - .. autoclass:: UpsertDataVectorIndexResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst index e20f4ac7..f2ed6dad 100644 --- a/docs/dbdataclasses/workspace.rst +++ b/docs/dbdataclasses/workspace.rst @@ -23,26 +23,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateCredentialsRequest - :members: - :undoc-members: - .. autoclass:: CreateCredentialsResponse :members: :undoc-members: -.. autoclass:: CreateRepoRequest - :members: - :undoc-members: - .. autoclass:: CreateRepoResponse :members: :undoc-members: -.. autoclass:: CreateScope - :members: - :undoc-members: - .. autoclass:: CreateScopeResponse :members: :undoc-members: @@ -51,14 +39,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Delete - :members: - :undoc-members: - -.. autoclass:: DeleteAcl - :members: - :undoc-members: - .. autoclass:: DeleteAclResponse :members: :undoc-members: @@ -75,18 +55,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteScope - :members: - :undoc-members: - .. autoclass:: DeleteScopeResponse :members: :undoc-members: -.. autoclass:: DeleteSecret - :members: - :undoc-members: - .. autoclass:: DeleteSecretResponse :members: :undoc-members: @@ -140,10 +112,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Import - :members: - :undoc-members: - .. py:class:: ImportFormat The format for workspace import and export. @@ -213,10 +181,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Mkdirs - :members: - :undoc-members: - .. autoclass:: MkdirsResponse :members: :undoc-members: @@ -247,18 +211,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: REPO :value: "REPO" -.. autoclass:: PutAcl - :members: - :undoc-members: - .. autoclass:: PutAclResponse :members: :undoc-members: -.. autoclass:: PutSecret - :members: - :undoc-members: - .. autoclass:: PutSecretResponse :members: :undoc-members: @@ -303,10 +259,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: RepoPermissionsRequest - :members: - :undoc-members: - .. py:class:: ScopeBackendType .. py:attribute:: AZURE_KEYVAULT @@ -331,18 +283,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateCredentialsRequest - :members: - :undoc-members: - .. autoclass:: UpdateCredentialsResponse :members: :undoc-members: -.. autoclass:: UpdateRepoRequest - :members: - :undoc-members: - .. autoclass:: UpdateRepoResponse :members: :undoc-members: @@ -382,7 +326,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: WorkspaceObjectPermissionsDescription :members: :undoc-members: - -.. autoclass:: WorkspaceObjectPermissionsRequest - :members: - :undoc-members: diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 9a18ede8..c486ab0d 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -24,10 +24,10 @@ w = WorkspaceClient() - created = w.catalogs.create(name=f"sdk-{time.time_ns()}") + created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") # cleanup - w.catalogs.delete(name=created.name, force=True) + w.catalogs.delete(name=created_catalog.name, force=True) Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index c3b5217a..b31089e4 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -30,22 +30,20 @@ w = WorkspaceClient() - storage_credential = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), - comment="created via SDK", ) - external_location = w.external_locations.create( + created = w.external_locations.create( name=f"sdk-{time.time_ns()}", - credential_name=storage_credential.name, - comment="created via SDK", - url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}", + credential_name=credential.name, + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) # cleanup - w.storage_credentials.delete(name=storage_credential.name) - w.external_locations.delete(name=external_location.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage @@ -182,24 +180,24 @@ credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) created = w.external_locations.create( name=f"sdk-{time.time_ns()}", credential_name=credential.name, - url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) _ = w.external_locations.update( name=created.name, credential_name=credential.name, - url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) # cleanup - w.storage_credentials.delete(delete=credential.name) - w.external_locations.delete(delete=created.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Updates an external location in the metastore. The caller must be the owner of the external location, or be a metastore admin. In the second case, the admin can only update the name of the external diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index 19406920..2d4dc160 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -30,13 +30,13 @@ w = WorkspaceClient() - created = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(name=created.name) + w.storage_credentials.delete(name=credential.name) Creates a new storage credential. @@ -123,10 +123,11 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.storage_credentials.list() + all = w.storage_credentials.list(catalog.ListStorageCredentialsRequest()) Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore @@ -162,17 +163,17 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) _ = w.storage_credentials.update( name=created.name, comment=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(name=created.name) + w.storage_credentials.delete(delete=created.name) Updates a storage credential on the metastore. diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst index 9632dc0d..fdb5164f 100644 --- a/docs/workspace/catalog/tables.rst +++ b/docs/workspace/catalog/tables.rst @@ -117,7 +117,7 @@ created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name) - summaries = w.tables.list_summaries(catalog_name=created_catalog.name, schema_name_pattern=created_schema.name) + all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name) # cleanup w.schemas.delete(full_name=created_schema.full_name) diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index 5e1abee9..760e296f 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -711,11 +711,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import compute w = WorkspaceClient() - all = w.clusters.list(compute.ListClustersRequest()) + nodes = w.clusters.list_node_types() Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. diff --git a/docs/workspace/iam/groups.rst b/docs/workspace/iam/groups.rst index 73793909..764a81ab 100644 --- a/docs/workspace/iam/groups.rst +++ b/docs/workspace/iam/groups.rst @@ -69,6 +69,9 @@ group = w.groups.create(display_name=f"sdk-{time.time_ns()}") w.groups.delete(id=group.id) + + # cleanup + w.groups.delete(id=group.id) Deletes a group from the Databricks workspace. diff --git a/docs/workspace/iam/service_principals.rst b/docs/workspace/iam/service_principals.rst index 0d0d447b..ce8978af 100644 --- a/docs/workspace/iam/service_principals.rst +++ b/docs/workspace/iam/service_principals.rst @@ -20,13 +20,19 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import iam w = WorkspaceClient() - created = w.service_principals.create(display_name=f"sdk-{time.time_ns()}") + groups = w.groups.group_display_name_to_id_map(iam.ListGroupsRequest()) + + spn = w.service_principals.create( + display_name=f"sdk-{time.time_ns()}", + groups=[iam.ComplexValue(value=groups["admins"])], + ) # cleanup - w.service_principals.delete(id=created.id) + w.service_principals.delete(id=spn.id) Creates a new service principal in the Databricks workspace. diff --git a/docs/workspace/iam/users.rst b/docs/workspace/iam/users.rst index b48b26b0..198e9f57 100644 --- a/docs/workspace/iam/users.rst +++ b/docs/workspace/iam/users.rst @@ -78,12 +78,9 @@ w = WorkspaceClient() - user = w.users.create( - display_name=f"sdk-{time.time_ns()}", - user_name=f"sdk-{time.time_ns()}@example.com", - ) + other_owner = w.users.create(user_name=f"sdk-{time.time_ns()}@example.com") - w.users.delete(id=user.id) + w.users.delete(id=other_owner.id) Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the user. diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index 288ab0ad..10c84398 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -355,21 +355,23 @@ w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"] ) - run = w.jobs.submit( - run_name=f"sdk-{time.time_ns()}", + created_job = w.jobs.create( + name=f"sdk-{time.time_ns()}", tasks=[ - jobs.SubmitTask( + jobs.Task( + description="test", existing_cluster_id=cluster_id, notebook_task=jobs.NotebookTask(notebook_path=notebook_path), - task_key=f"sdk-{time.time_ns()}", + task_key="test", + timeout_seconds=0, ) ], - ).result() + ) - output = w.jobs.get_run_output(run_id=run.tasks[0].run_id) + by_id = w.jobs.get(job_id=created_job.job_id) # cleanup - w.jobs.delete_run(run_id=run.run_id) + w.jobs.delete(job_id=created_job.job_id) Get a single job. diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 0146f790..1232cc17 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -90,9 +90,7 @@ w = WorkspaceClient() - model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - - mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") Creates a new registered model with the name specified in the request body. @@ -123,7 +121,7 @@ model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a model version. diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst index 19f791a2..076120c4 100644 --- a/docs/workspace/sharing/providers.rst +++ b/docs/workspace/sharing/providers.rst @@ -102,12 +102,25 @@ .. code-block:: + import time + from databricks.sdk import WorkspaceClient - from databricks.sdk.service import sharing w = WorkspaceClient() - all = w.providers.list(sharing.ListProvidersRequest()) + public_share_recipient = """{ + "shareCredentialsVersion":1, + "bearerToken":"dapiabcdefghijklmonpqrstuvwxyz", + "endpoint":"https://sharing.delta.io/delta-sharing/" + } + """ + + created = w.providers.create(name=f"sdk-{time.time_ns()}", recipient_profile_str=public_share_recipient) + + shares = w.providers.list_shares(name=created.name) + + # cleanup + w.providers.delete(name=created.name) Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index 2c369968..03dae035 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -178,7 +178,7 @@ content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(), format=workspace.ImportFormat.SOURCE, language=workspace.Language.SQL, - overwrite=True, + overwrite=true_, path=notebook_path, ) @@ -223,16 +223,14 @@ .. code-block:: - import os - import time - from databricks.sdk import WorkspaceClient w = WorkspaceClient() - notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" - - objects = w.workspace.list(path=os.path.dirname(notebook)) + names = [] + for i in w.workspace.list(f"/Users/{w.current_user.me().user_name}", recursive=True): + names.append(i.path) + assert len(names) > 0 List workspace objects