From 2a0df41c458c5ba331803db6f9f807ef0f61d230 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Fri, 2 May 2025 09:01:44 +0000 Subject: [PATCH 1/2] bump --- .codegen/_openapi_sha | 2 +- NEXT_CHANGELOG.md | 19 ++++ databricks/sdk/service/compute.py | 7 ++ databricks/sdk/service/dashboards.py | 2 +- databricks/sdk/service/files.py | 3 +- databricks/sdk/service/ml.py | 21 ++++- databricks/sdk/service/pipelines.py | 82 ++++++++++------ databricks/sdk/service/sql.py | 136 +-------------------------- 8 files changed, 109 insertions(+), 163 deletions(-) diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index e7f752fb5..2cd3357e2 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -ce962ccd0a078a5a9d89494fe38d237ce377d5f3 \ No newline at end of file +f20747a9e1b158ea126960dcb30ac66f53435f2d \ No newline at end of file diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 5e8ed133b..9946d835b 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,3 +11,22 @@ ### Internal Changes ### API Changes +* Added `future_feature_data_path` field for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`. +* Added `exclude_columns` and `include_columns` fields for `databricks.sdk.service.pipelines.TableSpecificConfig`. +* Added `create_time`, `custom_description`, `custom_summary`, `display_name`, `evaluation`, `id`, `lifecycle_state`, `owner_user_name`, `parent_path`, `query_text`, `run_as_user_name`, `schedule`, `update_time` and `warehouse_id` fields for `databricks.sdk.service.sql.AlertV2`. +* Added `network_check_control_plane_failure`, `network_check_dns_server_failure`, `network_check_metadata_endpoint_failure`, `network_check_multiple_components_failure`, `network_check_nic_failure`, `network_check_storage_failure` and `secret_permission_denied` enum values for `databricks.sdk.service.compute.TerminationReasonCode`. +* [Breaking] Changed `pipeline_id` field for `databricks.sdk.service.pipelines.EditPipeline` to be required. +* Changed `connection_name`, `gateway_storage_catalog` and `gateway_storage_schema` fields for `databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition` to be required. +* [Breaking] Changed `connection_name`, `gateway_storage_catalog` and `gateway_storage_schema` fields for `databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition` to be required. +* Changed `kind` field for `databricks.sdk.service.pipelines.PipelineDeployment` to be required. +* [Breaking] Changed `kind` field for `databricks.sdk.service.pipelines.PipelineDeployment` to be required. +* Changed `destination_catalog`, `destination_schema` and `source_url` fields for `databricks.sdk.service.pipelines.ReportSpec` to be required. +* [Breaking] Changed `destination_catalog`, `destination_schema` and `source_url` fields for `databricks.sdk.service.pipelines.ReportSpec` to be required. +* Changed `destination_catalog`, `destination_schema` and `source_schema` fields for `databricks.sdk.service.pipelines.SchemaSpec` to be required. +* [Breaking] Changed `destination_catalog`, `destination_schema` and `source_schema` fields for `databricks.sdk.service.pipelines.SchemaSpec` to be required. +* [Breaking] Changed `destination_catalog`, `destination_schema` and `source_table` fields for `databricks.sdk.service.pipelines.TableSpec` to be required. +* Changed `destination_catalog`, `destination_schema` and `source_table` fields for `databricks.sdk.service.pipelines.TableSpec` to be required. +* [Breaking] Changed `results` field for `databricks.sdk.service.sql.ListAlertsV2Response` to type `databricks.sdk.service.sql.AlertV2List` dataclass. +* [Breaking] Changed pagination for [AlertsV2API.list_alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/alerts_v2.html#databricks.sdk.service.sql.AlertsV2API.list_alerts) method. +* [Breaking] Changed waiter for [GenieAPI.create_message](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html#databricks.sdk.service.dashboards.GenieAPI.create_message) method. +* [Breaking] Removed `create_time`, `custom_description`, `custom_summary`, `display_name`, `evaluation`, `id`, `lifecycle_state`, `owner_user_name`, `query_text`, `run_as_user_name`, `schedule`, `update_time` and `warehouse_id` fields for `databricks.sdk.service.sql.ListAlertsV2ResponseAlert`. diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 9a60ebc87..b5e7306ad 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -9167,6 +9167,12 @@ class TerminationReasonCode(Enum): METASTORE_COMPONENT_UNHEALTHY = "METASTORE_COMPONENT_UNHEALTHY" NEPHOS_RESOURCE_MANAGEMENT = "NEPHOS_RESOURCE_MANAGEMENT" NETVISOR_SETUP_TIMEOUT = "NETVISOR_SETUP_TIMEOUT" + NETWORK_CHECK_CONTROL_PLANE_FAILURE = "NETWORK_CHECK_CONTROL_PLANE_FAILURE" + NETWORK_CHECK_DNS_SERVER_FAILURE = "NETWORK_CHECK_DNS_SERVER_FAILURE" + NETWORK_CHECK_METADATA_ENDPOINT_FAILURE = "NETWORK_CHECK_METADATA_ENDPOINT_FAILURE" + NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE = "NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE" + NETWORK_CHECK_NIC_FAILURE = "NETWORK_CHECK_NIC_FAILURE" + NETWORK_CHECK_STORAGE_FAILURE = "NETWORK_CHECK_STORAGE_FAILURE" NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" NO_MATCHED_K8S = "NO_MATCHED_K8S" @@ -9179,6 +9185,7 @@ class TerminationReasonCode(Enum): REQUEST_THROTTLED = "REQUEST_THROTTLED" RESOURCE_USAGE_BLOCKED = "RESOURCE_USAGE_BLOCKED" SECRET_CREATION_FAILURE = "SECRET_CREATION_FAILURE" + SECRET_PERMISSION_DENIED = "SECRET_PERMISSION_DENIED" SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR" SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION" SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE" diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 1683ba1b1..c340b7465 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -2097,7 +2097,7 @@ def create_message(self, space_id: str, conversation_id: str, content: str) -> W self.wait_get_message_genie_completed, response=GenieMessage.from_dict(op_response), conversation_id=conversation_id, - message_id=op_response["id"], + message_id=op_response["message_id"], space_id=space_id, ) diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py index 8d60b842f..52496e84b 100755 --- a/databricks/sdk/service/files.py +++ b/databricks/sdk/service/files.py @@ -1251,7 +1251,8 @@ def upload(self, file_path: str, contents: BinaryIO, *, overwrite: Optional[bool The absolute path of the file. :param contents: BinaryIO :param overwrite: bool (optional) - If true, an existing file will be overwritten. + If true or unspecified, an existing file will be overwritten. If false, an error will be returned if + the path points to an existing file. """ diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index 46ce607e1..7ec4d9cc8 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -592,7 +592,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateExperimentResponse: @dataclass class CreateForecastingExperimentRequest: train_data_path: str - """The fully qualified name of a Unity Catalog table, formatted as + """The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, used as training data for the forecasting model.""" target_column: str @@ -617,6 +617,10 @@ class CreateForecastingExperimentRequest: experiment_path: Optional[str] = None """The path in the workspace to store the created experiment.""" + future_feature_data_path: Optional[str] = None + """The fully qualified path of a Unity Catalog table, formatted as + catalog_name.schema_name.table_name, used to store future feature data for predictions.""" + holiday_regions: Optional[List[str]] = None """The region code(s) to automatically add holiday features. Currently supports only one region.""" @@ -665,6 +669,8 @@ def as_dict(self) -> dict: body["forecast_granularity"] = self.forecast_granularity if self.forecast_horizon is not None: body["forecast_horizon"] = self.forecast_horizon + if self.future_feature_data_path is not None: + body["future_feature_data_path"] = self.future_feature_data_path if self.holiday_regions: body["holiday_regions"] = [v for v in self.holiday_regions] if self.include_features: @@ -702,6 +708,8 @@ def as_shallow_dict(self) -> dict: body["forecast_granularity"] = self.forecast_granularity if self.forecast_horizon is not None: body["forecast_horizon"] = self.forecast_horizon + if self.future_feature_data_path is not None: + body["future_feature_data_path"] = self.future_feature_data_path if self.holiday_regions: body["holiday_regions"] = self.holiday_regions if self.include_features: @@ -736,6 +744,7 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateForecastingExperimentRequest: experiment_path=d.get("experiment_path", None), forecast_granularity=d.get("forecast_granularity", None), forecast_horizon=d.get("forecast_horizon", None), + future_feature_data_path=d.get("future_feature_data_path", None), holiday_regions=d.get("holiday_regions", None), include_features=d.get("include_features", None), max_runtime=d.get("max_runtime", None), @@ -7322,6 +7331,7 @@ def create_experiment( *, custom_weights_column: Optional[str] = None, experiment_path: Optional[str] = None, + future_feature_data_path: Optional[str] = None, holiday_regions: Optional[List[str]] = None, include_features: Optional[List[str]] = None, max_runtime: Optional[int] = None, @@ -7337,7 +7347,7 @@ def create_experiment( Creates a serverless forecasting experiment. Returns the experiment ID. :param train_data_path: str - The fully qualified name of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, + The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, used as training data for the forecasting model. :param target_column: str The column in the input training table used as the prediction target for model training. The values @@ -7355,6 +7365,9 @@ def create_experiment( The column in the training table used to customize weights for each time series. :param experiment_path: str (optional) The path in the workspace to store the created experiment. + :param future_feature_data_path: str (optional) + The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, + used to store future feature data for predictions. :param holiday_regions: List[str] (optional) The region code(s) to automatically add holiday features. Currently supports only one region. :param include_features: List[str] (optional) @@ -7395,6 +7408,8 @@ def create_experiment( body["forecast_granularity"] = forecast_granularity if forecast_horizon is not None: body["forecast_horizon"] = forecast_horizon + if future_feature_data_path is not None: + body["future_feature_data_path"] = future_feature_data_path if holiday_regions is not None: body["holiday_regions"] = [v for v in holiday_regions] if include_features is not None: @@ -7441,6 +7456,7 @@ def create_experiment_and_wait( *, custom_weights_column: Optional[str] = None, experiment_path: Optional[str] = None, + future_feature_data_path: Optional[str] = None, holiday_regions: Optional[List[str]] = None, include_features: Optional[List[str]] = None, max_runtime: Optional[int] = None, @@ -7457,6 +7473,7 @@ def create_experiment_and_wait( experiment_path=experiment_path, forecast_granularity=forecast_granularity, forecast_horizon=forecast_horizon, + future_feature_data_path=future_feature_data_path, holiday_regions=holiday_regions, include_features=include_features, max_runtime=max_runtime, diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index 7e3ee328b..b52846109 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -758,7 +758,7 @@ def from_dict(cls, d: Dict[str, Any]) -> EventLogSpec: @dataclass class FileLibrary: path: Optional[str] = None - """The absolute path of the file.""" + """The absolute path of the source code.""" def as_dict(self) -> dict: """Serializes the FileLibrary into a dictionary suitable for use as a JSON request body.""" @@ -1029,25 +1029,25 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionConfig: @dataclass class IngestionGatewayPipelineDefinition: - connection_id: Optional[str] = None - """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this - gateway pipeline uses to communicate with the source.""" - - connection_name: Optional[str] = None + connection_name: str """Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.""" - gateway_storage_catalog: Optional[str] = None + gateway_storage_catalog: str """Required, Immutable. The name of the catalog for the gateway pipeline's storage location.""" + gateway_storage_schema: str + """Required, Immutable. The name of the schema for the gateway pipelines's storage location.""" + + connection_id: Optional[str] = None + """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this + gateway pipeline uses to communicate with the source.""" + gateway_storage_name: Optional[str] = None """Optional. The Unity Catalog-compatible name for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.""" - gateway_storage_schema: Optional[str] = None - """Required, Immutable. The name of the schema for the gateway pipelines's storage location.""" - def as_dict(self) -> dict: """Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body.""" body = {} @@ -1293,7 +1293,7 @@ class MaturityLevel(Enum): @dataclass class NotebookLibrary: path: Optional[str] = None - """The absolute path of the notebook.""" + """The absolute path of the source code.""" def as_dict(self) -> dict: """Serializes the NotebookLibrary into a dictionary suitable for use as a JSON request body.""" @@ -1892,7 +1892,7 @@ class PipelineClusterAutoscaleMode(Enum): @dataclass class PipelineDeployment: - kind: Optional[DeploymentKind] = None + kind: DeploymentKind """The deployment method that manages the pipeline.""" metadata_file_path: Optional[str] = None @@ -2584,18 +2584,18 @@ def from_dict(cls, d: Dict[str, Any]) -> PipelineTrigger: @dataclass class ReportSpec: - destination_catalog: Optional[str] = None + source_url: str + """Required. Report URL in the source system.""" + + destination_catalog: str """Required. Destination catalog to store table.""" - destination_schema: Optional[str] = None + destination_schema: str """Required. Destination schema to store table.""" destination_table: Optional[str] = None """Required. Destination table name. The pipeline fails if a table with that name already exists.""" - source_url: Optional[str] = None - """Required. Report URL in the source system.""" - table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object.""" @@ -2731,10 +2731,13 @@ def from_dict(cls, d: Dict[str, Any]) -> RunAs: @dataclass class SchemaSpec: - destination_catalog: Optional[str] = None + source_schema: str + """Required. Schema name in the source database.""" + + destination_catalog: str """Required. Destination catalog to store tables.""" - destination_schema: Optional[str] = None + destination_schema: str """Required. Destination schema to store tables in. Tables with the same name as the source tables are created in this destination schema. The pipeline fails If a table with the same name already exists.""" @@ -2742,9 +2745,6 @@ class SchemaSpec: source_catalog: Optional[str] = None """The source catalog name. Might be optional depending on the type of source.""" - source_schema: Optional[str] = None - """Required. Schema name in the source database.""" - table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the @@ -2924,6 +2924,7 @@ def from_dict(cls, d: Dict[str, Any]) -> StackFrame: @dataclass class StartUpdate: cause: Optional[StartUpdateCause] = None + """What triggered this update.""" full_refresh: Optional[bool] = None """If true, this update will reset all tables before running.""" @@ -2992,6 +2993,7 @@ def from_dict(cls, d: Dict[str, Any]) -> StartUpdate: class StartUpdateCause(Enum): + """What triggered this update.""" API_CALL = "API_CALL" JOB_TASK = "JOB_TASK" @@ -3045,10 +3047,13 @@ def from_dict(cls, d: Dict[str, Any]) -> StopPipelineResponse: @dataclass class TableSpec: - destination_catalog: Optional[str] = None + source_table: str + """Required. Table name in the source database.""" + + destination_catalog: str """Required. Destination catalog to store table.""" - destination_schema: Optional[str] = None + destination_schema: str """Required. Destination schema to store table.""" destination_table: Optional[str] = None @@ -3061,9 +3066,6 @@ class TableSpec: source_schema: Optional[str] = None """Schema name in the source database. Might be optional depending on the type of source.""" - source_table: Optional[str] = None - """Required. Table name in the source database.""" - table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec.""" @@ -3122,6 +3124,18 @@ def from_dict(cls, d: Dict[str, Any]) -> TableSpec: @dataclass class TableSpecificConfig: + exclude_columns: Optional[List[str]] = None + """A list of column names to be excluded for the ingestion. When not specified, include_columns + fully controls what columns to be ingested. When specified, all other columns including future + ones will be automatically included for ingestion. This field in mutually exclusive with + `include_columns`.""" + + include_columns: Optional[List[str]] = None + """A list of column names to be included for the ingestion. When not specified, all columns except + ones in exclude_columns will be included. Future columns will be automatically included. When + specified, all other future columns will be automatically excluded from ingestion. This field in + mutually exclusive with `exclude_columns`.""" + primary_keys: Optional[List[str]] = None """The primary key of the table used to apply changes.""" @@ -3139,6 +3153,10 @@ class TableSpecificConfig: def as_dict(self) -> dict: """Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body.""" body = {} + if self.exclude_columns: + body["exclude_columns"] = [v for v in self.exclude_columns] + if self.include_columns: + body["include_columns"] = [v for v in self.include_columns] if self.primary_keys: body["primary_keys"] = [v for v in self.primary_keys] if self.salesforce_include_formula_fields is not None: @@ -3152,6 +3170,10 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the TableSpecificConfig into a shallow dictionary of its immediate attributes.""" body = {} + if self.exclude_columns: + body["exclude_columns"] = self.exclude_columns + if self.include_columns: + body["include_columns"] = self.include_columns if self.primary_keys: body["primary_keys"] = self.primary_keys if self.salesforce_include_formula_fields is not None: @@ -3166,6 +3188,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig: """Deserializes the TableSpecificConfig from a dictionary.""" return cls( + exclude_columns=d.get("exclude_columns", None), + include_columns=d.get("include_columns", None), primary_keys=d.get("primary_keys", None), salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None), scd_type=_enum(d, "scd_type", TableSpecificConfigScdType), @@ -3325,6 +3349,7 @@ class UpdateStateInfo: creation_time: Optional[str] = None state: Optional[UpdateStateInfoState] = None + """The update state.""" update_id: Optional[str] = None @@ -3361,6 +3386,7 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateStateInfo: class UpdateStateInfoState(Enum): + """The update state.""" CANCELED = "CANCELED" COMPLETED = "COMPLETED" @@ -3687,6 +3713,7 @@ def list_pipeline_events( Retrieves events for a pipeline. :param pipeline_id: str + The pipeline to return events for. :param filter: str (optional) Criteria to select a subset of results, expressed using a SQL-like syntax. The supported filters are: 1. level='INFO' (or WARN or ERROR) 2. level in ('INFO', 'WARN') 3. id='[event-id]' 4. timestamp @@ -3867,6 +3894,7 @@ def start_update( :param pipeline_id: str :param cause: :class:`StartUpdateCause` (optional) + What triggered this update. :param full_refresh: bool (optional) If true, this update will reset all tables before running. :param full_refresh_selection: List[str] (optional) diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index a3746381c..c9baeee54 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -4791,7 +4791,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ListAlertsResponseAlert: class ListAlertsV2Response: next_page_token: Optional[str] = None - results: Optional[List[ListAlertsV2ResponseAlert]] = None + results: Optional[List[AlertV2]] = None def as_dict(self) -> dict: """Serializes the ListAlertsV2Response into a dictionary suitable for use as a JSON request body.""" @@ -4814,131 +4814,7 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAlertsV2Response: """Deserializes the ListAlertsV2Response from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - results=_repeated_dict(d, "results", ListAlertsV2ResponseAlert), - ) - - -@dataclass -class ListAlertsV2ResponseAlert: - create_time: Optional[str] = None - """The timestamp indicating when the alert was created.""" - - custom_description: Optional[str] = None - """Custom description for the alert. support mustache template.""" - - custom_summary: Optional[str] = None - """Custom summary for the alert. support mustache template.""" - - display_name: Optional[str] = None - """The display name of the alert.""" - - evaluation: Optional[AlertV2Evaluation] = None - - id: Optional[str] = None - """UUID identifying the alert.""" - - lifecycle_state: Optional[LifecycleState] = None - """Indicates whether the query is trashed.""" - - owner_user_name: Optional[str] = None - """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" - - query_text: Optional[str] = None - """Text of the query to be run.""" - - run_as_user_name: Optional[str] = None - """The run as username. This field is set to "Unavailable" if the user has been deleted.""" - - schedule: Optional[CronSchedule] = None - - update_time: Optional[str] = None - """The timestamp indicating when the alert was updated.""" - - warehouse_id: Optional[str] = None - """ID of the SQL warehouse attached to the alert.""" - - def as_dict(self) -> dict: - """Serializes the ListAlertsV2ResponseAlert into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.custom_description is not None: - body["custom_description"] = self.custom_description - if self.custom_summary is not None: - body["custom_summary"] = self.custom_summary - if self.display_name is not None: - body["display_name"] = self.display_name - if self.evaluation: - body["evaluation"] = self.evaluation.as_dict() - if self.id is not None: - body["id"] = self.id - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state.value - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.query_text is not None: - body["query_text"] = self.query_text - if self.run_as_user_name is not None: - body["run_as_user_name"] = self.run_as_user_name - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.update_time is not None: - body["update_time"] = self.update_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ListAlertsV2ResponseAlert into a shallow dictionary of its immediate attributes.""" - body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.custom_description is not None: - body["custom_description"] = self.custom_description - if self.custom_summary is not None: - body["custom_summary"] = self.custom_summary - if self.display_name is not None: - body["display_name"] = self.display_name - if self.evaluation: - body["evaluation"] = self.evaluation - if self.id is not None: - body["id"] = self.id - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.query_text is not None: - body["query_text"] = self.query_text - if self.run_as_user_name is not None: - body["run_as_user_name"] = self.run_as_user_name - if self.schedule: - body["schedule"] = self.schedule - if self.update_time is not None: - body["update_time"] = self.update_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListAlertsV2ResponseAlert: - """Deserializes the ListAlertsV2ResponseAlert from a dictionary.""" - return cls( - create_time=d.get("create_time", None), - custom_description=d.get("custom_description", None), - custom_summary=d.get("custom_summary", None), - display_name=d.get("display_name", None), - evaluation=_from_dict(d, "evaluation", AlertV2Evaluation), - id=d.get("id", None), - lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), - owner_user_name=d.get("owner_user_name", None), - query_text=d.get("query_text", None), - run_as_user_name=d.get("run_as_user_name", None), - schedule=_from_dict(d, "schedule", CronSchedule), - update_time=d.get("update_time", None), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(next_page_token=d.get("next_page_token", None), results=_repeated_dict(d, "results", AlertV2)) class ListOrder(Enum): @@ -8966,9 +8842,7 @@ def get_alert(self, id: str) -> AlertV2: res = self._api.do("GET", f"/api/2.0/alerts/{id}", headers=headers) return AlertV2.from_dict(res) - def list_alerts( - self, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[ListAlertsV2ResponseAlert]: + def list_alerts(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[AlertV2]: """List alerts. Gets a list of alerts accessible to the user, ordered by creation time. @@ -8976,7 +8850,7 @@ def list_alerts( :param page_size: int (optional) :param page_token: str (optional) - :returns: Iterator over :class:`ListAlertsV2ResponseAlert` + :returns: Iterator over :class:`AlertV2` """ query = {} @@ -8992,7 +8866,7 @@ def list_alerts( json = self._api.do("GET", "/api/2.0/alerts", query=query, headers=headers) if "results" in json: for v in json["results"]: - yield ListAlertsV2ResponseAlert.from_dict(v) + yield AlertV2.from_dict(v) if "next_page_token" not in json or not json["next_page_token"]: return query["page_token"] = json["next_page_token"] From 7071173b6179f203720b2375ead282a57bf7261b Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Fri, 2 May 2025 09:22:09 +0000 Subject: [PATCH 2/2] nits --- NEXT_CHANGELOG.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 9946d835b..d39173c98 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -13,7 +13,6 @@ ### API Changes * Added `future_feature_data_path` field for `databricks.sdk.service.ml.CreateForecastingExperimentRequest`. * Added `exclude_columns` and `include_columns` fields for `databricks.sdk.service.pipelines.TableSpecificConfig`. -* Added `create_time`, `custom_description`, `custom_summary`, `display_name`, `evaluation`, `id`, `lifecycle_state`, `owner_user_name`, `parent_path`, `query_text`, `run_as_user_name`, `schedule`, `update_time` and `warehouse_id` fields for `databricks.sdk.service.sql.AlertV2`. * Added `network_check_control_plane_failure`, `network_check_dns_server_failure`, `network_check_metadata_endpoint_failure`, `network_check_multiple_components_failure`, `network_check_nic_failure`, `network_check_storage_failure` and `secret_permission_denied` enum values for `databricks.sdk.service.compute.TerminationReasonCode`. * [Breaking] Changed `pipeline_id` field for `databricks.sdk.service.pipelines.EditPipeline` to be required. * Changed `connection_name`, `gateway_storage_catalog` and `gateway_storage_schema` fields for `databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition` to be required. @@ -28,5 +27,4 @@ * Changed `destination_catalog`, `destination_schema` and `source_table` fields for `databricks.sdk.service.pipelines.TableSpec` to be required. * [Breaking] Changed `results` field for `databricks.sdk.service.sql.ListAlertsV2Response` to type `databricks.sdk.service.sql.AlertV2List` dataclass. * [Breaking] Changed pagination for [AlertsV2API.list_alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/alerts_v2.html#databricks.sdk.service.sql.AlertsV2API.list_alerts) method. -* [Breaking] Changed waiter for [GenieAPI.create_message](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html#databricks.sdk.service.dashboards.GenieAPI.create_message) method. -* [Breaking] Removed `create_time`, `custom_description`, `custom_summary`, `display_name`, `evaluation`, `id`, `lifecycle_state`, `owner_user_name`, `query_text`, `run_as_user_name`, `schedule`, `update_time` and `warehouse_id` fields for `databricks.sdk.service.sql.ListAlertsV2ResponseAlert`. +* Fixed waiter for [GenieAPI.create_message](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html#databricks.sdk.service.dashboards.GenieAPI.create_message) method.