From 471c38b1d7cd9364203586a0be7e6770a733cbfd Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Wed, 22 Jan 2025 14:00:38 +0100 Subject: [PATCH 1/3] [Release] Release v0.56.0 ### Bug Fixes * Support Query parameters for all HTTP operations ([#1124](https://github.com/databricks/databricks-sdk-go/pull/1124)). ### Internal Changes * Add download target to MakeFile ([#1125](https://github.com/databricks/databricks-sdk-go/pull/1125)). * Delete examples/mocking module ([#1126](https://github.com/databricks/databricks-sdk-go/pull/1126)). * Scope the traversing directory in the Recursive list workspace test ([#1120](https://github.com/databricks/databricks-sdk-go/pull/1120)). ### API Changes: * Added [w.AccessControl](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/iam#AccessControlAPI) workspace-level service. * Added `HttpRequest` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service. * Added `ReviewState`, `Reviews` and `RunnerCollaborators` fields for [cleanrooms.CleanRoomAssetNotebook](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomAssetNotebook). * Added `CleanRoomsNotebookOutput` field for [jobs.RunOutput](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#RunOutput). * Added `RunAsRepl` field for [jobs.SparkJarTask](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SparkJarTask). * Added `Scopes` field for [oauth2.UpdateCustomAppIntegration](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/oauth2#UpdateCustomAppIntegration). * Added `Contents` field for [serving.GetOpenApiResponse](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GetOpenApiResponse). * Added `Activated`, `ActivationUrl`, `AuthenticationType`, `Cloud`, `Comment`, `CreatedAt`, `CreatedBy`, `DataRecipientGlobalMetastoreId`, `IpAccessList`, `MetastoreId`, `Name`, `Owner`, `PropertiesKvpairs`, `Region`, `SharingCode`, `Tokens`, `UpdatedAt` and `UpdatedBy` fields for [sharing.RecipientInfo](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo). * Added `ExpirationTime` field for [sharing.RecipientInfo](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo). * Changed `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service return type to become non-empty. * Changed `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service to type `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service. * Changed `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service to return [sharing.RecipientInfo](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo). * Changed `Create` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service with new required argument order. * Changed `GetOpenApi` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service return type to become non-empty. * Changed `Patch` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service to return [serving.EndpointTags](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTags). * Changed `Patch` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service to type `Patch` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service. * Changed [serving.EndpointTagList](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTagList) to. * Changed `CollaboratorAlias` field for [cleanrooms.CleanRoomCollaborator](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator) to be required. * Changed `CollaboratorAlias` field for [cleanrooms.CleanRoomCollaborator](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator) to be required. * Changed `Behavior` field for [serving.AiGatewayGuardrailPiiBehavior](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#AiGatewayGuardrailPiiBehavior) to no longer be required. * Changed `Behavior` field for [serving.AiGatewayGuardrailPiiBehavior](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#AiGatewayGuardrailPiiBehavior) to no longer be required. * Changed `Config` field for [serving.CreateServingEndpoint](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#CreateServingEndpoint) to no longer be required. * Changed `ProjectId` and `Region` fields for [serving.GoogleCloudVertexAiConfig](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GoogleCloudVertexAiConfig) to be required. * Changed `ProjectId` and `Region` fields for [serving.GoogleCloudVertexAiConfig](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GoogleCloudVertexAiConfig) to be required. * Changed `WorkloadType` field for [serving.ServedEntityInput](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedEntityInput) to type [serving.ServingModelWorkloadType](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType). * Changed `WorkloadType` field for [serving.ServedEntityOutput](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedEntityOutput) to type [serving.ServingModelWorkloadType](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType). * Changed `WorkloadType` field for [serving.ServedModelOutput](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedModelOutput) to type [serving.ServingModelWorkloadType](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType). OpenAPI SHA: 0be1b914249781b5e903b7676fd02255755bc851, Date: 2025-01-22 --- .codegen/_openapi_sha | 2 +- CHANGELOG.md | 46 ++ experimental/mocks/mock_workspace_client.go | 9 + .../iam/mock_access_control_interface.go | 96 ++++ .../mock_serving_endpoints_interface.go | 129 ++++- .../sharing/mock_recipients_interface.go | 87 +--- service/cleanrooms/model.go | 64 ++- service/compute/model.go | 12 +- service/dashboards/model.go | 18 +- service/files/api.go | 7 +- service/files/interface.go | 7 +- service/iam/api.go | 21 +- service/iam/impl.go | 15 + service/iam/interface.go | 7 + service/iam/model.go | 104 ++++ service/jobs/model.go | 46 +- service/oauth2/model.go | 4 + service/pkg.go | 5 +- service/serving/api.go | 13 +- service/serving/impl.go | 25 +- service/serving/interface.go | 10 +- service/serving/model.go | 462 ++++++++++-------- service/sharing/api.go | 38 +- service/sharing/impl.go | 8 +- service/sharing/interface.go | 4 +- service/sharing/model.go | 60 +-- version/version.go | 2 +- workspace_client.go | 11 +- 28 files changed, 909 insertions(+), 403 deletions(-) create mode 100644 experimental/mocks/service/iam/mock_access_control_interface.go diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index dfe78790a..588cf9d63 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -779817ed8d63031f5ea761fbd25ee84f38feec0d \ No newline at end of file +0be1b914249781b5e903b7676fd02255755bc851 \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index a9036ffa8..a73d7f324 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,51 @@ # Version changelog +## [Release] Release v0.56.0 + +### Bug Fixes + + * Support Query parameters for all HTTP operations ([#1124](https://github.com/databricks/databricks-sdk-go/pull/1124)). + + +### Internal Changes + + * Add download target to MakeFile ([#1125](https://github.com/databricks/databricks-sdk-go/pull/1125)). + * Delete examples/mocking module ([#1126](https://github.com/databricks/databricks-sdk-go/pull/1126)). + * Scope the traversing directory in the Recursive list workspace test ([#1120](https://github.com/databricks/databricks-sdk-go/pull/1120)). + + +### API Changes: + + * Added [w.AccessControl](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/iam#AccessControlAPI) workspace-level service. + * Added `HttpRequest` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service. + * Added `ReviewState`, `Reviews` and `RunnerCollaborators` fields for [cleanrooms.CleanRoomAssetNotebook](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomAssetNotebook). + * Added `CleanRoomsNotebookOutput` field for [jobs.RunOutput](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#RunOutput). + * Added `RunAsRepl` field for [jobs.SparkJarTask](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SparkJarTask). + * Added `Scopes` field for [oauth2.UpdateCustomAppIntegration](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/oauth2#UpdateCustomAppIntegration). + * Added `Contents` field for [serving.GetOpenApiResponse](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GetOpenApiResponse). + * Added `Activated`, `ActivationUrl`, `AuthenticationType`, `Cloud`, `Comment`, `CreatedAt`, `CreatedBy`, `DataRecipientGlobalMetastoreId`, `IpAccessList`, `MetastoreId`, `Name`, `Owner`, `PropertiesKvpairs`, `Region`, `SharingCode`, `Tokens`, `UpdatedAt` and `UpdatedBy` fields for [sharing.RecipientInfo](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo). + * Added `ExpirationTime` field for [sharing.RecipientInfo](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo). + * Changed `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service return type to become non-empty. + * Changed `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service to type `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service. + * Changed `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service to return [sharing.RecipientInfo](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo). + * Changed `Create` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service with new required argument order. + * Changed `GetOpenApi` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service return type to become non-empty. + * Changed `Patch` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service to return [serving.EndpointTags](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTags). + * Changed `Patch` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service to type `Patch` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service. + * Changed [serving.EndpointTagList](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTagList) to. + * Changed `CollaboratorAlias` field for [cleanrooms.CleanRoomCollaborator](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator) to be required. + * Changed `CollaboratorAlias` field for [cleanrooms.CleanRoomCollaborator](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator) to be required. + * Changed `Behavior` field for [serving.AiGatewayGuardrailPiiBehavior](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#AiGatewayGuardrailPiiBehavior) to no longer be required. + * Changed `Behavior` field for [serving.AiGatewayGuardrailPiiBehavior](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#AiGatewayGuardrailPiiBehavior) to no longer be required. + * Changed `Config` field for [serving.CreateServingEndpoint](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#CreateServingEndpoint) to no longer be required. + * Changed `ProjectId` and `Region` fields for [serving.GoogleCloudVertexAiConfig](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GoogleCloudVertexAiConfig) to be required. + * Changed `ProjectId` and `Region` fields for [serving.GoogleCloudVertexAiConfig](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GoogleCloudVertexAiConfig) to be required. + * Changed `WorkloadType` field for [serving.ServedEntityInput](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedEntityInput) to type [serving.ServingModelWorkloadType](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType). + * Changed `WorkloadType` field for [serving.ServedEntityOutput](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedEntityOutput) to type [serving.ServingModelWorkloadType](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType). + * Changed `WorkloadType` field for [serving.ServedModelOutput](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedModelOutput) to type [serving.ServingModelWorkloadType](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType). + +OpenAPI SHA: 0be1b914249781b5e903b7676fd02255755bc851, Date: 2025-01-22 + ## [Release] Release v0.55.0 ### Internal Changes diff --git a/experimental/mocks/mock_workspace_client.go b/experimental/mocks/mock_workspace_client.go index 20f8533c9..c46e8663d 100755 --- a/experimental/mocks/mock_workspace_client.go +++ b/experimental/mocks/mock_workspace_client.go @@ -41,6 +41,7 @@ func NewMockWorkspaceClient(t interface { WorkspaceClient: &databricks.WorkspaceClient{ Config: nil, + AccessControl: iam.NewMockAccessControlInterface(t), AccountAccessControlProxy: iam.NewMockAccountAccessControlProxyInterface(t), Alerts: sql.NewMockAlertsInterface(t), AlertsLegacy: sql.NewMockAlertsLegacyInterface(t), @@ -242,6 +243,14 @@ func (m *MockWorkspaceClient) GetMockRestrictWorkspaceAdminsAPI() *settings.Mock return api } +func (m *MockWorkspaceClient) GetMockAccessControlAPI() *iam.MockAccessControlInterface { + api, ok := m.WorkspaceClient.AccessControl.(*iam.MockAccessControlInterface) + if !ok { + panic(fmt.Sprintf("expected AccessControl to be *iam.MockAccessControlInterface, actual was %T", m.WorkspaceClient.AccessControl)) + } + return api +} + func (m *MockWorkspaceClient) GetMockAccountAccessControlProxyAPI() *iam.MockAccountAccessControlProxyInterface { api, ok := m.WorkspaceClient.AccountAccessControlProxy.(*iam.MockAccountAccessControlProxyInterface) if !ok { diff --git a/experimental/mocks/service/iam/mock_access_control_interface.go b/experimental/mocks/service/iam/mock_access_control_interface.go new file mode 100644 index 000000000..67324947c --- /dev/null +++ b/experimental/mocks/service/iam/mock_access_control_interface.go @@ -0,0 +1,96 @@ +// Code generated by mockery v2.43.0. DO NOT EDIT. + +package iam + +import ( + context "context" + + iam "github.com/databricks/databricks-sdk-go/service/iam" + mock "github.com/stretchr/testify/mock" +) + +// MockAccessControlInterface is an autogenerated mock type for the AccessControlInterface type +type MockAccessControlInterface struct { + mock.Mock +} + +type MockAccessControlInterface_Expecter struct { + mock *mock.Mock +} + +func (_m *MockAccessControlInterface) EXPECT() *MockAccessControlInterface_Expecter { + return &MockAccessControlInterface_Expecter{mock: &_m.Mock} +} + +// CheckPolicy provides a mock function with given fields: ctx, request +func (_m *MockAccessControlInterface) CheckPolicy(ctx context.Context, request iam.CheckPolicyRequest) (*iam.CheckPolicyResponse, error) { + ret := _m.Called(ctx, request) + + if len(ret) == 0 { + panic("no return value specified for CheckPolicy") + } + + var r0 *iam.CheckPolicyResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, iam.CheckPolicyRequest) (*iam.CheckPolicyResponse, error)); ok { + return rf(ctx, request) + } + if rf, ok := ret.Get(0).(func(context.Context, iam.CheckPolicyRequest) *iam.CheckPolicyResponse); ok { + r0 = rf(ctx, request) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*iam.CheckPolicyResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, iam.CheckPolicyRequest) error); ok { + r1 = rf(ctx, request) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockAccessControlInterface_CheckPolicy_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CheckPolicy' +type MockAccessControlInterface_CheckPolicy_Call struct { + *mock.Call +} + +// CheckPolicy is a helper method to define mock.On call +// - ctx context.Context +// - request iam.CheckPolicyRequest +func (_e *MockAccessControlInterface_Expecter) CheckPolicy(ctx interface{}, request interface{}) *MockAccessControlInterface_CheckPolicy_Call { + return &MockAccessControlInterface_CheckPolicy_Call{Call: _e.mock.On("CheckPolicy", ctx, request)} +} + +func (_c *MockAccessControlInterface_CheckPolicy_Call) Run(run func(ctx context.Context, request iam.CheckPolicyRequest)) *MockAccessControlInterface_CheckPolicy_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(iam.CheckPolicyRequest)) + }) + return _c +} + +func (_c *MockAccessControlInterface_CheckPolicy_Call) Return(_a0 *iam.CheckPolicyResponse, _a1 error) *MockAccessControlInterface_CheckPolicy_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockAccessControlInterface_CheckPolicy_Call) RunAndReturn(run func(context.Context, iam.CheckPolicyRequest) (*iam.CheckPolicyResponse, error)) *MockAccessControlInterface_CheckPolicy_Call { + _c.Call.Return(run) + return _c +} + +// NewMockAccessControlInterface creates a new instance of MockAccessControlInterface. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockAccessControlInterface(t interface { + mock.TestingT + Cleanup(func()) +}) *MockAccessControlInterface { + mock := &MockAccessControlInterface{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/experimental/mocks/service/serving/mock_serving_endpoints_interface.go b/experimental/mocks/service/serving/mock_serving_endpoints_interface.go index 782f10dfc..84c106815 100644 --- a/experimental/mocks/service/serving/mock_serving_endpoints_interface.go +++ b/experimental/mocks/service/serving/mock_serving_endpoints_interface.go @@ -611,21 +611,33 @@ func (_c *MockServingEndpointsInterface_GetByName_Call) RunAndReturn(run func(co } // GetOpenApi provides a mock function with given fields: ctx, request -func (_m *MockServingEndpointsInterface) GetOpenApi(ctx context.Context, request serving.GetOpenApiRequest) error { +func (_m *MockServingEndpointsInterface) GetOpenApi(ctx context.Context, request serving.GetOpenApiRequest) (*serving.GetOpenApiResponse, error) { ret := _m.Called(ctx, request) if len(ret) == 0 { panic("no return value specified for GetOpenApi") } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, serving.GetOpenApiRequest) error); ok { + var r0 *serving.GetOpenApiResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, serving.GetOpenApiRequest) (*serving.GetOpenApiResponse, error)); ok { + return rf(ctx, request) + } + if rf, ok := ret.Get(0).(func(context.Context, serving.GetOpenApiRequest) *serving.GetOpenApiResponse); ok { r0 = rf(ctx, request) } else { - r0 = ret.Error(0) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*serving.GetOpenApiResponse) + } } - return r0 + if rf, ok := ret.Get(1).(func(context.Context, serving.GetOpenApiRequest) error); ok { + r1 = rf(ctx, request) + } else { + r1 = ret.Error(1) + } + + return r0, r1 } // MockServingEndpointsInterface_GetOpenApi_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetOpenApi' @@ -647,32 +659,44 @@ func (_c *MockServingEndpointsInterface_GetOpenApi_Call) Run(run func(ctx contex return _c } -func (_c *MockServingEndpointsInterface_GetOpenApi_Call) Return(_a0 error) *MockServingEndpointsInterface_GetOpenApi_Call { - _c.Call.Return(_a0) +func (_c *MockServingEndpointsInterface_GetOpenApi_Call) Return(_a0 *serving.GetOpenApiResponse, _a1 error) *MockServingEndpointsInterface_GetOpenApi_Call { + _c.Call.Return(_a0, _a1) return _c } -func (_c *MockServingEndpointsInterface_GetOpenApi_Call) RunAndReturn(run func(context.Context, serving.GetOpenApiRequest) error) *MockServingEndpointsInterface_GetOpenApi_Call { +func (_c *MockServingEndpointsInterface_GetOpenApi_Call) RunAndReturn(run func(context.Context, serving.GetOpenApiRequest) (*serving.GetOpenApiResponse, error)) *MockServingEndpointsInterface_GetOpenApi_Call { _c.Call.Return(run) return _c } // GetOpenApiByName provides a mock function with given fields: ctx, name -func (_m *MockServingEndpointsInterface) GetOpenApiByName(ctx context.Context, name string) error { +func (_m *MockServingEndpointsInterface) GetOpenApiByName(ctx context.Context, name string) (*serving.GetOpenApiResponse, error) { ret := _m.Called(ctx, name) if len(ret) == 0 { panic("no return value specified for GetOpenApiByName") } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string) error); ok { + var r0 *serving.GetOpenApiResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string) (*serving.GetOpenApiResponse, error)); ok { + return rf(ctx, name) + } + if rf, ok := ret.Get(0).(func(context.Context, string) *serving.GetOpenApiResponse); ok { r0 = rf(ctx, name) } else { - r0 = ret.Error(0) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*serving.GetOpenApiResponse) + } } - return r0 + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, name) + } else { + r1 = ret.Error(1) + } + + return r0, r1 } // MockServingEndpointsInterface_GetOpenApiByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetOpenApiByName' @@ -694,12 +718,12 @@ func (_c *MockServingEndpointsInterface_GetOpenApiByName_Call) Run(run func(ctx return _c } -func (_c *MockServingEndpointsInterface_GetOpenApiByName_Call) Return(_a0 error) *MockServingEndpointsInterface_GetOpenApiByName_Call { - _c.Call.Return(_a0) +func (_c *MockServingEndpointsInterface_GetOpenApiByName_Call) Return(_a0 *serving.GetOpenApiResponse, _a1 error) *MockServingEndpointsInterface_GetOpenApiByName_Call { + _c.Call.Return(_a0, _a1) return _c } -func (_c *MockServingEndpointsInterface_GetOpenApiByName_Call) RunAndReturn(run func(context.Context, string) error) *MockServingEndpointsInterface_GetOpenApiByName_Call { +func (_c *MockServingEndpointsInterface_GetOpenApiByName_Call) RunAndReturn(run func(context.Context, string) (*serving.GetOpenApiResponse, error)) *MockServingEndpointsInterface_GetOpenApiByName_Call { _c.Call.Return(run) return _c } @@ -940,6 +964,65 @@ func (_c *MockServingEndpointsInterface_GetPermissionsByServingEndpointId_Call) return _c } +// HttpRequest provides a mock function with given fields: ctx, request +func (_m *MockServingEndpointsInterface) HttpRequest(ctx context.Context, request serving.ExternalFunctionRequest) (*serving.ExternalFunctionResponse, error) { + ret := _m.Called(ctx, request) + + if len(ret) == 0 { + panic("no return value specified for HttpRequest") + } + + var r0 *serving.ExternalFunctionResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, serving.ExternalFunctionRequest) (*serving.ExternalFunctionResponse, error)); ok { + return rf(ctx, request) + } + if rf, ok := ret.Get(0).(func(context.Context, serving.ExternalFunctionRequest) *serving.ExternalFunctionResponse); ok { + r0 = rf(ctx, request) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*serving.ExternalFunctionResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, serving.ExternalFunctionRequest) error); ok { + r1 = rf(ctx, request) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// MockServingEndpointsInterface_HttpRequest_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HttpRequest' +type MockServingEndpointsInterface_HttpRequest_Call struct { + *mock.Call +} + +// HttpRequest is a helper method to define mock.On call +// - ctx context.Context +// - request serving.ExternalFunctionRequest +func (_e *MockServingEndpointsInterface_Expecter) HttpRequest(ctx interface{}, request interface{}) *MockServingEndpointsInterface_HttpRequest_Call { + return &MockServingEndpointsInterface_HttpRequest_Call{Call: _e.mock.On("HttpRequest", ctx, request)} +} + +func (_c *MockServingEndpointsInterface_HttpRequest_Call) Run(run func(ctx context.Context, request serving.ExternalFunctionRequest)) *MockServingEndpointsInterface_HttpRequest_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(serving.ExternalFunctionRequest)) + }) + return _c +} + +func (_c *MockServingEndpointsInterface_HttpRequest_Call) Return(_a0 *serving.ExternalFunctionResponse, _a1 error) *MockServingEndpointsInterface_HttpRequest_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *MockServingEndpointsInterface_HttpRequest_Call) RunAndReturn(run func(context.Context, serving.ExternalFunctionRequest) (*serving.ExternalFunctionResponse, error)) *MockServingEndpointsInterface_HttpRequest_Call { + _c.Call.Return(run) + return _c +} + // List provides a mock function with given fields: ctx func (_m *MockServingEndpointsInterface) List(ctx context.Context) listing.Iterator[serving.ServingEndpoint] { ret := _m.Called(ctx) @@ -1166,23 +1249,23 @@ func (_c *MockServingEndpointsInterface_LogsByNameAndServedModelName_Call) RunAn } // Patch provides a mock function with given fields: ctx, request -func (_m *MockServingEndpointsInterface) Patch(ctx context.Context, request serving.PatchServingEndpointTags) ([]serving.EndpointTag, error) { +func (_m *MockServingEndpointsInterface) Patch(ctx context.Context, request serving.PatchServingEndpointTags) (*serving.EndpointTags, error) { ret := _m.Called(ctx, request) if len(ret) == 0 { panic("no return value specified for Patch") } - var r0 []serving.EndpointTag + var r0 *serving.EndpointTags var r1 error - if rf, ok := ret.Get(0).(func(context.Context, serving.PatchServingEndpointTags) ([]serving.EndpointTag, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, serving.PatchServingEndpointTags) (*serving.EndpointTags, error)); ok { return rf(ctx, request) } - if rf, ok := ret.Get(0).(func(context.Context, serving.PatchServingEndpointTags) []serving.EndpointTag); ok { + if rf, ok := ret.Get(0).(func(context.Context, serving.PatchServingEndpointTags) *serving.EndpointTags); ok { r0 = rf(ctx, request) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]serving.EndpointTag) + r0 = ret.Get(0).(*serving.EndpointTags) } } @@ -1214,12 +1297,12 @@ func (_c *MockServingEndpointsInterface_Patch_Call) Run(run func(ctx context.Con return _c } -func (_c *MockServingEndpointsInterface_Patch_Call) Return(_a0 []serving.EndpointTag, _a1 error) *MockServingEndpointsInterface_Patch_Call { +func (_c *MockServingEndpointsInterface_Patch_Call) Return(_a0 *serving.EndpointTags, _a1 error) *MockServingEndpointsInterface_Patch_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *MockServingEndpointsInterface_Patch_Call) RunAndReturn(run func(context.Context, serving.PatchServingEndpointTags) ([]serving.EndpointTag, error)) *MockServingEndpointsInterface_Patch_Call { +func (_c *MockServingEndpointsInterface_Patch_Call) RunAndReturn(run func(context.Context, serving.PatchServingEndpointTags) (*serving.EndpointTags, error)) *MockServingEndpointsInterface_Patch_Call { _c.Call.Return(run) return _c } diff --git a/experimental/mocks/service/sharing/mock_recipients_interface.go b/experimental/mocks/service/sharing/mock_recipients_interface.go index 7f724bb2d..9a4a4dec4 100644 --- a/experimental/mocks/service/sharing/mock_recipients_interface.go +++ b/experimental/mocks/service/sharing/mock_recipients_interface.go @@ -403,65 +403,6 @@ func (_c *MockRecipientsInterface_ListAll_Call) RunAndReturn(run func(context.Co return _c } -// RecipientInfoNameToMetastoreIdMap provides a mock function with given fields: ctx, request -func (_m *MockRecipientsInterface) RecipientInfoNameToMetastoreIdMap(ctx context.Context, request sharing.ListRecipientsRequest) (map[string]string, error) { - ret := _m.Called(ctx, request) - - if len(ret) == 0 { - panic("no return value specified for RecipientInfoNameToMetastoreIdMap") - } - - var r0 map[string]string - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, sharing.ListRecipientsRequest) (map[string]string, error)); ok { - return rf(ctx, request) - } - if rf, ok := ret.Get(0).(func(context.Context, sharing.ListRecipientsRequest) map[string]string); ok { - r0 = rf(ctx, request) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(map[string]string) - } - } - - if rf, ok := ret.Get(1).(func(context.Context, sharing.ListRecipientsRequest) error); ok { - r1 = rf(ctx, request) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// MockRecipientsInterface_RecipientInfoNameToMetastoreIdMap_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RecipientInfoNameToMetastoreIdMap' -type MockRecipientsInterface_RecipientInfoNameToMetastoreIdMap_Call struct { - *mock.Call -} - -// RecipientInfoNameToMetastoreIdMap is a helper method to define mock.On call -// - ctx context.Context -// - request sharing.ListRecipientsRequest -func (_e *MockRecipientsInterface_Expecter) RecipientInfoNameToMetastoreIdMap(ctx interface{}, request interface{}) *MockRecipientsInterface_RecipientInfoNameToMetastoreIdMap_Call { - return &MockRecipientsInterface_RecipientInfoNameToMetastoreIdMap_Call{Call: _e.mock.On("RecipientInfoNameToMetastoreIdMap", ctx, request)} -} - -func (_c *MockRecipientsInterface_RecipientInfoNameToMetastoreIdMap_Call) Run(run func(ctx context.Context, request sharing.ListRecipientsRequest)) *MockRecipientsInterface_RecipientInfoNameToMetastoreIdMap_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(sharing.ListRecipientsRequest)) - }) - return _c -} - -func (_c *MockRecipientsInterface_RecipientInfoNameToMetastoreIdMap_Call) Return(_a0 map[string]string, _a1 error) *MockRecipientsInterface_RecipientInfoNameToMetastoreIdMap_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *MockRecipientsInterface_RecipientInfoNameToMetastoreIdMap_Call) RunAndReturn(run func(context.Context, sharing.ListRecipientsRequest) (map[string]string, error)) *MockRecipientsInterface_RecipientInfoNameToMetastoreIdMap_Call { - _c.Call.Return(run) - return _c -} - // RotateToken provides a mock function with given fields: ctx, request func (_m *MockRecipientsInterface) RotateToken(ctx context.Context, request sharing.RotateRecipientToken) (*sharing.RecipientInfo, error) { ret := _m.Called(ctx, request) @@ -640,21 +581,33 @@ func (_c *MockRecipientsInterface_SharePermissionsByName_Call) RunAndReturn(run } // Update provides a mock function with given fields: ctx, request -func (_m *MockRecipientsInterface) Update(ctx context.Context, request sharing.UpdateRecipient) error { +func (_m *MockRecipientsInterface) Update(ctx context.Context, request sharing.UpdateRecipient) (*sharing.RecipientInfo, error) { ret := _m.Called(ctx, request) if len(ret) == 0 { panic("no return value specified for Update") } - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, sharing.UpdateRecipient) error); ok { + var r0 *sharing.RecipientInfo + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, sharing.UpdateRecipient) (*sharing.RecipientInfo, error)); ok { + return rf(ctx, request) + } + if rf, ok := ret.Get(0).(func(context.Context, sharing.UpdateRecipient) *sharing.RecipientInfo); ok { r0 = rf(ctx, request) } else { - r0 = ret.Error(0) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*sharing.RecipientInfo) + } } - return r0 + if rf, ok := ret.Get(1).(func(context.Context, sharing.UpdateRecipient) error); ok { + r1 = rf(ctx, request) + } else { + r1 = ret.Error(1) + } + + return r0, r1 } // MockRecipientsInterface_Update_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Update' @@ -676,12 +629,12 @@ func (_c *MockRecipientsInterface_Update_Call) Run(run func(ctx context.Context, return _c } -func (_c *MockRecipientsInterface_Update_Call) Return(_a0 error) *MockRecipientsInterface_Update_Call { - _c.Call.Return(_a0) +func (_c *MockRecipientsInterface_Update_Call) Return(_a0 *sharing.RecipientInfo, _a1 error) *MockRecipientsInterface_Update_Call { + _c.Call.Return(_a0, _a1) return _c } -func (_c *MockRecipientsInterface_Update_Call) RunAndReturn(run func(context.Context, sharing.UpdateRecipient) error) *MockRecipientsInterface_Update_Call { +func (_c *MockRecipientsInterface_Update_Call) RunAndReturn(run func(context.Context, sharing.UpdateRecipient) (*sharing.RecipientInfo, error)) *MockRecipientsInterface_Update_Call { _c.Call.Return(run) return _c } diff --git a/service/cleanrooms/model.go b/service/cleanrooms/model.go index 61b7c4368..cf4a225a5 100755 --- a/service/cleanrooms/model.go +++ b/service/cleanrooms/model.go @@ -196,6 +196,12 @@ type CleanRoomAssetNotebook struct { // Base 64 representation of the notebook contents. This is the same format // as returned by :method:workspace/export with the format of **HTML**. NotebookContent string `json:"notebook_content,omitempty"` + // top-level status derived from all reviews + ReviewState CleanRoomNotebookReviewNotebookReviewState `json:"review_state,omitempty"` + // All existing approvals or rejections + Reviews []CleanRoomNotebookReview `json:"reviews,omitempty"` + // collaborators that can run the notebook + RunnerCollaborators []CleanRoomCollaborator `json:"runner_collaborators,omitempty"` ForceSendFields []string `json:"-"` } @@ -212,6 +218,8 @@ type CleanRoomAssetStatusEnum string const CleanRoomAssetStatusEnumActive CleanRoomAssetStatusEnum = `ACTIVE` +const CleanRoomAssetStatusEnumPending CleanRoomAssetStatusEnum = `PENDING` + const CleanRoomAssetStatusEnumPermissionDenied CleanRoomAssetStatusEnum = `PERMISSION_DENIED` // String representation for [fmt.Print] @@ -222,11 +230,11 @@ func (f *CleanRoomAssetStatusEnum) String() string { // Set raw string value and validate it against allowed values func (f *CleanRoomAssetStatusEnum) Set(v string) error { switch v { - case `ACTIVE`, `PERMISSION_DENIED`: + case `ACTIVE`, `PENDING`, `PERMISSION_DENIED`: *f = CleanRoomAssetStatusEnum(v) return nil default: - return fmt.Errorf(`value "%s" is not one of "ACTIVE", "PERMISSION_DENIED"`, v) + return fmt.Errorf(`value "%s" is not one of "ACTIVE", "PENDING", "PERMISSION_DENIED"`, v) } } @@ -304,7 +312,7 @@ type CleanRoomCollaborator struct { // requirements]. // // [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements - CollaboratorAlias string `json:"collaborator_alias,omitempty"` + CollaboratorAlias string `json:"collaborator_alias"` // Generated display name for the collaborator. In the case of a single // metastore clean room, it is the clean room name. For x-metastore clean // rooms, it is the organization name of the metastore. It is not restricted @@ -338,6 +346,56 @@ func (s CleanRoomCollaborator) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } +type CleanRoomNotebookReview struct { + // review comment + Comment string `json:"comment,omitempty"` + // timestamp of when the review was submitted + CreatedAtMillis int64 `json:"created_at_millis,omitempty"` + // review outcome + ReviewState CleanRoomNotebookReviewNotebookReviewState `json:"review_state,omitempty"` + // collaborator alias of the reviewer + ReviewerCollaboratorAlias string `json:"reviewer_collaborator_alias,omitempty"` + + ForceSendFields []string `json:"-"` +} + +func (s *CleanRoomNotebookReview) UnmarshalJSON(b []byte) error { + return marshal.Unmarshal(b, s) +} + +func (s CleanRoomNotebookReview) MarshalJSON() ([]byte, error) { + return marshal.Marshal(s) +} + +type CleanRoomNotebookReviewNotebookReviewState string + +const CleanRoomNotebookReviewNotebookReviewStateApproved CleanRoomNotebookReviewNotebookReviewState = `APPROVED` + +const CleanRoomNotebookReviewNotebookReviewStatePending CleanRoomNotebookReviewNotebookReviewState = `PENDING` + +const CleanRoomNotebookReviewNotebookReviewStateRejected CleanRoomNotebookReviewNotebookReviewState = `REJECTED` + +// String representation for [fmt.Print] +func (f *CleanRoomNotebookReviewNotebookReviewState) String() string { + return string(*f) +} + +// Set raw string value and validate it against allowed values +func (f *CleanRoomNotebookReviewNotebookReviewState) Set(v string) error { + switch v { + case `APPROVED`, `PENDING`, `REJECTED`: + *f = CleanRoomNotebookReviewNotebookReviewState(v) + return nil + default: + return fmt.Errorf(`value "%s" is not one of "APPROVED", "PENDING", "REJECTED"`, v) + } +} + +// Type always returns CleanRoomNotebookReviewNotebookReviewState to satisfy [pflag.Value] interface +func (f *CleanRoomNotebookReviewNotebookReviewState) Type() string { + return "CleanRoomNotebookReviewNotebookReviewState" +} + // Stores information about a single task run. type CleanRoomNotebookTaskRun struct { // Job run info of the task in the runner's local workspace. This field is diff --git a/service/compute/model.go b/service/compute/model.go index 6414a21eb..5fe81ced7 100755 --- a/service/compute/model.go +++ b/service/compute/model.go @@ -2732,6 +2732,14 @@ func (f *EventDetailsCause) Type() string { type EventType string +const EventTypeAddNodesFailed EventType = `ADD_NODES_FAILED` + +const EventTypeAutomaticClusterUpdate EventType = `AUTOMATIC_CLUSTER_UPDATE` + +const EventTypeAutoscalingBackoff EventType = `AUTOSCALING_BACKOFF` + +const EventTypeAutoscalingFailed EventType = `AUTOSCALING_FAILED` + const EventTypeAutoscalingStatsReport EventType = `AUTOSCALING_STATS_REPORT` const EventTypeCreating EventType = `CREATING` @@ -2790,11 +2798,11 @@ func (f *EventType) String() string { // Set raw string value and validate it against allowed values func (f *EventType) Set(v string) error { switch v { - case `AUTOSCALING_STATS_REPORT`, `CREATING`, `DBFS_DOWN`, `DID_NOT_EXPAND_DISK`, `DRIVER_HEALTHY`, `DRIVER_NOT_RESPONDING`, `DRIVER_UNAVAILABLE`, `EDITED`, `EXPANDED_DISK`, `FAILED_TO_EXPAND_DISK`, `INIT_SCRIPTS_FINISHED`, `INIT_SCRIPTS_STARTED`, `METASTORE_DOWN`, `NODES_LOST`, `NODE_BLACKLISTED`, `NODE_EXCLUDED_DECOMMISSIONED`, `PINNED`, `RESIZING`, `RESTARTING`, `RUNNING`, `SPARK_EXCEPTION`, `STARTING`, `TERMINATING`, `UNPINNED`, `UPSIZE_COMPLETED`: + case `ADD_NODES_FAILED`, `AUTOMATIC_CLUSTER_UPDATE`, `AUTOSCALING_BACKOFF`, `AUTOSCALING_FAILED`, `AUTOSCALING_STATS_REPORT`, `CREATING`, `DBFS_DOWN`, `DID_NOT_EXPAND_DISK`, `DRIVER_HEALTHY`, `DRIVER_NOT_RESPONDING`, `DRIVER_UNAVAILABLE`, `EDITED`, `EXPANDED_DISK`, `FAILED_TO_EXPAND_DISK`, `INIT_SCRIPTS_FINISHED`, `INIT_SCRIPTS_STARTED`, `METASTORE_DOWN`, `NODES_LOST`, `NODE_BLACKLISTED`, `NODE_EXCLUDED_DECOMMISSIONED`, `PINNED`, `RESIZING`, `RESTARTING`, `RUNNING`, `SPARK_EXCEPTION`, `STARTING`, `TERMINATING`, `UNPINNED`, `UPSIZE_COMPLETED`: *f = EventType(v) return nil default: - return fmt.Errorf(`value "%s" is not one of "AUTOSCALING_STATS_REPORT", "CREATING", "DBFS_DOWN", "DID_NOT_EXPAND_DISK", "DRIVER_HEALTHY", "DRIVER_NOT_RESPONDING", "DRIVER_UNAVAILABLE", "EDITED", "EXPANDED_DISK", "FAILED_TO_EXPAND_DISK", "INIT_SCRIPTS_FINISHED", "INIT_SCRIPTS_STARTED", "METASTORE_DOWN", "NODES_LOST", "NODE_BLACKLISTED", "NODE_EXCLUDED_DECOMMISSIONED", "PINNED", "RESIZING", "RESTARTING", "RUNNING", "SPARK_EXCEPTION", "STARTING", "TERMINATING", "UNPINNED", "UPSIZE_COMPLETED"`, v) + return fmt.Errorf(`value "%s" is not one of "ADD_NODES_FAILED", "AUTOMATIC_CLUSTER_UPDATE", "AUTOSCALING_BACKOFF", "AUTOSCALING_FAILED", "AUTOSCALING_STATS_REPORT", "CREATING", "DBFS_DOWN", "DID_NOT_EXPAND_DISK", "DRIVER_HEALTHY", "DRIVER_NOT_RESPONDING", "DRIVER_UNAVAILABLE", "EDITED", "EXPANDED_DISK", "FAILED_TO_EXPAND_DISK", "INIT_SCRIPTS_FINISHED", "INIT_SCRIPTS_STARTED", "METASTORE_DOWN", "NODES_LOST", "NODE_BLACKLISTED", "NODE_EXCLUDED_DECOMMISSIONED", "PINNED", "RESIZING", "RESTARTING", "RUNNING", "SPARK_EXCEPTION", "STARTING", "TERMINATING", "UNPINNED", "UPSIZE_COMPLETED"`, v) } } diff --git a/service/dashboards/model.go b/service/dashboards/model.go index 310008e61..495e6b8db 100755 --- a/service/dashboards/model.go +++ b/service/dashboards/model.go @@ -324,8 +324,10 @@ type GenieMessage struct { // MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching // metadata from the data sources. * `FILTERING_CONTEXT`: Running smart // context step to determine relevant context. * `ASKING_AI`: Waiting for - // the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing - // AI provided SQL query. Get the SQL query result by calling + // the LLM to respond to the users question. * `PENDING_WAREHOUSE`: Waiting + // for warehouse before the SQL query can start executing. * + // `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query + // result by calling // [getMessageQueryResult](:method:genie/getMessageQueryResult) API. // **Important: The message status will stay in the `EXECUTING_QUERY` until // a client calls @@ -672,8 +674,9 @@ func (f *MessageErrorType) Type() string { // MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching // metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context // step to determine relevant context. * `ASKING_AI`: Waiting for the LLM to -// respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL -// query. Get the SQL query result by calling +// respond to the users question. * `PENDING_WAREHOUSE`: Waiting for warehouse +// before the SQL query can start executing. * `EXECUTING_QUERY`: Executing AI +// provided SQL query. Get the SQL query result by calling // [getMessageQueryResult](:method:genie/getMessageQueryResult) API. // **Important: The message status will stay in the `EXECUTING_QUERY` until a // client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. @@ -713,6 +716,9 @@ const MessageStatusFetchingMetadata MessageStatus = `FETCHING_METADATA` // Running smart context step to determine relevant context. const MessageStatusFilteringContext MessageStatus = `FILTERING_CONTEXT` +// Waiting for warehouse before the SQL query can start executing. +const MessageStatusPendingWarehouse MessageStatus = `PENDING_WAREHOUSE` + // SQL result is not available anymore. The user needs to execute the query // again. const MessageStatusQueryResultExpired MessageStatus = `QUERY_RESULT_EXPIRED` @@ -728,11 +734,11 @@ func (f *MessageStatus) String() string { // Set raw string value and validate it against allowed values func (f *MessageStatus) Set(v string) error { switch v { - case `ASKING_AI`, `CANCELLED`, `COMPLETED`, `EXECUTING_QUERY`, `FAILED`, `FETCHING_METADATA`, `FILTERING_CONTEXT`, `QUERY_RESULT_EXPIRED`, `SUBMITTED`: + case `ASKING_AI`, `CANCELLED`, `COMPLETED`, `EXECUTING_QUERY`, `FAILED`, `FETCHING_METADATA`, `FILTERING_CONTEXT`, `PENDING_WAREHOUSE`, `QUERY_RESULT_EXPIRED`, `SUBMITTED`: *f = MessageStatus(v) return nil default: - return fmt.Errorf(`value "%s" is not one of "ASKING_AI", "CANCELLED", "COMPLETED", "EXECUTING_QUERY", "FAILED", "FETCHING_METADATA", "FILTERING_CONTEXT", "QUERY_RESULT_EXPIRED", "SUBMITTED"`, v) + return fmt.Errorf(`value "%s" is not one of "ASKING_AI", "CANCELLED", "COMPLETED", "EXECUTING_QUERY", "FAILED", "FETCHING_METADATA", "FILTERING_CONTEXT", "PENDING_WAREHOUSE", "QUERY_RESULT_EXPIRED", "SUBMITTED"`, v) } } diff --git a/service/files/api.go b/service/files/api.go index c12a519dc..b57bd759f 100755 --- a/service/files/api.go +++ b/service/files/api.go @@ -449,10 +449,15 @@ func NewFiles(client *client.DatabricksClient) *FilesAPI { // // The Files API has two distinct endpoints, one for working with files // (`/fs/files`) and another one for working with directories -// (`/fs/directories`). Both endpoints, use the standard HTTP methods GET, HEAD, +// (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, // PUT, and DELETE to manage files and directories specified using their URI // path. The path is always absolute. // +// Some Files API client features are currently experimental. To enable them, +// set `enable_experimental_files_api_client = True` in your configuration +// profile or use the environment variable +// `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. +// // [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html type FilesAPI struct { filesImpl diff --git a/service/files/interface.go b/service/files/interface.go index 5a573f9f3..c67524d4d 100755 --- a/service/files/interface.go +++ b/service/files/interface.go @@ -147,10 +147,15 @@ type DbfsService interface { // // The Files API has two distinct endpoints, one for working with files // (`/fs/files`) and another one for working with directories -// (`/fs/directories`). Both endpoints, use the standard HTTP methods GET, HEAD, +// (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, // PUT, and DELETE to manage files and directories specified using their URI // path. The path is always absolute. // +// Some Files API client features are currently experimental. To enable them, +// set `enable_experimental_files_api_client = True` in your configuration +// profile or use the environment variable +// `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. +// // [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html type FilesService interface { diff --git a/service/iam/api.go b/service/iam/api.go index 388b0a2f9..008607f65 100755 --- a/service/iam/api.go +++ b/service/iam/api.go @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -// These APIs allow you to manage Account Access Control, Account Access Control Proxy, Account Groups, Account Service Principals, Account Users, Current User, Groups, Permission Migration, Permissions, Service Principals, Users, Workspace Assignment, etc. +// These APIs allow you to manage Access Control, Account Access Control, Account Access Control Proxy, Account Groups, Account Service Principals, Account Users, Current User, Groups, Permission Migration, Permissions, Service Principals, Users, Workspace Assignment, etc. package iam import ( @@ -12,6 +12,25 @@ import ( "github.com/databricks/databricks-sdk-go/useragent" ) +type AccessControlInterface interface { + + // Check access policy to a resource. + CheckPolicy(ctx context.Context, request CheckPolicyRequest) (*CheckPolicyResponse, error) +} + +func NewAccessControl(client *client.DatabricksClient) *AccessControlAPI { + return &AccessControlAPI{ + accessControlImpl: accessControlImpl{ + client: client, + }, + } +} + +// Rule based Access Control for Databricks Resources. +type AccessControlAPI struct { + accessControlImpl +} + type AccountAccessControlInterface interface { // Get assignable roles for a resource. diff --git a/service/iam/impl.go b/service/iam/impl.go index f41d601c3..8e9f360d3 100755 --- a/service/iam/impl.go +++ b/service/iam/impl.go @@ -10,6 +10,21 @@ import ( "github.com/databricks/databricks-sdk-go/client" ) +// unexported type that holds implementations of just AccessControl API methods +type accessControlImpl struct { + client *client.DatabricksClient +} + +func (a *accessControlImpl) CheckPolicy(ctx context.Context, request CheckPolicyRequest) (*CheckPolicyResponse, error) { + var checkPolicyResponse CheckPolicyResponse + path := "/api/2.0/access-control/check-policy-v2" + queryParams := make(map[string]any) + headers := make(map[string]string) + headers["Accept"] = "application/json" + err := a.client.Do(ctx, http.MethodGet, path, headers, queryParams, request, &checkPolicyResponse) + return &checkPolicyResponse, err +} + // unexported type that holds implementations of just AccountAccessControl API methods type accountAccessControlImpl struct { client *client.DatabricksClient diff --git a/service/iam/interface.go b/service/iam/interface.go index 0a76288c2..6b44c4503 100755 --- a/service/iam/interface.go +++ b/service/iam/interface.go @@ -6,6 +6,13 @@ import ( "context" ) +// Rule based Access Control for Databricks Resources. +type AccessControlService interface { + + // Check access policy to a resource. + CheckPolicy(ctx context.Context, request CheckPolicyRequest) (*CheckPolicyResponse, error) +} + // These APIs manage access rules on resources in an account. Currently, only // grant rules are supported. A grant rule specifies a role assigned to a set of // principals. A list of rules attached to a resource is called a rule set. diff --git a/service/iam/model.go b/service/iam/model.go index 05357a329..1dbdcb937 100755 --- a/service/iam/model.go +++ b/service/iam/model.go @@ -52,6 +52,57 @@ func (s AccessControlResponse) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } +// represents an identity trying to access a resource - user or a service +// principal group can be a principal of a permission set assignment but an +// actor is always a user or a service principal +type Actor struct { + ActorId int64 `json:"actor_id,omitempty" url:"actor_id,omitempty"` + + ForceSendFields []string `json:"-"` +} + +func (s *Actor) UnmarshalJSON(b []byte) error { + return marshal.Unmarshal(b, s) +} + +func (s Actor) MarshalJSON() ([]byte, error) { + return marshal.Marshal(s) +} + +// Check access policy to a resource +type CheckPolicyRequest struct { + Actor Actor `json:"-" url:"actor"` + + AuthzIdentity RequestAuthzIdentity `json:"-" url:"authz_identity"` + + ConsistencyToken ConsistencyToken `json:"-" url:"consistency_token"` + + Permission string `json:"-" url:"permission"` + // Ex: (servicePrincipal/use, + // accounts//servicePrincipals/) Ex: + // (servicePrincipal.ruleSet/update, + // accounts//servicePrincipals//ruleSets/default) + Resource string `json:"-" url:"resource"` + + ResourceInfo *ResourceInfo `json:"-" url:"resource_info,omitempty"` +} + +type CheckPolicyResponse struct { + ConsistencyToken ConsistencyToken `json:"consistency_token"` + + IsPermitted bool `json:"is_permitted,omitempty"` + + ForceSendFields []string `json:"-"` +} + +func (s *CheckPolicyResponse) UnmarshalJSON(b []byte) error { + return marshal.Unmarshal(b, s) +} + +func (s CheckPolicyResponse) MarshalJSON() ([]byte, error) { + return marshal.Marshal(s) +} + type ComplexValue struct { Display string `json:"display,omitempty"` @@ -74,6 +125,10 @@ func (s ComplexValue) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } +type ConsistencyToken struct { + Value string `json:"value"` +} + // Delete a group type DeleteAccountGroupRequest struct { // Unique ID for a group in the Databricks account. @@ -1169,6 +1224,55 @@ func (s PrincipalOutput) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } +// Defines the identity to be used for authZ of the request on the server side. +// See one pager for for more information: http://go/acl/service-identity +type RequestAuthzIdentity string + +const RequestAuthzIdentityRequestAuthzIdentityServiceIdentity RequestAuthzIdentity = `REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY` + +const RequestAuthzIdentityRequestAuthzIdentityUserContext RequestAuthzIdentity = `REQUEST_AUTHZ_IDENTITY_USER_CONTEXT` + +// String representation for [fmt.Print] +func (f *RequestAuthzIdentity) String() string { + return string(*f) +} + +// Set raw string value and validate it against allowed values +func (f *RequestAuthzIdentity) Set(v string) error { + switch v { + case `REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY`, `REQUEST_AUTHZ_IDENTITY_USER_CONTEXT`: + *f = RequestAuthzIdentity(v) + return nil + default: + return fmt.Errorf(`value "%s" is not one of "REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY", "REQUEST_AUTHZ_IDENTITY_USER_CONTEXT"`, v) + } +} + +// Type always returns RequestAuthzIdentity to satisfy [pflag.Value] interface +func (f *RequestAuthzIdentity) Type() string { + return "RequestAuthzIdentity" +} + +type ResourceInfo struct { + // Id of the current resource. + Id string `json:"id" url:"id"` + // The legacy acl path of the current resource. + LegacyAclPath string `json:"legacy_acl_path,omitempty" url:"legacy_acl_path,omitempty"` + // Parent resource info for the current resource. The parent may have + // another parent. + ParentResourceInfo *ResourceInfo `json:"parent_resource_info,omitempty" url:"parent_resource_info,omitempty"` + + ForceSendFields []string `json:"-"` +} + +func (s *ResourceInfo) UnmarshalJSON(b []byte) error { + return marshal.Unmarshal(b, s) +} + +func (s ResourceInfo) MarshalJSON() ([]byte, error) { + return marshal.Marshal(s) +} + type ResourceMeta struct { // Identifier for group type. Can be local workspace group // (`WorkspaceGroup`) or account group (`Group`). diff --git a/service/jobs/model.go b/service/jobs/model.go index e23efe94c..c94c00d51 100755 --- a/service/jobs/model.go +++ b/service/jobs/model.go @@ -240,6 +240,8 @@ const CleanRoomTaskRunLifeCycleStateQueued CleanRoomTaskRunLifeCycleState = `QUE const CleanRoomTaskRunLifeCycleStateRunning CleanRoomTaskRunLifeCycleState = `RUNNING` +const CleanRoomTaskRunLifeCycleStateRunLifeCycleStateUnspecified CleanRoomTaskRunLifeCycleState = `RUN_LIFE_CYCLE_STATE_UNSPECIFIED` + const CleanRoomTaskRunLifeCycleStateSkipped CleanRoomTaskRunLifeCycleState = `SKIPPED` const CleanRoomTaskRunLifeCycleStateTerminated CleanRoomTaskRunLifeCycleState = `TERMINATED` @@ -256,11 +258,11 @@ func (f *CleanRoomTaskRunLifeCycleState) String() string { // Set raw string value and validate it against allowed values func (f *CleanRoomTaskRunLifeCycleState) Set(v string) error { switch v { - case `BLOCKED`, `INTERNAL_ERROR`, `PENDING`, `QUEUED`, `RUNNING`, `SKIPPED`, `TERMINATED`, `TERMINATING`, `WAITING_FOR_RETRY`: + case `BLOCKED`, `INTERNAL_ERROR`, `PENDING`, `QUEUED`, `RUNNING`, `RUN_LIFE_CYCLE_STATE_UNSPECIFIED`, `SKIPPED`, `TERMINATED`, `TERMINATING`, `WAITING_FOR_RETRY`: *f = CleanRoomTaskRunLifeCycleState(v) return nil default: - return fmt.Errorf(`value "%s" is not one of "BLOCKED", "INTERNAL_ERROR", "PENDING", "QUEUED", "RUNNING", "SKIPPED", "TERMINATED", "TERMINATING", "WAITING_FOR_RETRY"`, v) + return fmt.Errorf(`value "%s" is not one of "BLOCKED", "INTERNAL_ERROR", "PENDING", "QUEUED", "RUNNING", "RUN_LIFE_CYCLE_STATE_UNSPECIFIED", "SKIPPED", "TERMINATED", "TERMINATING", "WAITING_FOR_RETRY"`, v) } } @@ -285,6 +287,8 @@ const CleanRoomTaskRunResultStateFailed CleanRoomTaskRunResultState = `FAILED` const CleanRoomTaskRunResultStateMaximumConcurrentRunsReached CleanRoomTaskRunResultState = `MAXIMUM_CONCURRENT_RUNS_REACHED` +const CleanRoomTaskRunResultStateRunResultStateUnspecified CleanRoomTaskRunResultState = `RUN_RESULT_STATE_UNSPECIFIED` + const CleanRoomTaskRunResultStateSuccess CleanRoomTaskRunResultState = `SUCCESS` const CleanRoomTaskRunResultStateSuccessWithFailures CleanRoomTaskRunResultState = `SUCCESS_WITH_FAILURES` @@ -305,11 +309,11 @@ func (f *CleanRoomTaskRunResultState) String() string { // Set raw string value and validate it against allowed values func (f *CleanRoomTaskRunResultState) Set(v string) error { switch v { - case `CANCELED`, `DISABLED`, `EVICTED`, `EXCLUDED`, `FAILED`, `MAXIMUM_CONCURRENT_RUNS_REACHED`, `SUCCESS`, `SUCCESS_WITH_FAILURES`, `TIMEDOUT`, `UPSTREAM_CANCELED`, `UPSTREAM_EVICTED`, `UPSTREAM_FAILED`: + case `CANCELED`, `DISABLED`, `EVICTED`, `EXCLUDED`, `FAILED`, `MAXIMUM_CONCURRENT_RUNS_REACHED`, `RUN_RESULT_STATE_UNSPECIFIED`, `SUCCESS`, `SUCCESS_WITH_FAILURES`, `TIMEDOUT`, `UPSTREAM_CANCELED`, `UPSTREAM_EVICTED`, `UPSTREAM_FAILED`: *f = CleanRoomTaskRunResultState(v) return nil default: - return fmt.Errorf(`value "%s" is not one of "CANCELED", "DISABLED", "EVICTED", "EXCLUDED", "FAILED", "MAXIMUM_CONCURRENT_RUNS_REACHED", "SUCCESS", "SUCCESS_WITH_FAILURES", "TIMEDOUT", "UPSTREAM_CANCELED", "UPSTREAM_EVICTED", "UPSTREAM_FAILED"`, v) + return fmt.Errorf(`value "%s" is not one of "CANCELED", "DISABLED", "EVICTED", "EXCLUDED", "FAILED", "MAXIMUM_CONCURRENT_RUNS_REACHED", "RUN_RESULT_STATE_UNSPECIFIED", "SUCCESS", "SUCCESS_WITH_FAILURES", "TIMEDOUT", "UPSTREAM_CANCELED", "UPSTREAM_EVICTED", "UPSTREAM_FAILED"`, v) } } @@ -351,6 +355,15 @@ func (s CleanRoomsNotebookTask) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } +type CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput struct { + // The run state of the clean rooms notebook task. + CleanRoomJobRunState *CleanRoomTaskRunState `json:"clean_room_job_run_state,omitempty"` + // The notebook output for the clean room run + NotebookOutput *NotebookOutput `json:"notebook_output,omitempty"` + // Information on how to access the output schema for the clean room run + OutputSchemaInfo *OutputSchemaInfo `json:"output_schema_info,omitempty"` +} + type ClusterInstance struct { // The canonical identifier for the cluster used by a run. This field is // always available for runs on existing clusters. For runs on new clusters, @@ -2110,6 +2123,27 @@ func (s NotebookTask) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } +// Stores the catalog name, schema name, and the output schema expiration time +// for the clean room run. +type OutputSchemaInfo struct { + CatalogName string `json:"catalog_name,omitempty"` + // The expiration time for the output schema as a Unix timestamp in + // milliseconds. + ExpirationTime int64 `json:"expiration_time,omitempty"` + + SchemaName string `json:"schema_name,omitempty"` + + ForceSendFields []string `json:"-"` +} + +func (s *OutputSchemaInfo) UnmarshalJSON(b []byte) error { + return marshal.Unmarshal(b, s) +} + +func (s OutputSchemaInfo) MarshalJSON() ([]byte, error) { + return marshal.Marshal(s) +} + type PauseStatus string const PauseStatusPaused PauseStatus = `PAUSED` @@ -3175,6 +3209,8 @@ func (s RunNowResponse) MarshalJSON() ([]byte, error) { // Run output was retrieved successfully. type RunOutput struct { + // The output of a clean rooms notebook task, if available + CleanRoomsNotebookOutput *CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput `json:"clean_rooms_notebook_output,omitempty"` // The output of a dbt task, if available. DbtOutput *DbtOutput `json:"dbt_output,omitempty"` // An error message indicating why a task failed or why output is not @@ -3690,6 +3726,8 @@ type SparkJarTask struct { // // [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables Parameters []string `json:"parameters,omitempty"` + // Deprecated. A value of `false` is no longer supported. + RunAsRepl bool `json:"run_as_repl,omitempty"` ForceSendFields []string `json:"-"` } diff --git a/service/oauth2/model.go b/service/oauth2/model.go index a448d1690..80e7d7255 100755 --- a/service/oauth2/model.go +++ b/service/oauth2/model.go @@ -649,6 +649,10 @@ type UpdateCustomAppIntegration struct { // List of OAuth redirect urls to be updated in the custom OAuth app // integration RedirectUrls []string `json:"redirect_urls,omitempty"` + // List of OAuth scopes to be updated in the custom OAuth app integration, + // similar to redirect URIs this will fully replace the existing values + // instead of appending + Scopes []string `json:"scopes,omitempty"` // Token access policy to be updated in the custom OAuth app integration TokenAccessPolicy *TokenAccessPolicy `json:"token_access_policy,omitempty"` } diff --git a/service/pkg.go b/service/pkg.go index bd49b7d6b..6eb732385 100644 --- a/service/pkg.go +++ b/service/pkg.go @@ -1,5 +1,7 @@ // Databricks SDK for Go APIs // +// - [iam.AccessControlAPI]: Rule based Access Control for Databricks Resources. +// // - [iam.AccountAccessControlAPI]: These APIs manage access rules on resources in an account. // // - [iam.AccountAccessControlProxyAPI]: These APIs manage access rules on resources in an account. @@ -299,6 +301,7 @@ import ( // https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service // See: https://pkg.go.dev/golang.org/x/tools/internal/imports#ImportPathToAssumedName var ( + _ *iam.AccessControlAPI = nil _ *iam.AccountAccessControlAPI = nil _ *iam.AccountAccessControlProxyAPI = nil _ *settings.AibiDashboardEmbeddingAccessPolicyAPI = nil @@ -323,8 +326,8 @@ var ( _ *marketplace.ConsumerListingsAPI = nil _ *marketplace.ConsumerPersonalizationRequestsAPI = nil _ *marketplace.ConsumerProvidersAPI = nil - _ *catalog.CredentialsAPI = nil _ *provisioning.CredentialsAPI = nil + _ *catalog.CredentialsAPI = nil _ *settings.CredentialsManagerAPI = nil _ *settings.CspEnablementAccountAPI = nil _ *iam.CurrentUserAPI = nil diff --git a/service/serving/api.go b/service/serving/api.go index 5a2606d6b..3e7182827 100755 --- a/service/serving/api.go +++ b/service/serving/api.go @@ -74,14 +74,14 @@ type ServingEndpointsInterface interface { // Get the query schema of the serving endpoint in OpenAPI format. The schema // contains information for the supported paths, input and output format and // datatypes. - GetOpenApi(ctx context.Context, request GetOpenApiRequest) error + GetOpenApi(ctx context.Context, request GetOpenApiRequest) (*GetOpenApiResponse, error) // Get the schema for a serving endpoint. // // Get the query schema of the serving endpoint in OpenAPI format. The schema // contains information for the supported paths, input and output format and // datatypes. - GetOpenApiByName(ctx context.Context, name string) error + GetOpenApiByName(ctx context.Context, name string) (*GetOpenApiResponse, error) // Get serving endpoint permission levels. // @@ -105,6 +105,9 @@ type ServingEndpointsInterface interface { // permissions from their root object. GetPermissionsByServingEndpointId(ctx context.Context, servingEndpointId string) (*ServingEndpointPermissions, error) + // Make external services call using the credentials stored in UC Connection. + HttpRequest(ctx context.Context, request ExternalFunctionRequest) (*ExternalFunctionResponse, error) + // Get all serving endpoints. // // This method is generated by Databricks SDK Code Generator. @@ -129,7 +132,7 @@ type ServingEndpointsInterface interface { // // Used to batch add and delete tags from a serving endpoint with a single API // call. - Patch(ctx context.Context, request PatchServingEndpointTags) ([]EndpointTag, error) + Patch(ctx context.Context, request PatchServingEndpointTags) (*EndpointTags, error) // Update rate limits of a serving endpoint. // @@ -141,7 +144,7 @@ type ServingEndpointsInterface interface { // Update AI Gateway of a serving endpoint. // // Used to update the AI Gateway of a serving endpoint. NOTE: Only external - // model endpoints are currently supported. + // model and provisioned throughput endpoints are currently supported. PutAiGateway(ctx context.Context, request PutAiGatewayRequest) (*PutAiGatewayResponse, error) // Query a serving endpoint. @@ -341,7 +344,7 @@ func (a *ServingEndpointsAPI) GetByName(ctx context.Context, name string) (*Serv // Get the query schema of the serving endpoint in OpenAPI format. The schema // contains information for the supported paths, input and output format and // datatypes. -func (a *ServingEndpointsAPI) GetOpenApiByName(ctx context.Context, name string) error { +func (a *ServingEndpointsAPI) GetOpenApiByName(ctx context.Context, name string) (*GetOpenApiResponse, error) { return a.servingEndpointsImpl.GetOpenApi(ctx, GetOpenApiRequest{ Name: name, }) diff --git a/service/serving/impl.go b/service/serving/impl.go index d31d545f8..220d1f3b4 100755 --- a/service/serving/impl.go +++ b/service/serving/impl.go @@ -69,14 +69,14 @@ func (a *servingEndpointsImpl) Get(ctx context.Context, request GetServingEndpoi return &servingEndpointDetailed, err } -func (a *servingEndpointsImpl) GetOpenApi(ctx context.Context, request GetOpenApiRequest) error { +func (a *servingEndpointsImpl) GetOpenApi(ctx context.Context, request GetOpenApiRequest) (*GetOpenApiResponse, error) { var getOpenApiResponse GetOpenApiResponse path := fmt.Sprintf("/api/2.0/serving-endpoints/%v/openapi", request.Name) queryParams := make(map[string]any) headers := make(map[string]string) - headers["Accept"] = "application/json" + headers["Accept"] = "text/plain" err := a.client.Do(ctx, http.MethodGet, path, headers, queryParams, request, &getOpenApiResponse) - return err + return &getOpenApiResponse, err } func (a *servingEndpointsImpl) GetPermissionLevels(ctx context.Context, request GetServingEndpointPermissionLevelsRequest) (*GetServingEndpointPermissionLevelsResponse, error) { @@ -99,6 +99,17 @@ func (a *servingEndpointsImpl) GetPermissions(ctx context.Context, request GetSe return &servingEndpointPermissions, err } +func (a *servingEndpointsImpl) HttpRequest(ctx context.Context, request ExternalFunctionRequest) (*ExternalFunctionResponse, error) { + var externalFunctionResponse ExternalFunctionResponse + path := "/api/2.0/external-function" + queryParams := make(map[string]any) + headers := make(map[string]string) + headers["Accept"] = "application/json" + headers["Content-Type"] = "application/json" + err := a.client.Do(ctx, http.MethodPost, path, headers, queryParams, request, &externalFunctionResponse) + return &externalFunctionResponse, err +} + func (a *servingEndpointsImpl) List(ctx context.Context) (*ListEndpointsResponse, error) { var listEndpointsResponse ListEndpointsResponse path := "/api/2.0/serving-endpoints" @@ -119,15 +130,15 @@ func (a *servingEndpointsImpl) Logs(ctx context.Context, request LogsRequest) (* return &serverLogsResponse, err } -func (a *servingEndpointsImpl) Patch(ctx context.Context, request PatchServingEndpointTags) ([]EndpointTag, error) { - var endpointTagList []EndpointTag +func (a *servingEndpointsImpl) Patch(ctx context.Context, request PatchServingEndpointTags) (*EndpointTags, error) { + var endpointTags EndpointTags path := fmt.Sprintf("/api/2.0/serving-endpoints/%v/tags", request.Name) queryParams := make(map[string]any) headers := make(map[string]string) headers["Accept"] = "application/json" headers["Content-Type"] = "application/json" - err := a.client.Do(ctx, http.MethodPatch, path, headers, queryParams, request, &endpointTagList) - return endpointTagList, err + err := a.client.Do(ctx, http.MethodPatch, path, headers, queryParams, request, &endpointTags) + return &endpointTags, err } func (a *servingEndpointsImpl) Put(ctx context.Context, request PutRequest) (*PutResponse, error) { diff --git a/service/serving/interface.go b/service/serving/interface.go index 0a38190a9..806e0f49f 100755 --- a/service/serving/interface.go +++ b/service/serving/interface.go @@ -48,7 +48,7 @@ type ServingEndpointsService interface { // Get the query schema of the serving endpoint in OpenAPI format. The // schema contains information for the supported paths, input and output // format and datatypes. - GetOpenApi(ctx context.Context, request GetOpenApiRequest) error + GetOpenApi(ctx context.Context, request GetOpenApiRequest) (*GetOpenApiResponse, error) // Get serving endpoint permission levels. // @@ -61,6 +61,10 @@ type ServingEndpointsService interface { // permissions from their root object. GetPermissions(ctx context.Context, request GetServingEndpointPermissionsRequest) (*ServingEndpointPermissions, error) + // Make external services call using the credentials stored in UC + // Connection. + HttpRequest(ctx context.Context, request ExternalFunctionRequest) (*ExternalFunctionResponse, error) + // Get all serving endpoints. // // Use ListAll() to get all ServingEndpoint instances @@ -75,7 +79,7 @@ type ServingEndpointsService interface { // // Used to batch add and delete tags from a serving endpoint with a single // API call. - Patch(ctx context.Context, request PatchServingEndpointTags) ([]EndpointTag, error) + Patch(ctx context.Context, request PatchServingEndpointTags) (*EndpointTags, error) // Update rate limits of a serving endpoint. // @@ -87,7 +91,7 @@ type ServingEndpointsService interface { // Update AI Gateway of a serving endpoint. // // Used to update the AI Gateway of a serving endpoint. NOTE: Only external - // model endpoints are currently supported. + // model and provisioned throughput endpoints are currently supported. PutAiGateway(ctx context.Context, request PutAiGatewayRequest) (*PutAiGatewayResponse, error) // Query a serving endpoint. diff --git a/service/serving/model.go b/service/serving/model.go index ade652782..6d22239da 100755 --- a/service/serving/model.go +++ b/service/serving/model.go @@ -72,20 +72,10 @@ func (s AiGatewayGuardrailParameters) MarshalJSON() ([]byte, error) { } type AiGatewayGuardrailPiiBehavior struct { - // Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' - // is set for the input guardrail and the request contains PII, the request - // is not sent to the model server and 400 status code is returned; if - // 'BLOCK' is set for the output guardrail and the model response contains - // PII, the PII info in the response is redacted and 400 status code is - // returned. - Behavior AiGatewayGuardrailPiiBehaviorBehavior `json:"behavior"` -} - -// Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is -// set for the input guardrail and the request contains PII, the request is not -// sent to the model server and 400 status code is returned; if 'BLOCK' is set -// for the output guardrail and the model response contains PII, the PII info in -// the response is redacted and 400 status code is returned. + // Configuration for input guardrail filters. + Behavior AiGatewayGuardrailPiiBehaviorBehavior `json:"behavior,omitempty"` +} + type AiGatewayGuardrailPiiBehaviorBehavior string const AiGatewayGuardrailPiiBehaviorBehaviorBlock AiGatewayGuardrailPiiBehaviorBehavior = `BLOCK` @@ -149,7 +139,7 @@ func (s AiGatewayInferenceTableConfig) MarshalJSON() ([]byte, error) { type AiGatewayRateLimit struct { // Used to specify how many calls are allowed for a key within the // renewal_period. - Calls int `json:"calls"` + Calls int64 `json:"calls"` // Key field for a rate limit. Currently, only 'user' and 'endpoint' are // supported, with 'endpoint' being the default if not specified. Key AiGatewayRateLimitKey `json:"key,omitempty"` @@ -158,8 +148,6 @@ type AiGatewayRateLimit struct { RenewalPeriod AiGatewayRateLimitRenewalPeriod `json:"renewal_period"` } -// Key field for a rate limit. Currently, only 'user' and 'endpoint' are -// supported, with 'endpoint' being the default if not specified. type AiGatewayRateLimitKey string const AiGatewayRateLimitKeyEndpoint AiGatewayRateLimitKey = `endpoint` @@ -187,7 +175,6 @@ func (f *AiGatewayRateLimitKey) Type() string { return "AiGatewayRateLimitKey" } -// Renewal period field for a rate limit. Currently, only 'minute' is supported. type AiGatewayRateLimitRenewalPeriod string const AiGatewayRateLimitRenewalPeriodMinute AiGatewayRateLimitRenewalPeriod = `minute` @@ -231,9 +218,9 @@ func (s AiGatewayUsageTrackingConfig) MarshalJSON() ([]byte, error) { type AmazonBedrockConfig struct { // The Databricks secret key reference for an AWS access key ID with // permissions to interact with Bedrock services. If you prefer to paste - // your API key directly, see `aws_access_key_id`. You must provide an API - // key using one of the following fields: `aws_access_key_id` or - // `aws_access_key_id_plaintext`. + // your API key directly, see `aws_access_key_id_plaintext`. You must + // provide an API key using one of the following fields: `aws_access_key_id` + // or `aws_access_key_id_plaintext`. AwsAccessKeyId string `json:"aws_access_key_id,omitempty"` // An AWS access key ID with permissions to interact with Bedrock services // provided as a plaintext string. If you prefer to reference your key using @@ -272,8 +259,6 @@ func (s AmazonBedrockConfig) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } -// The underlying provider in Amazon Bedrock. Supported values (case -// insensitive) include: Anthropic, Cohere, AI21Labs, Amazon. type AmazonBedrockConfigBedrockProvider string const AmazonBedrockConfigBedrockProviderAi21labs AmazonBedrockConfigBedrockProvider = `ai21labs` @@ -353,15 +338,18 @@ func (s AutoCaptureConfigInput) MarshalJSON() ([]byte, error) { } type AutoCaptureConfigOutput struct { - // The name of the catalog in Unity Catalog. + // The name of the catalog in Unity Catalog. NOTE: On update, you cannot + // change the catalog name if the inference table is already enabled. CatalogName string `json:"catalog_name,omitempty"` // Indicates whether the inference table is enabled. Enabled bool `json:"enabled,omitempty"` - // The name of the schema in Unity Catalog. + // The name of the schema in Unity Catalog. NOTE: On update, you cannot + // change the schema name if the inference table is already enabled. SchemaName string `json:"schema_name,omitempty"` State *AutoCaptureState `json:"state,omitempty"` - // The prefix of the table in Unity Catalog. + // The prefix of the table in Unity Catalog. NOTE: On update, you cannot + // change the prefix name if the inference table is already enabled. TableNamePrefix string `json:"table_name_prefix,omitempty"` ForceSendFields []string `json:"-"` @@ -468,11 +456,12 @@ func (s CohereConfig) MarshalJSON() ([]byte, error) { } type CreateServingEndpoint struct { - // The AI Gateway configuration for the serving endpoint. NOTE: only - // external model endpoints are supported as of now. + // The AI Gateway configuration for the serving endpoint. NOTE: Only + // external model and provisioned throughput endpoints are currently + // supported. AiGateway *AiGatewayConfig `json:"ai_gateway,omitempty"` // The core config of the serving endpoint. - Config EndpointCoreConfigInput `json:"config"` + Config *EndpointCoreConfigInput `json:"config,omitempty"` // The name of the serving endpoint. This field is required and must be // unique across a Databricks workspace. An endpoint name can consist of // alphanumeric characters, dashes, and underscores. @@ -497,6 +486,7 @@ func (s CreateServingEndpoint) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } +// Details necessary to query this object's API through the DataPlane APIs. type DataPlaneInfo struct { // Authorization details as a string. AuthorizationDetails string `json:"authorization_details,omitempty"` @@ -557,7 +547,6 @@ type DeleteResponse struct { // Delete a serving endpoint type DeleteServingEndpointRequest struct { - // The name of the serving endpoint. This field is required. Name string `json:"-" url:"-"` } @@ -607,27 +596,31 @@ func (f *EmbeddingsV1ResponseEmbeddingElementObject) Type() string { type EndpointCoreConfigInput struct { // Configuration for Inference Tables which automatically logs requests and - // responses to Unity Catalog. + // responses to Unity Catalog. Note: this field is deprecated for creating + // new provisioned throughput endpoints, or updating existing provisioned + // throughput endpoints that never have inference table configured; in these + // cases please use AI Gateway to manage inference tables. AutoCaptureConfig *AutoCaptureConfigInput `json:"auto_capture_config,omitempty"` // The name of the serving endpoint to update. This field is required. Name string `json:"-" url:"-"` - // A list of served entities for the endpoint to serve. A serving endpoint - // can have up to 15 served entities. + // The list of served entities under the serving endpoint config. ServedEntities []ServedEntityInput `json:"served_entities,omitempty"` - // (Deprecated, use served_entities instead) A list of served models for the - // endpoint to serve. A serving endpoint can have up to 15 served models. + // (Deprecated, use served_entities instead) The list of served models under + // the serving endpoint config. ServedModels []ServedModelInput `json:"served_models,omitempty"` - // The traffic config defining how invocations to the serving endpoint - // should be routed. + // The traffic configuration associated with the serving endpoint config. TrafficConfig *TrafficConfig `json:"traffic_config,omitempty"` } type EndpointCoreConfigOutput struct { // Configuration for Inference Tables which automatically logs requests and - // responses to Unity Catalog. + // responses to Unity Catalog. Note: this field is deprecated for creating + // new provisioned throughput endpoints, or updating existing provisioned + // throughput endpoints that never have inference table configured; in these + // cases please use AI Gateway to manage inference tables. AutoCaptureConfig *AutoCaptureConfigOutput `json:"auto_capture_config,omitempty"` // The config version that the serving endpoint is currently serving. - ConfigVersion int `json:"config_version,omitempty"` + ConfigVersion int64 `json:"config_version,omitempty"` // The list of served entities under the serving endpoint config. ServedEntities []ServedEntityOutput `json:"served_entities,omitempty"` // (Deprecated, use served_entities instead) The list of served models under @@ -657,7 +650,10 @@ type EndpointCoreConfigSummary struct { type EndpointPendingConfig struct { // Configuration for Inference Tables which automatically logs requests and - // responses to Unity Catalog. + // responses to Unity Catalog. Note: this field is deprecated for creating + // new provisioned throughput endpoints, or updating existing provisioned + // throughput endpoints that never have inference table configured; in these + // cases please use AI Gateway to manage inference tables. AutoCaptureConfig *AutoCaptureConfigOutput `json:"auto_capture_config,omitempty"` // The config version that the serving endpoint is currently serving. ConfigVersion int `json:"config_version,omitempty"` @@ -689,7 +685,7 @@ type EndpointState struct { // pending_config is in progress, if the update failed, or if there is no // update in progress. Note that if the endpoint's config_update state value // is IN_PROGRESS, another update can not be made until the update completes - // or fails." + // or fails. ConfigUpdate EndpointStateConfigUpdate `json:"config_update,omitempty"` // The state of an endpoint, indicating whether or not the endpoint is // queryable. An endpoint is READY if all of the served entities in its @@ -698,11 +694,6 @@ type EndpointState struct { Ready EndpointStateReady `json:"ready,omitempty"` } -// The state of an endpoint's config update. This informs the user if the -// pending_config is in progress, if the update failed, or if there is no update -// in progress. Note that if the endpoint's config_update state value is -// IN_PROGRESS, another update can not be made until the update completes or -// fails." type EndpointStateConfigUpdate string const EndpointStateConfigUpdateInProgress EndpointStateConfigUpdate = `IN_PROGRESS` @@ -734,10 +725,6 @@ func (f *EndpointStateConfigUpdate) Type() string { return "EndpointStateConfigUpdate" } -// The state of an endpoint, indicating whether or not the endpoint is -// queryable. An endpoint is READY if all of the served entities in its active -// configuration are ready. If any of the actively served entities are in a -// non-ready state, the endpoint state will be NOT_READY. type EndpointStateReady string const EndpointStateReadyNotReady EndpointStateReady = `NOT_READY` @@ -782,6 +769,10 @@ func (s EndpointTag) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } +type EndpointTags struct { + Tags []EndpointTag `json:"tags,omitempty"` +} + // Get metrics of a serving endpoint type ExportMetricsRequest struct { // The name of the serving endpoint to retrieve metrics for. This field is @@ -793,6 +784,84 @@ type ExportMetricsResponse struct { Contents io.ReadCloser `json:"-"` } +// Simple Proto message for testing +type ExternalFunctionRequest struct { + // The connection name to use. This is required to identify the external + // connection. + ConnectionName string `json:"connection_name"` + // Additional headers for the request. If not provided, only auth headers + // from connections would be passed. + Headers string `json:"headers,omitempty"` + // The JSON payload to send in the request body. + Json string `json:"json,omitempty"` + // The HTTP method to use (e.g., 'GET', 'POST'). + Method ExternalFunctionRequestHttpMethod `json:"method"` + // Query parameters for the request. + Params string `json:"params,omitempty"` + // The relative path for the API endpoint. This is required. + Path string `json:"path"` + + ForceSendFields []string `json:"-"` +} + +func (s *ExternalFunctionRequest) UnmarshalJSON(b []byte) error { + return marshal.Unmarshal(b, s) +} + +func (s ExternalFunctionRequest) MarshalJSON() ([]byte, error) { + return marshal.Marshal(s) +} + +type ExternalFunctionRequestHttpMethod string + +const ExternalFunctionRequestHttpMethodDelete ExternalFunctionRequestHttpMethod = `DELETE` + +const ExternalFunctionRequestHttpMethodGet ExternalFunctionRequestHttpMethod = `GET` + +const ExternalFunctionRequestHttpMethodPatch ExternalFunctionRequestHttpMethod = `PATCH` + +const ExternalFunctionRequestHttpMethodPost ExternalFunctionRequestHttpMethod = `POST` + +const ExternalFunctionRequestHttpMethodPut ExternalFunctionRequestHttpMethod = `PUT` + +// String representation for [fmt.Print] +func (f *ExternalFunctionRequestHttpMethod) String() string { + return string(*f) +} + +// Set raw string value and validate it against allowed values +func (f *ExternalFunctionRequestHttpMethod) Set(v string) error { + switch v { + case `DELETE`, `GET`, `PATCH`, `POST`, `PUT`: + *f = ExternalFunctionRequestHttpMethod(v) + return nil + default: + return fmt.Errorf(`value "%s" is not one of "DELETE", "GET", "PATCH", "POST", "PUT"`, v) + } +} + +// Type always returns ExternalFunctionRequestHttpMethod to satisfy [pflag.Value] interface +func (f *ExternalFunctionRequestHttpMethod) Type() string { + return "ExternalFunctionRequestHttpMethod" +} + +type ExternalFunctionResponse struct { + // The HTTP status code of the response + StatusCode int `json:"status_code,omitempty"` + // The content of the response + Text string `json:"text,omitempty"` + + ForceSendFields []string `json:"-"` +} + +func (s *ExternalFunctionResponse) UnmarshalJSON(b []byte) error { + return marshal.Unmarshal(b, s) +} + +func (s ExternalFunctionResponse) MarshalJSON() ([]byte, error) { + return marshal.Marshal(s) +} + type ExternalModel struct { // AI21Labs Config. Only required if the provider is 'ai21labs'. Ai21labsConfig *Ai21LabsConfig `json:"ai21labs_config,omitempty"` @@ -817,15 +886,12 @@ type ExternalModel struct { // The name of the provider for the external model. Currently, the supported // providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', // 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and - // 'palm'.", + // 'palm'. Provider ExternalModelProvider `json:"provider"` // The task type of the external model. Task string `json:"task"` } -// The name of the provider for the external model. Currently, the supported -// providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', -// 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.", type ExternalModelProvider string const ExternalModelProviderAi21labs ExternalModelProvider = `ai21labs` @@ -884,14 +950,15 @@ func (s ExternalModelUsageElement) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } +// All fields are not sensitive as they are hard-coded in the system and made +// available to customers. type FoundationModel struct { - // The description of the foundation model. Description string `json:"description,omitempty"` - // The display name of the foundation model. + DisplayName string `json:"display_name,omitempty"` - // The URL to the documentation of the foundation model. + Docs string `json:"docs,omitempty"` - // The name of the foundation model. + Name string `json:"name,omitempty"` ForceSendFields []string `json:"-"` @@ -912,9 +979,8 @@ type GetOpenApiRequest struct { Name string `json:"-" url:"-"` } -// The response is an OpenAPI spec in JSON format that typically includes fields -// like openapi, info, servers and paths, etc. type GetOpenApiResponse struct { + Contents io.ReadCloser `json:"-"` } // Get serving endpoint permission levels @@ -948,7 +1014,8 @@ type GoogleCloudVertexAiConfig struct { // key using one of the following fields: `private_key` or // `private_key_plaintext` // - // [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys + // [Best practices for managing service account keys]: + // https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys PrivateKey string `json:"private_key,omitempty"` // The private key for the service account which has access to the Google // Cloud Vertex AI Service provided as a plaintext secret. See [Best @@ -957,17 +1024,19 @@ type GoogleCloudVertexAiConfig struct { // API key using one of the following fields: `private_key` or // `private_key_plaintext`. // - // [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys + // [Best practices for managing service account keys]: + // https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys PrivateKeyPlaintext string `json:"private_key_plaintext,omitempty"` // This is the Google Cloud project id that the service account is // associated with. - ProjectId string `json:"project_id,omitempty"` + ProjectId string `json:"project_id"` // This is the region for the Google Cloud Vertex AI Service. See [supported // regions] for more details. Some models are only available in specific // regions. // - // [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations - Region string `json:"region,omitempty"` + // [supported regions]: + // https://cloud.google.com/vertex-ai/docs/general/locations + Region string `json:"region"` ForceSendFields []string `json:"-"` } @@ -995,11 +1064,14 @@ type LogsRequest struct { ServedModelName string `json:"-" url:"-"` } +// A representation of all DataPlaneInfo for operations that can be done on a +// model through Data Plane APIs. type ModelDataPlaneInfo struct { // Information required to query DataPlane API 'query' endpoint. QueryInfo *DataPlaneInfo `json:"query_info,omitempty"` } +// Configs needed to create an OpenAI model route. type OpenAiConfig struct { // This field is only required for Azure AD OpenAI and is the Microsoft // Entra Client ID. @@ -1098,11 +1170,10 @@ type PatchServingEndpointTags struct { } type PayloadTable struct { - // The name of the payload table. Name string `json:"name,omitempty"` - // The status of the payload table. + Status string `json:"status,omitempty"` - // The status message of the payload table. + StatusMessage string `json:"status_message,omitempty"` ForceSendFields []string `json:"-"` @@ -1116,7 +1187,6 @@ func (s PayloadTable) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } -// Update AI Gateway of a serving endpoint type PutAiGatewayRequest struct { // Configuration for AI Guardrails to prevent unwanted data and unsafe data // in requests and responses. @@ -1142,7 +1212,7 @@ type PutAiGatewayResponse struct { Guardrails *AiGatewayGuardrails `json:"guardrails,omitempty"` // Configuration for payload logging using inference tables. Use these // tables to monitor and audit data being sent to and received from model - // APIs and to improve model quality . + // APIs and to improve model quality. InferenceTableConfig *AiGatewayInferenceTableConfig `json:"inference_table_config,omitempty"` // Configuration for rate limits which can be set to limit endpoint traffic. RateLimits []AiGatewayRateLimit `json:"rate_limits,omitempty"` @@ -1152,7 +1222,6 @@ type PutAiGatewayResponse struct { UsageTrackingConfig *AiGatewayUsageTrackingConfig `json:"usage_tracking_config,omitempty"` } -// Update rate limits of a serving endpoint type PutRequest struct { // The name of the serving endpoint whose rate limits are being updated. // This field is required. @@ -1303,7 +1372,7 @@ func (f *QueryEndpointResponseObject) Type() string { type RateLimit struct { // Used to specify how many calls are allowed for a key within the // renewal_period. - Calls int `json:"calls"` + Calls int64 `json:"calls"` // Key field for a serving endpoint rate limit. Currently, only 'user' and // 'endpoint' are supported, with 'endpoint' being the default if not // specified. @@ -1313,8 +1382,6 @@ type RateLimit struct { RenewalPeriod RateLimitRenewalPeriod `json:"renewal_period"` } -// Key field for a serving endpoint rate limit. Currently, only 'user' and -// 'endpoint' are supported, with 'endpoint' being the default if not specified. type RateLimitKey string const RateLimitKeyEndpoint RateLimitKey = `endpoint` @@ -1342,8 +1409,6 @@ func (f *RateLimitKey) Type() string { return "RateLimitKey" } -// Renewal period field for a serving endpoint rate limit. Currently, only -// 'minute' is supported. type RateLimitRenewalPeriod string const RateLimitRenewalPeriodMinute RateLimitRenewalPeriod = `minute` @@ -1382,10 +1447,9 @@ type ServedEntityInput struct { // Databricks Model Registry, a model in the Unity Catalog (UC), or a // function of type FEATURE_SPEC in the UC. If it is a UC object, the full // name of the object should be given in the form of - // __catalog_name__.__schema_name__.__model_name__. + // **catalog_name.schema_name.model_name**. EntityName string `json:"entity_name,omitempty"` - // The version of the model in Databricks Model Registry to be served or - // empty if the entity is a FEATURE_SPEC. + EntityVersion string `json:"entity_version,omitempty"` // An object containing a set of optional, user-specified environment // variable key-value pairs used for serving this entity. Note: this is an @@ -1414,8 +1478,7 @@ type ServedEntityInput struct { // served entity name can consist of alphanumeric characters, dashes, and // underscores. If not specified for an external model, this field defaults // to external_model.name, with '.' and ':' replaced with '-', and if not - // specified for other entities, it defaults to - // -. + // specified for other entities, it defaults to entity_name-entity_version. Name string `json:"name,omitempty"` // Whether the compute resources for the served entity should scale down to // zero. @@ -1434,8 +1497,8 @@ type ServedEntityInput struct { // available by selecting workload types like GPU_SMALL and others. See the // available [GPU types]. // - // [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types - WorkloadType string `json:"workload_type,omitempty"` + // [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types + WorkloadType ServingModelWorkloadType `json:"workload_type,omitempty"` ForceSendFields []string `json:"-"` } @@ -1449,18 +1512,16 @@ func (s ServedEntityInput) MarshalJSON() ([]byte, error) { } type ServedEntityOutput struct { - // The creation timestamp of the served entity in Unix time. CreationTimestamp int64 `json:"creation_timestamp,omitempty"` - // The email of the user who created the served entity. + Creator string `json:"creator,omitempty"` - // The name of the entity served. The entity may be a model in the + // The name of the entity to be served. The entity may be a model in the // Databricks Model Registry, a model in the Unity Catalog (UC), or a // function of type FEATURE_SPEC in the UC. If it is a UC object, the full - // name of the object is given in the form of - // __catalog_name__.__schema_name__.__model_name__. + // name of the object should be given in the form of + // **catalog_name.schema_name.model_name**. EntityName string `json:"entity_name,omitempty"` - // The version of the served entity in Databricks Model Registry or empty if - // the entity is a FEATURE_SPEC. + EntityVersion string `json:"entity_version,omitempty"` // An object containing a set of optional, user-specified environment // variable key-value pairs used for serving this entity. Note: this is an @@ -1469,15 +1530,17 @@ type ServedEntityOutput struct { // "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": // "{{secrets/my_scope2/my_key2}}"}` EnvironmentVars map[string]string `json:"environment_vars,omitempty"` - // The external model that is served. NOTE: Only one of external_model, - // foundation_model, and (entity_name, entity_version, workload_size, - // workload_type, and scale_to_zero_enabled) is returned based on the - // endpoint type. + // The external model to be served. NOTE: Only one of external_model and + // (entity_name, entity_version, workload_size, workload_type, and + // scale_to_zero_enabled) can be specified with the latter set being used + // for custom model serving for a Databricks registered model. For an + // existing endpoint with external_model, it cannot be updated to an + // endpoint without external_model. If the endpoint is created without + // external_model, users cannot update it to add external_model later. The + // task type of all external models within an endpoint must be the same. ExternalModel *ExternalModel `json:"external_model,omitempty"` - // The foundation model that is served. NOTE: Only one of foundation_model, - // external_model, and (entity_name, entity_version, workload_size, - // workload_type, and scale_to_zero_enabled) is returned based on the - // endpoint type. + // All fields are not sensitive as they are hard-coded in the system and + // made available to customers. FoundationModel *FoundationModel `json:"foundation_model,omitempty"` // ARN of the instance profile that the served entity uses to access AWS // resources. @@ -1486,12 +1549,16 @@ type ServedEntityOutput struct { MaxProvisionedThroughput int `json:"max_provisioned_throughput,omitempty"` // The minimum tokens per second that the endpoint can scale down to. MinProvisionedThroughput int `json:"min_provisioned_throughput,omitempty"` - // The name of the served entity. + // The name of a served entity. It must be unique across an endpoint. A + // served entity name can consist of alphanumeric characters, dashes, and + // underscores. If not specified for an external model, this field defaults + // to external_model.name, with '.' and ':' replaced with '-', and if not + // specified for other entities, it defaults to entity_name-entity_version. Name string `json:"name,omitempty"` // Whether the compute resources for the served entity should scale down to // zero. ScaleToZeroEnabled bool `json:"scale_to_zero_enabled,omitempty"` - // Information corresponding to the state of the served entity. + State *ServedModelState `json:"state,omitempty"` // The workload size of the served entity. The workload size corresponds to // a range of provisioned concurrency that the compute autoscales between. A @@ -1499,7 +1566,7 @@ type ServedEntityOutput struct { // Valid workload sizes are "Small" (4 - 4 provisioned concurrency), // "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 // provisioned concurrency). If scale-to-zero is enabled, the lower bound of - // the provisioned concurrency for each workload size will be 0. + // the provisioned concurrency for each workload size is 0. WorkloadSize string `json:"workload_size,omitempty"` // The workload type of the served entity. The workload type selects which // type of compute to use in the endpoint. The default value for this @@ -1507,8 +1574,8 @@ type ServedEntityOutput struct { // available by selecting workload types like GPU_SMALL and others. See the // available [GPU types]. // - // [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types - WorkloadType string `json:"workload_type,omitempty"` + // [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types + WorkloadType ServingModelWorkloadType `json:"workload_type,omitempty"` ForceSendFields []string `json:"-"` } @@ -1522,24 +1589,15 @@ func (s ServedEntityOutput) MarshalJSON() ([]byte, error) { } type ServedEntitySpec struct { - // The name of the entity served. The entity may be a model in the - // Databricks Model Registry, a model in the Unity Catalog (UC), or a - // function of type FEATURE_SPEC in the UC. If it is a UC object, the full - // name of the object is given in the form of - // __catalog_name__.__schema_name__.__model_name__. EntityName string `json:"entity_name,omitempty"` - // The version of the served entity in Databricks Model Registry or empty if - // the entity is a FEATURE_SPEC. + EntityVersion string `json:"entity_version,omitempty"` - // The external model that is served. NOTE: Only one of external_model, - // foundation_model, and (entity_name, entity_version) is returned based on - // the endpoint type. + ExternalModel *ExternalModel `json:"external_model,omitempty"` - // The foundation model that is served. NOTE: Only one of foundation_model, - // external_model, and (entity_name, entity_version) is returned based on - // the endpoint type. + // All fields are not sensitive as they are hard-coded in the system and + // made available to customers. FoundationModel *FoundationModel `json:"foundation_model,omitempty"` - // The name of the served entity. + Name string `json:"name,omitempty"` ForceSendFields []string `json:"-"` @@ -1555,49 +1613,47 @@ func (s ServedEntitySpec) MarshalJSON() ([]byte, error) { type ServedModelInput struct { // An object containing a set of optional, user-specified environment - // variable key-value pairs used for serving this model. Note: this is an - // experimental feature and subject to change. Example model environment + // variable key-value pairs used for serving this entity. Note: this is an + // experimental feature and subject to change. Example entity environment // variables that refer to Databricks secrets: `{"OPENAI_API_KEY": // "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": // "{{secrets/my_scope2/my_key2}}"}` EnvironmentVars map[string]string `json:"environment_vars,omitempty"` - // ARN of the instance profile that the served model will use to access AWS + // ARN of the instance profile that the served entity uses to access AWS // resources. InstanceProfileArn string `json:"instance_profile_arn,omitempty"` // The maximum tokens per second that the endpoint can scale up to. MaxProvisionedThroughput int `json:"max_provisioned_throughput,omitempty"` // The minimum tokens per second that the endpoint can scale down to. MinProvisionedThroughput int `json:"min_provisioned_throughput,omitempty"` - // The name of the model in Databricks Model Registry to be served or if the - // model resides in Unity Catalog, the full name of model, in the form of - // __catalog_name__.__schema_name__.__model_name__. + ModelName string `json:"model_name"` - // The version of the model in Databricks Model Registry or Unity Catalog to - // be served. + ModelVersion string `json:"model_version"` - // The name of a served model. It must be unique across an endpoint. If not - // specified, this field will default to -. A - // served model name can consist of alphanumeric characters, dashes, and - // underscores. + // The name of a served entity. It must be unique across an endpoint. A + // served entity name can consist of alphanumeric characters, dashes, and + // underscores. If not specified for an external model, this field defaults + // to external_model.name, with '.' and ':' replaced with '-', and if not + // specified for other entities, it defaults to entity_name-entity_version. Name string `json:"name,omitempty"` - // Whether the compute resources for the served model should scale down to + // Whether the compute resources for the served entity should scale down to // zero. ScaleToZeroEnabled bool `json:"scale_to_zero_enabled"` - // The workload size of the served model. The workload size corresponds to a - // range of provisioned concurrency that the compute will autoscale between. - // A single unit of provisioned concurrency can process one request at a - // time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), + // The workload size of the served entity. The workload size corresponds to + // a range of provisioned concurrency that the compute autoscales between. A + // single unit of provisioned concurrency can process one request at a time. + // Valid workload sizes are "Small" (4 - 4 provisioned concurrency), // "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 // provisioned concurrency). If scale-to-zero is enabled, the lower bound of - // the provisioned concurrency for each workload size will be 0. + // the provisioned concurrency for each workload size is 0. WorkloadSize ServedModelInputWorkloadSize `json:"workload_size,omitempty"` - // The workload type of the served model. The workload type selects which + // The workload type of the served entity. The workload type selects which // type of compute to use in the endpoint. The default value for this // parameter is "CPU". For deep learning workloads, GPU acceleration is // available by selecting workload types like GPU_SMALL and others. See the // available [GPU types]. // - // [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types + // [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types WorkloadType ServedModelInputWorkloadType `json:"workload_type,omitempty"` ForceSendFields []string `json:"-"` @@ -1611,13 +1667,6 @@ func (s ServedModelInput) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } -// The workload size of the served model. The workload size corresponds to a -// range of provisioned concurrency that the compute will autoscale between. A -// single unit of provisioned concurrency can process one request at a time. -// Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 -// - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). -// If scale-to-zero is enabled, the lower bound of the provisioned concurrency -// for each workload size will be 0. type ServedModelInputWorkloadSize string const ServedModelInputWorkloadSizeLarge ServedModelInputWorkloadSize = `Large` @@ -1647,13 +1696,6 @@ func (f *ServedModelInputWorkloadSize) Type() string { return "ServedModelInputWorkloadSize" } -// The workload type of the served model. The workload type selects which type -// of compute to use in the endpoint. The default value for this parameter is -// "CPU". For deep learning workloads, GPU acceleration is available by -// selecting workload types like GPU_SMALL and others. See the available [GPU -// types]. -// -// [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types type ServedModelInputWorkloadType string const ServedModelInputWorkloadTypeCpu ServedModelInputWorkloadType = `CPU` @@ -1688,49 +1730,50 @@ func (f *ServedModelInputWorkloadType) Type() string { } type ServedModelOutput struct { - // The creation timestamp of the served model in Unix time. CreationTimestamp int64 `json:"creation_timestamp,omitempty"` - // The email of the user who created the served model. + Creator string `json:"creator,omitempty"` // An object containing a set of optional, user-specified environment - // variable key-value pairs used for serving this model. Note: this is an - // experimental feature and subject to change. Example model environment + // variable key-value pairs used for serving this entity. Note: this is an + // experimental feature and subject to change. Example entity environment // variables that refer to Databricks secrets: `{"OPENAI_API_KEY": // "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": // "{{secrets/my_scope2/my_key2}}"}` EnvironmentVars map[string]string `json:"environment_vars,omitempty"` - // ARN of the instance profile that the served model will use to access AWS + // ARN of the instance profile that the served entity uses to access AWS // resources. InstanceProfileArn string `json:"instance_profile_arn,omitempty"` - // The name of the model in Databricks Model Registry or the full name of - // the model in Unity Catalog. + ModelName string `json:"model_name,omitempty"` - // The version of the model in Databricks Model Registry or Unity Catalog to - // be served. + ModelVersion string `json:"model_version,omitempty"` - // The name of the served model. + // The name of a served entity. It must be unique across an endpoint. A + // served entity name can consist of alphanumeric characters, dashes, and + // underscores. If not specified for an external model, this field defaults + // to external_model.name, with '.' and ':' replaced with '-', and if not + // specified for other entities, it defaults to entity_name-entity_version. Name string `json:"name,omitempty"` - // Whether the compute resources for the Served Model should scale down to + // Whether the compute resources for the served entity should scale down to // zero. ScaleToZeroEnabled bool `json:"scale_to_zero_enabled,omitempty"` - // Information corresponding to the state of the Served Model. + State *ServedModelState `json:"state,omitempty"` - // The workload size of the served model. The workload size corresponds to a - // range of provisioned concurrency that the compute will autoscale between. - // A single unit of provisioned concurrency can process one request at a - // time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), + // The workload size of the served entity. The workload size corresponds to + // a range of provisioned concurrency that the compute autoscales between. A + // single unit of provisioned concurrency can process one request at a time. + // Valid workload sizes are "Small" (4 - 4 provisioned concurrency), // "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 // provisioned concurrency). If scale-to-zero is enabled, the lower bound of - // the provisioned concurrency for each workload size will be 0. + // the provisioned concurrency for each workload size is 0. WorkloadSize string `json:"workload_size,omitempty"` - // The workload type of the served model. The workload type selects which + // The workload type of the served entity. The workload type selects which // type of compute to use in the endpoint. The default value for this // parameter is "CPU". For deep learning workloads, GPU acceleration is // available by selecting workload types like GPU_SMALL and others. See the // available [GPU types]. // - // [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types - WorkloadType string `json:"workload_type,omitempty"` + // [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types + WorkloadType ServingModelWorkloadType `json:"workload_type,omitempty"` ForceSendFields []string `json:"-"` } @@ -1744,13 +1787,11 @@ func (s ServedModelOutput) MarshalJSON() ([]byte, error) { } type ServedModelSpec struct { - // The name of the model in Databricks Model Registry or the full name of - // the model in Unity Catalog. + // Only one of model_name and entity_name should be populated ModelName string `json:"model_name,omitempty"` - // The version of the model in Databricks Model Registry or Unity Catalog to - // be served. + // Only one of model_version and entity_version should be populated ModelVersion string `json:"model_version,omitempty"` - // The name of the served model. + Name string `json:"name,omitempty"` ForceSendFields []string `json:"-"` @@ -1765,20 +1806,8 @@ func (s ServedModelSpec) MarshalJSON() ([]byte, error) { } type ServedModelState struct { - // The state of the served entity deployment. DEPLOYMENT_CREATING indicates - // that the served entity is not ready yet because the deployment is still - // being created (i.e container image is building, model server is deploying - // for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the - // served entity was previously in a ready state but no longer is and is - // attempting to recover. DEPLOYMENT_READY indicates that the served entity - // is ready to receive traffic. DEPLOYMENT_FAILED indicates that there was - // an error trying to bring up the served entity (e.g container image build - // failed, the model server failed to start due to a model loading error, - // etc.) DEPLOYMENT_ABORTED indicates that the deployment was terminated - // likely due to a failure in bringing up another served entity under the - // same endpoint and config version. Deployment ServedModelStateDeployment `json:"deployment,omitempty"` - // More information about the state of the served entity, if available. + DeploymentStateMessage string `json:"deployment_state_message,omitempty"` ForceSendFields []string `json:"-"` @@ -1792,17 +1821,6 @@ func (s ServedModelState) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } -// The state of the served entity deployment. DEPLOYMENT_CREATING indicates that -// the served entity is not ready yet because the deployment is still being -// created (i.e container image is building, model server is deploying for the -// first time, etc.). DEPLOYMENT_RECOVERING indicates that the served entity was -// previously in a ready state but no longer is and is attempting to recover. -// DEPLOYMENT_READY indicates that the served entity is ready to receive -// traffic. DEPLOYMENT_FAILED indicates that there was an error trying to bring -// up the served entity (e.g container image build failed, the model server -// failed to start due to a model loading error, etc.) DEPLOYMENT_ABORTED -// indicates that the deployment was terminated likely due to a failure in -// bringing up another served entity under the same endpoint and config version. type ServedModelStateDeployment string const ServedModelStateDeploymentAborted ServedModelStateDeployment = `DEPLOYMENT_ABORTED` @@ -1844,7 +1862,8 @@ type ServerLogsResponse struct { type ServingEndpoint struct { // The AI Gateway configuration for the serving endpoint. NOTE: Only - // external model endpoints are currently supported. + // external model and provisioned throughput endpoints are currently + // supported. AiGateway *AiGatewayConfig `json:"ai_gateway,omitempty"` // The config that is currently being served by the endpoint. Config *EndpointCoreConfigSummary `json:"config,omitempty"` @@ -1852,8 +1871,8 @@ type ServingEndpoint struct { CreationTimestamp int64 `json:"creation_timestamp,omitempty"` // The email of the user who created the serving endpoint. Creator string `json:"creator,omitempty"` - // System-generated ID of the endpoint. This is used to refer to the - // endpoint in the Permissions API + // System-generated ID of the endpoint, included to be used by the + // Permissions API. Id string `json:"id,omitempty"` // The timestamp when the endpoint was last updated by a user in Unix time. LastUpdatedTimestamp int64 `json:"last_updated_timestamp,omitempty"` @@ -1923,7 +1942,8 @@ func (s ServingEndpointAccessControlResponse) MarshalJSON() ([]byte, error) { type ServingEndpointDetailed struct { // The AI Gateway configuration for the serving endpoint. NOTE: Only - // external model endpoints are currently supported. + // external model and provisioned throughput endpoints are currently + // supported. AiGateway *AiGatewayConfig `json:"ai_gateway,omitempty"` // The config that is currently being served by the endpoint. Config *EndpointCoreConfigOutput `json:"config,omitempty"` @@ -1967,7 +1987,6 @@ func (s ServingEndpointDetailed) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } -// The permission level of the principal making the request. type ServingEndpointDetailedPermissionLevel string const ServingEndpointDetailedPermissionLevelCanManage ServingEndpointDetailedPermissionLevel = `CAN_MANAGE` @@ -2085,6 +2104,39 @@ type ServingEndpointPermissionsRequest struct { ServingEndpointId string `json:"-" url:"-"` } +type ServingModelWorkloadType string + +const ServingModelWorkloadTypeCpu ServingModelWorkloadType = `CPU` + +const ServingModelWorkloadTypeGpuLarge ServingModelWorkloadType = `GPU_LARGE` + +const ServingModelWorkloadTypeGpuMedium ServingModelWorkloadType = `GPU_MEDIUM` + +const ServingModelWorkloadTypeGpuSmall ServingModelWorkloadType = `GPU_SMALL` + +const ServingModelWorkloadTypeMultigpuMedium ServingModelWorkloadType = `MULTIGPU_MEDIUM` + +// String representation for [fmt.Print] +func (f *ServingModelWorkloadType) String() string { + return string(*f) +} + +// Set raw string value and validate it against allowed values +func (f *ServingModelWorkloadType) Set(v string) error { + switch v { + case `CPU`, `GPU_LARGE`, `GPU_MEDIUM`, `GPU_SMALL`, `MULTIGPU_MEDIUM`: + *f = ServingModelWorkloadType(v) + return nil + default: + return fmt.Errorf(`value "%s" is not one of "CPU", "GPU_LARGE", "GPU_MEDIUM", "GPU_SMALL", "MULTIGPU_MEDIUM"`, v) + } +} + +// Type always returns ServingModelWorkloadType to satisfy [pflag.Value] interface +func (f *ServingModelWorkloadType) Type() string { + return "ServingModelWorkloadType" +} + type TrafficConfig struct { // The list of routes that define traffic to each served entity. Routes []Route `json:"routes,omitempty"` diff --git a/service/sharing/api.go b/service/sharing/api.go index 941f6ae3f..57d1f45e2 100755 --- a/service/sharing/api.go +++ b/service/sharing/api.go @@ -335,7 +335,7 @@ type RecipientsInterface interface { // Create a share recipient. // // Creates a new recipient with the delta sharing authentication type in the - // metastore. The caller must be a metastore admin or has the + // metastore. The caller must be a metastore admin or have the // **CREATE_RECIPIENT** privilege on the metastore. Create(ctx context.Context, request CreateRecipient) (*RecipientInfo, error) @@ -385,15 +385,6 @@ type RecipientsInterface interface { // This method is generated by Databricks SDK Code Generator. ListAll(ctx context.Context, request ListRecipientsRequest) ([]RecipientInfo, error) - // RecipientInfoNameToMetastoreIdMap calls [RecipientsAPI.ListAll] and creates a map of results with [RecipientInfo].Name as key and [RecipientInfo].MetastoreId as value. - // - // Returns an error if there's more than one [RecipientInfo] with the same .Name. - // - // Note: All [RecipientInfo] instances are loaded into memory before creating a map. - // - // This method is generated by Databricks SDK Code Generator. - RecipientInfoNameToMetastoreIdMap(ctx context.Context, request ListRecipientsRequest) (map[string]string, error) - // Rotate a token. // // Refreshes the specified recipient's delta sharing authentication token with @@ -418,7 +409,7 @@ type RecipientsInterface interface { // metastore admin or the owner of the recipient. If the recipient name will be // updated, the user must be both a metastore admin and the owner of the // recipient. - Update(ctx context.Context, request UpdateRecipient) error + Update(ctx context.Context, request UpdateRecipient) (*RecipientInfo, error) } func NewRecipients(client *client.DatabricksClient) *RecipientsAPI { @@ -516,31 +507,6 @@ func (a *RecipientsAPI) ListAll(ctx context.Context, request ListRecipientsReque return listing.ToSlice[RecipientInfo](ctx, iterator) } -// RecipientInfoNameToMetastoreIdMap calls [RecipientsAPI.ListAll] and creates a map of results with [RecipientInfo].Name as key and [RecipientInfo].MetastoreId as value. -// -// Returns an error if there's more than one [RecipientInfo] with the same .Name. -// -// Note: All [RecipientInfo] instances are loaded into memory before creating a map. -// -// This method is generated by Databricks SDK Code Generator. -func (a *RecipientsAPI) RecipientInfoNameToMetastoreIdMap(ctx context.Context, request ListRecipientsRequest) (map[string]string, error) { - ctx = useragent.InContext(ctx, "sdk-feature", "name-to-id") - mapping := map[string]string{} - result, err := a.ListAll(ctx, request) - if err != nil { - return nil, err - } - for _, v := range result { - key := v.Name - _, duplicate := mapping[key] - if duplicate { - return nil, fmt.Errorf("duplicate .Name: %s", key) - } - mapping[key] = v.MetastoreId - } - return mapping, nil -} - // Get recipient share permissions. // // Gets the share permissions for the specified Recipient. The caller must be a diff --git a/service/sharing/impl.go b/service/sharing/impl.go index c6e2bccb6..da8223ee2 100755 --- a/service/sharing/impl.go +++ b/service/sharing/impl.go @@ -171,15 +171,15 @@ func (a *recipientsImpl) SharePermissions(ctx context.Context, request SharePerm return &getRecipientSharePermissionsResponse, err } -func (a *recipientsImpl) Update(ctx context.Context, request UpdateRecipient) error { - var updateResponse UpdateResponse +func (a *recipientsImpl) Update(ctx context.Context, request UpdateRecipient) (*RecipientInfo, error) { + var recipientInfo RecipientInfo path := fmt.Sprintf("/api/2.1/unity-catalog/recipients/%v", request.Name) queryParams := make(map[string]any) headers := make(map[string]string) headers["Accept"] = "application/json" headers["Content-Type"] = "application/json" - err := a.client.Do(ctx, http.MethodPatch, path, headers, queryParams, request, &updateResponse) - return err + err := a.client.Do(ctx, http.MethodPatch, path, headers, queryParams, request, &recipientInfo) + return &recipientInfo, err } // unexported type that holds implementations of just Shares API methods diff --git a/service/sharing/interface.go b/service/sharing/interface.go index 4f7e44ca9..ffbf87b16 100755 --- a/service/sharing/interface.go +++ b/service/sharing/interface.go @@ -107,7 +107,7 @@ type RecipientsService interface { // Create a share recipient. // // Creates a new recipient with the delta sharing authentication type in the - // metastore. The caller must be a metastore admin or has the + // metastore. The caller must be a metastore admin or have the // **CREATE_RECIPIENT** privilege on the metastore. Create(ctx context.Context, request CreateRecipient) (*RecipientInfo, error) @@ -154,7 +154,7 @@ type RecipientsService interface { // metastore admin or the owner of the recipient. If the recipient name will // be updated, the user must be both a metastore admin and the owner of the // recipient. - Update(ctx context.Context, request UpdateRecipient) error + Update(ctx context.Context, request UpdateRecipient) (*RecipientInfo, error) } // A share is a container instantiated with :method:shares/create. Once created diff --git a/service/sharing/model.go b/service/sharing/model.go index 4da345b00..e77d94b6c 100755 --- a/service/sharing/model.go +++ b/service/sharing/model.go @@ -44,8 +44,8 @@ type CreateProvider struct { Comment string `json:"comment,omitempty"` // The name of the Provider. Name string `json:"name"` - // This field is required when the __authentication_type__ is **TOKEN** or - // not provided. + // This field is required when the __authentication_type__ is **TOKEN**, + // **OAUTH_CLIENT_CREDENTIALS** or not provided. RecipientProfileStr string `json:"recipient_profile_str,omitempty"` ForceSendFields []string `json:"-"` @@ -65,7 +65,7 @@ type CreateRecipient struct { // Description about the recipient. Comment string `json:"comment,omitempty"` // The global Unity Catalog metastore id provided by the data recipient. - // This field is required when the __authentication_type__ is + // This field is only present when the __authentication_type__ is // **DATABRICKS**. The identifier is of format // __cloud__:__region__:__metastore-uuid__. DataRecipientGlobalMetastoreId string `json:"data_recipient_global_metastore_id,omitempty"` @@ -77,10 +77,13 @@ type CreateRecipient struct { Name string `json:"name"` // Username of the recipient owner. Owner string `json:"owner,omitempty"` - // Recipient properties as map of string key-value pairs. + // Recipient properties as map of string key-value pairs. When provided in + // update request, the specified properties will override the existing + // properties. To add and remove properties, one would need to perform a + // read-modify-write. PropertiesKvpairs *SecurablePropertiesKvPairs `json:"properties_kvpairs,omitempty"` // The one-time sharing code provided by the data recipient. This field is - // required when the __authentication_type__ is **DATABRICKS**. + // only present when the __authentication_type__ is **DATABRICKS**. SharingCode string `json:"sharing_code,omitempty"` ForceSendFields []string `json:"-"` @@ -565,7 +568,7 @@ type ProviderInfo struct { CreatedBy string `json:"created_by,omitempty"` // The global UC metastore id of the data provider. This field is only // present when the __authentication_type__ is **DATABRICKS**. The - // identifier is of format ::. + // identifier is of format __cloud__:__region__:__metastore-uuid__. DataProviderGlobalMetastoreId string `json:"data_provider_global_metastore_id,omitempty"` // UUID of the provider's UC metastore. This field is only present when the // __authentication_type__ is **DATABRICKS**. @@ -575,17 +578,17 @@ type ProviderInfo struct { // Username of Provider owner. Owner string `json:"owner,omitempty"` // The recipient profile. This field is only present when the - // authentication_type is `TOKEN`. + // authentication_type is `TOKEN` or `OAUTH_CLIENT_CREDENTIALS`. RecipientProfile *RecipientProfile `json:"recipient_profile,omitempty"` - // This field is only present when the authentication_type is `TOKEN` or not - // provided. + // This field is required when the __authentication_type__ is **TOKEN**, + // **OAUTH_CLIENT_CREDENTIALS** or not provided. RecipientProfileStr string `json:"recipient_profile_str,omitempty"` // Cloud region of the provider's UC metastore. This field is only present // when the __authentication_type__ is **DATABRICKS**. Region string `json:"region,omitempty"` // Time at which this Provider was created, in epoch milliseconds. UpdatedAt int64 `json:"updated_at,omitempty"` - // Username of user who last modified Share. + // Username of user who last modified Provider. UpdatedBy string `json:"updated_by,omitempty"` ForceSendFields []string `json:"-"` @@ -623,8 +626,8 @@ type RecipientInfo struct { ActivationUrl string `json:"activation_url,omitempty"` // The delta sharing authentication type. AuthenticationType AuthenticationType `json:"authentication_type,omitempty"` - // Cloud vendor of the recipient's Unity Catalog Metstore. This field is - // only present when the __authentication_type__ is **DATABRICKS**`. + // Cloud vendor of the recipient's Unity Catalog Metastore. This field is + // only present when the __authentication_type__ is **DATABRICKS**. Cloud string `json:"cloud,omitempty"` // Description about the recipient. Comment string `json:"comment,omitempty"` @@ -637,18 +640,23 @@ type RecipientInfo struct { // **DATABRICKS**. The identifier is of format // __cloud__:__region__:__metastore-uuid__. DataRecipientGlobalMetastoreId string `json:"data_recipient_global_metastore_id,omitempty"` + // Expiration timestamp of the token, in epoch milliseconds. + ExpirationTime int64 `json:"expiration_time,omitempty"` // IP Access List IpAccessList *IpAccessList `json:"ip_access_list,omitempty"` - // Unique identifier of recipient's Unity Catalog metastore. This field is - // only present when the __authentication_type__ is **DATABRICKS** + // Unique identifier of recipient's Unity Catalog Metastore. This field is + // only present when the __authentication_type__ is **DATABRICKS**. MetastoreId string `json:"metastore_id,omitempty"` // Name of Recipient. Name string `json:"name,omitempty"` // Username of the recipient owner. Owner string `json:"owner,omitempty"` - // Recipient properties as map of string key-value pairs. + // Recipient properties as map of string key-value pairs. When provided in + // update request, the specified properties will override the existing + // properties. To add and remove properties, one would need to perform a + // read-modify-write. PropertiesKvpairs *SecurablePropertiesKvPairs `json:"properties_kvpairs,omitempty"` - // Cloud region of the recipient's Unity Catalog Metstore. This field is + // Cloud region of the recipient's Unity Catalog Metastore. This field is // only present when the __authentication_type__ is **DATABRICKS**. Region string `json:"region,omitempty"` // The one-time sharing code provided by the data recipient. This field is @@ -695,7 +703,7 @@ type RecipientTokenInfo struct { // Full activation URL to retrieve the access token. It will be empty if the // token is already retrieved. ActivationUrl string `json:"activation_url,omitempty"` - // Time at which this recipient Token was created, in epoch milliseconds. + // Time at which this recipient token was created, in epoch milliseconds. CreatedAt int64 `json:"created_at,omitempty"` // Username of recipient token creator. CreatedBy string `json:"created_by,omitempty"` @@ -703,9 +711,9 @@ type RecipientTokenInfo struct { ExpirationTime int64 `json:"expiration_time,omitempty"` // Unique ID of the recipient token. Id string `json:"id,omitempty"` - // Time at which this recipient Token was updated, in epoch milliseconds. + // Time at which this recipient token was updated, in epoch milliseconds. UpdatedAt int64 `json:"updated_at,omitempty"` - // Username of recipient Token updater. + // Username of recipient token updater. UpdatedBy string `json:"updated_by,omitempty"` ForceSendFields []string `json:"-"` @@ -752,7 +760,7 @@ type RotateRecipientToken struct { // cannot extend the expiration_time. Use 0 to expire the existing token // immediately, negative number will return an error. ExistingTokenExpireInSeconds int64 `json:"existing_token_expire_in_seconds"` - // The name of the recipient. + // The name of the Recipient. Name string `json:"-" url:"-"` } @@ -763,9 +771,6 @@ type SecurablePropertiesKvPairs struct { Properties map[string]string `json:"properties"` } -// A map of key-value properties attached to the securable. -type SecurablePropertiesMap map[string]string - type ShareInfo struct { // User-provided free-form text description. Comment string `json:"comment,omitempty"` @@ -1052,8 +1057,8 @@ type UpdateProvider struct { NewName string `json:"new_name,omitempty"` // Username of Provider owner. Owner string `json:"owner,omitempty"` - // This field is required when the __authentication_type__ is **TOKEN** or - // not provided. + // This field is required when the __authentication_type__ is **TOKEN**, + // **OAUTH_CLIENT_CREDENTIALS** or not provided. RecipientProfileStr string `json:"recipient_profile_str,omitempty"` ForceSendFields []string `json:"-"` @@ -1076,7 +1081,7 @@ type UpdateRecipient struct { IpAccessList *IpAccessList `json:"ip_access_list,omitempty"` // Name of the recipient. Name string `json:"-" url:"-"` - // New name for the recipient. + // New name for the recipient. . NewName string `json:"new_name,omitempty"` // Username of the recipient owner. Owner string `json:"owner,omitempty"` @@ -1097,9 +1102,6 @@ func (s UpdateRecipient) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } -type UpdateResponse struct { -} - type UpdateShare struct { // User-provided free-form text description. Comment string `json:"comment,omitempty"` diff --git a/version/version.go b/version/version.go index 142d27dac..00cf8e860 100644 --- a/version/version.go +++ b/version/version.go @@ -1,4 +1,4 @@ package version // Version of the SDK, updated manually before every tag -const Version = "0.55.0" +const Version = "0.56.0" diff --git a/workspace_client.go b/workspace_client.go index f0bc03167..b129ce30d 100755 --- a/workspace_client.go +++ b/workspace_client.go @@ -32,6 +32,9 @@ type WorkspaceClient struct { Config *config.Config apiClient *httpclient.ApiClient + // Rule based Access Control for Databricks Resources. + AccessControl iam.AccessControlInterface + // These APIs manage access rules on resources in an account. Currently, // only grant rules are supported. A grant rule specifies a role assigned to // a set of principals. A list of rules attached to a resource is called a @@ -293,10 +296,15 @@ type WorkspaceClient struct { // // The Files API has two distinct endpoints, one for working with files // (`/fs/files`) and another one for working with directories - // (`/fs/directories`). Both endpoints, use the standard HTTP methods GET, + // (`/fs/directories`). Both endpoints use the standard HTTP methods GET, // HEAD, PUT, and DELETE to manage files and directories specified using // their URI path. The path is always absolute. // + // Some Files API client features are currently experimental. To enable + // them, set `enable_experimental_files_api_client = True` in your + // configuration profile or use the environment variable + // `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. + // // [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html Files files.FilesInterface @@ -1150,6 +1158,7 @@ func NewWorkspaceClient(c ...*Config) (*WorkspaceClient, error) { Config: cfg, apiClient: apiClient, + AccessControl: iam.NewAccessControl(databricksClient), AccountAccessControlProxy: iam.NewAccountAccessControlProxy(databricksClient), Alerts: sql.NewAlerts(databricksClient), AlertsLegacy: sql.NewAlertsLegacy(databricksClient), From 4b3a22f34104dc7291d28faecb8e96abdee668cb Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Wed, 22 Jan 2025 15:10:22 +0100 Subject: [PATCH 2/3] fix changelog --- CHANGELOG.md | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a73d7f324..61a339db8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,13 +25,17 @@ * Added `Contents` field for [serving.GetOpenApiResponse](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GetOpenApiResponse). * Added `Activated`, `ActivationUrl`, `AuthenticationType`, `Cloud`, `Comment`, `CreatedAt`, `CreatedBy`, `DataRecipientGlobalMetastoreId`, `IpAccessList`, `MetastoreId`, `Name`, `Owner`, `PropertiesKvpairs`, `Region`, `SharingCode`, `Tokens`, `UpdatedAt` and `UpdatedBy` fields for [sharing.RecipientInfo](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo). * Added `ExpirationTime` field for [sharing.RecipientInfo](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo). + * Added `Pending` enum value for [cleanrooms.CleanRoomAssetStatusEnum](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomAssetStatusEnum). + * Added `AddNodesFailed`, `AutomaticClusterUpdate`, `AutoscalingBackoff` and `AutoscalingFailed` enum values for [compute.EventType](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/compute#EventType). + * Added `PendingWarehouse` enum value for [dashboards.MessageStatus](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#MessageStatus). + * Added `Cpu`, `GpuLarge`, `GpuMedium`, `GpuSmall` and `MultigpuMedium` enum values for [serving.ServingModelWorkloadType](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType). + * Changed `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service to return [sharing.RecipientInfo](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo). * Changed `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service return type to become non-empty. * Changed `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service to type `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service. - * Changed `Update` method for [w.Recipients](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI) workspace-level service to return [sharing.RecipientInfo](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo). * Changed `Create` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service with new required argument order. * Changed `GetOpenApi` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service return type to become non-empty. - * Changed `Patch` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service to return [serving.EndpointTags](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTags). * Changed `Patch` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service to type `Patch` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service. + * Changed `Patch` method for [w.ServingEndpoints](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI) workspace-level service to return [serving.EndpointTags](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTags). * Changed [serving.EndpointTagList](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTagList) to. * Changed `CollaboratorAlias` field for [cleanrooms.CleanRoomCollaborator](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator) to be required. * Changed `CollaboratorAlias` field for [cleanrooms.CleanRoomCollaborator](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator) to be required. @@ -43,6 +47,9 @@ * Changed `WorkloadType` field for [serving.ServedEntityInput](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedEntityInput) to type [serving.ServingModelWorkloadType](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType). * Changed `WorkloadType` field for [serving.ServedEntityOutput](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedEntityOutput) to type [serving.ServingModelWorkloadType](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType). * Changed `WorkloadType` field for [serving.ServedModelOutput](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedModelOutput) to type [serving.ServingModelWorkloadType](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType). + * Changed waiter for [ServingEndpointsAPI.Create](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI.Create). + * Changed waiter for [ServingEndpointsAPI.UpdateConfig](https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI.UpdateConfig). + OpenAPI SHA: 0be1b914249781b5e903b7676fd02255755bc851, Date: 2025-01-22 From aba07daec1cae5e523338062c9a4241d4282be4d Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Wed, 22 Jan 2025 15:12:39 +0100 Subject: [PATCH 3/3] work --- .gitattributes | 1 + internal/sharing_test.go | 2 +- service/pkg.go | 2 +- service/sharing/recipients_usage_test.go | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.gitattributes b/.gitattributes index 91ba6180a..1f04b9a10 100644 --- a/.gitattributes +++ b/.gitattributes @@ -49,6 +49,7 @@ experimental/mocks/service/dashboards/mock_genie_interface.go linguist-generated experimental/mocks/service/dashboards/mock_lakeview_interface.go linguist-generated=true experimental/mocks/service/files/mock_dbfs_interface.go linguist-generated=true experimental/mocks/service/files/mock_files_interface.go linguist-generated=true +experimental/mocks/service/iam/mock_access_control_interface.go linguist-generated=true experimental/mocks/service/iam/mock_account_access_control_interface.go linguist-generated=true experimental/mocks/service/iam/mock_account_access_control_proxy_interface.go linguist-generated=true experimental/mocks/service/iam/mock_account_groups_interface.go linguist-generated=true diff --git a/internal/sharing_test.go b/internal/sharing_test.go index 93cfa2425..50c91ab08 100644 --- a/internal/sharing_test.go +++ b/internal/sharing_test.go @@ -104,7 +104,7 @@ func TestUcAccRecipients(t *testing.T) { err := w.Recipients.DeleteByName(ctx, created.Name) require.NoError(t, err) }) - err = w.Recipients.Update(ctx, sharing.UpdateRecipient{ + _, err = w.Recipients.Update(ctx, sharing.UpdateRecipient{ Name: created.Name, Comment: RandomName("comment "), }) diff --git a/service/pkg.go b/service/pkg.go index 6eb732385..a1e015563 100644 --- a/service/pkg.go +++ b/service/pkg.go @@ -326,8 +326,8 @@ var ( _ *marketplace.ConsumerListingsAPI = nil _ *marketplace.ConsumerPersonalizationRequestsAPI = nil _ *marketplace.ConsumerProvidersAPI = nil - _ *provisioning.CredentialsAPI = nil _ *catalog.CredentialsAPI = nil + _ *provisioning.CredentialsAPI = nil _ *settings.CredentialsManagerAPI = nil _ *settings.CspEnablementAccountAPI = nil _ *iam.CurrentUserAPI = nil diff --git a/service/sharing/recipients_usage_test.go b/service/sharing/recipients_usage_test.go index ee78c5abe..0d312454c 100755 --- a/service/sharing/recipients_usage_test.go +++ b/service/sharing/recipients_usage_test.go @@ -159,7 +159,7 @@ func ExampleRecipientsAPI_Update_recipients() { } logger.Infof(ctx, "found %v", created) - err = w.Recipients.Update(ctx, sharing.UpdateRecipient{ + _, err = w.Recipients.Update(ctx, sharing.UpdateRecipient{ Name: created.Name, Comment: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), })