diff --git a/pyproject.toml b/pyproject.toml index f18873e2..51cb87da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "uipath" -version = "2.0.0.dev2" +version = "2.0.0.dev3" description = "Python SDK and CLI for UiPath Platform, enabling programmatic interaction with automation services, process management, and deployment tools." readme = { file = "README.md", content-type = "text/markdown" } requires-python = ">=3.9" diff --git a/src/uipath/_models/__init__.py b/src/uipath/_models/__init__.py index 9d8af80f..17a1c8bd 100644 --- a/src/uipath/_models/__init__.py +++ b/src/uipath/_models/__init__.py @@ -3,6 +3,7 @@ from .assets import UserAsset from .connections import Connection, ConnectionToken from .context_grounding import ContextGroundingQueryResponse +from .exceptions import IngestionInProgressException from .interrupt_models import CreateAction, InvokeProcess, WaitAction, WaitJob from .job import Job from .processes import Process @@ -32,4 +33,5 @@ "WaitJob", "WaitAction", "CreateAction", + "IngestionInProgressException", ] diff --git a/src/uipath/_models/context_grounding_index.py b/src/uipath/_models/context_grounding_index.py new file mode 100644 index 00000000..fc4f180d --- /dev/null +++ b/src/uipath/_models/context_grounding_index.py @@ -0,0 +1,60 @@ +from datetime import datetime +from typing import Any, List, Optional + +from pydantic import BaseModel, ConfigDict, Field + + +class ContextGroundingField(BaseModel): + id: Optional[str] = Field(default=None, alias="id") + name: Optional[str] = Field(default=None, alias="name") + description: Optional[str] = Field(default=None, alias="description") + type: Optional[str] = Field(default=None, alias="type") + is_filterable: Optional[bool] = Field(default=None, alias="isFilterable") + searchable_type: Optional[str] = Field(default=None, alias="searchableType") + is_user_defined: Optional[bool] = Field(default=None, alias="isUserDefined") + + +class ContextGroundingDataSource(BaseModel): + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + use_enum_values=True, + arbitrary_types_allowed=True, + extra="allow", + json_encoders={datetime: lambda v: v.isoformat() if v else None}, + ) + id: Optional[str] = Field(default=None, alias="id") + folder: Optional[str] = Field(default=None, alias="folder") + + +class ContextGroundingIndex(BaseModel): + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + use_enum_values=True, + arbitrary_types_allowed=True, + extra="allow", + json_encoders={datetime: lambda v: v.isoformat() if v else None}, + ) + id: Optional[str] = Field(default=None, alias="id") + name: Optional[str] = Field(default=None, alias="name") + description: Optional[str] = Field(default=None, alias="description") + memory_usage: Optional[int] = Field(default=None, alias="memoryUsage") + disk_usage: Optional[int] = Field(default=None, alias="diskUsage") + data_source: Optional[ContextGroundingDataSource] = Field( + default=None, alias="dataSource" + ) + pre_processing: Any = Field(default=None, alias="preProcessing") + fields: Optional[List[ContextGroundingField]] = Field(default=None, alias="fields") + last_ingestion_status: Optional[str] = Field( + default=None, alias="lastIngestionStatus" + ) + last_ingested: Optional[datetime] = Field(default=None, alias="lastIngested") + last_queried: Optional[datetime] = Field(default=None, alias="lastQueried") + folder_key: Optional[str] = Field(default=None, alias="folderKey") + + def in_progress_ingestion(self): + return ( + self.last_ingestion_status == "Queued" + or self.last_ingestion_status == "In Progress" + ) diff --git a/src/uipath/_models/exceptions.py b/src/uipath/_models/exceptions.py new file mode 100644 index 00000000..b5cb0fb3 --- /dev/null +++ b/src/uipath/_models/exceptions.py @@ -0,0 +1,6 @@ +class IngestionInProgressException(Exception): + """An exception that is triggered when a search is attempted on an index that is currently undergoing ingestion.""" + + def __init__(self, index_name): + self.message = f"index {index_name} cannot be searched during ingestion" + super().__init__(self.message) diff --git a/src/uipath/_services/__init__.py b/src/uipath/_services/__init__.py index 6c1760dd..233f32ac 100644 --- a/src/uipath/_services/__init__.py +++ b/src/uipath/_services/__init__.py @@ -4,6 +4,7 @@ from .buckets_service import BucketsService from .connections_service import ConnectionsService from .context_grounding_service import ContextGroundingService +from .folder_service import FolderService from .jobs_service import JobsService from .llm_gateway_service import UiPathLlmChatService, UiPathOpenAIService from .processes_service import ProcessesService @@ -21,4 +22,5 @@ "JobsService", "UiPathOpenAIService", "UiPathLlmChatService", + "FolderService", ] diff --git a/src/uipath/_services/buckets_service.py b/src/uipath/_services/buckets_service.py index 2e44fa3d..bca6f2a9 100644 --- a/src/uipath/_services/buckets_service.py +++ b/src/uipath/_services/buckets_service.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any, Dict, Optional, Union from httpx import request @@ -59,7 +59,9 @@ def download( def upload( self, - bucket_key: str, + *, + bucket_key: Optional[str] = None, + bucket_name: Optional[str] = None, blob_file_path: str, content_type: str, source_path: str, @@ -68,11 +70,18 @@ def upload( Args: bucket_key: The key of the bucket + bucket_name: The name of the bucket blob_file_path: The path where the file will be stored in the bucket content_type: The MIME type of the file source_path: The local path of the file to upload """ - bucket = self.retrieve_by_key(bucket_key) + if bucket_key: + bucket = self.retrieve_by_key(bucket_key) + elif bucket_name: + bucket = self.retrieve(bucket_name) + else: + raise ValueError("Must specify a bucket name or bucket key") + bucket_id = bucket["Id"] endpoint = Endpoint( @@ -99,6 +108,60 @@ def upload( else: request("PUT", write_uri, headers=headers, files={"file": file}) + def upload_from_memory( + self, + *, + bucket_key: Optional[str] = None, + bucket_name: Optional[str] = None, + blob_file_path: str, + content_type: str, + content: Union[str, bytes], + ) -> None: + """Upload content from memory to a bucket. + + Args: + bucket_key: The key of the bucket + bucket_name: The name of the bucket + blob_file_path: The path where the content will be stored in the bucket + content_type: The MIME type of the content + content: The content to upload (string or bytes) + """ + if bucket_key: + bucket = self.retrieve_by_key(bucket_key) + elif bucket_name: + bucket = self.retrieve(bucket_name) + else: + raise ValueError("Must specify a bucket name or bucket key") + + bucket_id = bucket["Id"] + + endpoint = Endpoint( + f"/orchestrator_/odata/Buckets({bucket_id})/UiPath.Server.Configuration.OData.GetWriteUri" + ) + + result = self.request( + "GET", + endpoint, + params={"path": blob_file_path, "contentType": content_type}, + ).json() + write_uri = result["Uri"] + + headers = { + key: value + for key, value in zip( + result["Headers"]["Keys"], result["Headers"]["Values"] + ) + } + + # Convert string to bytes if needed + if isinstance(content, str): + content = content.encode("utf-8") + + if result["RequiresAuth"]: + self.request("PUT", write_uri, headers=headers, content=content) + else: + request("PUT", write_uri, headers=headers, content=content) + @infer_bindings() def retrieve(self, name: str) -> Any: """Retrieve bucket information by its name. @@ -192,7 +255,7 @@ def custom_headers(self) -> Dict[str, str]: def _retrieve_spec(self, name: str) -> RequestSpec: return RequestSpec( method="GET", - endpoint=Endpoint("/odata/Buckets"), + endpoint=Endpoint("/orchestrator_/odata/Buckets"), params={"$filter": f"Name eq '{name}'", "$top": 1}, ) @@ -200,6 +263,6 @@ def _retrieve_by_key_spec(self, key: str) -> RequestSpec: return RequestSpec( method="GET", endpoint=Endpoint( - f"/odata/Buckets/UiPath.Server.Configuration.OData.GetByKey(identifier={key})" + f"/orchestrator_/odata/Buckets/UiPath.Server.Configuration.OData.GetByKey(identifier={key})" ), ) diff --git a/src/uipath/_services/context_grounding_service.py b/src/uipath/_services/context_grounding_service.py index 2de87ac1..9f36308a 100644 --- a/src/uipath/_services/context_grounding_service.py +++ b/src/uipath/_services/context_grounding_service.py @@ -1,14 +1,21 @@ import json -from typing import Any, Dict, List +from typing import Any, Dict, List, Optional from pydantic import TypeAdapter from .._config import Config from .._execution_context import ExecutionContext from .._folder_context import FolderContext +from .._models import IngestionInProgressException from .._models.context_grounding import ContextGroundingQueryResponse +from .._models.context_grounding_index import ContextGroundingIndex from .._utils import Endpoint, RequestSpec +from .._utils.constants import ( + HEADER_FOLDER_KEY, + ORCHESTRATOR_STORAGE_BUCKET_DATA_SOURCE, +) from ._base_service import BaseService +from .folder_service import FolderService class ContextGroundingService(FolderContext, BaseService): @@ -24,10 +31,16 @@ class ContextGroundingService(FolderContext, BaseService): context. """ - def __init__(self, config: Config, execution_context: ExecutionContext) -> None: + def __init__( + self, + config: Config, + execution_context: ExecutionContext, + folders_service: FolderService, + ) -> None: + self._folders_service = folders_service super().__init__(config=config, execution_context=execution_context) - def retrieve(self, name: str) -> Any: + def retrieve(self, name: str) -> Optional[ContextGroundingIndex]: """Retrieve context grounding index information by its name. This method fetches details about a specific context index, which can be @@ -38,17 +51,25 @@ def retrieve(self, name: str) -> Any: name (str): The name of the context index to retrieve. Returns: - Any: The index information, including its configuration and metadata. + Optional[ContextGroundingIndex]: The index information, including its configuration and metadata if found, otherwise None. """ spec = self._retrieve_spec(name) - return self.request( + response = self.request( spec.method, spec.endpoint, params=spec.params, ).json() + return next( + ( + ContextGroundingIndex.model_validate(item) + for item in response["value"] + if item["name"] == name + ), + None, + ) - async def retrieve_async(self, name: str) -> Any: + async def retrieve_async(self, name: str) -> Optional[ContextGroundingIndex]: """Retrieve asynchronously context grounding index information by its name. This method fetches details about a specific context index, which can be @@ -59,19 +80,27 @@ async def retrieve_async(self, name: str) -> Any: name (str): The name of the context index to retrieve. Returns: - Any: The index information, including its configuration and metadata. + Optional[ContextGroundingIndex]: The index information, including its configuration and metadata if found, otherwise None. """ spec = self._retrieve_spec(name) - response = await self.request_async( - spec.method, - spec.endpoint, - params=spec.params, + response = ( + await self.request_async( + spec.method, + spec.endpoint, + params=spec.params, + ) + ).json() + return next( + ( + ContextGroundingIndex.model_validate(item) + for item in response["value"] + if item["name"] == name + ), + None, ) - return response.json() - def retrieve_by_id(self, id: str) -> Any: """Retrieve context grounding index information by its ID. @@ -137,6 +166,10 @@ def search( List[ContextGroundingQueryResponse]: A list of search results, each containing relevant contextual information and metadata. """ + index = self.retrieve(name) + if index and index.in_progress_ingestion(): + raise IngestionInProgressException(index_name=name) + spec = self._search_spec(name, query, number_of_results) response = self.request( @@ -171,6 +204,9 @@ async def search_async( List[ContextGroundingQueryResponse]: A list of search results, each containing relevant contextual information and metadata. """ + index = self.retrieve(name) + if index and index.in_progress_ingestion(): + raise IngestionInProgressException(index_name=name) spec = self._search_spec(name, query, number_of_results) response = await self.request_async( @@ -183,13 +219,122 @@ async def search_async( response.json() ) + def get_or_create_index( + self, + name: str, + *, + description: Optional[str] = None, + storage_bucket_name: str, + file_name_glob: Optional[str] = None, + storage_bucket_folder_path: Optional[str] = None, + ) -> ContextGroundingIndex: + spec = self._create_spec( + name, + description, + storage_bucket_name, + file_name_glob, + storage_bucket_folder_path, + ) + index = self.retrieve(name=name) + if index: + return index + + response = self.request( + spec.method, + spec.endpoint, + content=spec.content, + headers=spec.headers, + ).json() + return ContextGroundingIndex.model_validate(response) + + async def get_or_create_index_async( + self, + name: str, + *, + description: Optional[str] = None, + storage_bucket_name: str, + file_name_glob: Optional[str] = None, + storage_bucket_folder_path: Optional[str] = None, + ) -> ContextGroundingIndex: + index = await self.retrieve_async(name=name) + if index: + return index + + spec = self._create_spec( + name, + description, + storage_bucket_name, + file_name_glob, + storage_bucket_folder_path, + ) + response = ( + await self.request_async( + spec.method, + spec.endpoint, + content=spec.content, + headers=spec.headers, + ) + ).json() + return ContextGroundingIndex.model_validate(response) + + def ingest_data(self, index: ContextGroundingIndex) -> None: + if not index.id: + return + spec = self._ingest_spec(index.id) + self.request( + spec.method, + spec.endpoint, + headers=spec.headers, + ) + + async def ingest_data_async(self, index: ContextGroundingIndex) -> None: + if not index.id: + return + spec = self._ingest_spec(index.id) + await self.request_async( + spec.method, + spec.endpoint, + headers=spec.headers, + ) + + def delete_index(self, index: ContextGroundingIndex) -> None: + if not index.id: + return + spec = self._delete_by_id_spec(index.id) + self.request( + spec.method, + spec.endpoint, + headers=spec.headers, + ) + + async def delete_index_async(self, index: ContextGroundingIndex) -> None: + if not index.id: + return + spec = self._delete_by_id_spec(index.id) + await self.request_async( + spec.method, + spec.endpoint, + headers=spec.headers, + ) + @property def custom_headers(self) -> Dict[str, str]: - if self.folder_headers["x-uipath-folderkey"] is None: - raise ValueError("Folder key is not set (UIPATH_FOLDER_KEY)") + self._folder_key = self._folder_key or ( + self._folders_service.retrieve_key_by_folder_path(self._folder_path) + if self._folder_path + else None + ) + + if self._folder_key is None: + raise ValueError(f"Folder key is not set ({HEADER_FOLDER_KEY})") return self.folder_headers + def _ingest_spec(self, key: str) -> RequestSpec: + return RequestSpec( + method="POST", endpoint=Endpoint(f"/ecs_/v2/indexes/{key}/ingest") + ) + def _retrieve_spec(self, name: str) -> RequestSpec: return RequestSpec( method="GET", @@ -197,12 +342,51 @@ def _retrieve_spec(self, name: str) -> RequestSpec: params={"$filter": f"Name eq '{name}'"}, ) + def _create_spec( + self, + name: str, + description: Optional[str], + storage_bucket_name: Optional[str], + file_name_glob: Optional[str], + storage_bucket_folder_path: Optional[str], + ) -> RequestSpec: + storage_bucket_folder_path = ( + storage_bucket_folder_path + if storage_bucket_folder_path + else self._folder_path + ) + return RequestSpec( + method="POST", + endpoint=Endpoint("/ecs_/v2/indexes/create"), + content=json.dumps( + { + "name": name, + "description": description, + "dataSource": { + "@odata.type": ORCHESTRATOR_STORAGE_BUCKET_DATA_SOURCE, + "folder": storage_bucket_folder_path, + "bucketName": storage_bucket_name, + "fileNameGlob": file_name_glob + if file_name_glob is not None + else "*", + "directoryPath": "/", + }, + } + ), + ) + def _retrieve_by_id_spec(self, id: str) -> RequestSpec: return RequestSpec( method="GET", endpoint=Endpoint(f"/ecs_/v2/indexes/{id}"), ) + def _delete_by_id_spec(self, id: str) -> RequestSpec: + return RequestSpec( + method="DELETE", + endpoint=Endpoint(f"/ecs_/v2/indexes/{id}"), + ) + def _search_spec( self, name: str, query: str, number_of_results: int = 10 ) -> RequestSpec: diff --git a/src/uipath/_services/folder_service.py b/src/uipath/_services/folder_service.py new file mode 100644 index 00000000..c1379256 --- /dev/null +++ b/src/uipath/_services/folder_service.py @@ -0,0 +1,49 @@ +from typing import Optional + +from .._config import Config +from .._execution_context import ExecutionContext +from .._utils import Endpoint, RequestSpec +from ._base_service import BaseService + + +def _retrieve_spec(folder_path: str) -> RequestSpec: + folder_name = folder_path.split("/")[-1] + return RequestSpec( + method="GET", + endpoint=Endpoint( + "orchestrator_/api/FoldersNavigation/GetFoldersForCurrentUser" + ), + params={ + "searchText": folder_name, + "take": 1, + }, + ) + + +class FolderService(BaseService): + """Service for managing UiPath Folders. + + A folder represents a single area for data organization + and access control - it is created when you need to categorize, manage, and enforce authorization rules for a group + of UiPath resources (i.e. processes, assets, connections, storage buckets etc.) or other folders + """ + + def __init__(self, config: Config, execution_context: ExecutionContext) -> None: + super().__init__(config=config, execution_context=execution_context) + + def retrieve_key_by_folder_path(self, folder_path: str) -> Optional[str]: + spec = _retrieve_spec(folder_path) + response = self.request( + spec.method, + url=spec.endpoint, + params=spec.params, + ).json() + + return next( + ( + item["Key"] + for item in response["PageItems"] + if item["FullyQualifiedName"] == folder_path + ), + None, + ) diff --git a/src/uipath/_uipath.py b/src/uipath/_uipath.py index 9e8c193f..96005b8f 100644 --- a/src/uipath/_uipath.py +++ b/src/uipath/_uipath.py @@ -12,6 +12,7 @@ BucketsService, ConnectionsService, ContextGroundingService, + FolderService, JobsService, ProcessesService, QueuesService, @@ -45,6 +46,7 @@ def __init__( base_url=base_url_value, # type: ignore secret=secret_value, # type: ignore ) + self._folders_service: Optional[FolderService] = None setup_logging(debug) self._execution_context = ExecutionContext() @@ -75,7 +77,11 @@ def connections(self) -> ConnectionsService: @property def context_grounding(self) -> ContextGroundingService: - return ContextGroundingService(self._config, self._execution_context) + if not self._folders_service: + self._folders_service = FolderService(self._config, self._execution_context) + return ContextGroundingService( + self._config, self._execution_context, self._folders_service + ) @property def queues(self) -> QueuesService: @@ -84,3 +90,9 @@ def queues(self) -> QueuesService: @property def jobs(self) -> JobsService: return JobsService(self._config, self._execution_context) + + @property + def folders(self) -> FolderService: + if not self._folders_service: + self._folders_service = FolderService(self._config, self._execution_context) + return self._folders_service diff --git a/src/uipath/_utils/constants.py b/src/uipath/_utils/constants.py index 9df1cf77..1d694ce6 100644 --- a/src/uipath/_utils/constants.py +++ b/src/uipath/_utils/constants.py @@ -18,3 +18,8 @@ # Entrypoint for plugins ENTRYPOINT = "uipath.connectors" + +# Data sources +ORCHESTRATOR_STORAGE_BUCKET_DATA_SOURCE = ( + "#UiPath.Vdbs.Domain.Api.V20Models.StorageBucketDataSourceRequest" +)