From 711a20ca6cc6aacc30917b935a302145f7bf98d2 Mon Sep 17 00:00:00 2001 From: ionmincu Date: Fri, 13 Jun 2025 17:14:47 +0300 Subject: [PATCH 1/3] feat(llm): add uipath openai llm --- pyproject.toml | 4 +- src/uipath_llamaindex/embeddings/__init__.py | 9 +++ .../embeddings/_openai_embeddings.py | 48 ++++++++++++++++ src/uipath_llamaindex/llms/__init__.py | 9 +++ src/uipath_llamaindex/llms/_openai_llms.py | 55 +++++++++++++++++++ uv.lock | 35 +++++++++++- 6 files changed, 158 insertions(+), 2 deletions(-) create mode 100644 src/uipath_llamaindex/embeddings/__init__.py create mode 100644 src/uipath_llamaindex/embeddings/_openai_embeddings.py create mode 100644 src/uipath_llamaindex/llms/__init__.py create mode 100644 src/uipath_llamaindex/llms/_openai_llms.py diff --git a/pyproject.toml b/pyproject.toml index 0e0af74..1220515 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,13 @@ [project] name = "uipath-llamaindex" -version = "0.0.23" +version = "0.0.24" description = "UiPath LlamaIndex SDK" readme = { file = "README.md", content-type = "text/markdown" } requires-python = ">=3.10" dependencies = [ "llama-index>=0.12.38", + "llama-index-embeddings-azure-openai>=0.3.8", + "llama-index-llms-azure-openai>=0.3.2", "openinference-instrumentation-llama-index>=4.3.0", "uipath>=2.0.64", ] diff --git a/src/uipath_llamaindex/embeddings/__init__.py b/src/uipath_llamaindex/embeddings/__init__.py new file mode 100644 index 0000000..a8b7e36 --- /dev/null +++ b/src/uipath_llamaindex/embeddings/__init__.py @@ -0,0 +1,9 @@ +from ._openai_embeddings import ( + EmbeddingModelName, + UiPathOpenAIEmbedding, +) + +__all__ = [ + "UiPathOpenAIEmbedding", + "EmbeddingModelName", +] diff --git a/src/uipath_llamaindex/embeddings/_openai_embeddings.py b/src/uipath_llamaindex/embeddings/_openai_embeddings.py new file mode 100644 index 0000000..e025c84 --- /dev/null +++ b/src/uipath_llamaindex/embeddings/_openai_embeddings.py @@ -0,0 +1,48 @@ +import os +from enum import Enum +from typing import Any, Union + +from llama_index.embeddings.azure_openai import AzureOpenAIEmbedding + + +class EmbeddingModelName(Enum): + TEXT_EMBEDDING_3_LARGE = "text-embedding-3-large" + TEXT_EMBEDDING_ADA_002 = "text-embedding-ada-002" + + +class UiPathOpenAIEmbedding(AzureOpenAIEmbedding): + def __init__( + self, + model: Union[ + str, EmbeddingModelName + ] = EmbeddingModelName.TEXT_EMBEDDING_ADA_002, + api_version: str = "2024-10-21", + **kwargs: Any, + ): + uipath_access_token = os.environ.get("UIPATH_ACCESS_TOKEN") + auth_header_value = ( + "Bearer " + str(uipath_access_token) if uipath_access_token else None + ) + default_headers_dict = { + "X-UIPATH-STREAMING-ENABLED": "false", + "X-UiPath-LlmGateway-RequestingProduct": "uipath-python-sdk", + "X-UiPath-LlmGateway-RequestingFeature": "llama-index-agent", + } + if auth_header_value: + default_headers_dict["Authorization"] = auth_header_value + model_value = model.value if isinstance(model, EmbeddingModelName) else model + + base_url = os.environ.get( + "UIPATH_URL", "https://cloud.uipath.com/account/tenant" + ).rstrip("/") + + defaults = { + "model": model_value, + "deployment_name": model_value, + "azure_endpoint": f"{base_url}/llmgateway_/", + "api_key": uipath_access_token, + "api_version": api_version, + "default_headers": default_headers_dict, + } + final_kwargs = {**defaults, **kwargs} + super().__init__(**final_kwargs) diff --git a/src/uipath_llamaindex/llms/__init__.py b/src/uipath_llamaindex/llms/__init__.py new file mode 100644 index 0000000..c1804eb --- /dev/null +++ b/src/uipath_llamaindex/llms/__init__.py @@ -0,0 +1,9 @@ +from ._openai_llms import ( + ModelName, + UiPathOpenAI, +) + +__all__ = [ + "UiPathOpenAI", + "ModelName", +] diff --git a/src/uipath_llamaindex/llms/_openai_llms.py b/src/uipath_llamaindex/llms/_openai_llms.py new file mode 100644 index 0000000..0d9b72a --- /dev/null +++ b/src/uipath_llamaindex/llms/_openai_llms.py @@ -0,0 +1,55 @@ +import os +from enum import Enum +from typing import Any, Union + +from llama_index.llms.azure_openai import AzureOpenAI + + +class ModelName(Enum): + GPT_4_1_2025_04_14 = "gpt-4.1-2025-04-14" + GPT_4_1_MINI_2025_04_14 = "gpt-4.1-mini-2025-04-14" + GPT_4_1_NANO_2025_04_14 = "gpt-4.1-nano-2025-04-14" + GPT_4O_2024_05_13 = "gpt-4o-2024-05-13" + GPT_4O_2024_08_06 = "gpt-4o-2024-08-06" + GPT_4O_2024_11_20 = "gpt-4o-2024-11-20" + GPT_4O_MINI_2024_07_18 = "gpt-4o-mini-2024-07-18" + O3_MINI_2025_01_31 = "o3-mini-2025-01-31" + TEXT_DAVINCI_003 = "text-davinci-003" + + +# Define your custom AzureOpenAI class with default settings +class UiPathOpenAI(AzureOpenAI): + def __init__( + self, + model: Union[str, ModelName] = ModelName.GPT_4O_MINI_2024_07_18, + api_version: str = "2024-10-21", + **kwargs: Any, + ): + uipath_access_token = os.environ.get("UIPATH_ACCESS_TOKEN") + auth_header_value = ( + "Bearer " + str(uipath_access_token) if uipath_access_token else None + ) + default_headers_dict = { + "X-UIPATH-STREAMING-ENABLED": "false", + "X-UiPath-LlmGateway-RequestingProduct": "uipath-python-sdk", + "X-UiPath-LlmGateway-RequestingFeature": "llama-index-agent", + } + if auth_header_value: + default_headers_dict["Authorization"] = auth_header_value + model_value = model.value if isinstance(model, ModelName) else model + + base_url = os.environ.get( + "UIPATH_URL", "https://cloud.uipath.com/account/tenant" + ).rstrip("/") + + defaults = { + "model": model_value, + "deployment_name": model_value, + "azure_endpoint": f"{base_url}/llmgateway_/", + "api_key": uipath_access_token, + "api_version": api_version, + "is_chat_model": True, + "default_headers": default_headers_dict, + } + final_kwargs = {**defaults, **kwargs} + super().__init__(**final_kwargs) \ No newline at end of file diff --git a/uv.lock b/uv.lock index 91ca33f..3ff3dd5 100644 --- a/uv.lock +++ b/uv.lock @@ -1111,6 +1111,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1b/5b/5044a538ea9081dded02d4a50e12e1a803c19468234f936139ba368ca08e/llama_index_core-0.12.38-py3-none-any.whl", hash = "sha256:1cb852c37a52b7297a4c24dd3e7a53a45913f2a1f37e66af405958bb4669ce90", size = 7663883 }, ] +[[package]] +name = "llama-index-embeddings-azure-openai" +version = "0.3.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "llama-index-core" }, + { name = "llama-index-embeddings-openai" }, + { name = "llama-index-llms-azure-openai" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/f9/7027f5984e690d6b171d44e944aba0d6fedcedd49c08962ea67dce8a463f/llama_index_embeddings_azure_openai-0.3.8.tar.gz", hash = "sha256:24cff674364cffef4798f5faf220b557115ff9ea9a4f5e643785bd89c595928c", size = 4760 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/d0/c13abece3456e7de7d359e70bff8b9d8a53f4908b9f3783c8177f1be9a78/llama_index_embeddings_azure_openai-0.3.8-py3-none-any.whl", hash = "sha256:e5dc9c9103b914c4435a59ed0d7965b3a3eeeafb9868a5876c46315bb49f1e54", size = 4400 }, +] + [[package]] name = "llama-index-embeddings-openai" version = "0.3.1" @@ -1137,6 +1151,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/f4/5decd79fd7f2f0e44c5689af62497447e86832e876b7dad11903259de5f9/llama_index_indices_managed_llama_cloud-0.6.11-py3-none-any.whl", hash = "sha256:64e82e2ac178cd3721b76c0817edd57e05a3bd877c412b4148d3abbdeea62d59", size = 14272 }, ] +[[package]] +name = "llama-index-llms-azure-openai" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-identity" }, + { name = "httpx" }, + { name = "llama-index-core" }, + { name = "llama-index-llms-openai" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0c/cf/23c516c5a61c9b7a481c383862ebd99cc5e6a35f820dab871bb12b453b71/llama_index_llms_azure_openai-0.3.2.tar.gz", hash = "sha256:c6ae4e6d896abc784a1d60e02a537c91e019317de69d02256424eab80c988646", size = 6287 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/2f/efab9bd63f7f3dd9ec83c945c9516beffcff14ffd4a8187150b46e69124b/llama_index_llms_azure_openai-0.3.2-py3-none-any.whl", hash = "sha256:1a831035129042327f50d243a17918c481dfae39fd5a7ddaaaa0a712fb18ab8e", size = 7283 }, +] + [[package]] name = "llama-index-llms-openai" version = "0.3.42" @@ -2840,10 +2869,12 @@ wheels = [ [[package]] name = "uipath-llamaindex" -version = "0.0.22" +version = "0.0.23" source = { editable = "." } dependencies = [ { name = "llama-index" }, + { name = "llama-index-embeddings-azure-openai" }, + { name = "llama-index-llms-azure-openai" }, { name = "openinference-instrumentation-llama-index" }, { name = "uipath" }, ] @@ -2862,6 +2893,8 @@ dev = [ [package.metadata] requires-dist = [ { name = "llama-index", specifier = ">=0.12.38" }, + { name = "llama-index-embeddings-azure-openai", specifier = ">=0.3.8" }, + { name = "llama-index-llms-azure-openai", specifier = ">=0.3.2" }, { name = "openinference-instrumentation-llama-index", specifier = ">=4.3.0" }, { name = "uipath", specifier = ">=2.0.64" }, ] From 2c6f01c8efcca4f4b6194edd4b650d7d09d556f4 Mon Sep 17 00:00:00 2001 From: ionmincu Date: Fri, 13 Jun 2025 17:27:18 +0300 Subject: [PATCH 2/3] test --- src/uipath_llamaindex/llms/_openai_llms.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/uipath_llamaindex/llms/_openai_llms.py b/src/uipath_llamaindex/llms/_openai_llms.py index 0d9b72a..a365d9d 100644 --- a/src/uipath_llamaindex/llms/_openai_llms.py +++ b/src/uipath_llamaindex/llms/_openai_llms.py @@ -26,16 +26,12 @@ def __init__( **kwargs: Any, ): uipath_access_token = os.environ.get("UIPATH_ACCESS_TOKEN") - auth_header_value = ( - "Bearer " + str(uipath_access_token) if uipath_access_token else None - ) + default_headers_dict = { "X-UIPATH-STREAMING-ENABLED": "false", "X-UiPath-LlmGateway-RequestingProduct": "uipath-python-sdk", "X-UiPath-LlmGateway-RequestingFeature": "llama-index-agent", } - if auth_header_value: - default_headers_dict["Authorization"] = auth_header_value model_value = model.value if isinstance(model, ModelName) else model base_url = os.environ.get( From 005791160418b48de67b3a8d5853938804533de5 Mon Sep 17 00:00:00 2001 From: ionmincu Date: Fri, 13 Jun 2025 17:30:08 +0300 Subject: [PATCH 3/3] xyz --- src/uipath_llamaindex/embeddings/_openai_embeddings.py | 9 ++------- src/uipath_llamaindex/llms/_openai_llms.py | 4 +--- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/src/uipath_llamaindex/embeddings/_openai_embeddings.py b/src/uipath_llamaindex/embeddings/_openai_embeddings.py index e025c84..cfb8919 100644 --- a/src/uipath_llamaindex/embeddings/_openai_embeddings.py +++ b/src/uipath_llamaindex/embeddings/_openai_embeddings.py @@ -19,17 +19,12 @@ def __init__( api_version: str = "2024-10-21", **kwargs: Any, ): - uipath_access_token = os.environ.get("UIPATH_ACCESS_TOKEN") - auth_header_value = ( - "Bearer " + str(uipath_access_token) if uipath_access_token else None - ) default_headers_dict = { "X-UIPATH-STREAMING-ENABLED": "false", "X-UiPath-LlmGateway-RequestingProduct": "uipath-python-sdk", "X-UiPath-LlmGateway-RequestingFeature": "llama-index-agent", } - if auth_header_value: - default_headers_dict["Authorization"] = auth_header_value + model_value = model.value if isinstance(model, EmbeddingModelName) else model base_url = os.environ.get( @@ -40,7 +35,7 @@ def __init__( "model": model_value, "deployment_name": model_value, "azure_endpoint": f"{base_url}/llmgateway_/", - "api_key": uipath_access_token, + "api_key": os.environ.get("UIPATH_ACCESS_TOKEN"), "api_version": api_version, "default_headers": default_headers_dict, } diff --git a/src/uipath_llamaindex/llms/_openai_llms.py b/src/uipath_llamaindex/llms/_openai_llms.py index a365d9d..f99da3f 100644 --- a/src/uipath_llamaindex/llms/_openai_llms.py +++ b/src/uipath_llamaindex/llms/_openai_llms.py @@ -25,8 +25,6 @@ def __init__( api_version: str = "2024-10-21", **kwargs: Any, ): - uipath_access_token = os.environ.get("UIPATH_ACCESS_TOKEN") - default_headers_dict = { "X-UIPATH-STREAMING-ENABLED": "false", "X-UiPath-LlmGateway-RequestingProduct": "uipath-python-sdk", @@ -42,7 +40,7 @@ def __init__( "model": model_value, "deployment_name": model_value, "azure_endpoint": f"{base_url}/llmgateway_/", - "api_key": uipath_access_token, + "api_key": os.environ.get("UIPATH_ACCESS_TOKEN"), "api_version": api_version, "is_chat_model": True, "default_headers": default_headers_dict,