From 929493177a498b54beacb26caa81c8134bc7feeb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugues=20de=20Saxc=C3=A9?= Date: Fri, 16 Aug 2024 08:54:45 +0200 Subject: [PATCH 1/2] fix: add requirement to LlamaIndex instrumentation import --- literalai/client.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/literalai/client.py b/literalai/client.py index 73baaf4..96a755f 100644 --- a/literalai/client.py +++ b/literalai/client.py @@ -10,7 +10,6 @@ ExperimentItemRunContextManager, experiment_item_run_decorator, ) -from literalai.instrumentation.llamaindex import instrument_llamaindex from literalai.instrumentation.mistralai import instrument_mistralai from literalai.instrumentation.openai import instrument_openai from literalai.observability.message import Message @@ -25,6 +24,12 @@ ) from literalai.observability.thread import ThreadContextManager, thread_decorator +from literalai.requirements import check_all_requirements + +LLAMA_INDEX_REQUIREMENT = ["llama-index>=0.10.58"] +if check_all_requirements(LLAMA_INDEX_REQUIREMENT): + from literalai.instrumentation.llamaindex import instrument_llamaindex + class BaseLiteralClient: api: Union[LiteralAPI, AsyncLiteralAPI] From bb385dfb6463fe4ba329a249801dae87b47db467 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugues=20de=20Saxc=C3=A9?= Date: Fri, 16 Aug 2024 09:33:36 +0200 Subject: [PATCH 2/2] fix: move import to instrument method --- literalai/client.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/literalai/client.py b/literalai/client.py index 96a755f..468dcb5 100644 --- a/literalai/client.py +++ b/literalai/client.py @@ -26,10 +26,6 @@ from literalai.requirements import check_all_requirements -LLAMA_INDEX_REQUIREMENT = ["llama-index>=0.10.58"] -if check_all_requirements(LLAMA_INDEX_REQUIREMENT): - from literalai.instrumentation.llamaindex import instrument_llamaindex - class BaseLiteralClient: api: Union[LiteralAPI, AsyncLiteralAPI] @@ -97,6 +93,15 @@ def instrument_llamaindex(self): """ Instruments the Llama Index framework so that all RAG & LLM calls are logged to Literal AI. """ + + LLAMA_INDEX_REQUIREMENT = ["llama-index>=0.10.58"] + + if not check_all_requirements(LLAMA_INDEX_REQUIREMENT): + raise Exception( + f"LlamaIndex instrumentation requirements not satisfied: {LLAMA_INDEX_REQUIREMENT}" + ) + from literalai.instrumentation.llamaindex import instrument_llamaindex + instrument_llamaindex(self.to_sync()) def langchain_callback(