From 054881a6fd482e787332b127a183e7f02f7a5d71 Mon Sep 17 00:00:00 2001 From: Matthieu Olenga Date: Fri, 13 Dec 2024 15:15:44 +0100 Subject: [PATCH 1/7] feat: add warning messages when getting deleted promp --- literalai/api/helpers/gql.py | 2 ++ literalai/api/helpers/prompt_helpers.py | 18 +++++++++++++++++- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/literalai/api/helpers/gql.py b/literalai/api/helpers/gql.py index f2f6227..05e1b24 100644 --- a/literalai/api/helpers/gql.py +++ b/literalai/api/helpers/gql.py @@ -999,6 +999,7 @@ name: $name ) { id + deletedAt } }""" @@ -1083,6 +1084,7 @@ version lineage { name + deletedAt } } } diff --git a/literalai/api/helpers/prompt_helpers.py b/literalai/api/helpers/prompt_helpers.py index fe5cb23..1c8640f 100644 --- a/literalai/api/helpers/prompt_helpers.py +++ b/literalai/api/helpers/prompt_helpers.py @@ -1,4 +1,5 @@ -from typing import TYPE_CHECKING, Optional, TypedDict, Callable +import logging +from typing import TYPE_CHECKING, Dict, List, Optional, TypedDict, Callable from literalai.observability.generation import GenerationMessage from literalai.prompt_engineering.prompt import Prompt, ProviderSettings @@ -12,11 +13,16 @@ from literalai.api.helpers import gql +logger = logging.getLogger(__name__) + + def create_prompt_lineage_helper(name: str, description: Optional[str] = None): variables = {"name": name, "description": description} def process_response(response): prompt = response["data"]["createPromptLineage"] + if prompt and prompt.get("deletedAt"): + logger.warn("This prompt lineage is part of a deleted lineage") return prompt description = "create prompt lineage" @@ -29,6 +35,8 @@ def get_prompt_lineage_helper(name: str): def process_response(response): prompt = response["data"]["promptLineage"] + if prompt and prompt.get("deletedAt"): + logger.warn("This prompt lineage is part of a deleted lineage") return prompt description = "get prompt lineage" @@ -52,6 +60,10 @@ def create_prompt_helper( def process_response(response): prompt = response["data"]["createPromptVersion"] + prompt_lineage = prompt.get("lineage") + + if prompt_lineage and prompt_lineage.get("deletedAt"): + logger.warn("This prompt version is part of a deleted lineage") return Prompt.from_dict(api, prompt) if prompt else None description = "create prompt version" @@ -90,6 +102,10 @@ def get_prompt_helper( def process_response(response): prompt_version = response["data"]["promptVersion"] + prompt_lineage = prompt_version.get("lineage") + + if prompt_lineage and prompt_lineage.get("deletedAt"): + logger.warn("This prompt version is part of a deleted lineage") prompt = Prompt.from_dict(api, prompt_version) if prompt_version else None if cache and prompt: put_prompt(cache, prompt) From 23abedfe79daf397c2e639dc665d3f1821183daf Mon Sep 17 00:00:00 2001 From: Matthieu Olenga Date: Fri, 13 Dec 2024 15:30:23 +0100 Subject: [PATCH 2/7] refactor: remove useless imports`` --- literalai/api/helpers/prompt_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/literalai/api/helpers/prompt_helpers.py b/literalai/api/helpers/prompt_helpers.py index 1c8640f..818cfe3 100644 --- a/literalai/api/helpers/prompt_helpers.py +++ b/literalai/api/helpers/prompt_helpers.py @@ -1,5 +1,5 @@ import logging -from typing import TYPE_CHECKING, Dict, List, Optional, TypedDict, Callable +from typing import TYPE_CHECKING, Optional, TypedDict, Callable from literalai.observability.generation import GenerationMessage from literalai.prompt_engineering.prompt import Prompt, ProviderSettings From ffa6053585c54a6ad6c6719bfde07149ece95930 Mon Sep 17 00:00:00 2001 From: Matthieu Olenga Date: Fri, 13 Dec 2024 15:45:19 +0100 Subject: [PATCH 3/7] fix: update gql request --- literalai/api/helpers/gql.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/literalai/api/helpers/gql.py b/literalai/api/helpers/gql.py index 05e1b24..dbe1f20 100644 --- a/literalai/api/helpers/gql.py +++ b/literalai/api/helpers/gql.py @@ -989,6 +989,7 @@ ) { id name + deletedAt } }""" @@ -1024,6 +1025,7 @@ id lineage { name + deletedAt } version createdAt From accecf0817273eebd8d7a17f881ab9cc46e283f6 Mon Sep 17 00:00:00 2001 From: Matthieu Olenga Date: Mon, 16 Dec 2024 12:24:48 +0100 Subject: [PATCH 4/7] feat: improve wording for deleted prompt lineage --- literalai/api/helpers/prompt_helpers.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/literalai/api/helpers/prompt_helpers.py b/literalai/api/helpers/prompt_helpers.py index 818cfe3..380e12b 100644 --- a/literalai/api/helpers/prompt_helpers.py +++ b/literalai/api/helpers/prompt_helpers.py @@ -22,7 +22,7 @@ def create_prompt_lineage_helper(name: str, description: Optional[str] = None): def process_response(response): prompt = response["data"]["createPromptLineage"] if prompt and prompt.get("deletedAt"): - logger.warn("This prompt lineage is part of a deleted lineage") + logger.warn(f"Prompt lineage {name} was deleted") return prompt description = "create prompt lineage" @@ -36,7 +36,7 @@ def get_prompt_lineage_helper(name: str): def process_response(response): prompt = response["data"]["promptLineage"] if prompt and prompt.get("deletedAt"): - logger.warn("This prompt lineage is part of a deleted lineage") + logger.warn(f"Prompt lineage {name} was deleted") return prompt description = "get prompt lineage" @@ -63,7 +63,7 @@ def process_response(response): prompt_lineage = prompt.get("lineage") if prompt_lineage and prompt_lineage.get("deletedAt"): - logger.warn("This prompt version is part of a deleted lineage") + logger.warn(f"Prompt version is part of a deleted lineage {prompt_lineage.get('name')}") return Prompt.from_dict(api, prompt) if prompt else None description = "create prompt version" @@ -105,7 +105,7 @@ def process_response(response): prompt_lineage = prompt_version.get("lineage") if prompt_lineage and prompt_lineage.get("deletedAt"): - logger.warn("This prompt version is part of a deleted lineage") + logger.warn(f"Prompt version is part of a deleted lineage {prompt_lineage.get('name')}") prompt = Prompt.from_dict(api, prompt_version) if prompt_version else None if cache and prompt: put_prompt(cache, prompt) From 79dd258fa64c739731a6ae66b20030017de548cc Mon Sep 17 00:00:00 2001 From: Matthieu Olenga Date: Mon, 16 Dec 2024 16:26:34 +0100 Subject: [PATCH 5/7] feat: change again wording --- literalai/api/helpers/prompt_helpers.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/literalai/api/helpers/prompt_helpers.py b/literalai/api/helpers/prompt_helpers.py index 380e12b..8f58016 100644 --- a/literalai/api/helpers/prompt_helpers.py +++ b/literalai/api/helpers/prompt_helpers.py @@ -22,7 +22,9 @@ def create_prompt_lineage_helper(name: str, description: Optional[str] = None): def process_response(response): prompt = response["data"]["createPromptLineage"] if prompt and prompt.get("deletedAt"): - logger.warn(f"Prompt lineage {name} was deleted") + logger.warn( + f"Prompt {name} was deleted - please update any references to use an active prompt in production" + ) return prompt description = "create prompt lineage" @@ -36,7 +38,9 @@ def get_prompt_lineage_helper(name: str): def process_response(response): prompt = response["data"]["promptLineage"] if prompt and prompt.get("deletedAt"): - logger.warn(f"Prompt lineage {name} was deleted") + logger.warn( + f"Prompt {name} was deleted - please update any references to use an active prompt in production" + ) return prompt description = "get prompt lineage" @@ -63,7 +67,9 @@ def process_response(response): prompt_lineage = prompt.get("lineage") if prompt_lineage and prompt_lineage.get("deletedAt"): - logger.warn(f"Prompt version is part of a deleted lineage {prompt_lineage.get('name')}") + logger.warn( + f"Prompt {name} was deleted - please update any references to use an active prompt in production" + ) return Prompt.from_dict(api, prompt) if prompt else None description = "create prompt version" @@ -105,7 +111,9 @@ def process_response(response): prompt_lineage = prompt_version.get("lineage") if prompt_lineage and prompt_lineage.get("deletedAt"): - logger.warn(f"Prompt version is part of a deleted lineage {prompt_lineage.get('name')}") + logger.warn( + f"Prompt {name} was deleted - please update any references to use an active prompt in production" + ) prompt = Prompt.from_dict(api, prompt_version) if prompt_version else None if cache and prompt: put_prompt(cache, prompt) From 95061a7f24867d585f9bd00e3aca1d28fd81daa0 Mon Sep 17 00:00:00 2001 From: Matthieu Olenga Date: Thu, 19 Dec 2024 15:09:07 +0100 Subject: [PATCH 6/7] feat: change warn to warning + fix ci --- literalai/api/helpers/prompt_helpers.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/literalai/api/helpers/prompt_helpers.py b/literalai/api/helpers/prompt_helpers.py index 8f58016..f2b9cf8 100644 --- a/literalai/api/helpers/prompt_helpers.py +++ b/literalai/api/helpers/prompt_helpers.py @@ -22,7 +22,7 @@ def create_prompt_lineage_helper(name: str, description: Optional[str] = None): def process_response(response): prompt = response["data"]["createPromptLineage"] if prompt and prompt.get("deletedAt"): - logger.warn( + logger.warning( f"Prompt {name} was deleted - please update any references to use an active prompt in production" ) return prompt @@ -38,7 +38,7 @@ def get_prompt_lineage_helper(name: str): def process_response(response): prompt = response["data"]["promptLineage"] if prompt and prompt.get("deletedAt"): - logger.warn( + logger.warning( f"Prompt {name} was deleted - please update any references to use an active prompt in production" ) return prompt @@ -67,8 +67,8 @@ def process_response(response): prompt_lineage = prompt.get("lineage") if prompt_lineage and prompt_lineage.get("deletedAt"): - logger.warn( - f"Prompt {name} was deleted - please update any references to use an active prompt in production" + logger.warning( + f"Prompt {prompt_lineage.name} was deleted - please update any references to use an active prompt in production" ) return Prompt.from_dict(api, prompt) if prompt else None @@ -77,8 +77,10 @@ def process_response(response): return gql.CREATE_PROMPT_VERSION, description, variables, process_response -def get_prompt_cache_key(id: Optional[str], name: Optional[str], version: Optional[int]) -> str: - if id: +def get_prompt_cache_key( + id: Optional[str], name: Optional[str], version: Optional[int] +) -> str: + if id: return id elif name and version: return f"{name}-{version}" @@ -111,7 +113,7 @@ def process_response(response): prompt_lineage = prompt_version.get("lineage") if prompt_lineage and prompt_lineage.get("deletedAt"): - logger.warn( + logger.warning( f"Prompt {name} was deleted - please update any references to use an active prompt in production" ) prompt = Prompt.from_dict(api, prompt_version) if prompt_version else None @@ -121,7 +123,14 @@ def process_response(response): description = "get prompt" - return gql.GET_PROMPT_VERSION, description, variables, process_response, timeout, cached_prompt + return ( + gql.GET_PROMPT_VERSION, + description, + variables, + process_response, + timeout, + cached_prompt, + ) def create_prompt_variant_helper( From c9f751402771b434ef5c9abad66624ea91bf6d0f Mon Sep 17 00:00:00 2001 From: Matthieu Olenga Date: Thu, 19 Dec 2024 18:43:56 +0100 Subject: [PATCH 7/7] fix: prompt_lineage print name --- literalai/api/helpers/prompt_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/literalai/api/helpers/prompt_helpers.py b/literalai/api/helpers/prompt_helpers.py index f2b9cf8..00a9881 100644 --- a/literalai/api/helpers/prompt_helpers.py +++ b/literalai/api/helpers/prompt_helpers.py @@ -68,7 +68,7 @@ def process_response(response): if prompt_lineage and prompt_lineage.get("deletedAt"): logger.warning( - f"Prompt {prompt_lineage.name} was deleted - please update any references to use an active prompt in production" + f"Prompt {prompt_lineage.get('name')} was deleted - please update any references to use an active prompt in production" ) return Prompt.from_dict(api, prompt) if prompt else None