Skip to content

feat: serialize chat messages #124

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Sep 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 32 additions & 26 deletions literalai/callback/langchain_callback.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,6 @@
import time
from importlib.metadata import version
from typing import (
TYPE_CHECKING,
Any,
Dict,
List,
Optional,
Tuple,
TypedDict,
Union,
cast,
)
from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypedDict, Union, cast

from literalai.helper import ensure_values_serializable
from literalai.observability.generation import (
Expand All @@ -28,17 +18,6 @@
from literalai.observability.step import TrueStepType


def process_content(content: Any) -> Tuple[Dict, Optional[str]]:
if content is None:
return {}, None
if isinstance(content, dict):
return content, "json"
elif isinstance(content, str):
return {"content": content}, "text"
else:
return {"content": str(content)}, "text"


def process_variable_value(value: Any) -> str:
return str(value) if value is not None else ""

Expand Down Expand Up @@ -151,6 +130,35 @@ def _convert_message(

return msg

def _is_message(self, to_check: Any) -> bool:
return isinstance(to_check, BaseMessage)

def _is_message_array(self, to_check: Any) -> bool:
return isinstance(to_check, list) and all(
self._is_message(item) for item in to_check
)

def process_content(self, content: Any, root=True):
if content is None:
return {}
if self._is_message_array(content):
if root:
return {"messages": [self._convert_message(m) for m in content]}
else:
return [self._convert_message(m) for m in content]
elif self._is_message(content):
return self._convert_message(content)
elif isinstance(content, dict):
processed_dict = {}
for key, value in content.items():
processed_value = self.process_content(value, root=False)
processed_dict[key] = processed_value
return processed_dict
elif isinstance(content, str):
return {"content": content}
else:
return {"content": str(content)}

def _build_llm_settings(
self,
serialized: Dict,
Expand Down Expand Up @@ -366,7 +374,6 @@ def _start_trace(self, run: Run) -> None:
self.generation_inputs[str(run.id)] = ensure_values_serializable(
run.inputs
)

if ignore:
return

Expand All @@ -393,7 +400,7 @@ def _start_trace(self, run: Run) -> None:
)
step.tags = run.tags
step.metadata = run.metadata
step.input, _ = process_content(run.inputs)
step.input = self.process_content(run.inputs)

self.steps[str(run.id)] = step

Expand All @@ -406,7 +413,6 @@ def _on_run_update(self, run: Run) -> None:
return

current_step = self.steps.get(str(run.id), None)

if run.run_type == "llm" and current_step:
provider, model, tools, llm_settings = self._build_llm_settings(
(run.serialized or {}), (run.extra or {}).get("invocation_params")
Expand Down Expand Up @@ -508,7 +514,7 @@ def _on_run_update(self, run: Run) -> None:
output = outputs.get(output_keys[0], outputs)

if current_step:
current_step.output, _ = process_content(output)
current_step.output = self.process_content(output)
current_step.end()

def _on_error(self, error: BaseException, *, run_id: "UUID", **kwargs: Any):
Expand Down
7 changes: 3 additions & 4 deletions literalai/event_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,16 +35,15 @@ def __init__(self, api: "LiteralAPI", batch_size: int = 1, disabled: bool = Fals
self.batch_size = batch_size
self.api = api
self.event_queue = queue.Queue()
self.processing_thread = threading.Thread(
target=self._process_events, daemon=True
)
self.disabled = disabled
self.processing_counter = 0
self.counter_lock = threading.Lock()
self.last_batch_time = time.time()
self.processing_thread = threading.Thread(
target=self._process_events, daemon=True
)
if not self.disabled:
self.processing_thread.start()

self.stop_event = threading.Event()

def add_event(self, event: "StepDict"):
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
pytest
pytest-asyncio
pytest-timeout
pytest_httpx==0.30.0
pre-commit
python-dotenv
ruff
mypy
langchain
llama-index
pytest_httpx
mistralai
Loading