diff --git a/literalai/api/__init__.py b/literalai/api/__init__.py index 480a185..d271ba4 100644 --- a/literalai/api/__init__.py +++ b/literalai/api/__init__.py @@ -144,6 +144,7 @@ def handle_bytes(item): class BaseLiteralAPI: + def __init__( self, api_key: Optional[str] = None, @@ -185,25 +186,21 @@ def headers(self): class LiteralAPI(BaseLiteralAPI): + """ + ```python + from literalai import LiteralClient + # Initialize the client + literalai_client = LiteralClient(api_key="your_api_key_here") + # Access the API's methods + print(literalai_client.api) + ``` + """ + R = TypeVar("R") def make_gql_call( self, description: str, query: str, variables: Dict[str, Any] ) -> Dict: - """ - Executes a GraphQL call with the provided query and variables. - - Args: - description (str): Description of the GraphQL operation for logging purposes. - query (str): The GraphQL query to be executed. - variables (Dict[str, Any]): Variables required for the GraphQL query. - - Returns: - Dict: The JSON response from the GraphQL endpoint. - - Raises: - Exception: If the GraphQL call fails or returns errors. - """ def raise_error(error): logger.error(f"Failed to {description}: {error}") @@ -250,16 +247,6 @@ def raise_error(error): raise Exception("Unknown error") def make_rest_call(self, subpath: str, body: Dict[str, Any]) -> Dict: - """ - Executes a REST API call to the specified subpath with the given body. - - Args: - subpath (str): The subpath of the REST API endpoint. - body (Dict[str, Any]): The JSON body to send with the POST request. - - Returns: - Dict: The JSON response from the REST API endpoint. - """ with httpx.Client(follow_redirects=True) as client: response = client.post( self.rest_endpoint + subpath, @@ -290,18 +277,6 @@ def gql_helper( variables: Dict, process_response: Callable[..., R], ) -> R: - """ - Helper function to make a GraphQL call and process the response. - - Args: - query (str): The GraphQL query to execute. - description (str): Description of the GraphQL operation for logging purposes. - variables (Dict): Variables required for the GraphQL query. - process_response (Callable[..., R]): A function to process the response. - - Returns: - R: The result of processing the response. - """ response = self.make_gql_call(description, query, variables) return process_response(response) @@ -698,15 +673,14 @@ def upload_file( fields: Dict = request_dict.get("fields", {}) object_key: Optional[str] = fields.get("key") upload_type: Literal["raw", "multipart"] = cast( - Literal["raw", "multipart"], request_dict.get( - "uploadType", "multipart") + Literal["raw", "multipart"], request_dict.get("uploadType", "multipart") ) signed_url: Optional[str] = json_res.get("signedUrl") # Prepare form data form_data = ( {} - ) # type: Dict[str, Union[Tuple[Union[str, None], Any], Tuple[Union[str, None], Any, Any]]] + ) # type: Dict[str, Union[Tuple[Union[str, None], Any], Tuple[Union[str, None], Any, Any]]] for field_name, field_value in fields.items(): form_data[field_name] = (None, field_value) @@ -776,8 +750,7 @@ def create_attachment( if active_steps := active_steps_var.get([]): step_id = active_steps[-1].id else: - raise Exception( - "No step_id provided and no active step found.") + raise Exception("No step_id provided and no active step found.") ( query, @@ -799,8 +772,7 @@ def create_attachment( ) if content: - uploaded = self.upload_file( - content=content, thread_id=thread_id, mime=mime) + uploaded = self.upload_file(content=content, thread_id=thread_id, mime=mime) if uploaded["object_key"] is None or uploaded["url"] is None: raise Exception("Failed to upload file") @@ -1398,7 +1370,6 @@ def update_prompt_ab_testing( ) # Misc API - def get_my_project_id(self): """ Retrieves the projectId associated to the API key. @@ -1411,26 +1382,21 @@ def get_my_project_id(self): class AsyncLiteralAPI(BaseLiteralAPI): + """ + ```python + from literalai import AsyncLiteralClient + # Initialize the client + async_literalai_client = AsyncLiteralClient(api_key="your_api_key_here") + # Access the API's methods + print(async_literalai_client.api) + ``` + """ + R = TypeVar("R") async def make_gql_call( self, description: str, query: str, variables: Dict[str, Any] ) -> Dict: - """ - Asynchronously makes a GraphQL call using the provided query and variables. - - Args: - description (str): Description of the GraphQL operation for logging purposes. - query (str): The GraphQL query to be executed. - variables (Dict[str, Any]): Variables required for the GraphQL query. - - Returns: - Dict: The JSON response from the GraphQL endpoint. - - Raises: - Exception: If the GraphQL call fails or returns errors. - """ - def raise_error(error): logger.error(f"Failed to {description}: {error}") raise Exception(error) @@ -1477,16 +1443,6 @@ def raise_error(error): raise Exception("Unkown error") async def make_rest_call(self, subpath: str, body: Dict[str, Any]) -> Dict: - """ - Asynchronously makes a REST API call to a specified subpath with the provided body. - - Args: - subpath (str): The endpoint subpath to which the POST request is made. - body (Dict[str, Any]): The JSON body of the POST request. - - Returns: - Dict: The JSON response from the REST API endpoint. - """ async with httpx.AsyncClient(follow_redirects=True) as client: response = await client.post( self.rest_endpoint + subpath, @@ -1517,18 +1473,6 @@ async def gql_helper( variables: Dict, process_response: Callable[..., R], ) -> R: - """ - Helper function to process a GraphQL query by making an asynchronous call and processing the response. - - Args: - query (str): The GraphQL query to be executed. - description (str): Description of the GraphQL operation for logging purposes. - variables (Dict): Variables required for the GraphQL query. - process_response (Callable[..., R]): The function to process the response. - - Returns: - R: The result of processing the response. - """ response = await self.make_gql_call(description, query, variables) return process_response(response) @@ -1952,15 +1896,14 @@ async def upload_file( fields: Dict = request_dict.get("fields", {}) object_key: Optional[str] = fields.get("key") upload_type: Literal["raw", "multipart"] = cast( - Literal["raw", "multipart"], request_dict.get( - "uploadType", "multipart") + Literal["raw", "multipart"], request_dict.get("uploadType", "multipart") ) signed_url: Optional[str] = json_res.get("signedUrl") # Prepare form data form_data = ( - {} - ) # type: Dict[str, Union[Tuple[Union[str, None], Any], Tuple[Union[str, None], Any, Any]]] + {} + ) # type: Dict[str, Union[Tuple[Union[str, None], Any], Tuple[Union[str, None], Any, Any]]] for field_name, field_value in fields.items(): form_data[field_name] = (None, field_value) diff --git a/literalai/client.py b/literalai/client.py index 468dcb5..7628739 100644 --- a/literalai/client.py +++ b/literalai/client.py @@ -28,6 +28,23 @@ class BaseLiteralClient: + """ + Base class for LiteralClient and AsyncLiteralClient. + Example: + ```python + from literalai import LiteralClient, AsyncLiteralClient + + # Initialize the client + client = LiteralClient(api_key="your_api_key_here") + async_client = AsyncLiteralClient(api_key="your_api_key_here") + ``` + Attributes: + api (Union[LiteralAPI, AsyncLiteralAPI]): The API client used for communication with Literal AI. + disabled (bool): Flag indicating whether the client is disabled. + event_processor (EventProcessor): Processor for handling events. + + """ + api: Union[LiteralAPI, AsyncLiteralAPI] def __init__( @@ -61,12 +78,6 @@ def __init__( ) def to_sync(self) -> "LiteralClient": - """ - Converts the current client to its synchronous version. - - Returns: - LiteralClient: The current client's synchronous version. - """ if isinstance(self.api, AsyncLiteralAPI): return LiteralClient( batch_size=self.event_processor.batch_size, @@ -136,18 +147,6 @@ def thread( name: Optional[str] = None, **kwargs, ): - """ - Creates a thread where all the subsequents steps will be logged. - Works as a decorator or a ContextManager. - - Args: - original_function: The function to execute in the thread's context. - thread_id (Optional[str]): The id of the thread to create. - name (Optional[str]): The name of the thread to create. - - Returns: - The wrapper for the thread's context. - """ if original_function: return thread_decorator( self, func=original_function, thread_id=thread_id, name=name, **kwargs @@ -167,23 +166,6 @@ def step( root_run_id: Optional[str] = None, **kwargs, ): - """ - Creates a step where all the subsequents steps will be logged. Works as a decorator or a ContextManager. - This is used to create Agent steps. For conversational messages use `message` instead. - - Args: - original_function: The function to execute in the step's context. - name (Optional[str]): The name of the step to create. - type (TrueStepType): The type of the step. Must be one of the following : - "run", "tool", "llm", "embedding", "retrieval","rerank", "undefined". - id (Optional[str]): The id of the step to create. - parent_id (Optional[str]): The id of the parent step. - thread_id (Optional[str]): The id of the parent thread. - root_run_id (Optional[str]): The id of the root run. - - Returns: - The wrapper for the step's context. - """ if original_function: return step_decorator( self, @@ -218,20 +200,6 @@ def run( thread_id: Optional[str] = None, root_run_id: Optional[str] = None, ): - """ - Creates a run where all the subsequents steps will be logged. Works as a decorator or a ContextManager. - - Args: - original_function: The function to execute in the step's context. - name (Optional[str]): The name of the step to create. - id (Optional[str]): The id of the step to create. - parent_id (Optional[str]): The id of the parent step. - thread_id (Optional[str]): The id of the parent thread. - root_run_id (Optional[str]): The id of the root run. - - Returns: - The wrapper for the step's context. - """ return self.step( original_function=original_function, name=name, @@ -255,26 +223,6 @@ def message( metadata: Dict = {}, root_run_id: Optional[str] = None, ): - """ - Creates a conversational message step and sends it to Literal AI. - For agentic steps or runs use `step` or `run` respectively instead. - - Args: - content (str): The text content of the message. - id (Optional[str]): The id of the step to create. - parent_id (Optional[str]): The id of the parent step. - type (TrueStepType): The type of the step. Must be one of the following : - "user_message", "assistant_message", "system_message". - name (Optional[str]): The name of the step to create. - thread_id (Optional[str]): The id of the parent thread. - attachments (List[Attachment]): A list of attachments to append to the message. - tags (Optional[List[str]]): A list of tags to add to the message. - metadata (Dict): Metadata to add to the message, in key-value pairs. - root_run_id (Optional[str]): The id of the root run. - - Returns: - Message: the created message. - """ step = Message( name=name, id=id, @@ -298,17 +246,6 @@ def environment( env: Environment = "prod", **kwargs, ): - """ - Sets the environment to add to all subsequent threads and steps. Works as a decorator or a ContextManager. - Entities logged in the "experiment" environment are filtered out of the Literal AI UI. - - Args: - original_function: The function to execute in the step's context. - env (Environment): The environment to add to logged entities. - - Returns: - The wrapper for the context. - """ if original_function: return env_decorator( self, @@ -328,15 +265,6 @@ def experiment_item_run( original_function=None, **kwargs, ): - """ - Creates an experiment run. Works as a decorator or a ContextManager. - - Args: - original_function: The function to execute in the step's context. - - Returns: - The wrapper for the context. - """ if original_function: return experiment_item_run_decorator( self, @@ -426,6 +354,16 @@ def flush_and_stop(self): class LiteralClient(BaseLiteralClient): + """ + Synchronous client for interacting with the Literal AI API. + Example: + ```python + from literalai import LiteralClient + # Initialize the client + client = LiteralClient(api_key="your_api_key_here") + ``` + """ + api: LiteralAPI def __init__( @@ -450,6 +388,16 @@ def flush(self): class AsyncLiteralClient(BaseLiteralClient): + """ + Asynchronous client for interacting with the Literal AI API. + Example: + ```python + from literalai import AsyncLiteralClient + # Initialize the client + async_client = AsyncLiteralClient(api_key="your_api_key_here") + ``` + """ + api: AsyncLiteralAPI def __init__( diff --git a/literalai/instrumentation/mistralai.py b/literalai/instrumentation/mistralai.py index ea905a6..8ceccb5 100644 --- a/literalai/instrumentation/mistralai.py +++ b/literalai/instrumentation/mistralai.py @@ -311,7 +311,7 @@ def streaming_response( time.time() - context["start"] ) * 1000 token_count += 1 - completion += chunk.data.choices[0].delta.content + completion += str(chunk.data.choices[0].delta.content) if ( generation diff --git a/tests/e2e/test_e2e.py b/tests/e2e/test_e2e.py index 1063352..1137878 100644 --- a/tests/e2e/test_e2e.py +++ b/tests/e2e/test_e2e.py @@ -658,6 +658,7 @@ async def test_prompt_ab_testing(self, client: LiteralClient): ) ab_testing = client.api.get_prompt_ab_testing(name=prompt_v1.name) + ab_testing = sorted(ab_testing, key=lambda x: x["version"]) assert len(ab_testing) == 2 assert ab_testing[0]["version"] == 0