Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions python/mirascope/llm/clients/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from .azure_openai.completions import AzureOpenAICompletionsClient
from .azure_openai.responses import AzureOpenAIResponsesClient
from .base import BaseClient, ClientT, Params
from .cache import clear_all_client_caches
from .google import GoogleClient, GoogleModelId
from .openai import (
OpenAICompletionsClient,
Expand All @@ -33,6 +34,7 @@
"OpenAIResponsesModelId",
"Params",
"Provider",
"clear_all_client_caches",
"client",
"get_client",
]
3 changes: 2 additions & 1 deletion python/mirascope/llm/clients/anthropic/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
"""Anthropic client implementation."""

from .clients import AnthropicClient, client, get_client
from .clients import AnthropicClient, clear_cache, client, get_client
from .model_ids import AnthropicModelId

__all__ = [
"AnthropicClient",
"AnthropicModelId",
"clear_cache",
"client",
"get_client",
]
5 changes: 5 additions & 0 deletions python/mirascope/llm/clients/anthropic/clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,11 @@ def client(
return _anthropic_singleton(api_key, base_url)


def clear_cache() -> None:
"""Clear the cached Anthropic client singletons."""
_anthropic_singleton.cache_clear()


def get_client() -> "AnthropicClient":
"""Retrieve the current Anthropic client from context, or a global default.

Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@
"""Azure OpenAI Completions API client."""

from .clients import (
AzureOpenAICompletionsClient,
client,
get_client,
)
from .clients import AzureOpenAICompletionsClient, clear_cache, client, get_client

__all__ = [
"AzureOpenAICompletionsClient",
"clear_cache",
"client",
"get_client",
]
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,11 @@ def client(
)


def clear_cache() -> None:
"""Clear the cached Azure OpenAI Completions client singletons."""
_azure_completions_singleton.cache_clear()


def get_client() -> "AzureOpenAICompletionsClient":
"""Get the current `AzureOpenAICompletionsClient` from context."""
current_client = AZURE_OPENAI_COMPLETIONS_CLIENT_CONTEXT.get()
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@
"""Azure OpenAI Responses API client."""

from .clients import (
AzureOpenAIResponsesClient,
client,
get_client,
)
from .clients import AzureOpenAIResponsesClient, clear_cache, client, get_client

__all__ = [
"AzureOpenAIResponsesClient",
"clear_cache",
"client",
"get_client",
]
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,11 @@ def client(
)


def clear_cache() -> None:
"""Clear the cached Azure OpenAI Responses client singletons."""
_azure_responses_singleton.cache_clear()


def get_client() -> "AzureOpenAIResponsesClient":
"""Get the current `AzureOpenAIResponsesClient` from context."""
current_client = AZURE_OPENAI_RESPONSES_CLIENT_CONTEXT.get()
Expand Down
23 changes: 23 additions & 0 deletions python/mirascope/llm/clients/cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
"""Utilities for managing cached LLM client singletons."""

from .anthropic import clear_cache as clear_anthropic_cache
from .azure_openai.completions import clear_cache as clear_azure_completions_cache
from .azure_openai.responses import clear_cache as clear_azure_responses_cache
from .google import clear_cache as clear_google_cache
from .openai import (
clear_completions_cache as clear_openai_completions_cache,
clear_responses_cache as clear_openai_responses_cache,
)

__all__ = ["clear_all_client_caches"]


def clear_all_client_caches() -> None:
"""Clear caches for all registered LLM client implementations."""

clear_anthropic_cache()
clear_azure_completions_cache()
clear_azure_responses_cache()
clear_google_cache()
clear_openai_completions_cache()
clear_openai_responses_cache()
10 changes: 8 additions & 2 deletions python/mirascope/llm/clients/google/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
"""Google client implementation."""

from .clients import GoogleClient, client, get_client
from .clients import GoogleClient, clear_cache, client, get_client
from .model_ids import GoogleModelId

__all__ = ["GoogleClient", "GoogleModelId", "client", "get_client"]
__all__ = [
"GoogleClient",
"GoogleModelId",
"clear_cache",
"client",
"get_client",
]
5 changes: 5 additions & 0 deletions python/mirascope/llm/clients/google/clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,11 @@ def client(
return _google_singleton(api_key, base_url)


def clear_cache() -> None:
"""Clear the cached Google client singletons."""
_google_singleton.cache_clear()


def get_client() -> "GoogleClient":
"""Retrieve the current Google client from context, or a global default.

Expand Down
4 changes: 4 additions & 0 deletions python/mirascope/llm/clients/openai/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@
from .completions import (
OpenAICompletionsClient,
OpenAICompletionsModelId,
clear_cache as clear_completions_cache,
client as completions_client,
get_client as get_completions_client,
)
from .responses import (
OpenAIResponsesClient,
OpenAIResponsesModelId,
clear_cache as clear_responses_cache,
client as responses_client,
get_client as get_responses_client,
)
Expand All @@ -18,6 +20,8 @@
"OpenAICompletionsModelId",
"OpenAIResponsesClient",
"OpenAIResponsesModelId",
"clear_completions_cache",
"clear_responses_cache",
"completions_client",
"get_completions_client",
"get_responses_client",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from .clients import OpenAICompletionsClient, client, get_client
from .clients import OpenAICompletionsClient, clear_cache, client, get_client
from .model_ids import OpenAICompletionsModelId

__all__ = [
"OpenAICompletionsClient",
"OpenAICompletionsModelId",
"clear_cache",
"client",
"get_client",
]
5 changes: 5 additions & 0 deletions python/mirascope/llm/clients/openai/completions/clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,11 @@ def client(
return _openai_singleton(api_key, base_url)


def clear_cache() -> None:
"""Clear the cached OpenAI Completions client singletons."""
_openai_singleton.cache_clear()


def get_client() -> "OpenAICompletionsClient":
"""Retrieve the current OpenAI client from context, or a global default.

Expand Down
3 changes: 2 additions & 1 deletion python/mirascope/llm/clients/openai/responses/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from .clients import OpenAIResponsesClient, client, get_client
from .clients import OpenAIResponsesClient, clear_cache, client, get_client
from .model_ids import OpenAIResponsesModelId

__all__ = [
"OpenAIResponsesClient",
"OpenAIResponsesModelId",
"clear_cache",
"client",
"get_client",
]
5 changes: 5 additions & 0 deletions python/mirascope/llm/clients/openai/responses/clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,11 @@ def client(
return _openai_responses_singleton(api_key, base_url)


def clear_cache() -> None:
"""Clear the cached OpenAI Responses client singletons."""
_openai_responses_singleton.cache_clear()


def get_client() -> "OpenAIResponsesClient":
"""Get the current `OpenAIResponsesClient` from context."""
current_client = OPENAI_RESPONSES_CLIENT_CONTEXT.get()
Expand Down
13 changes: 12 additions & 1 deletion python/tests/e2e/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,14 @@

from __future__ import annotations

from collections.abc import Callable
from collections.abc import Callable, Generator
from copy import deepcopy
from typing import Any, TypedDict, get_args

import pytest

from mirascope import llm
from mirascope.llm.clients import clear_all_client_caches

SENSITIVE_HEADERS = [
# Common API authentication headers
Expand Down Expand Up @@ -93,6 +94,16 @@ def sanitize_request(request: Any) -> Any: # noqa: ANN401
return request


@pytest.fixture(autouse=True)
def _clear_client_caches() -> Generator[None, None, None]:
"""Ensure cached LLM client singletons do not bleed across e2e tests."""
clear_all_client_caches()
try:
yield
finally:
clear_all_client_caches()


@pytest.fixture(scope="session")
def vcr_config() -> VCRConfig:
"""VCR configuration for all API tests.
Expand Down
Loading