From 9220e2951d331dcce76d168c7e5313f483a82edc Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Fri, 20 Jun 2025 08:50:08 +0100 Subject: [PATCH 01/13] feat: AG-UI adapter AG-UI adapter enabling integration with the AG-UI protocol via the Agent.to_ag_ui() method. This includes a full example for all features in the AG-UI dojo. Fixes: ag-ui-protocol/ag-ui/issues/5 --- docs/ag-ui.md | 313 +++++++ docs/api/pydantic_ai_ag_ui.md | 3 + docs/install.md | 1 + examples/pydantic_ai_ag_ui_examples/README.md | 153 ++++ .../pydantic_ai_ag_ui_examples/__init__.py | 1 + .../api/__init__.py | 19 + .../pydantic_ai_ag_ui_examples/api/agent.py | 43 + .../api/agentic_chat.py | 54 ++ .../api/agentic_generative_ui.py | 139 +++ .../api/human_in_the_loop.py | 48 + .../api/predictive_state_updates.py | 107 +++ .../api/shared_state.py | 158 ++++ .../api/tool_based_generative_ui.py | 41 + examples/pydantic_ai_ag_ui_examples/basic.py | 58 ++ .../cli/__init__.py | 8 + .../pydantic_ai_ag_ui_examples/cli/args.py | 74 ++ .../pydantic_ai_ag_ui_examples/dojo_server.py | 48 + examples/pydantic_ai_ag_ui_examples/py.typed | 0 examples/pyproject.toml | 7 +- fasta2a/pyproject.toml | 2 +- mkdocs.yml | 2 + pydantic_ai_ag_ui/LICENSE | 21 + pydantic_ai_ag_ui/README.md | 13 + .../pydantic_ai_ag_ui/__init__.py | 19 + pydantic_ai_ag_ui/pydantic_ai_ag_ui/_enums.py | 16 + .../pydantic_ai_ag_ui/_exceptions.py | 51 ++ .../pydantic_ai_ag_ui/adapter.py | 678 +++++++++++++++ pydantic_ai_ag_ui/pydantic_ai_ag_ui/consts.py | 8 + pydantic_ai_ag_ui/pydantic_ai_ag_ui/deps.py | 51 ++ .../pydantic_ai_ag_ui/protocols.py | 26 + pydantic_ai_ag_ui/pydantic_ai_ag_ui/py.typed | 0 pydantic_ai_ag_ui/pyproject.toml | 62 ++ pydantic_ai_slim/pydantic_ai/agent.py | 36 +- pydantic_ai_slim/pydantic_ai/models/test.py | 124 ++- pydantic_ai_slim/pyproject.toml | 2 + pyproject.toml | 7 +- tests/pydantic_ai_ag_ui/__init__.py | 1 + tests/pydantic_ai_ag_ui/test_adapter.py | 821 ++++++++++++++++++ tests/test_ag_ui.py | 94 ++ uv.lock | 43 +- 40 files changed, 3333 insertions(+), 19 deletions(-) create mode 100644 docs/ag-ui.md create mode 100644 docs/api/pydantic_ai_ag_ui.md create mode 100644 examples/pydantic_ai_ag_ui_examples/README.md create mode 100644 examples/pydantic_ai_ag_ui_examples/__init__.py create mode 100644 examples/pydantic_ai_ag_ui_examples/api/__init__.py create mode 100644 examples/pydantic_ai_ag_ui_examples/api/agent.py create mode 100644 examples/pydantic_ai_ag_ui_examples/api/agentic_chat.py create mode 100644 examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py create mode 100644 examples/pydantic_ai_ag_ui_examples/api/human_in_the_loop.py create mode 100644 examples/pydantic_ai_ag_ui_examples/api/predictive_state_updates.py create mode 100644 examples/pydantic_ai_ag_ui_examples/api/shared_state.py create mode 100644 examples/pydantic_ai_ag_ui_examples/api/tool_based_generative_ui.py create mode 100644 examples/pydantic_ai_ag_ui_examples/basic.py create mode 100644 examples/pydantic_ai_ag_ui_examples/cli/__init__.py create mode 100644 examples/pydantic_ai_ag_ui_examples/cli/args.py create mode 100644 examples/pydantic_ai_ag_ui_examples/dojo_server.py create mode 100644 examples/pydantic_ai_ag_ui_examples/py.typed create mode 100644 pydantic_ai_ag_ui/LICENSE create mode 100644 pydantic_ai_ag_ui/README.md create mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/__init__.py create mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/_enums.py create mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/_exceptions.py create mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/adapter.py create mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/consts.py create mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/deps.py create mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/protocols.py create mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/py.typed create mode 100644 pydantic_ai_ag_ui/pyproject.toml create mode 100644 tests/pydantic_ai_ag_ui/__init__.py create mode 100644 tests/pydantic_ai_ag_ui/test_adapter.py create mode 100644 tests/test_ag_ui.py diff --git a/docs/ag-ui.md b/docs/ag-ui.md new file mode 100644 index 000000000..5fbed1802 --- /dev/null +++ b/docs/ag-ui.md @@ -0,0 +1,313 @@ +# Agent User Interaction (AG-UI) Protocol + +The [Agent User Interaction (AG-UI) Protocol](https://docs.ag-ui.com/introduction) +is an open standard introduced by the +[CopilotKit](https://webflow.copilotkit.ai/blog/introducing-ag-ui-the-protocol-where-agents-meet-users) +team that standardises how frontend applications connect to AI agents through +an open protocol. Think of it as a universal translator for AI-driven systems +no matter what language an agent speaks: AG-UI ensures fluent communication. + +The team at [Rocket Science](https://www.rocketscience.gg/), contributed the +[pydantic-ai-ag-ui](#ag-ui-adapter) package to make it easy to implement the +AG-UI protocol with PydanticAI agents. + +This also includes an [`Agent.to_ag_ui`][pydantic_ai.Agent.to_ag_ui] convenience +method which simplifies the creation of [`Adapter`][pydantic_ai_ag_ui.Adapter] +for PydanticAI agents, which can then be used by as part of a +[fastapi](https://fastapi.tiangolo.com/) app. + +## AG-UI Adapter + +The [Adapter][pydantic_ai_ag_ui.Adapter] class is an adapter between +PydanticAI agents and the AG-UI protocol written in Python. It provides support +for all aspects of spec including: + +- [Events](https://docs.ag-ui.com/concepts/events) +- [Messages](https://docs.ag-ui.com/concepts/messages) +- [State Management](https://docs.ag-ui.com/concepts/state) +- [Tools](https://docs.ag-ui.com/concepts/tools) + +Let's have a quick look at how to use it: + +### Installation + +[Adapter][pydantic_ai_ag_ui.Adapter] is available on PyPI as +[`pydantic-ai-ag-ui`](https://pypi.org/project/pydantic-ai-ag-ui/) so installation is as +simple as: + +```bash +pip/uv-add pydantic-ai-ag-ui +``` + +The only dependencies are: + +- [ag-ui-protocol](https://docs.ag-ui.com/introduction): to provide the AG-UI + types and encoder. +- [pydantic](https://pydantic.dev): to validate the request/response messages +- [pydantic-ai](https://ai.pydantic.dev/): to provide the agent framework + +To run the examples you'll also need: + +- [fastapi](https://fastapi.tiangolo.com/): to provide ASGI compatible server + +```bash +pip/uv-add 'fastapi' +``` + +You can install PydanticAI with the `ag-ui` extra to include **Adapter**: + +```bash +pip/uv-add 'pydantic-ai-slim[ag-ui]' +``` + +### Quick start + +```py {title="agent_to_ag_ui.py" py="3.10" hl_lines="17-28"} +"""Basic example for AG-UI with FastAPI and Pydantic AI.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated + +from fastapi import FastAPI, Header +from fastapi.responses import StreamingResponse +from pydantic_ai_ag_ui import SSE_CONTENT_TYPE + +from pydantic_ai import Agent + +if TYPE_CHECKING: + from ag_ui.core import RunAgentInput + +agent = Agent('openai:gpt-4.1', instructions='Be fun!') +adapter = agent.to_ag_ui() +app = FastAPI(title='AG-UI Endpoint') + + +@app.post('/') +async def root( + input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE +) -> StreamingResponse: + return StreamingResponse( + adapter.run(input_data, accept), + media_type=SSE_CONTENT_TYPE, + ) +``` + +You can run the example with: + +```shell +uvicorn agent_to_ag_ui:app --host 0.0.0.0 --port 8000 +``` + +This will expose the agent as an AG-UI server, and you can start sending +requests to it. + +### Design + +The adapter receives messages in the form of a +[`RunAgentInput`](https://docs.ag-ui.com/sdk/js/core/types#runagentinput) +which describes the details of a request being passed to the agent including +messages and state. These are then converted to PydanticAI types, passed to the +agent which then process the request. + +Results from the agent are converted from PydanticAI types to AG-UI events and +streamed back to the caller as Server-Sent Events (SSE). + +A user request may require multiple round trips between client UI and PydanticAI +server, depending on the tools and events needed. + +[Adapter][pydantic_ai_ag_ui.Adapter] can be used with any ASGI server. + +### Features + +To expose a PydanticAI agent as an AG-UI server including state support, you can +use the [`to_ag_ui`][pydantic_ai.agent.Agent.to_ag_ui] method in combination +with [fastapi](https://fastapi.tiangolo.com/). + +In the example below we have document state which is shared between the UI and +server using the [`StateDeps`][pydantic_ai_ag_ui.StateDeps] which implements the +[`StateHandler`][pydantic_ai_ag_ui.StateHandler] that can be used to automatically +decode state contained in [`RunAgentInput.state`](https://docs.ag-ui.com/sdk/js/core/types#runagentinput) +when processing requests. + +#### State management + +The adapter provides full support for +[AG-UI state management](https://docs.ag-ui.com/concepts/state), which enables +real-time synchronization between agents and frontend applications. + +```python {title="ag_ui_state.py" py="3.10" hl_lines="18-40"} +"""State example for AG-UI with FastAPI and Pydantic AI.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated + +from fastapi import FastAPI, Header +from fastapi.responses import StreamingResponse +from pydantic import BaseModel +from pydantic_ai_ag_ui import SSE_CONTENT_TYPE, StateDeps + +from pydantic_ai import Agent + +if TYPE_CHECKING: + from ag_ui.core import RunAgentInput + + +class DocumentState(BaseModel): + """State for the document being written.""" + + document: str + + +agent = Agent( + 'openai:gpt-4.1', + instructions='Be fun!', + deps_type=StateDeps[DocumentState], +) +adapter = agent.to_ag_ui() +app = FastAPI(title='AG-UI Endpoint') + + +@app.post('/') +async def root( + input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE +) -> StreamingResponse: + return StreamingResponse( + adapter.run(input_data, accept, deps=StateDeps(state_type=DocumentState)), + media_type=SSE_CONTENT_TYPE, + ) +``` + +Since `app` is an ASGI application, it can be used with any ASGI server. + +```bash +uvicorn agent_to_ag_ui:app --host 0.0.0.0 --port 8000 +``` + +Since the goal of [`to_ag_ui`][pydantic_ai.agent.Agent.to_ag_ui] is to be a +convenience method, it accepts the same arguments as the +[`Adapter`][pydantic_ai_ag_ui.Adapter] constructor. + +#### Tools + +AG-UI tools are seamlessly provided to the PydanticAI agent, enabling rich +use experiences with frontend user interfaces. + +#### Events + +The adapter provides the ability for PydanticAI tools to send +[AG-UI events](https://docs.ag-ui.com/concepts/events) simply by defining a tool +which returns a type based off +[`BaseEvent`](https://docs.ag-ui.com/sdk/js/core/events#baseevent) this allows +for custom events and state updates. + +```python {title="ag_ui_tool_events.py" py="3.10" hl_lines="34-55"} +"""Tool events example for AG-UI with FastAPI and Pydantic AI.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated + +from ag_ui.core import CustomEvent, EventType, StateSnapshotEvent +from fastapi import FastAPI, Header +from fastapi.responses import StreamingResponse +from pydantic import BaseModel +from pydantic_ai_ag_ui import SSE_CONTENT_TYPE, StateDeps + +from pydantic_ai import Agent, RunContext + +if TYPE_CHECKING: + from ag_ui.core import RunAgentInput + + +class DocumentState(BaseModel): + """State for the document being written.""" + + document: str + + +agent = Agent( + 'openai:gpt-4.1', + instructions='Be fun!', + deps_type=StateDeps[DocumentState], +) +adapter = agent.to_ag_ui() +app = FastAPI(title='AG-UI Endpoint') + + +@agent.tool +def update_state(ctx: RunContext[StateDeps[DocumentState]]) -> StateSnapshotEvent: + return StateSnapshotEvent( + type=EventType.STATE_SNAPSHOT, + snapshot=ctx.deps.state, + ) + + +@agent.tool_plain +def custom_events() -> list[CustomEvent]: + return [ + CustomEvent( + type=EventType.CUSTOM, + name='count', + value=1, + ), + CustomEvent( + type=EventType.CUSTOM, + name='count', + value=2, + ), + ] + + +@app.post('/') +async def root( + input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE +) -> StreamingResponse: + return StreamingResponse( + adapter.run(input_data, accept, deps=StateDeps(state_type=DocumentState)), + media_type=SSE_CONTENT_TYPE, + ) +``` + +### Examples + +For more examples of how to use [`Adapter`][pydantic_ai_ag_ui.Adapter] see +[`pydantic_ai_ag_ui_examples`](https://github.com/pydantic/pydantic-ai/tree/main/examples/pydantic_ai_ag_ui_examples), +which includes working server for the with the +[AG-UI Dojo](https://docs.ag-ui.com/tutorials/debugging#the-ag-ui-dojo) which +can be run from a clone of the repo or with the `pydantic-ai-examples` package +installed with either of the following: + +```bash +pip/uv-add pydantic-ai-examples +``` + +Direct, which supports command line flags: + +```shell +python -m pydantic_ai_ag_ui_examples.dojo_server --help +usage: dojo_server.py [-h] [--port PORT] [--reload] [--no-reload] [--log-level {critical,error,warning,info,debug,trace}] + +PydanticAI AG-UI Dojo server + +options: + -h, --help show this help message and exit + --port PORT, -p PORT Port to run the server on (default: 9000) + --reload Enable auto-reload (default: True) + --no-reload Disable auto-reload + --log-level {critical,error,warning,info,debug,trace} + Agent log level (default: info) +``` + +Run with adapter debug logging: + +```shell +python -m pydantic_ai_ag_ui_examples.dojo_server --log-level debug +``` + +Using uvicorn: + +```shell +uvicorn pydantic_ai_ag_ui_examples.dojo_server:app --port 9000 +``` diff --git a/docs/api/pydantic_ai_ag_ui.md b/docs/api/pydantic_ai_ag_ui.md new file mode 100644 index 000000000..ab5d48247 --- /dev/null +++ b/docs/api/pydantic_ai_ag_ui.md @@ -0,0 +1,3 @@ +# `pydantic_ai_ag_ui` + +::: pydantic_ai_ag_ui diff --git a/docs/install.md b/docs/install.md index 6d621ada5..5469dae8b 100644 --- a/docs/install.md +++ b/docs/install.md @@ -56,6 +56,7 @@ pip/uv-add "pydantic-ai-slim[openai]" * `cohere` - installs `cohere` [PyPI ↗](https://pypi.org/project/cohere){:target="_blank"} * `duckduckgo` - installs `duckduckgo-search` [PyPI ↗](https://pypi.org/project/duckduckgo-search){:target="_blank"} * `tavily` - installs `tavily-python` [PyPI ↗](https://pypi.org/project/tavily-python){:target="_blank"} +* `ag-ui` - installs `pydantic-ai-ag-ui` [PyPI ↗](https://pypi.org/project/pydantic-ai-ag-ui){:target="_blank"} See the [models](models/index.md) documentation for information on which optional dependencies are required for each model. diff --git a/examples/pydantic_ai_ag_ui_examples/README.md b/examples/pydantic_ai_ag_ui_examples/README.md new file mode 100644 index 000000000..52d472475 --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/README.md @@ -0,0 +1,153 @@ +# PydanticAI + +Implementation of the AG-UI protocol for PydanticAI. + +## Prerequisites + +This example uses a PydanticAI agent using an OpenAI model and the AG-UI dojo. + +1. An [OpenAI API key](https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key) +2. A clone of this repository +3. A clone of the [AG-UI protocol repository](https://github.com/ag-ui-protocol/ag-ui) + +## Running + +To run this integration you need to: + +1. Make a copy of `jobs-agent/.env.local-example` as `.env` +2. Open it in your editor and set `OPENAI_API_KEY` to a valid OpenAI key +3. Open terminal in the root directory of this repository clone +4. Install the required modules and run the server + + ```shell + cd jobs-agent + just install-deps + source .venv/bin/activate + python -m examples.pydantic_ai_ag_ui_examples.dojo_server + ``` + +5. Open another terminal in root directory of the `ag-ui` repository clone +6. Start the integration ag-ui dojo: + + ```shell + cd typescript-sdk + pnpm install && pnpm run dev + ``` + +7. Finally visit [http://localhost:3000/pydantic-ai](http://localhost:3000/pydantic-ai) + +## Feature Demos + +### [Agentic Chat](http://localhost:3000/pydantic-ai/feature/agentic_chat) + +This demonstrates a basic agent interaction including PydanticAI server side +tools and AG-UI client side tools. + +#### Agent Tools + +- `time` - PydanticAI tool to check the current time for a time zone +- `background` - AG-UI tool to set the background color of the client window + +#### Agent Prompts + +```text +What is the time in New York? +``` + +```text +Change the background to blue +``` + +A complex example which mixes both AG-UI and PydanticAI tools: + +```text +Perform the following steps, waiting for the response of each step before continuing: +1. Get the time +2. Set the background to red +3. Get the time +4. Report how long the background set took by diffing the two times +``` + +### [Agentic Generative UI](http://localhost:3000/pydantic-ai/feature/agentic_generative_ui) + +Demonstrates a long running task where the agent sends updates to the frontend +to let the user know what's happening. + +#### Plan Prompts + +```text +Create a plan for breakfast and execute it +``` + +### [Human in the Loop](http://localhost:3000/pydantic-ai/feature/human_in_the_loop) + +Demonstrates simple human in the loop workflow where the agent comes up with a +plan and the user can approve it using checkboxes. + +#### Task Planning Tools + +- `generate_task_steps` - AG-UI tool to generate and confirm steps + +#### Task Planning Prompt + +```text +Generate a list of steps for cleaning a car for me to review +``` + +### [Predictive State Updates](http://localhost:3000/pydantic-ai/feature/predictive_state_updates) + +Demonstrates how to use the predictive state updates feature to update the state +of the UI based on agent responses, including user interaction via git aconfirmation. + +#### Story Tools + +- `write_document` - AG-UI tool to write the document to a window +- `document_predict_state` - PydanticAI tool that enables document state + prediction for the `write_document` tool + +This also shows how to use custom instructions based on shared state information. + +#### Story Example + +Starting document text + +```markdown +Bruce was a good dog, +``` + +Agent prompt + +```text +Help me complete my story about bruce the dog, is should be no longer than a sentence. +``` + +### [Shared State](http://localhost:3000/pydantic-ai/feature/shared_state) + +Demonstrates how to use the shared state between the UI and the agent. + +State sent to the agent is detected by a function based instruction. This then +validates the data using a custom pydantic model before using to create the +instructions for the agent to follow and send to the client using a AG-UI tool. + +#### Recipe Tools + +- `display_recipe` - AG-UI tool to display the recipe in a graphical format + +#### Recipe Example + +1. Customise the basic settings of your recipe +2. Click `Improve with AI` + +### [Tool Based Generative UI](http://localhost:3000/pydantic-ai/feature/tool_based_generative_ui) + +Demonstrates customised rendering for tool output with used confirmation. + +#### Haiku Tools + +- `generate_haiku` - AG-UI tool to display a haiku in English and Japanese + +#### Haiku Prompt + +```text +Generate a haiku about formula 1 +``` diff --git a/examples/pydantic_ai_ag_ui_examples/__init__.py b/examples/pydantic_ai_ag_ui_examples/__init__.py new file mode 100644 index 000000000..2652b3500 --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/__init__.py @@ -0,0 +1 @@ +"""Example API Server for a AG-UI compatible Pydantic AI Agent UI.""" diff --git a/examples/pydantic_ai_ag_ui_examples/api/__init__.py b/examples/pydantic_ai_ag_ui_examples/api/__init__.py new file mode 100644 index 000000000..c595aac5a --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/api/__init__.py @@ -0,0 +1,19 @@ +"""Example API for a AG-UI compatible Pydantic AI Agent UI.""" + +from __future__ import annotations + +from .agentic_chat import router as agentic_chat_router +from .agentic_generative_ui import router as agentic_generative_ui_router +from .human_in_the_loop import router as human_in_the_loop_router +from .predictive_state_updates import router as predictive_state_updates_router +from .shared_state import router as shared_state_router +from .tool_based_generative_ui import router as tool_based_generative_ui_router + +__all__: list[str] = [ + 'agentic_chat_router', + 'agentic_generative_ui_router', + 'human_in_the_loop_router', + 'predictive_state_updates_router', + 'shared_state_router', + 'tool_based_generative_ui_router', +] diff --git a/examples/pydantic_ai_ag_ui_examples/api/agent.py b/examples/pydantic_ai_ag_ui_examples/api/agent.py new file mode 100644 index 000000000..a994b4871 --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/api/agent.py @@ -0,0 +1,43 @@ +"""Create a Pydantic AI agent and AG-UI adapter.""" + +from __future__ import annotations + +from dataclasses import dataclass +from types import NoneType +from typing import Generic + +from dotenv import load_dotenv +from pydantic_ai_ag_ui import Adapter + +from pydantic_ai import Agent +from pydantic_ai.result import OutputDataT +from pydantic_ai.tools import AgentDepsT + + +@dataclass(init=False, repr=False) +class AGUIAgent(Generic[AgentDepsT, OutputDataT]): + """Pydantic AI agent with AG-UI adapter.""" + + agent: Agent[AgentDepsT, str] + adapter: Adapter[AgentDepsT, str] + instructions: str | None + + def __init__( + self, deps_type: type[AgentDepsT] = NoneType, instructions: str | None = None + ) -> None: + """Initialize the API agent with AG-UI adapter. + + Args: + deps_type: Type annotation for the agent dependencies. + instructions: Optional instructions for the agent. + """ + # Ensure environment variables are loaded. + load_dotenv() + + self.agent = Agent( + 'openai:gpt-4o-mini', + output_type=str, + instructions=instructions, + deps_type=deps_type, + ) + self.adapter = self.agent.to_ag_ui() diff --git a/examples/pydantic_ai_ag_ui_examples/api/agentic_chat.py b/examples/pydantic_ai_ag_ui_examples/api/agentic_chat.py new file mode 100644 index 000000000..18243d560 --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/api/agentic_chat.py @@ -0,0 +1,54 @@ +"""Agentic Chat feature.""" + +from __future__ import annotations + +from datetime import datetime +from typing import TYPE_CHECKING, Annotated +from zoneinfo import ZoneInfo + +from ag_ui.core import RunAgentInput +from fastapi import APIRouter, Header +from fastapi.responses import StreamingResponse +from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE + +from .agent import AGUIAgent + +if TYPE_CHECKING: # pragma: no cover + from ag_ui.core import RunAgentInput + + +router: APIRouter = APIRouter(prefix='/agentic_chat') +agui: AGUIAgent = AGUIAgent() + + +@agui.agent.tool_plain +async def current_time(timezone: str = 'UTC') -> str: + """Get the current time in ISO format. + + Args: + timezone: The timezone to use. + + Returns: + The current time in ISO format string. + """ + tz: ZoneInfo = ZoneInfo(timezone) + return datetime.now(tz=tz).isoformat() + + +@router.post('') +async def handler( + input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE +) -> StreamingResponse: + """Endpoint to handle AG-UI protocol requests and stream responses. + + Args: + input_data: The AG-UI run input. + accept: The Accept header to specify the response format. + + Returns: + A streaming response with event-stream media type. + """ + return StreamingResponse( + agui.adapter.run(input_data, accept), + media_type=SSE_CONTENT_TYPE, + ) diff --git a/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py b/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py new file mode 100644 index 000000000..97abe43bc --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py @@ -0,0 +1,139 @@ +"""Agentic Generative UI feature.""" + +from enum import StrEnum +from typing import Annotated, Any, Literal + +from ag_ui.core import EventType, RunAgentInput, StateDeltaEvent, StateSnapshotEvent +from fastapi import APIRouter, Header +from fastapi.responses import StreamingResponse +from pydantic import BaseModel, Field +from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE + +from .agent import AGUIAgent + +router: APIRouter = APIRouter(prefix='/agentic_generative_ui') +instructions: str = """When planning use tools only, without any other messages. +IMPORTANT: +- Use the `create_plan` tool to set the initial state of the steps +- Use the `update_plan_step` tool to update the status of each step +- Do NOT repeat the plan or summarise it in a message +- Do NOT confirm the creation or updates in a message +- Do NOT ask the user for additional information or next steps + +Only one plan can be active at a time, so do not call the `create_plan` tool +again until all the steps in current plan are completed. +""" +agui: AGUIAgent = AGUIAgent(instructions=instructions) + + +class StepStatus(StrEnum): + """The status of a step in a plan.""" + + PENDING = 'pending' + COMPLETED = 'completed' + + +class Step(BaseModel): + """Represents a step in a plan.""" + + description: str = Field(description='The description of the step') + status: StepStatus = Field( + default=StepStatus.PENDING, + description='The status of the step (e.g., pending, completed)', + ) + + +class Plan(BaseModel): + """Represents a plan with multiple steps.""" + + steps: list[Step] = Field( + default_factory=lambda: list[Step](), description='The steps in the plan' + ) + + +class JSONPatchOp(BaseModel): + """A class representing a JSON Patch operation (RFC 6902).""" + + op: Literal['add', 'remove', 'replace', 'move', 'copy', 'test'] = Field( + ..., + description='The operation to perform: add, remove, replace, move, copy, or test', + ) + path: str = Field(..., description='JSON Pointer (RFC 6901) to the target location') + value: Any = Field( + default=None, + description='The value to apply (for add, replace operations)', + ) + from_: str | None = Field( + default=None, + alias='from', + description='Source path (for move, copy operations)', + ) + + +@agui.agent.tool_plain +def create_plan(steps: list[str]) -> StateSnapshotEvent: + """Create a plan with multiple steps. + + Args: + steps: List of step descriptions to create the plan. + + Returns: + StateSnapshotEvent containing the initial state of the steps. + """ + plan: Plan = Plan( + steps=[Step(description=step) for step in steps], + ) + return StateSnapshotEvent( + type=EventType.STATE_SNAPSHOT, + snapshot=plan.model_dump(), + ) + + +@agui.agent.tool_plain +def update_plan_step( + index: int, description: str | None = None, status: StepStatus | None = None +) -> StateDeltaEvent: + """Update the plan with new steps or changes. + + Args: + index: The index of the step to update. + description: The new description for the step. + status: The new status for the step. + + Returns: + StateDeltaEvent containing the changes made to the plan. + """ + changes: list[JSONPatchOp] = [] + if description is not None: + changes.append( + JSONPatchOp( + op='replace', path=f'/steps/{index}/description', value=description + ) + ) + if status is not None: + changes.append( + JSONPatchOp(op='replace', path=f'/steps/{index}/status', value=status.value) + ) + return StateDeltaEvent( + type=EventType.STATE_DELTA, + delta=changes, + ) + + +@router.post('') +async def handler( + input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE +) -> StreamingResponse: + """Endpoint to handle AG-UI protocol requests and stream responses. + + Args: + input_data: The AG-UI run input. + accept: The Accept header to specify the response format. + + Returns: + A streaming response with event-stream media type. + """ + return StreamingResponse( + agui.adapter.run(input_data, accept), + media_type=SSE_CONTENT_TYPE, + ) diff --git a/examples/pydantic_ai_ag_ui_examples/api/human_in_the_loop.py b/examples/pydantic_ai_ag_ui_examples/api/human_in_the_loop.py new file mode 100644 index 000000000..3b1119949 --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/api/human_in_the_loop.py @@ -0,0 +1,48 @@ +"""Human in the Loop Feature. + +No special handling is required for this feature. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated + +from ag_ui.core import RunAgentInput +from fastapi import APIRouter, Header +from fastapi.responses import StreamingResponse +from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE + +from .agent import AGUIAgent + +if TYPE_CHECKING: # pragma: no cover + from ag_ui.core import RunAgentInput + + +instructions: str = """When planning tasks use tools only, without any other messages. +IMPORTANT: +- Use the `generate_task_steps` tool to display the suggested steps to the user +- Never repeat the plan, or send a message detailing steps +- If accepted, confirm the creation of the plan and the number of selected (enabled) steps only +- If not accepted, ask the user for more information, DO NOT use the `generate_task_steps` tool again +""" +router: APIRouter = APIRouter(prefix='/human_in_the_loop') +agui: AGUIAgent = AGUIAgent(instructions=instructions) + + +@router.post('') +async def handler( + input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE +) -> StreamingResponse: + """Endpoint to handle AG-UI protocol requests and stream responses. + + Args: + input_data: The AG-UI run input. + accept: The Accept header to specify the response format. + + Returns: + A streaming response with event-stream media type. + """ + return StreamingResponse( + agui.adapter.run(input_data, accept), + media_type=SSE_CONTENT_TYPE, + ) diff --git a/examples/pydantic_ai_ag_ui_examples/api/predictive_state_updates.py b/examples/pydantic_ai_ag_ui_examples/api/predictive_state_updates.py new file mode 100644 index 000000000..c4eef70f2 --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/api/predictive_state_updates.py @@ -0,0 +1,107 @@ +"""Predictive State feature.""" + +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING, Annotated + +from ag_ui.core import CustomEvent, EventType, RunAgentInput +from fastapi import APIRouter, Header +from fastapi.responses import StreamingResponse +from pydantic import BaseModel +from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE +from pydantic_ai_ag_ui.deps import StateDeps + +from .agent import AGUIAgent + +if TYPE_CHECKING: # pragma: no cover + from pydantic_ai import RunContext + + +_LOGGER: logging.Logger = logging.getLogger(__name__) + + +class DocumentState(BaseModel): + """State for the document being written.""" + + document: str + + +router: APIRouter = APIRouter(prefix='/predictive_state_updates') +agui: AGUIAgent[StateDeps[DocumentState]] = AGUIAgent( + deps_type=StateDeps[DocumentState] +) + + +# Tools which return AG-UI events will be sent to the client as part of the +# event stream, single events and iterables of events are supported. +@agui.agent.tool_plain +def document_predict_state() -> list[CustomEvent]: + """Enable document state prediction. + + Returns: + CustomEvent containing the event to enable state prediction. + """ + _LOGGER.info('enabling document state state prediction') + return [ + CustomEvent( + type=EventType.CUSTOM, + name='PredictState', + value=[ + { + 'state_key': 'document', + 'tool': 'write_document', + 'tool_argument': 'document', + }, + ], + ), + ] + + +@agui.agent.instructions() +def story_instructions(ctx: RunContext[StateDeps[DocumentState]]) -> str: + """Provide instructions for writing document if present. + + Args: + ctx: The run context containing document state information. + + Returns: + Instructions string for the document writing agent. + """ + _LOGGER.info('story instructions document=%s', ctx.deps.state.document) + + return f"""You are a helpful assistant for writing documents. + +Before you start writing, you MUST call the `document_predict_state` +tool to enable state prediction. + +To present the document to the user for review, you MUST use the +`write_document` tool. + +When you have written the document, DO NOT repeat it as a message. +If accepted briefly summarize the changes you made, 2 sentences +max, otherwise ask the user to clarify what they want to change. + +This is the current document: + +{ctx.deps.state.document} +""" + + +@router.post('') +async def handler( + input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE +) -> StreamingResponse: + """Endpoint to handle AG-UI protocol requests and stream responses. + + Args: + input_data: The AG-UI run input. + accept: The Accept header to specify the response format. + + Returns: + A streaming response with event-stream media type. + """ + return StreamingResponse( + agui.adapter.run(input_data, accept, deps=StateDeps(state_type=DocumentState)), + media_type=SSE_CONTENT_TYPE, + ) diff --git a/examples/pydantic_ai_ag_ui_examples/api/shared_state.py b/examples/pydantic_ai_ag_ui_examples/api/shared_state.py new file mode 100644 index 000000000..a81e6477b --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/api/shared_state.py @@ -0,0 +1,158 @@ +"""Shared State feature.""" + +from __future__ import annotations + +import json +import logging +from enum import StrEnum +from typing import TYPE_CHECKING, Annotated + +from ag_ui.core import EventType, RunAgentInput, StateSnapshotEvent +from fastapi import APIRouter, Header +from fastapi.responses import StreamingResponse +from pydantic import BaseModel, Field +from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE +from pydantic_ai_ag_ui.deps import StateDeps + +from .agent import AGUIAgent + +if TYPE_CHECKING: # pragma: no cover + from pydantic_ai import RunContext + +_LOGGER: logging.Logger = logging.getLogger(__name__) + + +class SkillLevel(StrEnum): + """The level of skill required for the recipe.""" + + BEGINNER = 'Beginner' + INTERMEDIATE = 'Intermediate' + ADVANCED = 'Advanced' + + +class SpecialPreferences(StrEnum): + """Special preferences for the recipe.""" + + HIGH_PROTEIN = 'High Protein' + LOW_CARB = 'Low Carb' + SPICY = 'Spicy' + BUDGET_FRIENDLY = 'Budget-Friendly' + ONE_POT_MEAL = 'One-Pot Meal' + VEGETARIAN = 'Vegetarian' + VEGAN = 'Vegan' + + +class CookingTime(StrEnum): + """The cooking time of the recipe.""" + + FIVE_MIN = '5 min' + FIFTEEN_MIN = '15 min' + THIRTY_MIN = '30 min' + FORTY_FIVE_MIN = '45 min' + SIXTY_PLUS_MIN = '60+ min' + + +class Ingredient(BaseModel): + """A class representing an ingredient in a recipe.""" + + icon: str = Field( + default='ingredient', + description="The icon emoji (not emoji code like '\x1f35e', but the actual emoji like 🥕) of the ingredient", + ) + name: str + amount: str + + +class Recipe(BaseModel): + """A class representing a recipe.""" + + skill_level: SkillLevel = Field( + description='The skill level required for the recipe' + ) + special_preferences: list[SpecialPreferences] = Field( + description='Any special preferences for the recipe' + ) + cooking_time: CookingTime = Field(description='The cooking time of the recipe') + ingredients: list[Ingredient] = Field(description='Ingredients for the recipe') + instructions: list[str] = Field(description='Instructions for the recipe') + + +class RecipeSnapshot(BaseModel): + """A class representing the state of the recipe.""" + + recipe: Recipe = Field(description='The current state of the recipe') + + +router: APIRouter = APIRouter(prefix='/shared_state') +agui: AGUIAgent[StateDeps[RecipeSnapshot]] = AGUIAgent( + deps_type=StateDeps[RecipeSnapshot] +) + + +@agui.agent.tool_plain +def display_recipe(recipe: Recipe) -> StateSnapshotEvent: + """Display the recipe to the user. + + Args: + recipe: The recipe to display. + + Returns: + StateSnapshotEvent containing the recipe snapshot. + """ + return StateSnapshotEvent( + type=EventType.STATE_SNAPSHOT, + snapshot={'recipe': recipe}, + ) + + +@agui.agent.instructions +def recipe_instructions(ctx: RunContext[StateDeps[RecipeSnapshot]]) -> str: + """Instructions for the recipe generation agent. + + Args: + ctx: The run context containing recipe state information. + + Returns: + Instructions string for the recipe generation agent. + """ + _LOGGER.info('recipe instructions recipe=%s', ctx.deps.state.recipe) + + return f"""You are a helpful assistant for creating recipes. + +IMPORTANT: +- Create a complete recipe using the existing ingredients +- Append new ingredients to the existing ones +- Use the `display_recipe` tool to present the recipe to the user +- Do NOT repeat the recipe in the message, use the tool instead + +Once you have created the updated recipe and displayed it to the user, +summarise the changes in one sentence, don't describe the recipe in +detail or send it as a message to the user. + +The structure of a recipe is as follows: + +{json.dumps(Recipe.model_json_schema(), indent=2)} + +The current state of the recipe is: + +{ctx.deps.state.recipe.model_dump_json(indent=2)} +""" + + +@router.post('') +async def handler( + input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE +) -> StreamingResponse: + """Endpoint to handle AG-UI protocol requests and stream responses. + + Args: + input_data: The AG-UI run input. + accept: The Accept header to specify the response format. + + Returns: + A streaming response with event-stream media type. + """ + return StreamingResponse( + agui.adapter.run(input_data, accept, deps=StateDeps(state_type=RecipeSnapshot)), + media_type=SSE_CONTENT_TYPE, + ) diff --git a/examples/pydantic_ai_ag_ui_examples/api/tool_based_generative_ui.py b/examples/pydantic_ai_ag_ui_examples/api/tool_based_generative_ui.py new file mode 100644 index 000000000..366df2388 --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/api/tool_based_generative_ui.py @@ -0,0 +1,41 @@ +"""Tool Based Generative UI feature. + +No special handling is required for this feature. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated + +from ag_ui.core import RunAgentInput +from fastapi import APIRouter, Header +from fastapi.responses import StreamingResponse +from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE + +from .agent import AGUIAgent + +if TYPE_CHECKING: # pragma: no cover + from ag_ui.core import RunAgentInput + + +router: APIRouter = APIRouter(prefix='/tool_based_generative_ui') +agui: AGUIAgent = AGUIAgent() + + +@router.post('') +async def handler( + input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE +) -> StreamingResponse: + """Endpoint to handle AG-UI protocol requests and stream responses. + + Args: + input_data: The AG-UI run input. + accept: The Accept header to specify the response format. + + Returns: + A streaming response with event-stream media type. + """ + return StreamingResponse( + agui.adapter.run(input_data, accept), + media_type=SSE_CONTENT_TYPE, + ) diff --git a/examples/pydantic_ai_ag_ui_examples/basic.py b/examples/pydantic_ai_ag_ui_examples/basic.py new file mode 100644 index 000000000..27fda6064 --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/basic.py @@ -0,0 +1,58 @@ +"""Basic example of using pydantic_ai_ag_ui with FastAPI.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Annotated + +from fastapi import FastAPI, Header +from fastapi.responses import StreamingResponse +from pydantic_ai_ag_ui.adapter import Adapter +from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE + +from pydantic_ai import Agent + +if TYPE_CHECKING: + from ag_ui.core import RunAgentInput + +app = FastAPI(title='AG-UI Endpoint') + +agent: Agent[None, str] = Agent( + 'openai:gpt-4o-mini', + instructions='You are a helpful assistant.', +) +adapter: Adapter[None, str] = agent.to_ag_ui() + + +@app.post('/agent') +async def handler( + input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE +) -> StreamingResponse: + """Endpoint to handle AG-UI protocol requests and stream responses. + + Args: + input_data: The AG-UI run input. + accept: The Accept header to specify the response format. + + Returns: + A streaming response with event-stream media type. + """ + return StreamingResponse( + adapter.run(input_data, accept), + media_type=SSE_CONTENT_TYPE, + ) + + +if __name__ == '__main__': + import uvicorn + + from .cli import Args, parse_args + + args: Args = parse_args() + + uvicorn.run( + 'pydantic_ai_ag_ui_examples.dojo_server:app', + port=args.port, + reload=args.reload, + log_level=args.log_level, + log_config=args.log_config(), + ) diff --git a/examples/pydantic_ai_ag_ui_examples/cli/__init__.py b/examples/pydantic_ai_ag_ui_examples/cli/__init__.py new file mode 100644 index 000000000..dc71f4f1e --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/cli/__init__.py @@ -0,0 +1,8 @@ +"""Command line interface for the PydanticAI AG-UI servers.""" + +from .args import Args, parse_args + +__all__ = [ + 'Args', + 'parse_args', +] diff --git a/examples/pydantic_ai_ag_ui_examples/cli/args.py b/examples/pydantic_ai_ag_ui_examples/cli/args.py new file mode 100644 index 000000000..db281e429 --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/cli/args.py @@ -0,0 +1,74 @@ +"""CLI argument parser for the PydanticAI AG-UI servers.""" + +import argparse +from typing import Any + +from uvicorn.config import LOGGING_CONFIG + +from pydantic_ai.models import dataclass + + +@dataclass +class Args: + """Custom namespace for command line arguments.""" + + port: int + reload: bool + log_level: str + loggers: list[str] + + def log_config(self) -> dict[str, Any]: + """Return the logging configuration based on the log level.""" + log_config: dict[str, Any] = LOGGING_CONFIG.copy() + for logger in self.loggers: + log_config['loggers'][logger] = { + 'handlers': ['default'], + 'level': self.log_level.upper(), + 'propagate': False, + } + + return log_config + + +def parse_args() -> Args: + """Parse command line arguments for the PydanticAI AG-UI servers. + + Returns: + Args: A dataclass containing the parsed command line arguments. + """ + parser: argparse.ArgumentParser = argparse.ArgumentParser( + description='PydanticAI AG-UI Dojo server' + ) + parser.add_argument( + '--port', + '-p', + type=int, + default=9000, + help='Port to run the server on (default: 9000)', + ) + parser.add_argument( + '--reload', + action='store_true', + default=True, + help='Enable auto-reload (default: True)', + ) + parser.add_argument( + '--no-reload', dest='reload', action='store_false', help='Disable auto-reload' + ) + parser.add_argument( + '--log-level', + choices=['critical', 'error', 'warning', 'info', 'debug', 'trace'], + default='info', + help='Adapter log level (default: info)', + ) + parser.add_argument( + '--loggers', + nargs='*', + default=[ + 'pydantic_ai_ag_ui.adapter', + ], + help='Logger names to configure (default: adapter and model loggers)', + ) + + args: argparse.Namespace = parser.parse_args() + return Args(**vars(args)) diff --git a/examples/pydantic_ai_ag_ui_examples/dojo_server.py b/examples/pydantic_ai_ag_ui_examples/dojo_server.py new file mode 100644 index 000000000..6ce7156f2 --- /dev/null +++ b/examples/pydantic_ai_ag_ui_examples/dojo_server.py @@ -0,0 +1,48 @@ +"""Example usage of the AG-UI adapter for PydanticAI. + +This provides a FastAPI application that demonstrates how to use the +PydanticAI agent with the AG-UI protocol. It includes examples for +each of the AG-UI dojo features: +- Agentic Chat +- Human in the Loop +- Agentic Generative UI +- Tool Based Generative UI +- Shared State +- Predictive State Updates +""" + +from __future__ import annotations + +from fastapi import FastAPI + +from .api import ( + agentic_chat_router, + agentic_generative_ui_router, + human_in_the_loop_router, + predictive_state_updates_router, + shared_state_router, + tool_based_generative_ui_router, +) + +app = FastAPI(title='PydanticAI AG-UI server') +app.include_router(agentic_chat_router, tags=['Agentic Chat']) +app.include_router(agentic_generative_ui_router, tags=['Agentic Generative UI']) +app.include_router(human_in_the_loop_router, tags=['Human in the Loop']) +app.include_router(predictive_state_updates_router, tags=['Predictive State Updates']) +app.include_router(shared_state_router, tags=['Shared State']) +app.include_router(tool_based_generative_ui_router, tags=['Tool Based Generative UI']) + + +if __name__ == '__main__': + import uvicorn + + from .cli import Args, parse_args + + args: Args = parse_args() + + uvicorn.run( + 'pydantic_ai_ag_ui_examples.dojo_server:app', + port=args.port, + reload=args.reload, + log_config=args.log_config(), + ) diff --git a/examples/pydantic_ai_ag_ui_examples/py.typed b/examples/pydantic_ai_ag_ui_examples/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/examples/pyproject.toml b/examples/pyproject.toml index bb5dcd9ef..04d6240bf 100644 --- a/examples/pyproject.toml +++ b/examples/pyproject.toml @@ -42,7 +42,7 @@ requires-python = ">=3.9" [tool.hatch.metadata.hooks.uv-dynamic-versioning] dependencies = [ - "pydantic-ai-slim[openai,vertexai,groq,anthropic]=={{ version }}", + "pydantic-ai-slim[openai,vertexai,groq,anthropic,ag-ui]=={{ version }}", "pydantic-evals=={{ version }}", "asyncpg>=0.30.0", "fastapi>=0.115.4", @@ -57,7 +57,10 @@ dependencies = [ ] [tool.hatch.build.targets.wheel] -packages = ["pydantic_ai_examples"] +packages = [ + "pydantic_ai_ag_ui_examples", + "pydantic_ai_examples", +] [tool.uv.sources] pydantic-ai-slim = { workspace = true } diff --git a/fasta2a/pyproject.toml b/fasta2a/pyproject.toml index 2abe809aa..93495d93f 100644 --- a/fasta2a/pyproject.toml +++ b/fasta2a/pyproject.toml @@ -54,7 +54,7 @@ logfire = ["logfire>=2.3"] [project.urls] Homepage = "https://ai.pydantic.dev/a2a/fasta2a" -Source = "https://github.com/pydantic/fasta2a" +Source = "https://github.com/pydantic/pydantic-ai" Documentation = "https://ai.pydantic.dev/a2a" Changelog = "https://github.com/pydantic/pydantic-ai/releases" diff --git a/mkdocs.yml b/mkdocs.yml index 44b1548f1..2b9f90849 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -47,6 +47,7 @@ nav: - mcp/server.md - mcp/run-python.md - A2A: a2a.md + - AG-UI: ag-ui.md - cli.md - Examples: - examples/index.md @@ -103,6 +104,7 @@ nav: - api/pydantic_evals/otel.md - api/pydantic_evals/generation.md - api/fasta2a.md + - api/pydantic_ai_ag_ui.md extra: # hide the "Made with Material for MkDocs" message diff --git a/pydantic_ai_ag_ui/LICENSE b/pydantic_ai_ag_ui/LICENSE new file mode 100644 index 000000000..1bf1f55e6 --- /dev/null +++ b/pydantic_ai_ag_ui/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Pydantic Services Inc. 2024 to present + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pydantic_ai_ag_ui/README.md b/pydantic_ai_ag_ui/README.md new file mode 100644 index 000000000..8fce4c2e3 --- /dev/null +++ b/pydantic_ai_ag_ui/README.md @@ -0,0 +1,13 @@ +# PydanticAI AG-UI Adapter + +[![CI](https://github.com/pydantic/pydantic-ai/actions/workflows/ci.yml/badge.svg?event=push)](https://github.com/pydantic/pydantic-ai/actions/workflows/ci.yml?query=branch%3Amain) +[![Coverage](https://coverage-badge.samuelcolvin.workers.dev/pydantic/pydantic-ai.svg)](https://coverage-badge.samuelcolvin.workers.dev/redirect/pydantic/pydantic-ai) +[![PyPI](https://img.shields.io/pypi/v/pydantic-ai-ag-ui.svg)](https://pypi.python.org/pypi/pydantic-ai-ag-ui) +[![python versions](https://img.shields.io/pypi/pyversions/pydantic-ai-ag-ui.svg)](https://github.com/pydantic/pydantic-ai) +[![license](https://img.shields.io/github/license/pydantic/pydantic-ai.svg)](https://github.com/pydantic/pydantic-ai/blob/main/LICENSE) + +To make it easier to implement use AG-UI with PydanticAI agents we've +implemented an adapter which handles the translation between PydanticAI +and AG-UI. + +See [the docs](https://ai.pydantic.dev/ag_ui/) for more information. diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/__init__.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/__init__.py new file mode 100644 index 000000000..734afbf84 --- /dev/null +++ b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/__init__.py @@ -0,0 +1,19 @@ +"""Pydantic AI integration for ag-ui protocol. + +This package provides seamless integration between pydantic-ai agents and ag-ui +for building interactive AI applications with streaming event-based communication. +""" + +from __future__ import annotations + +from .adapter import Adapter +from .consts import SSE_CONTENT_TYPE +from .deps import StateDeps +from .protocols import StateHandler + +__all__ = [ + 'Adapter', + 'SSE_CONTENT_TYPE', + 'StateDeps', + 'StateHandler', +] diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/_enums.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/_enums.py new file mode 100644 index 000000000..76fbf5951 --- /dev/null +++ b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/_enums.py @@ -0,0 +1,16 @@ +"""Enums for AG-UI protocol.""" + +from __future__ import annotations + +from enum import Enum + + +# TODO(steve): Remove this and all uses once https://github.com/ag-ui-protocol/ag-ui/pull/49 is merged. +class Role(str, Enum): + """Enum for message roles in AG-UI protocol.""" + + ASSISTANT = 'assistant' + USER = 'user' + DEVELOPER = 'developer' + SYSTEM = 'system' + TOOL = 'tool' diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/_exceptions.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/_exceptions.py new file mode 100644 index 000000000..8bd78cb31 --- /dev/null +++ b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/_exceptions.py @@ -0,0 +1,51 @@ +"""Exceptions for the AI Agent UI module.""" + +from __future__ import annotations + +from dataclasses import InitVar, dataclass + +from pydantic import ValidationError as PydanticValidationError + + +@dataclass +class RunError(Exception): + """Exception raised for errors during agent runs.""" + + message: str + code: str + + def __str__(self) -> str: + return self.message + + +@dataclass(kw_only=True) +class UnexpectedToolCallError(RunError): + """Exception raised when an unexpected tool call is encountered.""" + + tool_name: InitVar[str] + message: str = '' + code: str = 'unexpected_tool_call' + + def __post_init__(self, tool_name: str) -> None: + """Set the message for the unexpected tool call. + + Args: + tool_name: The name of the tool that was unexpectedly called. + """ + self.message = f'unexpected tool call name={tool_name}' # pragma: no cover + + +@dataclass +class NoMessagesError(RunError): + """Exception raised when no messages are found in the input.""" + + message: str = 'no messages found in the input' + code: str = 'no_messages' + + +@dataclass +class InvalidStateError(RunError, PydanticValidationError): + """Exception raised when an invalid state is provided.""" + + message: str = 'invalid state provided' + code: str = 'invalid_state' diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/adapter.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/adapter.py new file mode 100644 index 000000000..faec292bd --- /dev/null +++ b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/adapter.py @@ -0,0 +1,678 @@ +"""Provides an AG-UI protocol adapter for the PydanticAI agent.""" + +from __future__ import annotations + +import json +import logging +import uuid +from collections.abc import Iterable, Sequence +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Any, Generic, cast + +from ag_ui.core import ( + AssistantMessage, + BaseEvent, + DeveloperMessage, + EventType, + FunctionCall, + Message, + MessagesSnapshotEvent, + RunAgentInput, + RunErrorEvent, + RunFinishedEvent, + RunStartedEvent, + SystemMessage, + TextMessageContentEvent, + TextMessageEndEvent, + TextMessageStartEvent, + Tool as ToolAGUI, + ToolCall, + ToolCallArgsEvent, + ToolCallEndEvent, + ToolCallStartEvent, + ToolMessage, + UserMessage, +) +from ag_ui.encoder import EventEncoder + +from pydantic_ai import Agent, ModelRequestNode, models +from pydantic_ai._output import OutputType +from pydantic_ai._parts_manager import ModelResponsePartsManager +from pydantic_ai.agent import RunOutputDataT +from pydantic_ai.mcp import ToolResult +from pydantic_ai.messages import ( + AgentStreamEvent, + FinalResultEvent, + ModelMessage, + ModelRequest, + ModelRequestPart, + ModelResponse, + ModelResponsePart, + PartDeltaEvent, + PartStartEvent, + SystemPromptPart, + TextPart, + TextPartDelta, + ThinkingPart, + ThinkingPartDelta, + ToolCallPart, + ToolCallPartDelta, + ToolReturnPart, + UserPromptPart, +) +from pydantic_ai.result import AgentStream, OutputDataT +from pydantic_ai.settings import ModelSettings +from pydantic_ai.tools import AgentDepsT, Tool +from pydantic_ai.usage import Usage, UsageLimits + +from ._enums import Role +from ._exceptions import NoMessagesError, RunError, UnexpectedToolCallError +from .consts import SSE_CONTENT_TYPE +from .protocols import StateHandler + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator + + from ag_ui.encoder import EventEncoder + + from pydantic_ai._agent_graph import AgentNode + from pydantic_ai.agent import AgentRun + from pydantic_ai.result import FinalResult + from pydantic_graph.nodes import End + + +_LOGGER: logging.Logger = logging.getLogger(__name__) + + +@dataclass(repr=False) +class _RequestStreamContext: + """Data class to hold request stream context.""" + + message_id: str = '' + last_tool_call_id: str | None = None + part_ends: list[BaseEvent | None] = field(default_factory=lambda: list[BaseEvent | None]()) + local_tool_calls: set[str] = field(default_factory=set) + + def new_message_id(self) -> str: + """Generate a new message ID for the request stream. + + Assigns a new UUID to the `message_id` and returns it. + + Returns: + A new message ID. + """ + self.message_id = str(uuid.uuid4()) + return self.message_id + + +@dataclass(kw_only=True, repr=False) +class Adapter(Generic[AgentDepsT, OutputDataT]): + """An agent adapter providing AG-UI protocol support for PydanticAI agents. + + This class manages the agent runs, tool calls, state storage and providing + an adapter for running agents with Server-Sent Event (SSE) streaming + responses using the AG-UI protocol. + + Examples: + This is an example of base usage with FastAPI. + .. code-block:: python + from __future__ import annotations + + from typing import TYPE_CHECKING, Annotated + + from fastapi import FastAPI, Header + from fastapi.responses import StreamingResponse + from pydantic_ai import Agent + + from pydantic_ai_ag_ui import SSE_CONTENT_TYPE, Adapter + + if TYPE_CHECKING: + from ag_ui.core import RunAgentInput + + app = FastAPI(title="AG-UI Endpoint") + agent = Agent( + "openai:gpt-4o-mini", + deps_type=int, + instructions="You are a helpful assistant.", + ) + adapter = agent.to_ag_ui() + + @app.post("/") + async def root(input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE) -> StreamingResponse: + return StreamingResponse( + adapter.run(input_data, accept, deps=42), + media_type=SSE_CONTENT_TYPE, + ) + + PydanticAI tools which return AG-UI events will be sent to the client + as part of the event stream, single events and event iterables are + supported. + .. code-block:: python + @agent.tool + def update_state(ctx: RunContext[StateDeps[DocumentState]]) -> StateSnapshotEvent: + return StateSnapshotEvent( + type=EventType.STATE_SNAPSHOT, + snapshot=ctx.deps.state, + ) + + @agent.tool_plain + def custom_events() -> list[CustomEvent]: + return [ + CustomEvent( + type=EventType.CUSTOM, + name="count", + value=1, + ), + CustomEvent( + type=EventType.CUSTOM, + name="count", + value=2, + ), + ] + + Args: + agent: The PydanticAI `Agent` to adapt. + tool_prefix: Optional prefix to add to tool names. + logger: The logger to use for logging. + """ + + agent: Agent[AgentDepsT, OutputDataT] = field(repr=False) + tool_prefix: str = field(default='', repr=False) + logger: logging.Logger = field(default=_LOGGER, repr=False) + + async def run( + self, + run_input: RunAgentInput, + accept: str = SSE_CONTENT_TYPE, + *, + output_type: OutputType[RunOutputDataT] | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: UsageLimits | None = None, + usage: Usage | None = None, + infer_name: bool = True, + additional_tools: Sequence[Tool[AgentDepsT]] | None = None, + ) -> AsyncGenerator[str, None]: + """Run the agent with streaming response using AG-UI protocol events. + + The first two arguments are specific to `Adapter` the rest map directly to the `Agent.iter` method. + + Args: + run_input: The AG-UI run input containing thread_id, run_id, messages, etc. + accept: The accept header value for the run. + + output_type: Custom output type to use for this run, `output_type` may only be used if the agent has no + output validators since output validators would expect an argument that matches the agent's output type. + model: Optional model to use for this run, required if `model` was not set when creating the agent. + deps: Optional dependencies to use for this run. + model_settings: Optional settings to use for this model's request. + usage_limits: Optional limits on model request count or token usage. + usage: Optional usage to start with, useful for resuming a conversation or agents used in tools. + infer_name: Whether to try to infer the agent name from the call frame if it's not set. + additional_tools: Additional tools to use for this run. + + Yields: + Streaming SSE-formatted event chunks. + """ + self.logger.debug('starting run: %s', json.dumps(run_input.model_dump(), indent=2)) + + tool_names: dict[str, str] = {self.tool_prefix + tool.name: tool.name for tool in run_input.tools} + encoder: EventEncoder = EventEncoder(accept=accept) + run_tools: list[Tool[AgentDepsT]] = list(additional_tools) if additional_tools else [] + run_tools.extend(self._convert_tools(run_input.tools)) + + try: + yield encoder.encode( + RunStartedEvent( + type=EventType.RUN_STARTED, + thread_id=run_input.thread_id, + run_id=run_input.run_id, + ), + ) + + if not run_input.messages: + raise NoMessagesError + + if isinstance(deps, StateHandler): + deps.set_state(run_input.state) + + run: AgentRun[AgentDepsT, Any] + async with self.agent.iter( + user_prompt=None, + output_type=output_type, + message_history=_convert_history(run_input.messages), + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + additional_tools=run_tools, + ) as run: + parts_manager: ModelResponsePartsManager = ModelResponsePartsManager() + async for event in self._agent_stream(tool_names, run, parts_manager): + if event is None: + # Tool call signals early return, so we stop processing. + self.logger.debug('tool call early return') + + # TODO(steve): Remove this workaround, it's only needed as AG-UI doesn't + # currently have a way to add server side tool calls to the message history + # via events. To workaround this we create a full snapshot of the messages + # and send that. + snapshot: MessagesSnapshotEvent | None = self._message_snapshot( + run, run_input.messages, parts_manager + ) + if snapshot is not None: + yield encoder.encode(snapshot) + break + + yield encoder.encode(event) + except RunError as e: + self.logger.exception('agent run') + yield encoder.encode( + RunErrorEvent(type=EventType.RUN_ERROR, message=e.message, code=e.code), + ) + except Exception as e: # pragma: no cover + self.logger.exception('unexpected error in agent run') + yield encoder.encode( + RunErrorEvent(type=EventType.RUN_ERROR, message=str(e), code='run_error'), + ) + else: + yield encoder.encode( + RunFinishedEvent( + type=EventType.RUN_FINISHED, + thread_id=run_input.thread_id, + run_id=run_input.run_id, + ), + ) + + self.logger.info('done thread_id=%s run_id=%s', run_input.thread_id, run_input.run_id) + + def _message_snapshot( + self, run: AgentRun[AgentDepsT, Any], messages: list[Message], parts_manager: ModelResponsePartsManager + ) -> MessagesSnapshotEvent | None: + """Create a message snapshot to replicate the current state of the run. + + This method collects all messages from the run's state and the parts + manager, converting them into AG-UI messages. + + Args: + run: The agent run instance. + messages: The initial messages from the run input. + parts_manager: The parts manager containing the response parts. + + Returns: + A full snapshot of the messages so far in the run if local tool + calls were made, otherwise `None`. + """ + new_messages: list[ModelMessage] = run.ctx.state.message_history[len(messages) :] + if not any( + isinstance(request_part, ToolReturnPart) + for msg in new_messages + if isinstance(msg, ModelRequest) + for request_part in msg.parts + ): + # No tool calls were made, so we don't need a snapshot. + return None + + # Tool calls were made, so we need to create a snapshot. + for msg in new_messages: + match msg: + case ModelRequest(): + for request_part in msg.parts: + if isinstance(request_part, ToolReturnPart): + messages.append( + ToolMessage( + id='result-' + request_part.tool_call_id, + role=Role.TOOL, + content=request_part.content, + tool_call_id=request_part.tool_call_id, + ) + ) + case ModelResponse(): + self._convert_response_parts(msg.parts, messages) + + self._convert_response_parts(parts_manager.get_parts(), messages) + + return MessagesSnapshotEvent( + type=EventType.MESSAGES_SNAPSHOT, + messages=messages, + ) + + def _convert_response_parts(self, parts: list[ModelResponsePart], messages: list[Message]) -> None: + """Convert model response parts to AG-UI messages. + + Args: + parts: The list of model response parts to convert. + messages: The list of messages to append the converted parts to. + """ + response_part: ModelResponsePart + for response_part in parts: + match response_part: + case TextPart(): # pragma: no cover + # This is not expected, but we handle it gracefully. + messages.append( + AssistantMessage( + id=uuid.uuid4().hex, + role=Role.ASSISTANT, + content=response_part.content, + ) + ) + case ToolCallPart(): + args: str = ( + json.dumps(response_part.args) + if isinstance(response_part.args, dict) + else response_part.args or '{}' + ) + messages.append( + AssistantMessage( + id=uuid.uuid4().hex, + role=Role.ASSISTANT, + tool_calls=[ + ToolCall( + id=response_part.tool_call_id, + type='function', + function=FunctionCall( + name=response_part.tool_name, + arguments=args, + ), + ) + ], + ), + ) + case ThinkingPart(): # pragma: no cover + # No AG-UI equivalent for thinking parts, so we skip them. + pass + + async def _tool_events(self, parts: list[ModelRequestPart]) -> AsyncGenerator[BaseEvent | None, None]: + """Check for tool call results that are AG-UI events. + + Args: + encoder: The event encoder to use for encoding events. + parts: The list of request parts to check for tool event returns. + + Yields: + AG-UI Server-Sent Events (SSE). + """ + # TODO(steve): Determine how to handle multiple parts. Currently + # AG-UI only supports a single tool call per request, but that + # may change in the future. + part: ModelRequestPart + for part in parts: + if not isinstance(part, ToolReturnPart): + continue + + iter: Iterable[Any] + match part.content: + case BaseEvent(): + self.logger.debug('ag-ui event: %s', part.content) + yield part.content + case str() | bytes(): + # Avoid strings and bytes being checked as iterable. + pass + case Iterable() as iter: + for item in iter: + if isinstance(item, BaseEvent): # pragma: no branch + self.logger.debug('ag-ui event: %s', item) + yield item + case _: # pragma: no cover + # Not currently interested in other types. + pass + + def _convert_tools(self, run_tools: list[ToolAGUI]) -> list[Tool[AgentDepsT]]: + """Convert AG-UI tools to PydanticAI tools. + + Creates `Tool` objects from AG-UI tool definitions. These tools don't + actually execute anything, that is done by AG-UI client - they just + provide the necessary tool definitions to PydanticAI agent. + + Args: + run_tools: List of AG-UI tool definitions to convert. + + Returns: + List of PydanticAI Tool objects that call the AG-UI tools. + """ + return [self._tool_call(tool) for tool in run_tools] + + def _tool_call(self, tool: ToolAGUI) -> Tool[AgentDepsT]: + """Create a PydanticAI tool from an AG-UI tool definition. + + Args: + tool: The AG-UI tool definition to convert. + + Returns: + A PydanticAI `Tool` object that calls the AG-UI tool. + """ + + def _tool_stub(*args: Any, **kwargs: Any) -> ToolResult: + """Stub function which is never called. + + Returns: + Never returns as it always raises an exception. + + Raises: + UnexpectedToolCallError: Always raised since this should never be called. + """ + raise UnexpectedToolCallError(tool_name=tool.name) # pragma: no cover + + # TODO(steve): See it we can avoid the cast here. + return cast( + 'Tool[AgentDepsT]', + Tool.from_schema( + function=_tool_stub, + name=tool.name, + description=tool.description, + json_schema=tool.parameters, + ), + ) + + async def _agent_stream( + self, + tool_names: dict[str, str], + run: AgentRun[AgentDepsT, Any], + parts_manager: ModelResponsePartsManager, + ) -> AsyncGenerator[BaseEvent | None, None]: + """Run the agent streaming responses using AG-UI protocol events. + + Args: + tool_names: A mapping of tool names to their AG-UI names. + run: The agent run to process. + parts_manager: The parts manager to handle tool call parts. + + Yields: + AG-UI Server-Sent Events (SSE). + """ + node: AgentNode[AgentDepsT, Any] | End[FinalResult[Any]] + msg: BaseEvent | None + async for node in run: + self.logger.debug('processing node=%r', node) + if not isinstance(node, ModelRequestNode): + # Not interested UserPromptNode, CallToolsNode or End. + continue + + # Check for state updates. + snapshot: BaseEvent | None + async for snapshot in self._tool_events(node.request.parts): + yield snapshot + + stream_ctx: _RequestStreamContext = _RequestStreamContext() + request_stream: AgentStream[AgentDepsT] + async with node.stream(run.ctx) as request_stream: + agent_event: AgentStreamEvent + async for agent_event in request_stream: + async for msg in self._handle_agent_event(tool_names, stream_ctx, agent_event, parts_manager): + yield msg + + for part_end in stream_ctx.part_ends: + yield part_end + + async def _handle_agent_event( + self, + tool_names: dict[str, str], + stream_ctx: _RequestStreamContext, + agent_event: AgentStreamEvent, + parts_manager: ModelResponsePartsManager, + ) -> AsyncGenerator[BaseEvent | None, None]: + """Handle an agent event and yield AG-UI protocol events. + + Args: + encoder: The event encoder to use for encoding events. + tool_names: A mapping of tool names to their AG-UI names. + stream_ctx: The request stream context to manage state. + agent_event: The agent event to process. + parts_manager: The parts manager to handle tool call parts. + + Yields: + AG-UI Server-Sent Events (SSE) based on the agent event. + """ + self.logger.debug('agent_event: %s', agent_event) + match agent_event: + case PartStartEvent(): + # If we have a previous part end it. + part_end: BaseEvent | None + for part_end in stream_ctx.part_ends: + yield part_end + stream_ctx.part_ends.clear() + + match agent_event.part: + case TextPart(): + message_id: str = stream_ctx.new_message_id() + yield TextMessageStartEvent( + type=EventType.TEXT_MESSAGE_START, + message_id=message_id, + role=Role.ASSISTANT.value, + ) + stream_ctx.part_ends = [ + TextMessageEndEvent( + type=EventType.TEXT_MESSAGE_END, + message_id=message_id, + ), + ] + if agent_event.part.content: + yield TextMessageContentEvent( # pragma: no cover + type=EventType.TEXT_MESSAGE_CONTENT, + message_id=message_id, + delta=agent_event.part.content, + ) + case ToolCallPart(): # pragma: no branch + tool_name: str | None = tool_names.get(agent_event.part.tool_name) + if not tool_name: + # Local tool calls are not sent as events to the UI. + stream_ctx.local_tool_calls.add(agent_event.part.tool_call_id) + return + + parts_manager.handle_tool_call_part( + vendor_part_id=None, + tool_name=agent_event.part.tool_name, + args=agent_event.part.args, + tool_call_id=agent_event.part.tool_call_id, + ) + stream_ctx.last_tool_call_id = agent_event.part.tool_call_id + yield ToolCallStartEvent( + type=EventType.TOOL_CALL_START, + tool_call_id=agent_event.part.tool_call_id, + tool_call_name=tool_name or agent_event.part.tool_name, + ) + stream_ctx.part_ends = [ + ToolCallEndEvent( + type=EventType.TOOL_CALL_END, + tool_call_id=agent_event.part.tool_call_id, + ), + None, # Signal continuation of the stream. + ] + case ThinkingPart(): # pragma: no branch + # No equivalent AG-UI event yet. + pass + case PartDeltaEvent(): + match agent_event.delta: + case TextPartDelta(): + yield TextMessageContentEvent( + type=EventType.TEXT_MESSAGE_CONTENT, + message_id=stream_ctx.message_id, + delta=agent_event.delta.content_delta, + ) + case ToolCallPartDelta(): # pragma: no branch + if agent_event.delta.tool_call_id in stream_ctx.local_tool_calls: + # Local tool calls are not sent as events to the UI. + return + + parts_manager.handle_tool_call_delta( + vendor_part_id=None, + tool_name=None, + args=agent_event.delta.args_delta, + tool_call_id=agent_event.delta.tool_call_id, + ) + yield ToolCallArgsEvent( + type=EventType.TOOL_CALL_ARGS, + tool_call_id=agent_event.delta.tool_call_id + or stream_ctx.last_tool_call_id + or 'unknown', # Should never be unknown, but just in case. + delta=agent_event.delta.args_delta + if isinstance(agent_event.delta.args_delta, str) + else json.dumps(agent_event.delta.args_delta), + ) + case ThinkingPartDelta(): # pragma: no branch + # No equivalent AG-UI event yet. + pass + case FinalResultEvent(): + # No equivalent AG-UI event yet. + pass + + +def _convert_history(messages: list[Message]) -> list[ModelMessage]: + """Convert a AG-UI history to a PydanticAI one. + + Args: + messages: List of AG-UI messages to convert. + + Returns: + List of PydanticAI model messages. + """ + msg: Message + result: list[ModelMessage] = [] + tool_calls: dict[str, str] = {} + for msg in messages: + match msg: + case UserMessage(): + result.append(ModelRequest(parts=[UserPromptPart(content=msg.content)])) + case AssistantMessage(): + if msg.tool_calls: + for tool_call in msg.tool_calls: + tool_calls[tool_call.id] = tool_call.function.name + + result.append( + ModelResponse( + parts=[ + ToolCallPart( + tool_name=tool_call.function.name, + tool_call_id=tool_call.id, + args=tool_call.function.arguments, + ) + for tool_call in msg.tool_calls + ] + ) + ) + + if msg.content: + result.append(ModelResponse(parts=[TextPart(content=msg.content)])) + case SystemMessage(): + # TODO(steve): Should we handle as instructions instead of system prompt? + result.append(ModelRequest(parts=[SystemPromptPart(content=msg.content)])) + case ToolMessage(): + result.append( + ModelRequest( + parts=[ + ToolReturnPart( + tool_name=tool_calls.get(msg.tool_call_id, 'unknown'), + content=msg.content, + tool_call_id=msg.tool_call_id, + ) + ] + ) + ) + case DeveloperMessage(): # pragma: no branch + # TODO(steve): Should these be handled differently? + result.append(ModelRequest(parts=[SystemPromptPart(content=msg.content)])) + + return result diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/consts.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/consts.py new file mode 100644 index 000000000..93dc92ad5 --- /dev/null +++ b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/consts.py @@ -0,0 +1,8 @@ +"""Constants for the AI Agent UI module.""" + +from __future__ import annotations + +from typing import Final + +SSE_CONTENT_TYPE: Final[str] = 'text/event-stream' +"""Content type header value for Server-Sent Events (SSE).""" diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/deps.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/deps.py new file mode 100644 index 000000000..b3dce4149 --- /dev/null +++ b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/deps.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import Generic, TypeVar + +from ag_ui.core import State +from pydantic import BaseModel, ValidationError + +from ._exceptions import InvalidStateError + +StateT = TypeVar('StateT', bound=BaseModel, contravariant=True) +"""Type variable for the state type, which must be a subclass of `BaseModel`.""" + + +@dataclass(kw_only=True) +class StateDeps(Generic[StateT]): + """Provides AG-UI state management. + + This class is used to manage the state of an agent run. It allows setting + the state of the agent run with a specific type of state model, which must + be a subclass of `BaseModel`. + + The state is set using the `set_state` when the run starts by the `Adapter`. + + Implements the `StateHandler` protocol. + """ + + state_type: type[StateT] + state: StateT = field(init=False) + + def set_state(self, state: State) -> None: + """Set the state of the agent run. + + This method is called to update the state of the agent run with the + provided state. + + Implements the `StateHandler` protocol. + + Args: + state: The run state, which should match the expected model type or be `None`. + + Raises: + InvalidStateError: If `state` does not match the expected model and is not `None`. + """ + if state is None: + return + + try: + self.state = self.state_type.model_validate(state) + except ValidationError as e: # pragma: no cover + raise InvalidStateError from e diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/protocols.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/protocols.py new file mode 100644 index 000000000..4a3af8102 --- /dev/null +++ b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/protocols.py @@ -0,0 +1,26 @@ +"""Protocols for the AG-UI to PydanticAI agent adapter.""" + +from __future__ import annotations + +from typing import Protocol, runtime_checkable + +from ag_ui.core import State + + +@runtime_checkable +class StateHandler(Protocol): + """Protocol for state handlers in agent runs.""" + + def set_state(self, state: State) -> None: + """Set the state of the agent run. + + This method is called to update the state of the agent run with the + provided state. + + Args: + state: The run state. + + Raises: + ValidationError: If `state` does not match the expected model. + """ + ... diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/py.typed b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/pydantic_ai_ag_ui/pyproject.toml b/pydantic_ai_ag_ui/pyproject.toml new file mode 100644 index 000000000..a568c4dc1 --- /dev/null +++ b/pydantic_ai_ag_ui/pyproject.toml @@ -0,0 +1,62 @@ +[build-system] +requires = ["hatchling", "uv-dynamic-versioning>=0.7.0"] +build-backend = "hatchling.build" + +[tool.hatch.version] +source = "uv-dynamic-versioning" + +[tool.uv-dynamic-versioning] +vcs = "git" +style = "pep440" +bump = true + +[project] +name = "pydantic-ai-ag-ui" +dynamic = ["version", "dependencies"] +description = "Convert an AI Agent to speak AG UI! ✨" +authors = [ + { name = "Steven Hartland", email = "steve@rocketscience.gg" }, +] +license = "MIT" +readme = "README.md" +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Intended Audience :: Developers", + "Intended Audience :: Information Technology", + "Intended Audience :: System Administrators", + "License :: OSI Approved :: MIT License", + "Operating System :: Unix", + "Operating System :: POSIX :: Linux", + "Environment :: Console", + "Environment :: MacOS X", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Internet", +] +requires-python = ">=3.9" + +[tool.hatch.metadata.hooks.uv-dynamic-versioning] +dependencies = [ + "ag-ui-protocol>=0.1.5", + "pydantic>=2.10", + "pydantic-ai>=0.3.2", +] + +[project.urls] +Homepage = "https://ai.pydantic.dev/ag-ui/pydantic_ai_ag_ui" +Source = "https://github.com/pydantic/pydantic-ai/" +Documentation = "https://ai.pydantic.dev/ag-ui" +Changelog = "https://github.com/pydantic/pydantic-ai/releases" + +[tool.hatch.build.targets.wheel] +packages = ["pydantic_ai_ag_ui"] + +[tool.uv.sources] +pydantic-ai = { workspace = true } diff --git a/pydantic_ai_slim/pydantic_ai/agent.py b/pydantic_ai_slim/pydantic_ai/agent.py index c46699fd0..f0c9b58ff 100644 --- a/pydantic_ai_slim/pydantic_ai/agent.py +++ b/pydantic_ai_slim/pydantic_ai/agent.py @@ -3,6 +3,7 @@ import dataclasses import inspect import json +import logging import warnings from asyncio import Lock from collections.abc import AsyncIterator, Awaitable, Iterator, Sequence @@ -72,7 +73,7 @@ from fasta2a.schema import Provider, Skill from fasta2a.storage import Storage from pydantic_ai.mcp import MCPServer - + from pydantic_ai_ag_ui import Adapter __all__ = ( 'Agent', @@ -1853,6 +1854,39 @@ async def run_mcp_servers( async with self: yield + def to_ag_ui( + self, + *, + logger: logging.Logger | None = None, + tool_prefix: str | None = None, + ) -> Adapter[AgentDepsT, OutputDataT]: + """Convert the agent to an Adapter instance. + + This allows you to use the agent with a compatible AG-UI frontend. + + Args: + logger: Optional logger to use for the adapter. + tool_prefix: Optional prefix to add to tool names in the AG-UI. + + Returns: + An adapter that converts between AG-UI protocol and PydanticAI. + """ + try: + from pydantic_ai_ag_ui.adapter import Adapter + except ImportError as _import_error: + raise ImportError( + 'Please install the `pydantic-ai-ag-ui` package to use `Agent.to_ag_ui()` method, ' + 'you can use the `ag-ui` optional group — `pip install "pydantic-ai-slim[ag_ui]"`' + ) from _import_error + + kwargs: dict[str, Any] = {} + if tool_prefix is not None: + kwargs['tool_prefix'] = tool_prefix + if logger is not None: + kwargs['logger'] = logger + + return Adapter(agent=self, **kwargs) + def to_a2a( self, *, diff --git a/pydantic_ai_slim/pydantic_ai/models/test.py b/pydantic_ai_slim/pydantic_ai/models/test.py index 87a0c79c0..b67131744 100644 --- a/pydantic_ai_slim/pydantic_ai/models/test.py +++ b/pydantic_ai_slim/pydantic_ai/models/test.py @@ -45,6 +45,33 @@ class _WrappedToolOutput: value: Any | None +@dataclass +class TestToolCallPart: + """Represents a tool call in the test model.""" + + call_tools: list[str] | Literal['all'] = 'all' + deltas: bool = False + + +@dataclass +class TestTextPart: + """Represents a text part in the test model.""" + + text: str + + +TestPart = TestTextPart | TestToolCallPart +"""A part of the test model response.""" + + +@dataclass +class TestNode: + """A node in the test model.""" + + parts: list[TestPart] + id: str = field(default_factory=_utils.generate_tool_call_id) + + @dataclass class TestModel(Model): """A model specifically for testing purposes. @@ -63,6 +90,10 @@ class TestModel(Model): call_tools: list[str] | Literal['all'] = 'all' """List of tools to call. If `'all'`, all tools will be called.""" + tool_call_deltas: set[str] = field(default_factory=set) + """A set of tool call names which should result in tool call part deltas.""" + custom_response_nodes: list[TestNode] | None = None + """A list of nodes which defines a custom model response.""" custom_output_text: str | None = None """If set, this text is returned as the final output.""" custom_output_args: Any | None = None @@ -102,7 +133,10 @@ async def request_stream( model_response = self._request(messages, model_settings, model_request_parameters) yield TestStreamedResponse( - _model_name=self._model_name, _structured_response=model_response, _messages=messages + _model_name=self._model_name, + _structured_response=model_response, + _messages=messages, + _tool_call_deltas=self.tool_call_deltas, ) @property @@ -150,23 +184,71 @@ def _get_output(self, model_request_parameters: ModelRequestParameters) -> _Wrap else: return _WrappedTextOutput(None) + def _node_response( + self, + messages: list[ModelMessage], + model_request_parameters: ModelRequestParameters, + ) -> ModelResponse | None: + """Returns a ModelResponse based on configured nodes. + + Args: + messages: The messages sent to the model. + model_request_parameters: The parameters for the model request. + + Returns: + The response from the model, or `None` if no nodes configured or + all nodes have already been processed. + """ + if not self.custom_response_nodes: + # No nodes configured, follow the default behaviour. + return None + + # Pick up where we left off by counting the number of ModelResponse messages in the stream. + # This allows us to stream the response in chunks, simulating a real model response. + node: TestNode + count: int = sum(isinstance(m, ModelResponse) for m in messages) + if count < len(self.custom_response_nodes): + node: TestNode = self.custom_response_nodes[count] + assert node.parts, 'Node parts should not be empty.' + + parts: list[ModelResponsePart] = [] + part: TestPart + for part in node.parts: + if isinstance(part, TestTextPart): + assert model_request_parameters.allow_text_output, ( + 'Plain response not allowed, but `part` is a `TestText`.' + ) + parts.append(TextPart(part.text)) + elif isinstance(part, TestToolCallPart): + tool_calls = self._get_tool_calls(model_request_parameters) + if part.call_tools == 'all': + parts.extend(ToolCallPart(name, self.gen_tool_args(args)) for name, args in tool_calls) + else: + parts.extend( + ToolCallPart(name, self.gen_tool_args(args)) + for name, args in tool_calls + if name in part.call_tools + ) + return ModelResponse(vendor_id=node.id, parts=parts, model_name=self._model_name) + def _request( self, messages: list[ModelMessage], model_settings: ModelSettings | None, model_request_parameters: ModelRequestParameters, ) -> ModelResponse: - tool_calls = self._get_tool_calls(model_request_parameters) - output_wrapper = self._get_output(model_request_parameters) - output_tools = model_request_parameters.output_tools + if (response := self._node_response(messages, model_request_parameters)) is not None: + return response - # if there are tools, the first thing we want to do is call all of them + tool_calls = self._get_tool_calls(model_request_parameters) if tool_calls and not any(isinstance(m, ModelResponse) for m in messages): return ModelResponse( parts=[ToolCallPart(name, self.gen_tool_args(args)) for name, args in tool_calls], model_name=self._model_name, ) + output_wrapper = self._get_output(model_request_parameters) + output_tools = model_request_parameters.output_tools if messages: # pragma: no branch last_message = messages[-1] assert isinstance(last_message, ModelRequest), 'Expected last message to be a `ModelRequest`.' @@ -218,7 +300,8 @@ def _request( output_tool = output_tools[self.seed % len(output_tools)] if custom_output_args is not None: return ModelResponse( - parts=[ToolCallPart(output_tool.name, custom_output_args)], model_name=self._model_name + parts=[ToolCallPart(output_tool.name, custom_output_args)], + model_name=self._model_name, ) else: response_args = self.gen_tool_args(output_tool) @@ -232,6 +315,7 @@ class TestStreamedResponse(StreamedResponse): _model_name: str _structured_response: ModelResponse _messages: InitVar[Iterable[ModelMessage]] + _tool_call_deltas: set[str] _timestamp: datetime = field(default_factory=_utils.now_utc, init=False) def __post_init__(self, _messages: Iterable[ModelMessage]): @@ -253,9 +337,31 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: self._usage += _get_string_usage(word) yield self._parts_manager.handle_text_delta(vendor_part_id=i, content=word) elif isinstance(part, ToolCallPart): - yield self._parts_manager.handle_tool_call_part( - vendor_part_id=i, tool_name=part.tool_name, args=part.args, tool_call_id=part.tool_call_id - ) + if part.tool_name in self._tool_call_deltas: + # Start with empty tool call delta. + event = self._parts_manager.handle_tool_call_delta( + vendor_part_id=i, tool_name=part.tool_name, args='', tool_call_id=part.tool_call_id + ) + if event is not None: # pragma: no branch + yield event + + # Stream the args as JSON string in chunks. + args_json = pydantic_core.to_json(part.args).decode() + *chunks, last_chunk = args_json.split(',') if ',' in args_json else [args_json] + chunks = [f'{chunk},' for chunk in chunks] if chunks else [] + if last_chunk: # pragma: no branch + chunks.append(last_chunk) + + for chunk in chunks: + event = self._parts_manager.handle_tool_call_delta( + vendor_part_id=i, tool_name=None, args=chunk, tool_call_id=part.tool_call_id + ) + if event is not None: # pragma: no branch + yield event + else: + yield self._parts_manager.handle_tool_call_part( + vendor_part_id=i, tool_name=part.tool_name, args=part.args, tool_call_id=part.tool_call_id + ) elif isinstance(part, ThinkingPart): # pragma: no cover # NOTE: There's no way to reach this part of the code, since we don't generate ThinkingPart on TestModel. assert False, "This should be unreachable — we don't generate ThinkingPart on TestModel." diff --git a/pydantic_ai_slim/pyproject.toml b/pydantic_ai_slim/pyproject.toml index 99706c2c6..2c13974aa 100644 --- a/pydantic_ai_slim/pyproject.toml +++ b/pydantic_ai_slim/pyproject.toml @@ -80,6 +80,8 @@ mcp = ["mcp>=1.9.4; python_version >= '3.10'"] evals = ["pydantic-evals=={{ version }}"] # A2A a2a = ["fasta2a=={{ version }}"] +# AG UI Adapter +ag-ui = ["pydantic-ai-ag-ui=={{ version }}"] [dependency-groups] dev = [ diff --git a/pyproject.toml b/pyproject.toml index ea96212e3..64967de63 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ requires-python = ">=3.9" [tool.hatch.metadata.hooks.uv-dynamic-versioning] dependencies = [ - "pydantic-ai-slim[openai,vertexai,google,groq,anthropic,mistral,cohere,bedrock,cli,mcp,evals,a2a]=={{ version }}", + "pydantic-ai-slim[openai,vertexai,google,groq,anthropic,mistral,cohere,bedrock,cli,mcp,evals,a2a,ag-ui]=={{ version }}", ] [tool.hatch.metadata.hooks.uv-dynamic-versioning.optional-dependencies] @@ -68,9 +68,11 @@ pydantic-evals = { workspace = true } pydantic-graph = { workspace = true } pydantic-ai-examples = { workspace = true } fasta2a = { workspace = true } +pydantic-ai-ag-ui = { workspace = true } [tool.uv.workspace] members = [ + "pydantic_ai_ag_ui", "pydantic_ai_slim", "pydantic_evals", "pydantic_graph", @@ -103,6 +105,7 @@ include = ["/README.md", "/Makefile", "/tests"] line-length = 120 target-version = "py39" include = [ + "pydantic_ai_ag_ui/**/*.py", "pydantic_ai_slim/**/*.py", "pydantic_evals/**/*.py", "pydantic_graph/**/*.py", @@ -161,6 +164,7 @@ reportUnnecessaryIsInstance = false reportUnnecessaryTypeIgnoreComment = true reportMissingModuleSource = false include = [ + "pydantic_ai_ag_ui", "pydantic_ai_slim", "pydantic_evals", "pydantic_graph", @@ -213,6 +217,7 @@ filterwarnings = [ [tool.coverage.run] # required to avoid warnings about files created by create_module fixture include = [ + "pydantic_ai_ag_ui/**/*.py", "pydantic_ai_slim/**/*.py", "pydantic_evals/**/*.py", "pydantic_graph/**/*.py", diff --git a/tests/pydantic_ai_ag_ui/__init__.py b/tests/pydantic_ai_ag_ui/__init__.py new file mode 100644 index 000000000..8918e6b0f --- /dev/null +++ b/tests/pydantic_ai_ag_ui/__init__.py @@ -0,0 +1 @@ +"""Tests for pydantic_ai_ag_ui module.""" diff --git a/tests/pydantic_ai_ag_ui/test_adapter.py b/tests/pydantic_ai_ag_ui/test_adapter.py new file mode 100644 index 000000000..167904a03 --- /dev/null +++ b/tests/pydantic_ai_ag_ui/test_adapter.py @@ -0,0 +1,821 @@ +"""Comprehensive tests for Adapter.run method.""" + +# pyright: reportPossiblyUnboundVariable=none +from __future__ import annotations + +import asyncio +import contextlib +import re +import sys +import uuid +from collections.abc import Callable +from dataclasses import dataclass, field +from itertools import count +from typing import Any, Final, Literal, cast + +import pytest +from pydantic import BaseModel + +from pydantic_ai import Agent +from pydantic_ai.models.test import TestModel, TestNode, TestToolCallPart + +has_required_python: bool = sys.version_info >= (3, 10) +has_ag_ui: bool = False +if has_required_python: + with contextlib.suppress(ImportError): + from ag_ui.core import ( + AssistantMessage, + CustomEvent, + DeveloperMessage, + EventType, + FunctionCall, + Message, + RunAgentInput, + StateSnapshotEvent, + SystemMessage, + Tool, + ToolCall, + ToolMessage, + UserMessage, + ) + + from pydantic_ai_ag_ui._enums import Role + from pydantic_ai_ag_ui.adapter import Adapter + from pydantic_ai_ag_ui.deps import StateDeps + + has_ag_ui = True + + +pytestmark = [ + pytest.mark.anyio, + pytest.mark.skipif(not has_required_python, reason='requires Python 3.10 or higher'), + pytest.mark.skipif(has_required_python and not has_ag_ui, reason='adapter-ag-ui not installed'), +] + +# Type aliases. +_MockUUID = Callable[[], str] + +# Constants. +THREAD_ID_PREFIX: Final[str] = 'thread_' +RUN_ID_PREFIX: Final[str] = 'run_' +EXPECTED_EVENTS: Final[list[str]] = [ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000003","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000003","delta":"success "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000003","delta":"(no "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000003","delta":"tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000003","delta":"calls)"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000003"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', +] +UUID_PATTERN: Final[re.Pattern[str]] = re.compile(r'\d{8}-\d{4}-\d{4}-\d{4}-\d{12}') + + +class StateInt(BaseModel): + """Example state class for testing purposes.""" + + value: int = 0 + + +def get_weather(name: str = 'get_weather') -> Tool: + return Tool( + name=name, + description='Get the weather for a given location', + parameters={ + 'type': 'object', + 'properties': { + 'location': { + 'type': 'string', + 'description': 'The location to get the weather for', + }, + }, + 'required': ['location'], + }, + ) + + +@pytest.fixture +async def adapter() -> Adapter[StateDeps[StateInt], str]: + """Fixture to create an Adapter instance for testing. + + Returns: + An Adapter instance configured for testing. + """ + return await create_adapter([]) + + +async def create_adapter( + call_tools: list[str] | Literal['all'], +) -> Adapter[StateDeps[StateInt], str]: + """Create an Adapter instance for testing. + + Args: + call_tools: List of tool names to enable, or 'all' for all tools. + + Returns: + An Adapter instance configured with the specified tools. + """ + return Agent( + model=TestModel( + call_tools=call_tools, + tool_call_deltas={'get_weather_parts', 'current_time'}, + ), + deps_type=cast(type[StateDeps[StateInt]], StateDeps[StateInt]), + tools=[send_snapshot, send_custom, current_time], + ).to_ag_ui() + + +@pytest.fixture +def mock_uuid(monkeypatch: pytest.MonkeyPatch) -> _MockUUID: + """Mock UUID generation for consistent test results. + + This fixture replaces the uuid.uuid4 function with a mock that generates + sequential UUIDs for testing purposes. This ensures that UUIDs are + predictable and consistent across test runs. + + Args: + monkeypatch: The pytest monkeypatch fixture to modify uuid.uuid4. + + Returns: + A function that generates a mock UUID. + """ + counter = count(1) + + def _fake_uuid() -> str: + """Generate a fake UUID string with sequential numbering. + + Returns: + A fake UUID string in the format '00000000-0000-0000-0000-{counter:012d}'. + """ + return f'00000000-0000-0000-0000-{next(counter):012d}' + + def _fake_uuid4() -> uuid.UUID: + """Generate a fake UUID object using the fake UUID string. + + Returns: + A UUID object created from the fake UUID string. + """ + return uuid.UUID(_fake_uuid()) + + # Due to how ToolCallPart uses generate_tool_call_id with field default_factory, + # we have to patch uuid.uuid4 directly instead of the generate function. This + # also covers how we generate messages IDs. + monkeypatch.setattr('uuid.uuid4', _fake_uuid4) + + return _fake_uuid + + +def assert_events(events: list[str], expected_events: list[str], *, loose: bool = False) -> None: + expected: str + event: str + for event, expected in zip(events, expected_events, strict=True): + if loose: + expected = normalize_uuids(expected) + event = normalize_uuids(event) + assert event == f'data: {expected}\n\n' + + +def normalize_uuids(text: str) -> str: + """Normalize UUIDs in the given text to a fixed format. + + Args: + text: The input text containing UUIDs. + + Returns: + The text with UUIDs replaced by a fixed UUID. + """ + return UUID_PATTERN.sub('00000000-0000-0000-0000-000000000001', text) + + +def current_time() -> str: + """Get the current time in ISO format. + + Returns: + The current UTC time in ISO format string. + """ + return '21T12:08:45.485981+00:00' + + +async def send_snapshot() -> StateSnapshotEvent: + """Display the recipe to the user. + + Returns: + StateSnapshotEvent. + """ + return StateSnapshotEvent( + type=EventType.STATE_SNAPSHOT, + snapshot={'key': 'value'}, + ) + + +async def send_custom() -> list[CustomEvent]: + """Display the recipe to the user. + + Returns: + StateSnapshotEvent. + """ + return [ + CustomEvent( + type=EventType.CUSTOM, + name='custom_event1', + value={'key1': 'value1'}, + ), + CustomEvent( + type=EventType.CUSTOM, + name='custom_event2', + value={'key2': 'value2'}, + ), + ] + + +@dataclass(frozen=True) +class Run: + """Test parameter class for Adapter.run method tests. + + Args: + messages: List of messages for the run input. + state: State object for the run input. + context: Context list for the run input. + tools: List of tools for the run input. + forwarded_props: Forwarded properties for the run input. + nodes: List of TestNode instances for the run input. + """ + + messages: list[Message] + state: Any = None + context: list[Any] = field(default_factory=lambda: list[Any]()) + tools: list[Tool] = field(default_factory=lambda: list[Tool]()) + nodes: list[TestNode] | None = None + forwarded_props: Any = None + + def run_input(self, *, thread_id: str, run_id: str) -> RunAgentInput: + """Create a RunAgentInput instance for the test case. + + Args: + thread_id: The thread ID for the run. + run_id: The run ID for the run. + + Returns: + A RunAgentInput instance with the test case parameters. + """ + return RunAgentInput( + thread_id=thread_id, + run_id=run_id, + messages=self.messages, + state=self.state, + context=self.context, + tools=self.tools, + forwarded_props=self.forwarded_props, + ) + + +@dataclass(frozen=True) +class AdapterRunTest: + """Test parameter class for Adapter.run method tests. + + Args: + id: Name of the test case. + runs: List of Run instances for the test case. + """ + + id: str + runs: list[Run] + call_tools: list[str] = field(default_factory=lambda: list[str]()) + expected_events: list[str] = field(default_factory=lambda: list(EXPECTED_EVENTS)) + expected_state: int | None = None + + +# Test parameter data +def tc_parameters() -> list[AdapterRunTest]: + if not has_ag_ui: + return [AdapterRunTest(id='skipped', runs=[])] + + return [ + AdapterRunTest( + id='basic_user_message', + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Hello, how are you?', + ), + ], + ), + ], + ), + AdapterRunTest( + id='empty_messages', + runs=[ + Run(messages=[]), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"RUN_ERROR","message":"no messages found in the input","code":"no_messages"}', + ], + ), + AdapterRunTest( + id='multiple_messages', + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='First message', + ), + AssistantMessage( + id='msg_2', + role=Role.ASSISTANT.value, + content='Assistant response', + ), + SystemMessage( + id='msg_3', + role=Role.SYSTEM.value, + content='System message', + ), + DeveloperMessage( + id='msg_4', + role=Role.DEVELOPER.value, + content='Developer note', + ), + UserMessage( + id='msg_5', + role=Role.USER.value, + content='Second message', + ), + ], + ), + ], + ), + AdapterRunTest( + id='messages_with_history', + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='First message', + ), + UserMessage( + id='msg_2', + role=Role.USER.value, + content='Second message', + ), + ], + ), + ], + ), + AdapterRunTest( + id='tool_ag_ui', + call_tools=['get_weather'], + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather for Paris', + ), + ], + tools=[get_weather()], + ), + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather for Paris', + ), + AssistantMessage( + id='msg_2', + role=Role.ASSISTANT.value, + tool_calls=[ + ToolCall( + id='pyd_ai_00000000000000000000000000000003', + type='function', + function=FunctionCall( + name='get_weather', + arguments='{"location": "Paris"}', + ), + ), + ], + ), + ToolMessage( + id='msg_3', + role=Role.TOOL.value, + content='Tool result', + tool_call_id='pyd_ai_00000000000000000000000000000003', + ), + ], + tools=[get_weather()], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"get_weather"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000005","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"{\\"get_weather\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"result\\"}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000005"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', + ], + ), + AdapterRunTest( + id='tool_ag_ui_multiple', + call_tools=['get_weather', 'get_weather_parts'], + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather and get_weather_parts for Paris', + ), + ], + tools=[get_weather(), get_weather('get_weather_parts')], + ), + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather for Paris', + ), + AssistantMessage( + id='msg_2', + role=Role.ASSISTANT.value, + tool_calls=[ + ToolCall( + id='pyd_ai_00000000000000000000000000000003', + type='function', + function=FunctionCall( + name='get_weather', + arguments='{"location": "Paris"}', + ), + ), + ], + ), + ToolMessage( + id='msg_3', + role=Role.TOOL.value, + content='Tool result', + tool_call_id='pyd_ai_00000000000000000000000000000003', + ), + AssistantMessage( + id='msg_4', + role=Role.ASSISTANT.value, + tool_calls=[ + ToolCall( + id='pyd_ai_00000000000000000000000000000003', + type='function', + function=FunctionCall( + name='get_weather_parts', + arguments='{"location": "Paris"}', + ), + ), + ], + ), + ToolMessage( + id='msg_5', + role=Role.TOOL.value, + content='Tool result', + tool_call_id='pyd_ai_00000000000000000000000000000003', + ), + ], + tools=[get_weather(), get_weather('get_weather_parts')], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"get_weather"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000005"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000006","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"{\\"get_weather\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"result\\",\\"get_weather_parts\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"result\\"}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000006"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000005"}', + ], + ), + AdapterRunTest( + id='tool_ag_ui_parts', + call_tools=['get_weather_parts'], + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather_parts for Paris', + ), + ], + tools=[get_weather('get_weather_parts')], + ), + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather_parts for Paris', + ), + AssistantMessage( + id='msg_2', + role=Role.ASSISTANT.value, + tool_calls=[ + ToolCall( + id='pyd_ai_00000000000000000000000000000003', + type='function', + function=FunctionCall( + name='get_weather_parts', + arguments='{"location": "Paris"}', + ), + ), + ], + ), + ToolMessage( + id='msg_3', + role=Role.TOOL.value, + content='Tool result', + tool_call_id='pyd_ai_00000000000000000000000000000003', + ), + ], + tools=[get_weather('get_weather_parts')], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"get_weather_parts"}', + '{"type":"TOOL_CALL_ARGS","toolCallId":"pyd_ai_00000000000000000000000000000003","delta":"{\\"location\\":\\"a\\"}"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000005","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"{\\"get_weather_parts\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"result\\"}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000005"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', + ], + ), + AdapterRunTest( + id='tool_local_single_event', + call_tools=['send_snapshot'], + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call send_snapshot', + ), + ], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"STATE_SNAPSHOT","snapshot":{"key":"value"}}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000004","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"{\\"send_snapshot\\":{\\"type\\":\\"STATE_SNAPSHOT\\",\\"timestam"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"p\\":null,\\"rawEvent\\":null,\\"snapshot\\":{\\"key\\":\\"value\\"}}}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000004"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + ], + ), + AdapterRunTest( + id='tool_local_multiple_events', + call_tools=['send_custom'], + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call send_custom', + ), + ], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"CUSTOM","name":"custom_event1","value":{"key1":"value1"}}', + '{"type":"CUSTOM","name":"custom_event2","value":{"key2":"value2"}}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000004","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"{\\"send_custom\\":[{\\"type\\":\\"CUSTOM\\",\\"timestamp\\":null,\\"rawEvent\\":null,\\"name\\":\\"custom_event1\\",\\"value\\":{\\"key1\\":\\"va"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"lue1\\"}},{\\"type\\":\\"CUSTOM\\",\\"timestamp\\":null,\\"rawEvent\\":null,\\"name\\":\\"custom_event2\\",\\"value\\":{\\"key2\\":\\"value2\\"}}]}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000004"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + ], + ), + AdapterRunTest( + id='tool_local_parts', + call_tools=['current_time'], + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call current_time', + ), + ], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000004","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"{\\"current_time\\":\\"21T1"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"2:08:45.485981+00:00\\"}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000004"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + ], + ), + AdapterRunTest( + id='tool_local_then_ag_ui', + call_tools=['current_time', 'get_weather'], + runs=[ + Run( + nodes=[ + TestNode( + parts=[TestToolCallPart(call_tools=['current_time'])], + ), + TestNode( + parts=[TestToolCallPart(call_tools=['get_weather'])], + ), + ], + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please tell me the time and then call get_weather for Paris', + ), + ], + tools=[get_weather()], + ), + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather for Paris', + ), + AssistantMessage( + id='msg_2', + role=Role.ASSISTANT.value, + tool_calls=[ + ToolCall( + id='pyd_ai_00000000000000000000000000000003', + type='function', + function=FunctionCall( + name='current_time', + arguments='{}', + ), + ), + ], + ), + ToolMessage( + id='msg_3', + role=Role.TOOL.value, + content='Tool result', + tool_call_id='pyd_ai_00000000000000000000000000000003', + ), + AssistantMessage( + id='msg_4', + role=Role.ASSISTANT.value, + tool_calls=[ + ToolCall( + id='pyd_ai_00000000000000000000000000000004', + type='function', + function=FunctionCall( + name='get_weather', + arguments='{"location": "Paris"}', + ), + ), + ], + ), + ToolMessage( + id='msg_5', + role=Role.TOOL.value, + content='Tool result', + tool_call_id='pyd_ai_00000000000000000000000000000004', + ), + ], + tools=[get_weather()], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000004","toolCallName":"get_weather"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000004"}', + '{"type":"MESSAGES_SNAPSHOT","messages":[{"id":"msg_1","role":"user","content":"Please tell me the time and then call get_weather for Paris"},' + + '{"id":"00000000000000000000000000000005","role":"assistant","toolCalls":[{"id":"pyd_ai_00000000000000000000000000000003","type":"function",' + + '"function":{"name":"current_time","arguments":"{}"}}]},{"id":"result-pyd_ai_00000000000000000000000000000003","role":"tool","content":' + + '"21T12:08:45.485981+00:00","toolCallId":"pyd_ai_00000000000000000000000000000003"},{"id":"00000000000000000000000000000006","role":"assistant",' + + '"toolCalls":[{"id":"pyd_ai_00000000000000000000000000000004","type":"function","function":{"name":"get_weather","arguments":"{\\"location\\": \\"a\\"}"}}]}]}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000007"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000008","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000008","delta":"{\\"current_time\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000008","delta":"result\\",\\"get_weather\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000008","delta":"result\\"}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000008"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000007"}', + ], + ), + AdapterRunTest( + id='request_with_state', + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Hello, how are you?', + ), + ], + state={'value': 42}, + ), + ], + expected_state=42, + ), + ] + + +@pytest.mark.parametrize('tc', tc_parameters(), ids=lambda tc: tc.id) +async def test_run_method(mock_uuid: _MockUUID, tc: AdapterRunTest) -> None: + """Test the Adapter.run method with various scenarios. + + Args: + mock_uuid: The mock UUID generator fixture. + tc: The test case parameters. + """ + + run: Run + events: list[str] = [] + thread_id: str = f'{THREAD_ID_PREFIX}{mock_uuid()}' + adapter: Adapter[StateDeps[StateInt], str] = await create_adapter(tc.call_tools) + deps: StateDeps[StateInt] = cast(StateDeps[StateInt], StateDeps[StateInt](state_type=StateInt)) + for run in tc.runs: + if run.nodes is not None: + assert isinstance(adapter.agent.model, TestModel), 'Agent model is not TestModel' + adapter.agent.model.custom_response_nodes = run.nodes + + run_input: RunAgentInput = run.run_input( + thread_id=thread_id, + run_id=f'{RUN_ID_PREFIX}{mock_uuid()}', + ) + + events.extend([event async for event in adapter.run(run_input, deps=deps)]) + + assert_events(events, tc.expected_events) + if tc.expected_state is not None: + assert deps.state.value == tc.expected_state + + +async def test_concurrent_runs(mock_uuid: _MockUUID, adapter: Adapter[None, str]) -> None: + """Test concurrent execution of multiple runs.""" + + async def collect_events(run_input: RunAgentInput) -> list[str]: + """Collect all events from an adapter run. + + Args: + run_input: The input configuration for the adapter run. + + Returns: + List of all events generated by the adapter run. + """ + return [event async for event in adapter.run(run_input)] + + concurrent_tasks: list[asyncio.Task[list[str]]] = [] + + for i in range(20): + run_input: RunAgentInput = RunAgentInput( + thread_id=f'{THREAD_ID_PREFIX}{mock_uuid()}', + run_id=f'{RUN_ID_PREFIX}{mock_uuid()}', + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id=f'msg_{i}', + role=Role.USER.value, + content=f'Message {i}', + ), + ], + state=None, + context=[], + tools=[], + forwarded_props=None, + ) + + task = asyncio.create_task(collect_events(run_input)) + concurrent_tasks.append(task) + + results = await asyncio.gather(*concurrent_tasks) + + for events in results: + assert_events(events, EXPECTED_EVENTS, loose=True) + assert len(events) == len(EXPECTED_EVENTS) diff --git a/tests/test_ag_ui.py b/tests/test_ag_ui.py new file mode 100644 index 000000000..f63f4889a --- /dev/null +++ b/tests/test_ag_ui.py @@ -0,0 +1,94 @@ +"""Tests for Agent.to_ag_ui method.""" + +from __future__ import annotations + +import contextlib +import logging +import sys +from dataclasses import dataclass, field +from typing import Final + +import pytest + +from pydantic_ai import Agent +from pydantic_ai.models.test import TestModel + +has_required_python: bool = sys.version_info >= (3, 10) +has_ag_ui: bool = False +if has_required_python: + with contextlib.suppress(ImportError): + from pydantic_ai_ag_ui.adapter import _LOGGER as adapter_logger, Adapter # type: ignore[reportPrivateUsage] + + has_ag_ui = True + + +pytestmark = [ + pytest.mark.anyio, + pytest.mark.skipif(not has_required_python, reason='requires Python 3.10 or higher'), + pytest.mark.skipif(has_required_python and not has_ag_ui, reason='pydantic-ai-ag-ui not installed'), +] + +# Constants. +CUSTOM_LOGGER: Final[logging.Logger] = logging.getLogger('test_logger') + + +@pytest.fixture +async def agent() -> Agent[None, str]: + """Create an Adapter instance for testing.""" + return Agent(model=TestModel()) + + +@dataclass +class ToAGUITest: + id: str + logger: logging.Logger | None = None + tool_prefix: str | None = None + expected_logger: logging.Logger = field( + default_factory=lambda: adapter_logger if has_ag_ui else logging.getLogger(__name__) # type: ignore[reportPossiblyUnboundVariable] + ) + expected_tool_prefix: str = '' + + +TEST_PARAMETERS = [ + ToAGUITest( + id='defaults', + ), + ToAGUITest( + id='custom_logger', + logger=CUSTOM_LOGGER, + expected_logger=CUSTOM_LOGGER, + ), + ToAGUITest( + id='custom_tool_prefix', + tool_prefix='test_prefix', + expected_tool_prefix='test_prefix', + ), + ToAGUITest( + id='custom_tool_timeout', + ), + ToAGUITest( + id='custom_all', + logger=CUSTOM_LOGGER, + tool_prefix='test_prefix', + expected_logger=CUSTOM_LOGGER, + expected_tool_prefix='test_prefix', + ), +] + + +@pytest.mark.parametrize('tc', TEST_PARAMETERS, ids=lambda tc: tc.id) +@pytest.mark.anyio +async def test_to_ag_ui(agent: Agent[None, str], tc: ToAGUITest) -> None: + """Test the agent.to_ag_ui method. + + Args: + agent: The agent instance to test. + tc: Test case parameters including logger, tool prefix, and timeout. + """ + + adapter: Adapter[None, str] = agent.to_ag_ui( + logger=tc.logger, + tool_prefix=tc.tool_prefix, + ) + assert adapter.logger == tc.expected_logger + assert adapter.tool_prefix == tc.expected_tool_prefix diff --git a/uv.lock b/uv.lock index f715314a2..34c328578 100644 --- a/uv.lock +++ b/uv.lock @@ -22,12 +22,25 @@ members = [ "fasta2a", "mcp-run-python", "pydantic-ai", + "pydantic-ai-ag-ui", "pydantic-ai-examples", "pydantic-ai-slim", "pydantic-evals", "pydantic-graph", ] +[[package]] +name = "ag-ui-protocol" +version = "0.1.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/26/1d5530e3fa84da37a8b58300f7a4352f763be43b2c393b0fad4d119f8653/ag_ui_protocol-0.1.5.tar.gz", hash = "sha256:48757afe82a4ee88eb078f31ef9672e09df624573d82045054f5a5b5dc021832", size = 4175, upload-time = "2025-05-20T11:37:06.835Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/39/c488044d3195f82e35102c190f92b605a8af1ad63f26b9166e9be460e1c1/ag_ui_protocol-0.1.5-py3-none-any.whl", hash = "sha256:d51a0ad9635059b629b4cb57a9a2ec425b4cc8220e91d50a8f9d559571737ae9", size = 5819, upload-time = "2025-05-20T11:37:05.521Z" }, +] + [[package]] name = "aiofiles" version = "23.2.1" @@ -2962,7 +2975,7 @@ wheels = [ name = "pydantic-ai" source = { editable = "." } dependencies = [ - { name = "pydantic-ai-slim", extra = ["a2a", "anthropic", "bedrock", "cli", "cohere", "evals", "google", "groq", "mcp", "mistral", "openai", "vertexai"] }, + { name = "pydantic-ai-slim", extra = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "evals", "google", "groq", "mcp", "mistral", "openai", "vertexai"] }, ] [package.optional-dependencies] @@ -2996,7 +3009,7 @@ lint = [ requires-dist = [ { name = "logfire", marker = "extra == 'logfire'", specifier = ">=3.11.0" }, { name = "pydantic-ai-examples", marker = "extra == 'examples'", editable = "examples" }, - { name = "pydantic-ai-slim", extras = ["a2a", "anthropic", "bedrock", "cli", "cohere", "evals", "google", "groq", "mcp", "mistral", "openai", "vertexai"], editable = "pydantic_ai_slim" }, + { name = "pydantic-ai-slim", extras = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "evals", "google", "groq", "mcp", "mistral", "openai", "vertexai"], editable = "pydantic_ai_slim" }, ] provides-extras = ["examples", "logfire"] @@ -3019,6 +3032,22 @@ lint = [ { name = "ruff", specifier = ">=0.6.9" }, ] +[[package]] +name = "pydantic-ai-ag-ui" +source = { editable = "pydantic_ai_ag_ui" } +dependencies = [ + { name = "ag-ui-protocol" }, + { name = "pydantic" }, + { name = "pydantic-ai" }, +] + +[package.metadata] +requires-dist = [ + { name = "ag-ui-protocol", specifier = ">=0.1.5" }, + { name = "pydantic", specifier = ">=2.10" }, + { name = "pydantic-ai", editable = "." }, +] + [[package]] name = "pydantic-ai-examples" source = { editable = "examples" } @@ -3030,7 +3059,7 @@ dependencies = [ { name = "logfire", extra = ["asyncpg", "fastapi", "httpx", "sqlite3"] }, { name = "mcp", extra = ["cli"], marker = "python_full_version >= '3.10'" }, { name = "modal" }, - { name = "pydantic-ai-slim", extra = ["anthropic", "groq", "openai", "vertexai"] }, + { name = "pydantic-ai-slim", extra = ["ag-ui", "anthropic", "groq", "openai", "vertexai"] }, { name = "pydantic-evals" }, { name = "python-multipart" }, { name = "rich" }, @@ -3046,7 +3075,7 @@ requires-dist = [ { name = "logfire", extras = ["asyncpg", "fastapi", "httpx", "sqlite3"], specifier = ">=2.6" }, { name = "mcp", extras = ["cli"], marker = "python_full_version >= '3.10'", specifier = ">=1.4.1" }, { name = "modal", specifier = ">=1.0.4" }, - { name = "pydantic-ai-slim", extras = ["anthropic", "groq", "openai", "vertexai"], editable = "pydantic_ai_slim" }, + { name = "pydantic-ai-slim", extras = ["ag-ui", "anthropic", "groq", "openai", "vertexai"], editable = "pydantic_ai_slim" }, { name = "pydantic-evals", editable = "pydantic_evals" }, { name = "python-multipart", specifier = ">=0.0.17" }, { name = "rich", specifier = ">=13.9.2" }, @@ -3071,6 +3100,9 @@ dependencies = [ a2a = [ { name = "fasta2a" }, ] +ag-ui = [ + { name = "pydantic-ai-ag-ui" }, +] anthropic = [ { name = "anthropic" }, ] @@ -3159,6 +3191,7 @@ requires-dist = [ { name = "opentelemetry-api", specifier = ">=1.28.0" }, { name = "prompt-toolkit", marker = "extra == 'cli'", specifier = ">=3" }, { name = "pydantic", specifier = ">=2.10" }, + { name = "pydantic-ai-ag-ui", marker = "extra == 'ag-ui'", editable = "pydantic_ai_ag_ui" }, { name = "pydantic-evals", marker = "extra == 'evals'", editable = "pydantic_evals" }, { name = "pydantic-graph", editable = "pydantic_graph" }, { name = "requests", marker = "extra == 'vertexai'", specifier = ">=2.32.2" }, @@ -3166,7 +3199,7 @@ requires-dist = [ { name = "tavily-python", marker = "extra == 'tavily'", specifier = ">=0.5.0" }, { name = "typing-inspection", specifier = ">=0.4.0" }, ] -provides-extras = ["a2a", "anthropic", "bedrock", "cli", "cohere", "duckduckgo", "evals", "google", "groq", "logfire", "mcp", "mistral", "openai", "tavily", "vertexai"] +provides-extras = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "duckduckgo", "evals", "google", "groq", "logfire", "mcp", "mistral", "openai", "tavily", "vertexai"] [package.metadata.requires-dev] dev = [ From f50f0ece0abeb5b0376fd86cf93ac27169308151 Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Mon, 23 Jun 2025 20:03:20 +0100 Subject: [PATCH 02/13] refactor(ag-ui): move AG-UI to pydantic_ai_slim Move pydantic_ai_ag_ui to pydantic_ai_slim as ag_ui, updating references and documentation accordingly. --- docs/ag-ui.md | 43 +- docs/api/ag_ui.md | 3 + docs/api/pydantic_ai_ag_ui.md | 3 - docs/install.md | 2 +- .../pydantic_ai_ag_ui_examples/api/agent.py | 2 +- .../api/agentic_chat.py | 3 +- .../api/agentic_generative_ui.py | 3 +- .../api/human_in_the_loop.py | 3 +- .../api/predictive_state_updates.py | 4 +- .../api/shared_state.py | 4 +- .../api/tool_based_generative_ui.py | 3 +- examples/pydantic_ai_ag_ui_examples/basic.py | 7 +- .../pydantic_ai_ag_ui_examples/cli/args.py | 2 +- .../pydantic_ai_ag_ui_examples/dojo_server.py | 2 +- mkdocs.yml | 2 +- pydantic_ai_ag_ui/LICENSE | 21 - pydantic_ai_ag_ui/README.md | 13 - .../pydantic_ai_ag_ui/__init__.py | 19 - pydantic_ai_ag_ui/pydantic_ai_ag_ui/_enums.py | 16 - .../pydantic_ai_ag_ui/_exceptions.py | 51 -- pydantic_ai_ag_ui/pydantic_ai_ag_ui/consts.py | 8 - pydantic_ai_ag_ui/pydantic_ai_ag_ui/deps.py | 51 -- .../pydantic_ai_ag_ui/protocols.py | 26 - pydantic_ai_ag_ui/pydantic_ai_ag_ui/py.typed | 0 pydantic_ai_ag_ui/pyproject.toml | 62 -- .../pydantic_ai/ag_ui.py | 338 +++++-- pydantic_ai_slim/pydantic_ai/agent.py | 7 +- pydantic_ai_slim/pyproject.toml | 2 +- pyproject.toml | 5 - tests/pydantic_ai_ag_ui/__init__.py | 1 - tests/pydantic_ai_ag_ui/test_adapter.py | 821 ------------------ tests/test_ag_ui.py | 815 ++++++++++++++++- uv.lock | 218 +++-- 33 files changed, 1202 insertions(+), 1358 deletions(-) create mode 100644 docs/api/ag_ui.md delete mode 100644 docs/api/pydantic_ai_ag_ui.md delete mode 100644 pydantic_ai_ag_ui/LICENSE delete mode 100644 pydantic_ai_ag_ui/README.md delete mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/__init__.py delete mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/_enums.py delete mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/_exceptions.py delete mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/consts.py delete mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/deps.py delete mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/protocols.py delete mode 100644 pydantic_ai_ag_ui/pydantic_ai_ag_ui/py.typed delete mode 100644 pydantic_ai_ag_ui/pyproject.toml rename pydantic_ai_ag_ui/pydantic_ai_ag_ui/adapter.py => pydantic_ai_slim/pydantic_ai/ag_ui.py (77%) delete mode 100644 tests/pydantic_ai_ag_ui/__init__.py delete mode 100644 tests/pydantic_ai_ag_ui/test_adapter.py diff --git a/docs/ag-ui.md b/docs/ag-ui.md index 5fbed1802..0646587b9 100644 --- a/docs/ag-ui.md +++ b/docs/ag-ui.md @@ -8,17 +8,17 @@ an open protocol. Think of it as a universal translator for AI-driven systems no matter what language an agent speaks: AG-UI ensures fluent communication. The team at [Rocket Science](https://www.rocketscience.gg/), contributed the -[pydantic-ai-ag-ui](#ag-ui-adapter) package to make it easy to implement the -AG-UI protocol with PydanticAI agents. +[AG-UI integration](#ag-ui-adapter) to make it easy to implement the AG-UI +protocol with PydanticAI agents. This also includes an [`Agent.to_ag_ui`][pydantic_ai.Agent.to_ag_ui] convenience -method which simplifies the creation of [`Adapter`][pydantic_ai_ag_ui.Adapter] +method which simplifies the creation of [`Adapter`][pydantic_ai.ag_ui.Adapter] for PydanticAI agents, which can then be used by as part of a [fastapi](https://fastapi.tiangolo.com/) app. ## AG-UI Adapter -The [Adapter][pydantic_ai_ag_ui.Adapter] class is an adapter between +The [Adapter][pydantic_ai.ag_ui.Adapter] class is an adapter between PydanticAI agents and the AG-UI protocol written in Python. It provides support for all aspects of spec including: @@ -31,14 +31,6 @@ Let's have a quick look at how to use it: ### Installation -[Adapter][pydantic_ai_ag_ui.Adapter] is available on PyPI as -[`pydantic-ai-ag-ui`](https://pypi.org/project/pydantic-ai-ag-ui/) so installation is as -simple as: - -```bash -pip/uv-add pydantic-ai-ag-ui -``` - The only dependencies are: - [ag-ui-protocol](https://docs.ag-ui.com/introduction): to provide the AG-UI @@ -54,7 +46,8 @@ To run the examples you'll also need: pip/uv-add 'fastapi' ``` -You can install PydanticAI with the `ag-ui` extra to include **Adapter**: +You can install PydanticAI with the `ag-ui` extra to include +[Adapter][pydantic_ai.ag_ui.Adapter] run: ```bash pip/uv-add 'pydantic-ai-slim[ag-ui]' @@ -71,7 +64,7 @@ from typing import TYPE_CHECKING, Annotated from fastapi import FastAPI, Header from fastapi.responses import StreamingResponse -from pydantic_ai_ag_ui import SSE_CONTENT_TYPE +from pydantic_ai.ag_ui import SSE_CONTENT_TYPE from pydantic_ai import Agent @@ -116,7 +109,7 @@ streamed back to the caller as Server-Sent Events (SSE). A user request may require multiple round trips between client UI and PydanticAI server, depending on the tools and events needed. -[Adapter][pydantic_ai_ag_ui.Adapter] can be used with any ASGI server. +[Adapter][pydantic_ai.ag_ui.Adapter] can be used with any ASGI server. ### Features @@ -125,8 +118,8 @@ use the [`to_ag_ui`][pydantic_ai.agent.Agent.to_ag_ui] method in combination with [fastapi](https://fastapi.tiangolo.com/). In the example below we have document state which is shared between the UI and -server using the [`StateDeps`][pydantic_ai_ag_ui.StateDeps] which implements the -[`StateHandler`][pydantic_ai_ag_ui.StateHandler] that can be used to automatically +server using the [`StateDeps`][pydantic_ai.ag_ui.StateDeps] which implements the +[`StateHandler`][pydantic_ai.ag_ui.StateHandler] that can be used to automatically decode state contained in [`RunAgentInput.state`](https://docs.ag-ui.com/sdk/js/core/types#runagentinput) when processing requests. @@ -146,7 +139,7 @@ from typing import TYPE_CHECKING, Annotated from fastapi import FastAPI, Header from fastapi.responses import StreamingResponse from pydantic import BaseModel -from pydantic_ai_ag_ui import SSE_CONTENT_TYPE, StateDeps +from pydantic_ai.ag_ui import SSE_CONTENT_TYPE, StateDeps from pydantic_ai import Agent @@ -187,7 +180,7 @@ uvicorn agent_to_ag_ui:app --host 0.0.0.0 --port 8000 Since the goal of [`to_ag_ui`][pydantic_ai.agent.Agent.to_ag_ui] is to be a convenience method, it accepts the same arguments as the -[`Adapter`][pydantic_ai_ag_ui.Adapter] constructor. +[`Adapter`][pydantic_ai.ag_ui.Adapter] constructor. #### Tools @@ -213,7 +206,7 @@ from ag_ui.core import CustomEvent, EventType, StateSnapshotEvent from fastapi import FastAPI, Header from fastapi.responses import StreamingResponse from pydantic import BaseModel -from pydantic_ai_ag_ui import SSE_CONTENT_TYPE, StateDeps +from pydantic_ai.ag_ui import SSE_CONTENT_TYPE, StateDeps from pydantic_ai import Agent, RunContext @@ -272,8 +265,8 @@ async def root( ### Examples -For more examples of how to use [`Adapter`][pydantic_ai_ag_ui.Adapter] see -[`pydantic_ai_ag_ui_examples`](https://github.com/pydantic/pydantic-ai/tree/main/examples/pydantic_ai_ag_ui_examples), +For more examples of how to use [`Adapter`][pydantic_ai.ag_ui.Adapter] see +[`pydantic_ai.ag_ui_examples`](https://github.com/pydantic/pydantic-ai/tree/main/examples/pydantic_ai.ag_ui_examples), which includes working server for the with the [AG-UI Dojo](https://docs.ag-ui.com/tutorials/debugging#the-ag-ui-dojo) which can be run from a clone of the repo or with the `pydantic-ai-examples` package @@ -286,7 +279,7 @@ pip/uv-add pydantic-ai-examples Direct, which supports command line flags: ```shell -python -m pydantic_ai_ag_ui_examples.dojo_server --help +python -m pydantic_ai.ag_ui_examples.dojo_server --help usage: dojo_server.py [-h] [--port PORT] [--reload] [--no-reload] [--log-level {critical,error,warning,info,debug,trace}] PydanticAI AG-UI Dojo server @@ -303,11 +296,11 @@ options: Run with adapter debug logging: ```shell -python -m pydantic_ai_ag_ui_examples.dojo_server --log-level debug +python -m pydantic_ai.ag_ui_examples.dojo_server --log-level debug ``` Using uvicorn: ```shell -uvicorn pydantic_ai_ag_ui_examples.dojo_server:app --port 9000 +uvicorn pydantic_ai.ag_ui_examples.dojo_server:app --port 9000 ``` diff --git a/docs/api/ag_ui.md b/docs/api/ag_ui.md new file mode 100644 index 000000000..bb0ffd429 --- /dev/null +++ b/docs/api/ag_ui.md @@ -0,0 +1,3 @@ +# `pydantic_ai.ag_ui` + +::: pydantic_ai.ag_ui diff --git a/docs/api/pydantic_ai_ag_ui.md b/docs/api/pydantic_ai_ag_ui.md deleted file mode 100644 index ab5d48247..000000000 --- a/docs/api/pydantic_ai_ag_ui.md +++ /dev/null @@ -1,3 +0,0 @@ -# `pydantic_ai_ag_ui` - -::: pydantic_ai_ag_ui diff --git a/docs/install.md b/docs/install.md index 5469dae8b..3d803c729 100644 --- a/docs/install.md +++ b/docs/install.md @@ -56,7 +56,7 @@ pip/uv-add "pydantic-ai-slim[openai]" * `cohere` - installs `cohere` [PyPI ↗](https://pypi.org/project/cohere){:target="_blank"} * `duckduckgo` - installs `duckduckgo-search` [PyPI ↗](https://pypi.org/project/duckduckgo-search){:target="_blank"} * `tavily` - installs `tavily-python` [PyPI ↗](https://pypi.org/project/tavily-python){:target="_blank"} -* `ag-ui` - installs `pydantic-ai-ag-ui` [PyPI ↗](https://pypi.org/project/pydantic-ai-ag-ui){:target="_blank"} +* `ag-ui` - installs `ag-ui-protocol` [PyPI ↗](https://pypi.org/project/ag-ui-protocol){:target="_blank"} See the [models](models/index.md) documentation for information on which optional dependencies are required for each model. diff --git a/examples/pydantic_ai_ag_ui_examples/api/agent.py b/examples/pydantic_ai_ag_ui_examples/api/agent.py index a994b4871..18841506d 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/agent.py +++ b/examples/pydantic_ai_ag_ui_examples/api/agent.py @@ -7,9 +7,9 @@ from typing import Generic from dotenv import load_dotenv -from pydantic_ai_ag_ui import Adapter from pydantic_ai import Agent +from pydantic_ai.ag_ui import Adapter from pydantic_ai.result import OutputDataT from pydantic_ai.tools import AgentDepsT diff --git a/examples/pydantic_ai_ag_ui_examples/api/agentic_chat.py b/examples/pydantic_ai_ag_ui_examples/api/agentic_chat.py index 18243d560..d490c0451 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/agentic_chat.py +++ b/examples/pydantic_ai_ag_ui_examples/api/agentic_chat.py @@ -9,7 +9,8 @@ from ag_ui.core import RunAgentInput from fastapi import APIRouter, Header from fastapi.responses import StreamingResponse -from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE + +from pydantic_ai.ag_ui import SSE_CONTENT_TYPE from .agent import AGUIAgent diff --git a/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py b/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py index 97abe43bc..5df1308fc 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py +++ b/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py @@ -7,7 +7,8 @@ from fastapi import APIRouter, Header from fastapi.responses import StreamingResponse from pydantic import BaseModel, Field -from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE + +from pydantic_ai.ag_ui import SSE_CONTENT_TYPE from .agent import AGUIAgent diff --git a/examples/pydantic_ai_ag_ui_examples/api/human_in_the_loop.py b/examples/pydantic_ai_ag_ui_examples/api/human_in_the_loop.py index 3b1119949..51d72bdd7 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/human_in_the_loop.py +++ b/examples/pydantic_ai_ag_ui_examples/api/human_in_the_loop.py @@ -10,7 +10,8 @@ from ag_ui.core import RunAgentInput from fastapi import APIRouter, Header from fastapi.responses import StreamingResponse -from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE + +from pydantic_ai.ag_ui import SSE_CONTENT_TYPE from .agent import AGUIAgent diff --git a/examples/pydantic_ai_ag_ui_examples/api/predictive_state_updates.py b/examples/pydantic_ai_ag_ui_examples/api/predictive_state_updates.py index c4eef70f2..9cfb7ce05 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/predictive_state_updates.py +++ b/examples/pydantic_ai_ag_ui_examples/api/predictive_state_updates.py @@ -9,8 +9,8 @@ from fastapi import APIRouter, Header from fastapi.responses import StreamingResponse from pydantic import BaseModel -from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE -from pydantic_ai_ag_ui.deps import StateDeps + +from pydantic_ai.ag_ui import SSE_CONTENT_TYPE, StateDeps from .agent import AGUIAgent diff --git a/examples/pydantic_ai_ag_ui_examples/api/shared_state.py b/examples/pydantic_ai_ag_ui_examples/api/shared_state.py index a81e6477b..6b01399b3 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/shared_state.py +++ b/examples/pydantic_ai_ag_ui_examples/api/shared_state.py @@ -11,8 +11,8 @@ from fastapi import APIRouter, Header from fastapi.responses import StreamingResponse from pydantic import BaseModel, Field -from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE -from pydantic_ai_ag_ui.deps import StateDeps + +from pydantic_ai.ag_ui import SSE_CONTENT_TYPE, StateDeps from .agent import AGUIAgent diff --git a/examples/pydantic_ai_ag_ui_examples/api/tool_based_generative_ui.py b/examples/pydantic_ai_ag_ui_examples/api/tool_based_generative_ui.py index 366df2388..f9f5394f3 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/tool_based_generative_ui.py +++ b/examples/pydantic_ai_ag_ui_examples/api/tool_based_generative_ui.py @@ -10,7 +10,8 @@ from ag_ui.core import RunAgentInput from fastapi import APIRouter, Header from fastapi.responses import StreamingResponse -from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE + +from pydantic_ai.ag_ui import SSE_CONTENT_TYPE from .agent import AGUIAgent diff --git a/examples/pydantic_ai_ag_ui_examples/basic.py b/examples/pydantic_ai_ag_ui_examples/basic.py index 27fda6064..e448e2ec1 100644 --- a/examples/pydantic_ai_ag_ui_examples/basic.py +++ b/examples/pydantic_ai_ag_ui_examples/basic.py @@ -1,4 +1,4 @@ -"""Basic example of using pydantic_ai_ag_ui with FastAPI.""" +"""Basic example of using pydantic_ai.ag_ui with FastAPI.""" from __future__ import annotations @@ -6,10 +6,9 @@ from fastapi import FastAPI, Header from fastapi.responses import StreamingResponse -from pydantic_ai_ag_ui.adapter import Adapter -from pydantic_ai_ag_ui.consts import SSE_CONTENT_TYPE from pydantic_ai import Agent +from pydantic_ai.ag_ui import SSE_CONTENT_TYPE, Adapter if TYPE_CHECKING: from ag_ui.core import RunAgentInput @@ -50,7 +49,7 @@ async def handler( args: Args = parse_args() uvicorn.run( - 'pydantic_ai_ag_ui_examples.dojo_server:app', + 'pydantic_ai.ag_ui_examples.dojo_server:app', port=args.port, reload=args.reload, log_level=args.log_level, diff --git a/examples/pydantic_ai_ag_ui_examples/cli/args.py b/examples/pydantic_ai_ag_ui_examples/cli/args.py index db281e429..ee7485bef 100644 --- a/examples/pydantic_ai_ag_ui_examples/cli/args.py +++ b/examples/pydantic_ai_ag_ui_examples/cli/args.py @@ -65,7 +65,7 @@ def parse_args() -> Args: '--loggers', nargs='*', default=[ - 'pydantic_ai_ag_ui.adapter', + 'pydantic_ai.ag_ui.adapter', ], help='Logger names to configure (default: adapter and model loggers)', ) diff --git a/examples/pydantic_ai_ag_ui_examples/dojo_server.py b/examples/pydantic_ai_ag_ui_examples/dojo_server.py index 6ce7156f2..42db92dba 100644 --- a/examples/pydantic_ai_ag_ui_examples/dojo_server.py +++ b/examples/pydantic_ai_ag_ui_examples/dojo_server.py @@ -41,7 +41,7 @@ args: Args = parse_args() uvicorn.run( - 'pydantic_ai_ag_ui_examples.dojo_server:app', + 'pydantic_ai.ag_ui_examples.dojo_server:app', port=args.port, reload=args.reload, log_config=args.log_config(), diff --git a/mkdocs.yml b/mkdocs.yml index 2b9f90849..39e344763 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -104,7 +104,7 @@ nav: - api/pydantic_evals/otel.md - api/pydantic_evals/generation.md - api/fasta2a.md - - api/pydantic_ai_ag_ui.md + - api/ag_ui.md extra: # hide the "Made with Material for MkDocs" message diff --git a/pydantic_ai_ag_ui/LICENSE b/pydantic_ai_ag_ui/LICENSE deleted file mode 100644 index 1bf1f55e6..000000000 --- a/pydantic_ai_ag_ui/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Pydantic Services Inc. 2024 to present - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/pydantic_ai_ag_ui/README.md b/pydantic_ai_ag_ui/README.md deleted file mode 100644 index 8fce4c2e3..000000000 --- a/pydantic_ai_ag_ui/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# PydanticAI AG-UI Adapter - -[![CI](https://github.com/pydantic/pydantic-ai/actions/workflows/ci.yml/badge.svg?event=push)](https://github.com/pydantic/pydantic-ai/actions/workflows/ci.yml?query=branch%3Amain) -[![Coverage](https://coverage-badge.samuelcolvin.workers.dev/pydantic/pydantic-ai.svg)](https://coverage-badge.samuelcolvin.workers.dev/redirect/pydantic/pydantic-ai) -[![PyPI](https://img.shields.io/pypi/v/pydantic-ai-ag-ui.svg)](https://pypi.python.org/pypi/pydantic-ai-ag-ui) -[![python versions](https://img.shields.io/pypi/pyversions/pydantic-ai-ag-ui.svg)](https://github.com/pydantic/pydantic-ai) -[![license](https://img.shields.io/github/license/pydantic/pydantic-ai.svg)](https://github.com/pydantic/pydantic-ai/blob/main/LICENSE) - -To make it easier to implement use AG-UI with PydanticAI agents we've -implemented an adapter which handles the translation between PydanticAI -and AG-UI. - -See [the docs](https://ai.pydantic.dev/ag_ui/) for more information. diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/__init__.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/__init__.py deleted file mode 100644 index 734afbf84..000000000 --- a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Pydantic AI integration for ag-ui protocol. - -This package provides seamless integration between pydantic-ai agents and ag-ui -for building interactive AI applications with streaming event-based communication. -""" - -from __future__ import annotations - -from .adapter import Adapter -from .consts import SSE_CONTENT_TYPE -from .deps import StateDeps -from .protocols import StateHandler - -__all__ = [ - 'Adapter', - 'SSE_CONTENT_TYPE', - 'StateDeps', - 'StateHandler', -] diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/_enums.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/_enums.py deleted file mode 100644 index 76fbf5951..000000000 --- a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/_enums.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Enums for AG-UI protocol.""" - -from __future__ import annotations - -from enum import Enum - - -# TODO(steve): Remove this and all uses once https://github.com/ag-ui-protocol/ag-ui/pull/49 is merged. -class Role(str, Enum): - """Enum for message roles in AG-UI protocol.""" - - ASSISTANT = 'assistant' - USER = 'user' - DEVELOPER = 'developer' - SYSTEM = 'system' - TOOL = 'tool' diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/_exceptions.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/_exceptions.py deleted file mode 100644 index 8bd78cb31..000000000 --- a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/_exceptions.py +++ /dev/null @@ -1,51 +0,0 @@ -"""Exceptions for the AI Agent UI module.""" - -from __future__ import annotations - -from dataclasses import InitVar, dataclass - -from pydantic import ValidationError as PydanticValidationError - - -@dataclass -class RunError(Exception): - """Exception raised for errors during agent runs.""" - - message: str - code: str - - def __str__(self) -> str: - return self.message - - -@dataclass(kw_only=True) -class UnexpectedToolCallError(RunError): - """Exception raised when an unexpected tool call is encountered.""" - - tool_name: InitVar[str] - message: str = '' - code: str = 'unexpected_tool_call' - - def __post_init__(self, tool_name: str) -> None: - """Set the message for the unexpected tool call. - - Args: - tool_name: The name of the tool that was unexpectedly called. - """ - self.message = f'unexpected tool call name={tool_name}' # pragma: no cover - - -@dataclass -class NoMessagesError(RunError): - """Exception raised when no messages are found in the input.""" - - message: str = 'no messages found in the input' - code: str = 'no_messages' - - -@dataclass -class InvalidStateError(RunError, PydanticValidationError): - """Exception raised when an invalid state is provided.""" - - message: str = 'invalid state provided' - code: str = 'invalid_state' diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/consts.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/consts.py deleted file mode 100644 index 93dc92ad5..000000000 --- a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/consts.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Constants for the AI Agent UI module.""" - -from __future__ import annotations - -from typing import Final - -SSE_CONTENT_TYPE: Final[str] = 'text/event-stream' -"""Content type header value for Server-Sent Events (SSE).""" diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/deps.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/deps.py deleted file mode 100644 index b3dce4149..000000000 --- a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/deps.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -from dataclasses import dataclass, field -from typing import Generic, TypeVar - -from ag_ui.core import State -from pydantic import BaseModel, ValidationError - -from ._exceptions import InvalidStateError - -StateT = TypeVar('StateT', bound=BaseModel, contravariant=True) -"""Type variable for the state type, which must be a subclass of `BaseModel`.""" - - -@dataclass(kw_only=True) -class StateDeps(Generic[StateT]): - """Provides AG-UI state management. - - This class is used to manage the state of an agent run. It allows setting - the state of the agent run with a specific type of state model, which must - be a subclass of `BaseModel`. - - The state is set using the `set_state` when the run starts by the `Adapter`. - - Implements the `StateHandler` protocol. - """ - - state_type: type[StateT] - state: StateT = field(init=False) - - def set_state(self, state: State) -> None: - """Set the state of the agent run. - - This method is called to update the state of the agent run with the - provided state. - - Implements the `StateHandler` protocol. - - Args: - state: The run state, which should match the expected model type or be `None`. - - Raises: - InvalidStateError: If `state` does not match the expected model and is not `None`. - """ - if state is None: - return - - try: - self.state = self.state_type.model_validate(state) - except ValidationError as e: # pragma: no cover - raise InvalidStateError from e diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/protocols.py b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/protocols.py deleted file mode 100644 index 4a3af8102..000000000 --- a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/protocols.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Protocols for the AG-UI to PydanticAI agent adapter.""" - -from __future__ import annotations - -from typing import Protocol, runtime_checkable - -from ag_ui.core import State - - -@runtime_checkable -class StateHandler(Protocol): - """Protocol for state handlers in agent runs.""" - - def set_state(self, state: State) -> None: - """Set the state of the agent run. - - This method is called to update the state of the agent run with the - provided state. - - Args: - state: The run state. - - Raises: - ValidationError: If `state` does not match the expected model. - """ - ... diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/py.typed b/pydantic_ai_ag_ui/pydantic_ai_ag_ui/py.typed deleted file mode 100644 index e69de29bb..000000000 diff --git a/pydantic_ai_ag_ui/pyproject.toml b/pydantic_ai_ag_ui/pyproject.toml deleted file mode 100644 index a568c4dc1..000000000 --- a/pydantic_ai_ag_ui/pyproject.toml +++ /dev/null @@ -1,62 +0,0 @@ -[build-system] -requires = ["hatchling", "uv-dynamic-versioning>=0.7.0"] -build-backend = "hatchling.build" - -[tool.hatch.version] -source = "uv-dynamic-versioning" - -[tool.uv-dynamic-versioning] -vcs = "git" -style = "pep440" -bump = true - -[project] -name = "pydantic-ai-ag-ui" -dynamic = ["version", "dependencies"] -description = "Convert an AI Agent to speak AG UI! ✨" -authors = [ - { name = "Steven Hartland", email = "steve@rocketscience.gg" }, -] -license = "MIT" -readme = "README.md" -classifiers = [ - "Development Status :: 4 - Beta", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Intended Audience :: Developers", - "Intended Audience :: Information Technology", - "Intended Audience :: System Administrators", - "License :: OSI Approved :: MIT License", - "Operating System :: Unix", - "Operating System :: POSIX :: Linux", - "Environment :: Console", - "Environment :: MacOS X", - "Topic :: Software Development :: Libraries :: Python Modules", - "Topic :: Internet", -] -requires-python = ">=3.9" - -[tool.hatch.metadata.hooks.uv-dynamic-versioning] -dependencies = [ - "ag-ui-protocol>=0.1.5", - "pydantic>=2.10", - "pydantic-ai>=0.3.2", -] - -[project.urls] -Homepage = "https://ai.pydantic.dev/ag-ui/pydantic_ai_ag_ui" -Source = "https://github.com/pydantic/pydantic-ai/" -Documentation = "https://ai.pydantic.dev/ag-ui" -Changelog = "https://github.com/pydantic/pydantic-ai/releases" - -[tool.hatch.build.targets.wheel] -packages = ["pydantic_ai_ag_ui"] - -[tool.uv.sources] -pydantic-ai = { workspace = true } diff --git a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/adapter.py b/pydantic_ai_slim/pydantic_ai/ag_ui.py similarity index 77% rename from pydantic_ai_ag_ui/pydantic_ai_ag_ui/adapter.py rename to pydantic_ai_slim/pydantic_ai/ag_ui.py index faec292bd..e675f0a23 100644 --- a/pydantic_ai_ag_ui/pydantic_ai_ag_ui/adapter.py +++ b/pydantic_ai_slim/pydantic_ai/ag_ui.py @@ -1,4 +1,8 @@ -"""Provides an AG-UI protocol adapter for the PydanticAI agent.""" +"""Provides an AG-UI protocol adapter for the PydanticAI agent. + +This package provides seamless integration between pydantic-ai agents and ag-ui +for building interactive AI applications with streaming event-based communication. +""" from __future__ import annotations @@ -6,41 +10,52 @@ import logging import uuid from collections.abc import Iterable, Sequence -from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Any, Generic, cast - -from ag_ui.core import ( - AssistantMessage, - BaseEvent, - DeveloperMessage, - EventType, - FunctionCall, - Message, - MessagesSnapshotEvent, - RunAgentInput, - RunErrorEvent, - RunFinishedEvent, - RunStartedEvent, - SystemMessage, - TextMessageContentEvent, - TextMessageEndEvent, - TextMessageStartEvent, - Tool as ToolAGUI, - ToolCall, - ToolCallArgsEvent, - ToolCallEndEvent, - ToolCallStartEvent, - ToolMessage, - UserMessage, -) -from ag_ui.encoder import EventEncoder - -from pydantic_ai import Agent, ModelRequestNode, models -from pydantic_ai._output import OutputType -from pydantic_ai._parts_manager import ModelResponsePartsManager -from pydantic_ai.agent import RunOutputDataT -from pydantic_ai.mcp import ToolResult -from pydantic_ai.messages import ( +from dataclasses import InitVar, dataclass, field +from enum import Enum +from typing import TYPE_CHECKING, Any, Final, Generic, Protocol, TypeVar, cast, runtime_checkable + +try: + from ag_ui.core import ( + AssistantMessage, + BaseEvent, + DeveloperMessage, + EventType, + FunctionCall, + Message, + MessagesSnapshotEvent, + RunAgentInput, + RunErrorEvent, + RunFinishedEvent, + RunStartedEvent, + State, + SystemMessage, + TextMessageContentEvent, + TextMessageEndEvent, + TextMessageStartEvent, + Tool as ToolAGUI, + ToolCall, + ToolCallArgsEvent, + ToolCallEndEvent, + ToolCallStartEvent, + ToolMessage, + UserMessage, + ) + from ag_ui.encoder import EventEncoder +except ImportError as e: + raise ImportError( + 'Please install the `ag-ui-protocol` package to use `Agent.to_ag_ui()` method, ' + 'you can use the `ag-ui` optional group — `pip install "pydantic-ai-slim[ag-ui]"`' + ) from e + +from pydantic import BaseModel, ValidationError + +from . import Agent, models +from ._agent_graph import ModelRequestNode +from ._output import OutputType +from ._parts_manager import ModelResponsePartsManager +from .agent import RunOutputDataT +from .mcp import ToolResult +from .messages import ( AgentStreamEvent, FinalResultEvent, ModelMessage, @@ -60,30 +75,151 @@ ToolReturnPart, UserPromptPart, ) -from pydantic_ai.result import AgentStream, OutputDataT -from pydantic_ai.settings import ModelSettings -from pydantic_ai.tools import AgentDepsT, Tool -from pydantic_ai.usage import Usage, UsageLimits - -from ._enums import Role -from ._exceptions import NoMessagesError, RunError, UnexpectedToolCallError -from .consts import SSE_CONTENT_TYPE -from .protocols import StateHandler +from .result import AgentStream, OutputDataT +from .settings import ModelSettings +from .tools import AgentDepsT, Tool +from .usage import Usage, UsageLimits if TYPE_CHECKING: from collections.abc import AsyncGenerator from ag_ui.encoder import EventEncoder - from pydantic_ai._agent_graph import AgentNode - from pydantic_ai.agent import AgentRun - from pydantic_ai.result import FinalResult from pydantic_graph.nodes import End + from ._agent_graph import AgentNode + from .agent import AgentRun + from .result import FinalResult + _LOGGER: logging.Logger = logging.getLogger(__name__) +# Constants. +SSE_CONTENT_TYPE: Final[str] = 'text/event-stream' +"""Content type header value for Server-Sent Events (SSE).""" + + +# Enums. +# TODO(steve): Remove this and all uses once https://github.com/ag-ui-protocol/ag-ui/pull/49 is merged. +class Role(str, Enum): + """Enum for message roles in AG-UI protocol.""" + + ASSISTANT = 'assistant' + USER = 'user' + DEVELOPER = 'developer' + SYSTEM = 'system' + TOOL = 'tool' + + +# Exceptions. +@dataclass +class RunError(Exception): + """Exception raised for errors during agent runs.""" + + message: str + code: str + + def __str__(self) -> str: + return self.message + + +@dataclass(kw_only=True) +class UnexpectedToolCallError(RunError): + """Exception raised when an unexpected tool call is encountered.""" + + tool_name: InitVar[str] + message: str = '' + code: str = 'unexpected_tool_call' + + def __post_init__(self, tool_name: str) -> None: + """Set the message for the unexpected tool call. + + Args: + tool_name: The name of the tool that was unexpectedly called. + """ + self.message = f'unexpected tool call name={tool_name}' # pragma: no cover + + +@dataclass +class NoMessagesError(RunError): + """Exception raised when no messages are found in the input.""" + + message: str = 'no messages found in the input' + code: str = 'no_messages' + + +@dataclass +class InvalidStateError(RunError, ValidationError): + """Exception raised when an invalid state is provided.""" + + message: str = 'invalid state provided' + code: str = 'invalid_state' + + +# Protocols. +@runtime_checkable +class StateHandler(Protocol): + """Protocol for state handlers in agent runs.""" + + def set_state(self, state: State) -> None: + """Set the state of the agent run. + + This method is called to update the state of the agent run with the + provided state. + + Args: + state: The run state. + + Raises: + ValidationError: If `state` does not match the expected model. + """ + ... + + +StateT = TypeVar('StateT', bound=BaseModel, contravariant=True) +"""Type variable for the state type, which must be a subclass of `BaseModel`.""" + + +@dataclass(kw_only=True) +class StateDeps(Generic[StateT]): + """Provides AG-UI state management. + + This class is used to manage the state of an agent run. It allows setting + the state of the agent run with a specific type of state model, which must + be a subclass of `BaseModel`. + + The state is set using the `set_state` when the run starts by the `Adapter`. + + Implements the `StateHandler` protocol. + """ + + state_type: type[StateT] + state: StateT = field(init=False) + + def set_state(self, state: State) -> None: + """Set the state of the agent run. + + This method is called to update the state of the agent run with the + provided state. + + Implements the `StateHandler` protocol. + + Args: + state: The run state, which should match the expected model type or be `None`. + + Raises: + InvalidStateError: If `state` does not match the expected model and is not `None`. + """ + if state is None: + return + + try: + self.state = self.state_type.model_validate(state) + except ValidationError as e: # pragma: no cover + raise InvalidStateError from e + + @dataclass(repr=False) class _RequestStreamContext: """Data class to hold request stream context.""" @@ -115,61 +251,62 @@ class Adapter(Generic[AgentDepsT, OutputDataT]): Examples: This is an example of base usage with FastAPI. - .. code-block:: python - from __future__ import annotations + ```python + from __future__ import annotations - from typing import TYPE_CHECKING, Annotated + from typing import TYPE_CHECKING, Annotated - from fastapi import FastAPI, Header - from fastapi.responses import StreamingResponse - from pydantic_ai import Agent + from fastapi import FastAPI, Header + from fastapi.responses import StreamingResponse + from pydantic_ai import Agent - from pydantic_ai_ag_ui import SSE_CONTENT_TYPE, Adapter + from pydantic_ai.ag_ui import SSE_CONTENT_TYPE, Adapter - if TYPE_CHECKING: - from ag_ui.core import RunAgentInput + if TYPE_CHECKING: + from ag_ui.core import RunAgentInput - app = FastAPI(title="AG-UI Endpoint") - agent = Agent( - "openai:gpt-4o-mini", - deps_type=int, - instructions="You are a helpful assistant.", - ) - adapter = agent.to_ag_ui() + app = FastAPI(title="AG-UI Endpoint") + agent = Agent( + "openai:gpt-4o-mini", + deps_type=int, + instructions="You are a helpful assistant.", + ) + adapter = agent.to_ag_ui() - @app.post("/") - async def root(input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE) -> StreamingResponse: - return StreamingResponse( - adapter.run(input_data, accept, deps=42), - media_type=SSE_CONTENT_TYPE, - ) + @app.post("/") + async def root(input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE) -> StreamingResponse: + return StreamingResponse( + adapter.run(input_data, accept, deps=42), + media_type=SSE_CONTENT_TYPE, + ) + ``` PydanticAI tools which return AG-UI events will be sent to the client as part of the event stream, single events and event iterables are supported. - .. code-block:: python - @agent.tool - def update_state(ctx: RunContext[StateDeps[DocumentState]]) -> StateSnapshotEvent: - return StateSnapshotEvent( - type=EventType.STATE_SNAPSHOT, - snapshot=ctx.deps.state, - ) - - @agent.tool_plain - def custom_events() -> list[CustomEvent]: - return [ - CustomEvent( - type=EventType.CUSTOM, - name="count", - value=1, - ), - CustomEvent( - type=EventType.CUSTOM, - name="count", - value=2, - ), - ] + ```python + @agent.tool + def update_state(ctx: RunContext[StateDeps[DocumentState]]) -> StateSnapshotEvent: + return StateSnapshotEvent( + type=EventType.STATE_SNAPSHOT, + snapshot=ctx.deps.state, + ) + @agent.tool_plain + def custom_events() -> list[CustomEvent]: + return [ + CustomEvent( + type=EventType.CUSTOM, + name="count", + value=1, + ), + CustomEvent( + type=EventType.CUSTOM, + name="count", + value=2, + ), + ] + ``` Args: agent: The PydanticAI `Agent` to adapt. tool_prefix: Optional prefix to add to tool names. @@ -676,3 +813,20 @@ def _convert_history(messages: list[Message]) -> list[ModelMessage]: result.append(ModelRequest(parts=[SystemPromptPart(content=msg.content)])) return result + + +# ===================================================================================== +# Exports +# ===================================================================================== + +__all__ = [ + 'Adapter', + 'SSE_CONTENT_TYPE', + 'StateDeps', + 'StateHandler', + 'Role', + 'RunError', + 'UnexpectedToolCallError', + 'NoMessagesError', + 'InvalidStateError', +] diff --git a/pydantic_ai_slim/pydantic_ai/agent.py b/pydantic_ai_slim/pydantic_ai/agent.py index f0c9b58ff..9c1026d35 100644 --- a/pydantic_ai_slim/pydantic_ai/agent.py +++ b/pydantic_ai_slim/pydantic_ai/agent.py @@ -73,7 +73,8 @@ from fasta2a.schema import Provider, Skill from fasta2a.storage import Storage from pydantic_ai.mcp import MCPServer - from pydantic_ai_ag_ui import Adapter + + from .ag_ui import Adapter __all__ = ( 'Agent', @@ -1872,10 +1873,10 @@ def to_ag_ui( An adapter that converts between AG-UI protocol and PydanticAI. """ try: - from pydantic_ai_ag_ui.adapter import Adapter + from .ag_ui import Adapter except ImportError as _import_error: raise ImportError( - 'Please install the `pydantic-ai-ag-ui` package to use `Agent.to_ag_ui()` method, ' + 'Please install the `ag-ui` dependencies to use `Agent.to_ag_ui()` method, ' 'you can use the `ag-ui` optional group — `pip install "pydantic-ai-slim[ag_ui]"`' ) from _import_error diff --git a/pydantic_ai_slim/pyproject.toml b/pydantic_ai_slim/pyproject.toml index 2c13974aa..e918e895c 100644 --- a/pydantic_ai_slim/pyproject.toml +++ b/pydantic_ai_slim/pyproject.toml @@ -81,7 +81,7 @@ evals = ["pydantic-evals=={{ version }}"] # A2A a2a = ["fasta2a=={{ version }}"] # AG UI Adapter -ag-ui = ["pydantic-ai-ag-ui=={{ version }}"] +ag-ui = ["ag-ui-protocol>=0.1.5"] [dependency-groups] dev = [ diff --git a/pyproject.toml b/pyproject.toml index 64967de63..901ebb869 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,11 +68,9 @@ pydantic-evals = { workspace = true } pydantic-graph = { workspace = true } pydantic-ai-examples = { workspace = true } fasta2a = { workspace = true } -pydantic-ai-ag-ui = { workspace = true } [tool.uv.workspace] members = [ - "pydantic_ai_ag_ui", "pydantic_ai_slim", "pydantic_evals", "pydantic_graph", @@ -105,7 +103,6 @@ include = ["/README.md", "/Makefile", "/tests"] line-length = 120 target-version = "py39" include = [ - "pydantic_ai_ag_ui/**/*.py", "pydantic_ai_slim/**/*.py", "pydantic_evals/**/*.py", "pydantic_graph/**/*.py", @@ -164,7 +161,6 @@ reportUnnecessaryIsInstance = false reportUnnecessaryTypeIgnoreComment = true reportMissingModuleSource = false include = [ - "pydantic_ai_ag_ui", "pydantic_ai_slim", "pydantic_evals", "pydantic_graph", @@ -217,7 +213,6 @@ filterwarnings = [ [tool.coverage.run] # required to avoid warnings about files created by create_module fixture include = [ - "pydantic_ai_ag_ui/**/*.py", "pydantic_ai_slim/**/*.py", "pydantic_evals/**/*.py", "pydantic_graph/**/*.py", diff --git a/tests/pydantic_ai_ag_ui/__init__.py b/tests/pydantic_ai_ag_ui/__init__.py deleted file mode 100644 index 8918e6b0f..000000000 --- a/tests/pydantic_ai_ag_ui/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for pydantic_ai_ag_ui module.""" diff --git a/tests/pydantic_ai_ag_ui/test_adapter.py b/tests/pydantic_ai_ag_ui/test_adapter.py deleted file mode 100644 index 167904a03..000000000 --- a/tests/pydantic_ai_ag_ui/test_adapter.py +++ /dev/null @@ -1,821 +0,0 @@ -"""Comprehensive tests for Adapter.run method.""" - -# pyright: reportPossiblyUnboundVariable=none -from __future__ import annotations - -import asyncio -import contextlib -import re -import sys -import uuid -from collections.abc import Callable -from dataclasses import dataclass, field -from itertools import count -from typing import Any, Final, Literal, cast - -import pytest -from pydantic import BaseModel - -from pydantic_ai import Agent -from pydantic_ai.models.test import TestModel, TestNode, TestToolCallPart - -has_required_python: bool = sys.version_info >= (3, 10) -has_ag_ui: bool = False -if has_required_python: - with contextlib.suppress(ImportError): - from ag_ui.core import ( - AssistantMessage, - CustomEvent, - DeveloperMessage, - EventType, - FunctionCall, - Message, - RunAgentInput, - StateSnapshotEvent, - SystemMessage, - Tool, - ToolCall, - ToolMessage, - UserMessage, - ) - - from pydantic_ai_ag_ui._enums import Role - from pydantic_ai_ag_ui.adapter import Adapter - from pydantic_ai_ag_ui.deps import StateDeps - - has_ag_ui = True - - -pytestmark = [ - pytest.mark.anyio, - pytest.mark.skipif(not has_required_python, reason='requires Python 3.10 or higher'), - pytest.mark.skipif(has_required_python and not has_ag_ui, reason='adapter-ag-ui not installed'), -] - -# Type aliases. -_MockUUID = Callable[[], str] - -# Constants. -THREAD_ID_PREFIX: Final[str] = 'thread_' -RUN_ID_PREFIX: Final[str] = 'run_' -EXPECTED_EVENTS: Final[list[str]] = [ - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000003","role":"assistant"}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000003","delta":"success "}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000003","delta":"(no "}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000003","delta":"tool "}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000003","delta":"calls)"}', - '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000003"}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', -] -UUID_PATTERN: Final[re.Pattern[str]] = re.compile(r'\d{8}-\d{4}-\d{4}-\d{4}-\d{12}') - - -class StateInt(BaseModel): - """Example state class for testing purposes.""" - - value: int = 0 - - -def get_weather(name: str = 'get_weather') -> Tool: - return Tool( - name=name, - description='Get the weather for a given location', - parameters={ - 'type': 'object', - 'properties': { - 'location': { - 'type': 'string', - 'description': 'The location to get the weather for', - }, - }, - 'required': ['location'], - }, - ) - - -@pytest.fixture -async def adapter() -> Adapter[StateDeps[StateInt], str]: - """Fixture to create an Adapter instance for testing. - - Returns: - An Adapter instance configured for testing. - """ - return await create_adapter([]) - - -async def create_adapter( - call_tools: list[str] | Literal['all'], -) -> Adapter[StateDeps[StateInt], str]: - """Create an Adapter instance for testing. - - Args: - call_tools: List of tool names to enable, or 'all' for all tools. - - Returns: - An Adapter instance configured with the specified tools. - """ - return Agent( - model=TestModel( - call_tools=call_tools, - tool_call_deltas={'get_weather_parts', 'current_time'}, - ), - deps_type=cast(type[StateDeps[StateInt]], StateDeps[StateInt]), - tools=[send_snapshot, send_custom, current_time], - ).to_ag_ui() - - -@pytest.fixture -def mock_uuid(monkeypatch: pytest.MonkeyPatch) -> _MockUUID: - """Mock UUID generation for consistent test results. - - This fixture replaces the uuid.uuid4 function with a mock that generates - sequential UUIDs for testing purposes. This ensures that UUIDs are - predictable and consistent across test runs. - - Args: - monkeypatch: The pytest monkeypatch fixture to modify uuid.uuid4. - - Returns: - A function that generates a mock UUID. - """ - counter = count(1) - - def _fake_uuid() -> str: - """Generate a fake UUID string with sequential numbering. - - Returns: - A fake UUID string in the format '00000000-0000-0000-0000-{counter:012d}'. - """ - return f'00000000-0000-0000-0000-{next(counter):012d}' - - def _fake_uuid4() -> uuid.UUID: - """Generate a fake UUID object using the fake UUID string. - - Returns: - A UUID object created from the fake UUID string. - """ - return uuid.UUID(_fake_uuid()) - - # Due to how ToolCallPart uses generate_tool_call_id with field default_factory, - # we have to patch uuid.uuid4 directly instead of the generate function. This - # also covers how we generate messages IDs. - monkeypatch.setattr('uuid.uuid4', _fake_uuid4) - - return _fake_uuid - - -def assert_events(events: list[str], expected_events: list[str], *, loose: bool = False) -> None: - expected: str - event: str - for event, expected in zip(events, expected_events, strict=True): - if loose: - expected = normalize_uuids(expected) - event = normalize_uuids(event) - assert event == f'data: {expected}\n\n' - - -def normalize_uuids(text: str) -> str: - """Normalize UUIDs in the given text to a fixed format. - - Args: - text: The input text containing UUIDs. - - Returns: - The text with UUIDs replaced by a fixed UUID. - """ - return UUID_PATTERN.sub('00000000-0000-0000-0000-000000000001', text) - - -def current_time() -> str: - """Get the current time in ISO format. - - Returns: - The current UTC time in ISO format string. - """ - return '21T12:08:45.485981+00:00' - - -async def send_snapshot() -> StateSnapshotEvent: - """Display the recipe to the user. - - Returns: - StateSnapshotEvent. - """ - return StateSnapshotEvent( - type=EventType.STATE_SNAPSHOT, - snapshot={'key': 'value'}, - ) - - -async def send_custom() -> list[CustomEvent]: - """Display the recipe to the user. - - Returns: - StateSnapshotEvent. - """ - return [ - CustomEvent( - type=EventType.CUSTOM, - name='custom_event1', - value={'key1': 'value1'}, - ), - CustomEvent( - type=EventType.CUSTOM, - name='custom_event2', - value={'key2': 'value2'}, - ), - ] - - -@dataclass(frozen=True) -class Run: - """Test parameter class for Adapter.run method tests. - - Args: - messages: List of messages for the run input. - state: State object for the run input. - context: Context list for the run input. - tools: List of tools for the run input. - forwarded_props: Forwarded properties for the run input. - nodes: List of TestNode instances for the run input. - """ - - messages: list[Message] - state: Any = None - context: list[Any] = field(default_factory=lambda: list[Any]()) - tools: list[Tool] = field(default_factory=lambda: list[Tool]()) - nodes: list[TestNode] | None = None - forwarded_props: Any = None - - def run_input(self, *, thread_id: str, run_id: str) -> RunAgentInput: - """Create a RunAgentInput instance for the test case. - - Args: - thread_id: The thread ID for the run. - run_id: The run ID for the run. - - Returns: - A RunAgentInput instance with the test case parameters. - """ - return RunAgentInput( - thread_id=thread_id, - run_id=run_id, - messages=self.messages, - state=self.state, - context=self.context, - tools=self.tools, - forwarded_props=self.forwarded_props, - ) - - -@dataclass(frozen=True) -class AdapterRunTest: - """Test parameter class for Adapter.run method tests. - - Args: - id: Name of the test case. - runs: List of Run instances for the test case. - """ - - id: str - runs: list[Run] - call_tools: list[str] = field(default_factory=lambda: list[str]()) - expected_events: list[str] = field(default_factory=lambda: list(EXPECTED_EVENTS)) - expected_state: int | None = None - - -# Test parameter data -def tc_parameters() -> list[AdapterRunTest]: - if not has_ag_ui: - return [AdapterRunTest(id='skipped', runs=[])] - - return [ - AdapterRunTest( - id='basic_user_message', - runs=[ - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Hello, how are you?', - ), - ], - ), - ], - ), - AdapterRunTest( - id='empty_messages', - runs=[ - Run(messages=[]), - ], - expected_events=[ - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"RUN_ERROR","message":"no messages found in the input","code":"no_messages"}', - ], - ), - AdapterRunTest( - id='multiple_messages', - runs=[ - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='First message', - ), - AssistantMessage( - id='msg_2', - role=Role.ASSISTANT.value, - content='Assistant response', - ), - SystemMessage( - id='msg_3', - role=Role.SYSTEM.value, - content='System message', - ), - DeveloperMessage( - id='msg_4', - role=Role.DEVELOPER.value, - content='Developer note', - ), - UserMessage( - id='msg_5', - role=Role.USER.value, - content='Second message', - ), - ], - ), - ], - ), - AdapterRunTest( - id='messages_with_history', - runs=[ - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='First message', - ), - UserMessage( - id='msg_2', - role=Role.USER.value, - content='Second message', - ), - ], - ), - ], - ), - AdapterRunTest( - id='tool_ag_ui', - call_tools=['get_weather'], - runs=[ - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Please call get_weather for Paris', - ), - ], - tools=[get_weather()], - ), - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Please call get_weather for Paris', - ), - AssistantMessage( - id='msg_2', - role=Role.ASSISTANT.value, - tool_calls=[ - ToolCall( - id='pyd_ai_00000000000000000000000000000003', - type='function', - function=FunctionCall( - name='get_weather', - arguments='{"location": "Paris"}', - ), - ), - ], - ), - ToolMessage( - id='msg_3', - role=Role.TOOL.value, - content='Tool result', - tool_call_id='pyd_ai_00000000000000000000000000000003', - ), - ], - tools=[get_weather()], - ), - ], - expected_events=[ - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"get_weather"}', - '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', - '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000005","role":"assistant"}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"{\\"get_weather\\":\\"Tool "}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"result\\"}"}', - '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000005"}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', - ], - ), - AdapterRunTest( - id='tool_ag_ui_multiple', - call_tools=['get_weather', 'get_weather_parts'], - runs=[ - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Please call get_weather and get_weather_parts for Paris', - ), - ], - tools=[get_weather(), get_weather('get_weather_parts')], - ), - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Please call get_weather for Paris', - ), - AssistantMessage( - id='msg_2', - role=Role.ASSISTANT.value, - tool_calls=[ - ToolCall( - id='pyd_ai_00000000000000000000000000000003', - type='function', - function=FunctionCall( - name='get_weather', - arguments='{"location": "Paris"}', - ), - ), - ], - ), - ToolMessage( - id='msg_3', - role=Role.TOOL.value, - content='Tool result', - tool_call_id='pyd_ai_00000000000000000000000000000003', - ), - AssistantMessage( - id='msg_4', - role=Role.ASSISTANT.value, - tool_calls=[ - ToolCall( - id='pyd_ai_00000000000000000000000000000003', - type='function', - function=FunctionCall( - name='get_weather_parts', - arguments='{"location": "Paris"}', - ), - ), - ], - ), - ToolMessage( - id='msg_5', - role=Role.TOOL.value, - content='Tool result', - tool_call_id='pyd_ai_00000000000000000000000000000003', - ), - ], - tools=[get_weather(), get_weather('get_weather_parts')], - ), - ], - expected_events=[ - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"get_weather"}', - '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000005"}', - '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000006","role":"assistant"}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"{\\"get_weather\\":\\"Tool "}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"result\\",\\"get_weather_parts\\":\\"Tool "}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"result\\"}"}', - '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000006"}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000005"}', - ], - ), - AdapterRunTest( - id='tool_ag_ui_parts', - call_tools=['get_weather_parts'], - runs=[ - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Please call get_weather_parts for Paris', - ), - ], - tools=[get_weather('get_weather_parts')], - ), - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Please call get_weather_parts for Paris', - ), - AssistantMessage( - id='msg_2', - role=Role.ASSISTANT.value, - tool_calls=[ - ToolCall( - id='pyd_ai_00000000000000000000000000000003', - type='function', - function=FunctionCall( - name='get_weather_parts', - arguments='{"location": "Paris"}', - ), - ), - ], - ), - ToolMessage( - id='msg_3', - role=Role.TOOL.value, - content='Tool result', - tool_call_id='pyd_ai_00000000000000000000000000000003', - ), - ], - tools=[get_weather('get_weather_parts')], - ), - ], - expected_events=[ - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"get_weather_parts"}', - '{"type":"TOOL_CALL_ARGS","toolCallId":"pyd_ai_00000000000000000000000000000003","delta":"{\\"location\\":\\"a\\"}"}', - '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', - '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000005","role":"assistant"}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"{\\"get_weather_parts\\":\\"Tool "}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"result\\"}"}', - '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000005"}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', - ], - ), - AdapterRunTest( - id='tool_local_single_event', - call_tools=['send_snapshot'], - runs=[ - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Please call send_snapshot', - ), - ], - ), - ], - expected_events=[ - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"STATE_SNAPSHOT","snapshot":{"key":"value"}}', - '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000004","role":"assistant"}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"{\\"send_snapshot\\":{\\"type\\":\\"STATE_SNAPSHOT\\",\\"timestam"}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"p\\":null,\\"rawEvent\\":null,\\"snapshot\\":{\\"key\\":\\"value\\"}}}"}', - '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000004"}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - ], - ), - AdapterRunTest( - id='tool_local_multiple_events', - call_tools=['send_custom'], - runs=[ - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Please call send_custom', - ), - ], - ), - ], - expected_events=[ - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"CUSTOM","name":"custom_event1","value":{"key1":"value1"}}', - '{"type":"CUSTOM","name":"custom_event2","value":{"key2":"value2"}}', - '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000004","role":"assistant"}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"{\\"send_custom\\":[{\\"type\\":\\"CUSTOM\\",\\"timestamp\\":null,\\"rawEvent\\":null,\\"name\\":\\"custom_event1\\",\\"value\\":{\\"key1\\":\\"va"}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"lue1\\"}},{\\"type\\":\\"CUSTOM\\",\\"timestamp\\":null,\\"rawEvent\\":null,\\"name\\":\\"custom_event2\\",\\"value\\":{\\"key2\\":\\"value2\\"}}]}"}', - '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000004"}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - ], - ), - AdapterRunTest( - id='tool_local_parts', - call_tools=['current_time'], - runs=[ - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Please call current_time', - ), - ], - ), - ], - expected_events=[ - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000004","role":"assistant"}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"{\\"current_time\\":\\"21T1"}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"2:08:45.485981+00:00\\"}"}', - '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000004"}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - ], - ), - AdapterRunTest( - id='tool_local_then_ag_ui', - call_tools=['current_time', 'get_weather'], - runs=[ - Run( - nodes=[ - TestNode( - parts=[TestToolCallPart(call_tools=['current_time'])], - ), - TestNode( - parts=[TestToolCallPart(call_tools=['get_weather'])], - ), - ], - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Please tell me the time and then call get_weather for Paris', - ), - ], - tools=[get_weather()], - ), - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Please call get_weather for Paris', - ), - AssistantMessage( - id='msg_2', - role=Role.ASSISTANT.value, - tool_calls=[ - ToolCall( - id='pyd_ai_00000000000000000000000000000003', - type='function', - function=FunctionCall( - name='current_time', - arguments='{}', - ), - ), - ], - ), - ToolMessage( - id='msg_3', - role=Role.TOOL.value, - content='Tool result', - tool_call_id='pyd_ai_00000000000000000000000000000003', - ), - AssistantMessage( - id='msg_4', - role=Role.ASSISTANT.value, - tool_calls=[ - ToolCall( - id='pyd_ai_00000000000000000000000000000004', - type='function', - function=FunctionCall( - name='get_weather', - arguments='{"location": "Paris"}', - ), - ), - ], - ), - ToolMessage( - id='msg_5', - role=Role.TOOL.value, - content='Tool result', - tool_call_id='pyd_ai_00000000000000000000000000000004', - ), - ], - tools=[get_weather()], - ), - ], - expected_events=[ - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000004","toolCallName":"get_weather"}', - '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000004"}', - '{"type":"MESSAGES_SNAPSHOT","messages":[{"id":"msg_1","role":"user","content":"Please tell me the time and then call get_weather for Paris"},' - + '{"id":"00000000000000000000000000000005","role":"assistant","toolCalls":[{"id":"pyd_ai_00000000000000000000000000000003","type":"function",' - + '"function":{"name":"current_time","arguments":"{}"}}]},{"id":"result-pyd_ai_00000000000000000000000000000003","role":"tool","content":' - + '"21T12:08:45.485981+00:00","toolCallId":"pyd_ai_00000000000000000000000000000003"},{"id":"00000000000000000000000000000006","role":"assistant",' - + '"toolCalls":[{"id":"pyd_ai_00000000000000000000000000000004","type":"function","function":{"name":"get_weather","arguments":"{\\"location\\": \\"a\\"}"}}]}]}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000007"}', - '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000008","role":"assistant"}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000008","delta":"{\\"current_time\\":\\"Tool "}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000008","delta":"result\\",\\"get_weather\\":\\"Tool "}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000008","delta":"result\\"}"}', - '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000008"}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000007"}', - ], - ), - AdapterRunTest( - id='request_with_state', - runs=[ - Run( - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id='msg_1', - role=Role.USER.value, - content='Hello, how are you?', - ), - ], - state={'value': 42}, - ), - ], - expected_state=42, - ), - ] - - -@pytest.mark.parametrize('tc', tc_parameters(), ids=lambda tc: tc.id) -async def test_run_method(mock_uuid: _MockUUID, tc: AdapterRunTest) -> None: - """Test the Adapter.run method with various scenarios. - - Args: - mock_uuid: The mock UUID generator fixture. - tc: The test case parameters. - """ - - run: Run - events: list[str] = [] - thread_id: str = f'{THREAD_ID_PREFIX}{mock_uuid()}' - adapter: Adapter[StateDeps[StateInt], str] = await create_adapter(tc.call_tools) - deps: StateDeps[StateInt] = cast(StateDeps[StateInt], StateDeps[StateInt](state_type=StateInt)) - for run in tc.runs: - if run.nodes is not None: - assert isinstance(adapter.agent.model, TestModel), 'Agent model is not TestModel' - adapter.agent.model.custom_response_nodes = run.nodes - - run_input: RunAgentInput = run.run_input( - thread_id=thread_id, - run_id=f'{RUN_ID_PREFIX}{mock_uuid()}', - ) - - events.extend([event async for event in adapter.run(run_input, deps=deps)]) - - assert_events(events, tc.expected_events) - if tc.expected_state is not None: - assert deps.state.value == tc.expected_state - - -async def test_concurrent_runs(mock_uuid: _MockUUID, adapter: Adapter[None, str]) -> None: - """Test concurrent execution of multiple runs.""" - - async def collect_events(run_input: RunAgentInput) -> list[str]: - """Collect all events from an adapter run. - - Args: - run_input: The input configuration for the adapter run. - - Returns: - List of all events generated by the adapter run. - """ - return [event async for event in adapter.run(run_input)] - - concurrent_tasks: list[asyncio.Task[list[str]]] = [] - - for i in range(20): - run_input: RunAgentInput = RunAgentInput( - thread_id=f'{THREAD_ID_PREFIX}{mock_uuid()}', - run_id=f'{RUN_ID_PREFIX}{mock_uuid()}', - messages=[ # pyright: ignore[reportArgumentType] - UserMessage( - id=f'msg_{i}', - role=Role.USER.value, - content=f'Message {i}', - ), - ], - state=None, - context=[], - tools=[], - forwarded_props=None, - ) - - task = asyncio.create_task(collect_events(run_input)) - concurrent_tasks.append(task) - - results = await asyncio.gather(*concurrent_tasks) - - for events in results: - assert_events(events, EXPECTED_EVENTS, loose=True) - assert len(events) == len(EXPECTED_EVENTS) diff --git a/tests/test_ag_ui.py b/tests/test_ag_ui.py index f63f4889a..e58422f15 100644 --- a/tests/test_ag_ui.py +++ b/tests/test_ag_ui.py @@ -1,36 +1,827 @@ -"""Tests for Agent.to_ag_ui method.""" +"""Tests for AG-UI implementation.""" +# pyright: reportPossiblyUnboundVariable=none from __future__ import annotations +import asyncio import contextlib import logging -import sys +import re +import uuid +from collections.abc import Callable from dataclasses import dataclass, field -from typing import Final +from itertools import count +from typing import Any, Final, Literal import pytest +from pydantic import BaseModel from pydantic_ai import Agent -from pydantic_ai.models.test import TestModel +from pydantic_ai.ag_ui import ( + _LOGGER as adapter_logger, # type: ignore[reportPrivateUsage] + Adapter, + Role, + StateDeps, +) +from pydantic_ai.models.test import TestModel, TestNode, TestToolCallPart -has_required_python: bool = sys.version_info >= (3, 10) has_ag_ui: bool = False -if has_required_python: - with contextlib.suppress(ImportError): - from pydantic_ai_ag_ui.adapter import _LOGGER as adapter_logger, Adapter # type: ignore[reportPrivateUsage] +with contextlib.suppress(ImportError): + from ag_ui.core import ( + AssistantMessage, + CustomEvent, + DeveloperMessage, + EventType, + FunctionCall, + Message, + RunAgentInput, + StateSnapshotEvent, + SystemMessage, + Tool, + ToolCall, + ToolMessage, + UserMessage, + ) - has_ag_ui = True + has_ag_ui = True pytestmark = [ pytest.mark.anyio, - pytest.mark.skipif(not has_required_python, reason='requires Python 3.10 or higher'), - pytest.mark.skipif(has_required_python and not has_ag_ui, reason='pydantic-ai-ag-ui not installed'), + pytest.mark.skipif(not has_ag_ui, reason='ag-ui-protocol not installed'), ] # Constants. CUSTOM_LOGGER: Final[logging.Logger] = logging.getLogger('test_logger') +# Type aliases. +_MockUUID = Callable[[], str] + +# Constants. +THREAD_ID_PREFIX: Final[str] = 'thread_' +RUN_ID_PREFIX: Final[str] = 'run_' +EXPECTED_EVENTS: Final[list[str]] = [ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000003","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000003","delta":"success "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000003","delta":"(no "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000003","delta":"tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000003","delta":"calls)"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000003"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', +] +UUID_PATTERN: Final[re.Pattern[str]] = re.compile(r'\d{8}-\d{4}-\d{4}-\d{4}-\d{12}') + + +class StateInt(BaseModel): + """Example state class for testing purposes.""" + + value: int = 0 + + +def get_weather(name: str = 'get_weather') -> Tool: + return Tool( + name=name, + description='Get the weather for a given location', + parameters={ + 'type': 'object', + 'properties': { + 'location': { + 'type': 'string', + 'description': 'The location to get the weather for', + }, + }, + 'required': ['location'], + }, + ) + + +@pytest.fixture +async def adapter() -> Adapter[StateDeps[StateInt], str]: + """Fixture to create an Adapter instance for testing. + + Returns: + An Adapter instance configured for testing. + """ + return await create_adapter([]) + + +async def create_adapter( + call_tools: list[str] | Literal['all'], +) -> Adapter[StateDeps[StateInt], str]: + """Create an Adapter instance for testing. + + Args: + call_tools: List of tool names to enable, or 'all' for all tools. + + Returns: + An Adapter instance configured with the specified tools. + """ + return Agent( + model=TestModel( + call_tools=call_tools, + tool_call_deltas={'get_weather_parts', 'current_time'}, + ), + deps_type=StateDeps[StateInt], + tools=[send_snapshot, send_custom, current_time], + ).to_ag_ui() + + +@pytest.fixture +def mock_uuid(monkeypatch: pytest.MonkeyPatch) -> _MockUUID: + """Mock UUID generation for consistent test results. + + This fixture replaces the uuid.uuid4 function with a mock that generates + sequential UUIDs for testing purposes. This ensures that UUIDs are + predictable and consistent across test runs. + + Args: + monkeypatch: The pytest monkeypatch fixture to modify uuid.uuid4. + + Returns: + A function that generates a mock UUID. + """ + counter = count(1) + + def _fake_uuid() -> str: + """Generate a fake UUID string with sequential numbering. + + Returns: + A fake UUID string in the format '00000000-0000-0000-0000-{counter:012d}'. + """ + return f'00000000-0000-0000-0000-{next(counter):012d}' + + def _fake_uuid4() -> uuid.UUID: + """Generate a fake UUID object using the fake UUID string. + + Returns: + A UUID object created from the fake UUID string. + """ + return uuid.UUID(_fake_uuid()) + + # Due to how ToolCallPart uses generate_tool_call_id with field default_factory, + # we have to patch uuid.uuid4 directly instead of the generate function. This + # also covers how we generate messages IDs. + monkeypatch.setattr('uuid.uuid4', _fake_uuid4) + + return _fake_uuid + + +def assert_events(events: list[str], expected_events: list[str], *, loose: bool = False) -> None: + expected: str + event: str + for event, expected in zip(events, expected_events, strict=True): + if loose: + expected = normalize_uuids(expected) + event = normalize_uuids(event) + assert event == f'data: {expected}\n\n' + + +def normalize_uuids(text: str) -> str: + """Normalize UUIDs in the given text to a fixed format. + + Args: + text: The input text containing UUIDs. + + Returns: + The text with UUIDs replaced by a fixed UUID. + """ + return UUID_PATTERN.sub('00000000-0000-0000-0000-000000000001', text) + + +def current_time() -> str: + """Get the current time in ISO format. + + Returns: + The current UTC time in ISO format string. + """ + return '21T12:08:45.485981+00:00' + + +async def send_snapshot() -> StateSnapshotEvent: + """Display the recipe to the user. + + Returns: + StateSnapshotEvent. + """ + return StateSnapshotEvent( + type=EventType.STATE_SNAPSHOT, + snapshot={'key': 'value'}, + ) + + +async def send_custom() -> list[CustomEvent]: + """Display the recipe to the user. + + Returns: + StateSnapshotEvent. + """ + return [ + CustomEvent( + type=EventType.CUSTOM, + name='custom_event1', + value={'key1': 'value1'}, + ), + CustomEvent( + type=EventType.CUSTOM, + name='custom_event2', + value={'key2': 'value2'}, + ), + ] + + +@dataclass(frozen=True) +class Run: + """Test parameter class for Adapter.run method tests. + + Args: + messages: List of messages for the run input. + state: State object for the run input. + context: Context list for the run input. + tools: List of tools for the run input. + forwarded_props: Forwarded properties for the run input. + nodes: List of TestNode instances for the run input. + """ + + messages: list[Message] + state: Any = None + context: list[Any] = field(default_factory=lambda: list[Any]()) + tools: list[Tool] = field(default_factory=lambda: list[Tool]()) + nodes: list[TestNode] | None = None + forwarded_props: Any = None + + def run_input(self, *, thread_id: str, run_id: str) -> RunAgentInput: + """Create a RunAgentInput instance for the test case. + + Args: + thread_id: The thread ID for the run. + run_id: The run ID for the run. + + Returns: + A RunAgentInput instance with the test case parameters. + """ + return RunAgentInput( + thread_id=thread_id, + run_id=run_id, + messages=self.messages, + state=self.state, + context=self.context, + tools=self.tools, + forwarded_props=self.forwarded_props, + ) + + +@dataclass(frozen=True) +class AdapterRunTest: + """Test parameter class for Adapter.run method tests. + + Args: + id: Name of the test case. + runs: List of Run instances for the test case. + """ + + id: str + runs: list[Run] + call_tools: list[str] = field(default_factory=lambda: list[str]()) + expected_events: list[str] = field(default_factory=lambda: list(EXPECTED_EVENTS)) + expected_state: int | None = None + + +# Test parameter data +def tc_parameters() -> list[AdapterRunTest]: + if not has_ag_ui: + return [AdapterRunTest(id='skipped', runs=[])] + + return [ + AdapterRunTest( + id='basic_user_message', + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Hello, how are you?', + ), + ], + ), + ], + ), + AdapterRunTest( + id='empty_messages', + runs=[ + Run(messages=[]), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"RUN_ERROR","message":"no messages found in the input","code":"no_messages"}', + ], + ), + AdapterRunTest( + id='multiple_messages', + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='First message', + ), + AssistantMessage( + id='msg_2', + role=Role.ASSISTANT.value, + content='Assistant response', + ), + SystemMessage( + id='msg_3', + role=Role.SYSTEM.value, + content='System message', + ), + DeveloperMessage( + id='msg_4', + role=Role.DEVELOPER.value, + content='Developer note', + ), + UserMessage( + id='msg_5', + role=Role.USER.value, + content='Second message', + ), + ], + ), + ], + ), + AdapterRunTest( + id='messages_with_history', + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='First message', + ), + UserMessage( + id='msg_2', + role=Role.USER.value, + content='Second message', + ), + ], + ), + ], + ), + AdapterRunTest( + id='tool_ag_ui', + call_tools=['get_weather'], + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather for Paris', + ), + ], + tools=[get_weather()], + ), + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather for Paris', + ), + AssistantMessage( + id='msg_2', + role=Role.ASSISTANT.value, + tool_calls=[ + ToolCall( + id='pyd_ai_00000000000000000000000000000003', + type='function', + function=FunctionCall( + name='get_weather', + arguments='{"location": "Paris"}', + ), + ), + ], + ), + ToolMessage( + id='msg_3', + role=Role.TOOL.value, + content='Tool result', + tool_call_id='pyd_ai_00000000000000000000000000000003', + ), + ], + tools=[get_weather()], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"get_weather"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000005","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"{\\"get_weather\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"result\\"}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000005"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', + ], + ), + AdapterRunTest( + id='tool_ag_ui_multiple', + call_tools=['get_weather', 'get_weather_parts'], + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather and get_weather_parts for Paris', + ), + ], + tools=[get_weather(), get_weather('get_weather_parts')], + ), + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather for Paris', + ), + AssistantMessage( + id='msg_2', + role=Role.ASSISTANT.value, + tool_calls=[ + ToolCall( + id='pyd_ai_00000000000000000000000000000003', + type='function', + function=FunctionCall( + name='get_weather', + arguments='{"location": "Paris"}', + ), + ), + ], + ), + ToolMessage( + id='msg_3', + role=Role.TOOL.value, + content='Tool result', + tool_call_id='pyd_ai_00000000000000000000000000000003', + ), + AssistantMessage( + id='msg_4', + role=Role.ASSISTANT.value, + tool_calls=[ + ToolCall( + id='pyd_ai_00000000000000000000000000000003', + type='function', + function=FunctionCall( + name='get_weather_parts', + arguments='{"location": "Paris"}', + ), + ), + ], + ), + ToolMessage( + id='msg_5', + role=Role.TOOL.value, + content='Tool result', + tool_call_id='pyd_ai_00000000000000000000000000000003', + ), + ], + tools=[get_weather(), get_weather('get_weather_parts')], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"get_weather"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000005"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000006","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"{\\"get_weather\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"result\\",\\"get_weather_parts\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"result\\"}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000006"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000005"}', + ], + ), + AdapterRunTest( + id='tool_ag_ui_parts', + call_tools=['get_weather_parts'], + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather_parts for Paris', + ), + ], + tools=[get_weather('get_weather_parts')], + ), + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather_parts for Paris', + ), + AssistantMessage( + id='msg_2', + role=Role.ASSISTANT.value, + tool_calls=[ + ToolCall( + id='pyd_ai_00000000000000000000000000000003', + type='function', + function=FunctionCall( + name='get_weather_parts', + arguments='{"location": "Paris"}', + ), + ), + ], + ), + ToolMessage( + id='msg_3', + role=Role.TOOL.value, + content='Tool result', + tool_call_id='pyd_ai_00000000000000000000000000000003', + ), + ], + tools=[get_weather('get_weather_parts')], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"get_weather_parts"}', + '{"type":"TOOL_CALL_ARGS","toolCallId":"pyd_ai_00000000000000000000000000000003","delta":"{\\"location\\":\\"a\\"}"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000005","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"{\\"get_weather_parts\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"result\\"}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000005"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', + ], + ), + AdapterRunTest( + id='tool_local_single_event', + call_tools=['send_snapshot'], + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call send_snapshot', + ), + ], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"STATE_SNAPSHOT","snapshot":{"key":"value"}}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000004","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"{\\"send_snapshot\\":{\\"type\\":\\"STATE_SNAPSHOT\\",\\"timestam"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"p\\":null,\\"rawEvent\\":null,\\"snapshot\\":{\\"key\\":\\"value\\"}}}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000004"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + ], + ), + AdapterRunTest( + id='tool_local_multiple_events', + call_tools=['send_custom'], + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call send_custom', + ), + ], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"CUSTOM","name":"custom_event1","value":{"key1":"value1"}}', + '{"type":"CUSTOM","name":"custom_event2","value":{"key2":"value2"}}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000004","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"{\\"send_custom\\":[{\\"type\\":\\"CUSTOM\\",\\"timestamp\\":null,\\"rawEvent\\":null,\\"name\\":\\"custom_event1\\",\\"value\\":{\\"key1\\":\\"va"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"lue1\\"}},{\\"type\\":\\"CUSTOM\\",\\"timestamp\\":null,\\"rawEvent\\":null,\\"name\\":\\"custom_event2\\",\\"value\\":{\\"key2\\":\\"value2\\"}}]}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000004"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + ], + ), + AdapterRunTest( + id='tool_local_parts', + call_tools=['current_time'], + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call current_time', + ), + ], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000004","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"{\\"current_time\\":\\"21T1"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"2:08:45.485981+00:00\\"}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000004"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + ], + ), + AdapterRunTest( + id='tool_local_then_ag_ui', + call_tools=['current_time', 'get_weather'], + runs=[ + Run( + nodes=[ + TestNode( + parts=[TestToolCallPart(call_tools=['current_time'])], + ), + TestNode( + parts=[TestToolCallPart(call_tools=['get_weather'])], + ), + ], + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please tell me the time and then call get_weather for Paris', + ), + ], + tools=[get_weather()], + ), + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Please call get_weather for Paris', + ), + AssistantMessage( + id='msg_2', + role=Role.ASSISTANT.value, + tool_calls=[ + ToolCall( + id='pyd_ai_00000000000000000000000000000003', + type='function', + function=FunctionCall( + name='current_time', + arguments='{}', + ), + ), + ], + ), + ToolMessage( + id='msg_3', + role=Role.TOOL.value, + content='Tool result', + tool_call_id='pyd_ai_00000000000000000000000000000003', + ), + AssistantMessage( + id='msg_4', + role=Role.ASSISTANT.value, + tool_calls=[ + ToolCall( + id='pyd_ai_00000000000000000000000000000004', + type='function', + function=FunctionCall( + name='get_weather', + arguments='{"location": "Paris"}', + ), + ), + ], + ), + ToolMessage( + id='msg_5', + role=Role.TOOL.value, + content='Tool result', + tool_call_id='pyd_ai_00000000000000000000000000000004', + ), + ], + tools=[get_weather()], + ), + ], + expected_events=[ + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000004","toolCallName":"get_weather"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000004"}', + '{"type":"MESSAGES_SNAPSHOT","messages":[{"id":"msg_1","role":"user","content":"Please tell me the time and then call get_weather for Paris"},' + + '{"id":"00000000000000000000000000000005","role":"assistant","toolCalls":[{"id":"pyd_ai_00000000000000000000000000000003","type":"function",' + + '"function":{"name":"current_time","arguments":"{}"}}]},{"id":"result-pyd_ai_00000000000000000000000000000003","role":"tool","content":' + + '"21T12:08:45.485981+00:00","toolCallId":"pyd_ai_00000000000000000000000000000003"},{"id":"00000000000000000000000000000006","role":"assistant",' + + '"toolCalls":[{"id":"pyd_ai_00000000000000000000000000000004","type":"function","function":{"name":"get_weather","arguments":"{\\"location\\": \\"a\\"}"}}]}]}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000007"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000008","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000008","delta":"{\\"current_time\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000008","delta":"result\\",\\"get_weather\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000008","delta":"result\\"}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000008"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000007"}', + ], + ), + AdapterRunTest( + id='request_with_state', + runs=[ + Run( + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Hello, how are you?', + ), + ], + state={'value': 42}, + ), + ], + expected_state=42, + ), + ] + + +@pytest.mark.parametrize('tc', tc_parameters(), ids=lambda tc: tc.id) +async def test_run_method(mock_uuid: _MockUUID, tc: AdapterRunTest) -> None: + """Test the Adapter.run method with various scenarios. + + Args: + mock_uuid: The mock UUID generator fixture. + tc: The test case parameters. + """ + + run: Run + events: list[str] = [] + thread_id: str = f'{THREAD_ID_PREFIX}{mock_uuid()}' + adapter: Adapter[StateDeps[StateInt], str] = await create_adapter(tc.call_tools) + deps: StateDeps[StateInt] = StateDeps[StateInt](state_type=StateInt) + for run in tc.runs: + if run.nodes is not None: + assert isinstance(adapter.agent.model, TestModel), 'Agent model is not TestModel' + adapter.agent.model.custom_response_nodes = run.nodes + + run_input: RunAgentInput = run.run_input( + thread_id=thread_id, + run_id=f'{RUN_ID_PREFIX}{mock_uuid()}', + ) + + events.extend([event async for event in adapter.run(run_input, deps=deps)]) + + assert_events(events, tc.expected_events) + if tc.expected_state is not None: + assert deps.state.value == tc.expected_state + + +async def test_concurrent_runs(mock_uuid: _MockUUID, adapter: Adapter[None, str]) -> None: + """Test concurrent execution of multiple runs.""" + + async def collect_events(run_input: RunAgentInput) -> list[str]: + """Collect all events from an adapter run. + + Args: + run_input: The input configuration for the adapter run. + + Returns: + List of all events generated by the adapter run. + """ + return [event async for event in adapter.run(run_input)] + + concurrent_tasks: list[asyncio.Task[list[str]]] = [] + + for i in range(20): + run_input: RunAgentInput = RunAgentInput( + thread_id=f'{THREAD_ID_PREFIX}{mock_uuid()}', + run_id=f'{RUN_ID_PREFIX}{mock_uuid()}', + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id=f'msg_{i}', + role=Role.USER.value, + content=f'Message {i}', + ), + ], + state=None, + context=[], + tools=[], + forwarded_props=None, + ) + + task = asyncio.create_task(collect_events(run_input)) + concurrent_tasks.append(task) + + results = await asyncio.gather(*concurrent_tasks) + + for events in results: + assert_events(events, EXPECTED_EVENTS, loose=True) + assert len(events) == len(EXPECTED_EVENTS) + @pytest.fixture async def agent() -> Agent[None, str]: @@ -44,7 +835,7 @@ class ToAGUITest: logger: logging.Logger | None = None tool_prefix: str | None = None expected_logger: logging.Logger = field( - default_factory=lambda: adapter_logger if has_ag_ui else logging.getLogger(__name__) # type: ignore[reportPossiblyUnboundVariable] + default_factory=lambda: adapter_logger if has_ag_ui else logging.getLogger(__name__) ) expected_tool_prefix: str = '' diff --git a/uv.lock b/uv.lock index 34c328578..4572bf17b 100644 --- a/uv.lock +++ b/uv.lock @@ -22,7 +22,6 @@ members = [ "fasta2a", "mcp-run-python", "pydantic-ai", - "pydantic-ai-ag-ui", "pydantic-ai-examples", "pydantic-ai-slim", "pydantic-evals", @@ -2959,16 +2958,17 @@ wheels = [ [[package]] name = "pydantic" -version = "2.10.6" +version = "2.11.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, { name = "pydantic-core" }, { name = "typing-extensions" }, + { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681, upload-time = "2025-01-24T01:42:12.693Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696, upload-time = "2025-01-24T01:42:10.371Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, ] [[package]] @@ -3032,22 +3032,6 @@ lint = [ { name = "ruff", specifier = ">=0.6.9" }, ] -[[package]] -name = "pydantic-ai-ag-ui" -source = { editable = "pydantic_ai_ag_ui" } -dependencies = [ - { name = "ag-ui-protocol" }, - { name = "pydantic" }, - { name = "pydantic-ai" }, -] - -[package.metadata] -requires-dist = [ - { name = "ag-ui-protocol", specifier = ">=0.1.5" }, - { name = "pydantic", specifier = ">=2.10" }, - { name = "pydantic-ai", editable = "." }, -] - [[package]] name = "pydantic-ai-examples" source = { editable = "examples" } @@ -3101,7 +3085,7 @@ a2a = [ { name = "fasta2a" }, ] ag-ui = [ - { name = "pydantic-ai-ag-ui" }, + { name = "ag-ui-protocol" }, ] anthropic = [ { name = "anthropic" }, @@ -3171,6 +3155,7 @@ dev = [ [package.metadata] requires-dist = [ + { name = "ag-ui-protocol", marker = "extra == 'ag-ui'", specifier = ">=0.1.5" }, { name = "anthropic", marker = "extra == 'anthropic'", specifier = ">=0.52.0" }, { name = "argcomplete", marker = "extra == 'cli'", specifier = ">=3.5.0" }, { name = "boto3", marker = "extra == 'bedrock'", specifier = ">=1.37.24" }, @@ -3191,7 +3176,6 @@ requires-dist = [ { name = "opentelemetry-api", specifier = ">=1.28.0" }, { name = "prompt-toolkit", marker = "extra == 'cli'", specifier = ">=3" }, { name = "pydantic", specifier = ">=2.10" }, - { name = "pydantic-ai-ag-ui", marker = "extra == 'ag-ui'", editable = "pydantic_ai_ag_ui" }, { name = "pydantic-evals", marker = "extra == 'evals'", editable = "pydantic_evals" }, { name = "pydantic-graph", editable = "pydantic_graph" }, { name = "requests", marker = "extra == 'vertexai'", specifier = ">=2.32.2" }, @@ -3222,99 +3206,111 @@ dev = [ [[package]] name = "pydantic-core" -version = "2.27.2" +version = "2.33.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443, upload-time = "2024-12-18T11:31:54.917Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/bc/fed5f74b5d802cf9a03e83f60f18864e90e3aed7223adaca5ffb7a8d8d64/pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa", size = 1895938, upload-time = "2024-12-18T11:27:14.406Z" }, - { url = "https://files.pythonhosted.org/packages/71/2a/185aff24ce844e39abb8dd680f4e959f0006944f4a8a0ea372d9f9ae2e53/pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c", size = 1815684, upload-time = "2024-12-18T11:27:16.489Z" }, - { url = "https://files.pythonhosted.org/packages/c3/43/fafabd3d94d159d4f1ed62e383e264f146a17dd4d48453319fd782e7979e/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a", size = 1829169, upload-time = "2024-12-18T11:27:22.16Z" }, - { url = "https://files.pythonhosted.org/packages/a2/d1/f2dfe1a2a637ce6800b799aa086d079998959f6f1215eb4497966efd2274/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5", size = 1867227, upload-time = "2024-12-18T11:27:25.097Z" }, - { url = "https://files.pythonhosted.org/packages/7d/39/e06fcbcc1c785daa3160ccf6c1c38fea31f5754b756e34b65f74e99780b5/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c", size = 2037695, upload-time = "2024-12-18T11:27:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/7a/67/61291ee98e07f0650eb756d44998214231f50751ba7e13f4f325d95249ab/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7", size = 2741662, upload-time = "2024-12-18T11:27:30.798Z" }, - { url = "https://files.pythonhosted.org/packages/32/90/3b15e31b88ca39e9e626630b4c4a1f5a0dfd09076366f4219429e6786076/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a", size = 1993370, upload-time = "2024-12-18T11:27:33.692Z" }, - { url = "https://files.pythonhosted.org/packages/ff/83/c06d333ee3a67e2e13e07794995c1535565132940715931c1c43bfc85b11/pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236", size = 1996813, upload-time = "2024-12-18T11:27:37.111Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f7/89be1c8deb6e22618a74f0ca0d933fdcb8baa254753b26b25ad3acff8f74/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962", size = 2005287, upload-time = "2024-12-18T11:27:40.566Z" }, - { url = "https://files.pythonhosted.org/packages/b7/7d/8eb3e23206c00ef7feee17b83a4ffa0a623eb1a9d382e56e4aa46fd15ff2/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9", size = 2128414, upload-time = "2024-12-18T11:27:43.757Z" }, - { url = "https://files.pythonhosted.org/packages/4e/99/fe80f3ff8dd71a3ea15763878d464476e6cb0a2db95ff1c5c554133b6b83/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af", size = 2155301, upload-time = "2024-12-18T11:27:47.36Z" }, - { url = "https://files.pythonhosted.org/packages/2b/a3/e50460b9a5789ca1451b70d4f52546fa9e2b420ba3bfa6100105c0559238/pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4", size = 1816685, upload-time = "2024-12-18T11:27:50.508Z" }, - { url = "https://files.pythonhosted.org/packages/57/4c/a8838731cb0f2c2a39d3535376466de6049034d7b239c0202a64aaa05533/pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31", size = 1982876, upload-time = "2024-12-18T11:27:53.54Z" }, - { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421, upload-time = "2024-12-18T11:27:55.409Z" }, - { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998, upload-time = "2024-12-18T11:27:57.252Z" }, - { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167, upload-time = "2024-12-18T11:27:59.146Z" }, - { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071, upload-time = "2024-12-18T11:28:02.625Z" }, - { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244, upload-time = "2024-12-18T11:28:04.442Z" }, - { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470, upload-time = "2024-12-18T11:28:07.679Z" }, - { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291, upload-time = "2024-12-18T11:28:10.297Z" }, - { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613, upload-time = "2024-12-18T11:28:13.362Z" }, - { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355, upload-time = "2024-12-18T11:28:16.587Z" }, - { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661, upload-time = "2024-12-18T11:28:18.407Z" }, - { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261, upload-time = "2024-12-18T11:28:21.471Z" }, - { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361, upload-time = "2024-12-18T11:28:23.53Z" }, - { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484, upload-time = "2024-12-18T11:28:25.391Z" }, - { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102, upload-time = "2024-12-18T11:28:28.593Z" }, - { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127, upload-time = "2024-12-18T11:28:30.346Z" }, - { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340, upload-time = "2024-12-18T11:28:32.521Z" }, - { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900, upload-time = "2024-12-18T11:28:34.507Z" }, - { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177, upload-time = "2024-12-18T11:28:36.488Z" }, - { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046, upload-time = "2024-12-18T11:28:39.409Z" }, - { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386, upload-time = "2024-12-18T11:28:41.221Z" }, - { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060, upload-time = "2024-12-18T11:28:44.709Z" }, - { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870, upload-time = "2024-12-18T11:28:46.839Z" }, - { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822, upload-time = "2024-12-18T11:28:48.896Z" }, - { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364, upload-time = "2024-12-18T11:28:50.755Z" }, - { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303, upload-time = "2024-12-18T11:28:54.122Z" }, - { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064, upload-time = "2024-12-18T11:28:56.074Z" }, - { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046, upload-time = "2024-12-18T11:28:58.107Z" }, - { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092, upload-time = "2024-12-18T11:29:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709, upload-time = "2024-12-18T11:29:03.193Z" }, - { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273, upload-time = "2024-12-18T11:29:05.306Z" }, - { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027, upload-time = "2024-12-18T11:29:07.294Z" }, - { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888, upload-time = "2024-12-18T11:29:09.249Z" }, - { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738, upload-time = "2024-12-18T11:29:11.23Z" }, - { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138, upload-time = "2024-12-18T11:29:16.396Z" }, - { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025, upload-time = "2024-12-18T11:29:20.25Z" }, - { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633, upload-time = "2024-12-18T11:29:23.877Z" }, - { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404, upload-time = "2024-12-18T11:29:25.872Z" }, - { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130, upload-time = "2024-12-18T11:29:29.252Z" }, - { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946, upload-time = "2024-12-18T11:29:31.338Z" }, - { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387, upload-time = "2024-12-18T11:29:33.481Z" }, - { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453, upload-time = "2024-12-18T11:29:35.533Z" }, - { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186, upload-time = "2024-12-18T11:29:37.649Z" }, - { url = "https://files.pythonhosted.org/packages/27/97/3aef1ddb65c5ccd6eda9050036c956ff6ecbfe66cb7eb40f280f121a5bb0/pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993", size = 1896475, upload-time = "2024-12-18T11:30:18.316Z" }, - { url = "https://files.pythonhosted.org/packages/ad/d3/5668da70e373c9904ed2f372cb52c0b996426f302e0dee2e65634c92007d/pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308", size = 1772279, upload-time = "2024-12-18T11:30:20.547Z" }, - { url = "https://files.pythonhosted.org/packages/8a/9e/e44b8cb0edf04a2f0a1f6425a65ee089c1d6f9c4c2dcab0209127b6fdfc2/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4", size = 1829112, upload-time = "2024-12-18T11:30:23.255Z" }, - { url = "https://files.pythonhosted.org/packages/1c/90/1160d7ac700102effe11616e8119e268770f2a2aa5afb935f3ee6832987d/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf", size = 1866780, upload-time = "2024-12-18T11:30:25.742Z" }, - { url = "https://files.pythonhosted.org/packages/ee/33/13983426df09a36d22c15980008f8d9c77674fc319351813b5a2739b70f3/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76", size = 2037943, upload-time = "2024-12-18T11:30:28.036Z" }, - { url = "https://files.pythonhosted.org/packages/01/d7/ced164e376f6747e9158c89988c293cd524ab8d215ae4e185e9929655d5c/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118", size = 2740492, upload-time = "2024-12-18T11:30:30.412Z" }, - { url = "https://files.pythonhosted.org/packages/8b/1f/3dc6e769d5b7461040778816aab2b00422427bcaa4b56cc89e9c653b2605/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630", size = 1995714, upload-time = "2024-12-18T11:30:34.358Z" }, - { url = "https://files.pythonhosted.org/packages/07/d7/a0bd09bc39283530b3f7c27033a814ef254ba3bd0b5cfd040b7abf1fe5da/pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54", size = 1997163, upload-time = "2024-12-18T11:30:37.979Z" }, - { url = "https://files.pythonhosted.org/packages/2d/bb/2db4ad1762e1c5699d9b857eeb41959191980de6feb054e70f93085e1bcd/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f", size = 2005217, upload-time = "2024-12-18T11:30:40.367Z" }, - { url = "https://files.pythonhosted.org/packages/53/5f/23a5a3e7b8403f8dd8fc8a6f8b49f6b55c7d715b77dcf1f8ae919eeb5628/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362", size = 2127899, upload-time = "2024-12-18T11:30:42.737Z" }, - { url = "https://files.pythonhosted.org/packages/c2/ae/aa38bb8dd3d89c2f1d8362dd890ee8f3b967330821d03bbe08fa01ce3766/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96", size = 2155726, upload-time = "2024-12-18T11:30:45.279Z" }, - { url = "https://files.pythonhosted.org/packages/98/61/4f784608cc9e98f70839187117ce840480f768fed5d386f924074bf6213c/pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e", size = 1817219, upload-time = "2024-12-18T11:30:47.718Z" }, - { url = "https://files.pythonhosted.org/packages/57/82/bb16a68e4a1a858bb3768c2c8f1ff8d8978014e16598f001ea29a25bf1d1/pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67", size = 1985382, upload-time = "2024-12-18T11:30:51.871Z" }, - { url = "https://files.pythonhosted.org/packages/46/72/af70981a341500419e67d5cb45abe552a7c74b66326ac8877588488da1ac/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e", size = 1891159, upload-time = "2024-12-18T11:30:54.382Z" }, - { url = "https://files.pythonhosted.org/packages/ad/3d/c5913cccdef93e0a6a95c2d057d2c2cba347815c845cda79ddd3c0f5e17d/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8", size = 1768331, upload-time = "2024-12-18T11:30:58.178Z" }, - { url = "https://files.pythonhosted.org/packages/f6/f0/a3ae8fbee269e4934f14e2e0e00928f9346c5943174f2811193113e58252/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3", size = 1822467, upload-time = "2024-12-18T11:31:00.6Z" }, - { url = "https://files.pythonhosted.org/packages/d7/7a/7bbf241a04e9f9ea24cd5874354a83526d639b02674648af3f350554276c/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f", size = 1979797, upload-time = "2024-12-18T11:31:07.243Z" }, - { url = "https://files.pythonhosted.org/packages/4f/5f/4784c6107731f89e0005a92ecb8a2efeafdb55eb992b8e9d0a2be5199335/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133", size = 1987839, upload-time = "2024-12-18T11:31:09.775Z" }, - { url = "https://files.pythonhosted.org/packages/6d/a7/61246562b651dff00de86a5f01b6e4befb518df314c54dec187a78d81c84/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc", size = 1998861, upload-time = "2024-12-18T11:31:13.469Z" }, - { url = "https://files.pythonhosted.org/packages/86/aa/837821ecf0c022bbb74ca132e117c358321e72e7f9702d1b6a03758545e2/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50", size = 2116582, upload-time = "2024-12-18T11:31:17.423Z" }, - { url = "https://files.pythonhosted.org/packages/81/b0/5e74656e95623cbaa0a6278d16cf15e10a51f6002e3ec126541e95c29ea3/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9", size = 2151985, upload-time = "2024-12-18T11:31:19.901Z" }, - { url = "https://files.pythonhosted.org/packages/63/37/3e32eeb2a451fddaa3898e2163746b0cffbbdbb4740d38372db0490d67f3/pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151", size = 2004715, upload-time = "2024-12-18T11:31:22.821Z" }, - { url = "https://files.pythonhosted.org/packages/29/0e/dcaea00c9dbd0348b723cae82b0e0c122e0fa2b43fa933e1622fd237a3ee/pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656", size = 1891733, upload-time = "2024-12-18T11:31:26.876Z" }, - { url = "https://files.pythonhosted.org/packages/86/d3/e797bba8860ce650272bda6383a9d8cad1d1c9a75a640c9d0e848076f85e/pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278", size = 1768375, upload-time = "2024-12-18T11:31:29.276Z" }, - { url = "https://files.pythonhosted.org/packages/41/f7/f847b15fb14978ca2b30262548f5fc4872b2724e90f116393eb69008299d/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb", size = 1822307, upload-time = "2024-12-18T11:31:33.123Z" }, - { url = "https://files.pythonhosted.org/packages/9c/63/ed80ec8255b587b2f108e514dc03eed1546cd00f0af281e699797f373f38/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd", size = 1979971, upload-time = "2024-12-18T11:31:35.755Z" }, - { url = "https://files.pythonhosted.org/packages/a9/6d/6d18308a45454a0de0e975d70171cadaf454bc7a0bf86b9c7688e313f0bb/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc", size = 1987616, upload-time = "2024-12-18T11:31:38.534Z" }, - { url = "https://files.pythonhosted.org/packages/82/8a/05f8780f2c1081b800a7ca54c1971e291c2d07d1a50fb23c7e4aef4ed403/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b", size = 1998943, upload-time = "2024-12-18T11:31:41.853Z" }, - { url = "https://files.pythonhosted.org/packages/5e/3e/fe5b6613d9e4c0038434396b46c5303f5ade871166900b357ada4766c5b7/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b", size = 2116654, upload-time = "2024-12-18T11:31:44.756Z" }, - { url = "https://files.pythonhosted.org/packages/db/ad/28869f58938fad8cc84739c4e592989730bfb69b7c90a8fff138dff18e1e/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2", size = 2152292, upload-time = "2024-12-18T11:31:48.613Z" }, - { url = "https://files.pythonhosted.org/packages/a1/0c/c5c5cd3689c32ed1fe8c5d234b079c12c281c051759770c05b8bed6412b5/pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35", size = 2004961, upload-time = "2024-12-18T11:31:52.446Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, + { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, + { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, + { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, + { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, + { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, + { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, + { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, + { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, + { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, + { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, + { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, + { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, + { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, + { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/53/ea/bbe9095cdd771987d13c82d104a9c8559ae9aec1e29f139e286fd2e9256e/pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d", size = 2028677, upload-time = "2025-04-23T18:32:27.227Z" }, + { url = "https://files.pythonhosted.org/packages/49/1d/4ac5ed228078737d457a609013e8f7edc64adc37b91d619ea965758369e5/pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954", size = 1864735, upload-time = "2025-04-23T18:32:29.019Z" }, + { url = "https://files.pythonhosted.org/packages/23/9a/2e70d6388d7cda488ae38f57bc2f7b03ee442fbcf0d75d848304ac7e405b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb", size = 1898467, upload-time = "2025-04-23T18:32:31.119Z" }, + { url = "https://files.pythonhosted.org/packages/ff/2e/1568934feb43370c1ffb78a77f0baaa5a8b6897513e7a91051af707ffdc4/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7", size = 1983041, upload-time = "2025-04-23T18:32:33.655Z" }, + { url = "https://files.pythonhosted.org/packages/01/1a/1a1118f38ab64eac2f6269eb8c120ab915be30e387bb561e3af904b12499/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4", size = 2136503, upload-time = "2025-04-23T18:32:35.519Z" }, + { url = "https://files.pythonhosted.org/packages/5c/da/44754d1d7ae0f22d6d3ce6c6b1486fc07ac2c524ed8f6eca636e2e1ee49b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b", size = 2736079, upload-time = "2025-04-23T18:32:37.659Z" }, + { url = "https://files.pythonhosted.org/packages/4d/98/f43cd89172220ec5aa86654967b22d862146bc4d736b1350b4c41e7c9c03/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3", size = 2006508, upload-time = "2025-04-23T18:32:39.637Z" }, + { url = "https://files.pythonhosted.org/packages/2b/cc/f77e8e242171d2158309f830f7d5d07e0531b756106f36bc18712dc439df/pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a", size = 2113693, upload-time = "2025-04-23T18:32:41.818Z" }, + { url = "https://files.pythonhosted.org/packages/54/7a/7be6a7bd43e0a47c147ba7fbf124fe8aaf1200bc587da925509641113b2d/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782", size = 2074224, upload-time = "2025-04-23T18:32:44.033Z" }, + { url = "https://files.pythonhosted.org/packages/2a/07/31cf8fadffbb03be1cb520850e00a8490c0927ec456e8293cafda0726184/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9", size = 2245403, upload-time = "2025-04-23T18:32:45.836Z" }, + { url = "https://files.pythonhosted.org/packages/b6/8d/bbaf4c6721b668d44f01861f297eb01c9b35f612f6b8e14173cb204e6240/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e", size = 2242331, upload-time = "2025-04-23T18:32:47.618Z" }, + { url = "https://files.pythonhosted.org/packages/bb/93/3cc157026bca8f5006250e74515119fcaa6d6858aceee8f67ab6dc548c16/pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9", size = 1910571, upload-time = "2025-04-23T18:32:49.401Z" }, + { url = "https://files.pythonhosted.org/packages/5b/90/7edc3b2a0d9f0dda8806c04e511a67b0b7a41d2187e2003673a996fb4310/pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3", size = 1956504, upload-time = "2025-04-23T18:32:51.287Z" }, + { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, + { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, + { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, + { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, + { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, + { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, + { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, + { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, + { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/08/98/dbf3fdfabaf81cda5622154fda78ea9965ac467e3239078e0dcd6df159e7/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101", size = 2024034, upload-time = "2025-04-23T18:33:32.843Z" }, + { url = "https://files.pythonhosted.org/packages/8d/99/7810aa9256e7f2ccd492590f86b79d370df1e9292f1f80b000b6a75bd2fb/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64", size = 1858578, upload-time = "2025-04-23T18:33:34.912Z" }, + { url = "https://files.pythonhosted.org/packages/d8/60/bc06fa9027c7006cc6dd21e48dbf39076dc39d9abbaf718a1604973a9670/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d", size = 1892858, upload-time = "2025-04-23T18:33:36.933Z" }, + { url = "https://files.pythonhosted.org/packages/f2/40/9d03997d9518816c68b4dfccb88969756b9146031b61cd37f781c74c9b6a/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535", size = 2068498, upload-time = "2025-04-23T18:33:38.997Z" }, + { url = "https://files.pythonhosted.org/packages/d8/62/d490198d05d2d86672dc269f52579cad7261ced64c2df213d5c16e0aecb1/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d", size = 2108428, upload-time = "2025-04-23T18:33:41.18Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ec/4cd215534fd10b8549015f12ea650a1a973da20ce46430b68fc3185573e8/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6", size = 2069854, upload-time = "2025-04-23T18:33:43.446Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1a/abbd63d47e1d9b0d632fee6bb15785d0889c8a6e0a6c3b5a8e28ac1ec5d2/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca", size = 2237859, upload-time = "2025-04-23T18:33:45.56Z" }, + { url = "https://files.pythonhosted.org/packages/80/1c/fa883643429908b1c90598fd2642af8839efd1d835b65af1f75fba4d94fe/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039", size = 2239059, upload-time = "2025-04-23T18:33:47.735Z" }, + { url = "https://files.pythonhosted.org/packages/d4/29/3cade8a924a61f60ccfa10842f75eb12787e1440e2b8660ceffeb26685e7/pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27", size = 2066661, upload-time = "2025-04-23T18:33:49.995Z" }, ] [[package]] From 4a5b4731ec67d826d70d94369b8f5e3e0b87dae0 Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Tue, 24 Jun 2025 00:52:59 +0100 Subject: [PATCH 03/13] feat(ag-ui): ASGI server Refactor to_ag_ui so it now returns a ASGI compatible server based off starlette. This makes it easier for users setup apps with minimal code. Fix some invalid references missed in the package refactor for cli and examples. Made enums, and exceptions private to the package, so they are not exposed in the public API. --- docs/ag-ui.md | 109 ++-- .../pydantic_ai_ag_ui_examples/api/agent.py | 2 +- examples/pydantic_ai_ag_ui_examples/basic.py | 35 +- .../pydantic_ai_ag_ui_examples/cli/args.py | 2 +- .../pydantic_ai_ag_ui_examples/dojo_server.py | 2 +- mkdocs.yml | 2 +- pydantic_ai_slim/pydantic_ai/ag_ui.py | 553 ++++++++++++------ pydantic_ai_slim/pydantic_ai/agent.py | 60 +- pydantic_ai_slim/pydantic_ai/models/test.py | 14 +- pydantic_ai_slim/pyproject.toml | 2 +- tests/test_ag_ui.py | 134 ++--- uv.lock | 2 + 12 files changed, 533 insertions(+), 384 deletions(-) diff --git a/docs/ag-ui.md b/docs/ag-ui.md index 0646587b9..a4bd8f79b 100644 --- a/docs/ag-ui.md +++ b/docs/ag-ui.md @@ -12,9 +12,9 @@ The team at [Rocket Science](https://www.rocketscience.gg/), contributed the protocol with PydanticAI agents. This also includes an [`Agent.to_ag_ui`][pydantic_ai.Agent.to_ag_ui] convenience -method which simplifies the creation of [`Adapter`][pydantic_ai.ag_ui.Adapter] -for PydanticAI agents, which can then be used by as part of a -[fastapi](https://fastapi.tiangolo.com/) app. +method which simplifies the creation of [`FastAGUI`][pydantic_ai.ag_ui.FastAGUI] +for PydanticAI agents, which is built on top of [Starlette](https://www.starlette.io/), +meaning it's fully compatible with any ASGI server. ## AG-UI Adapter @@ -27,8 +27,6 @@ for all aspects of spec including: - [State Management](https://docs.ag-ui.com/concepts/state) - [Tools](https://docs.ag-ui.com/concepts/tools) -Let's have a quick look at how to use it: - ### Installation The only dependencies are: @@ -40,14 +38,14 @@ The only dependencies are: To run the examples you'll also need: -- [fastapi](https://fastapi.tiangolo.com/): to provide ASGI compatible server +- [uvicorn](https://www.uvicorn.org/) or another ASGI compatible server ```bash -pip/uv-add 'fastapi' +pip/uv-add 'uvicorn' ``` -You can install PydanticAI with the `ag-ui` extra to include -[Adapter][pydantic_ai.ag_ui.Adapter] run: +You can install PydanticAI with the `ag-ui` extra to ensure you have all the +required AG-UI dependencies: ```bash pip/uv-add 'pydantic-ai-slim[ag-ui]' @@ -60,30 +58,10 @@ pip/uv-add 'pydantic-ai-slim[ag-ui]' from __future__ import annotations -from typing import TYPE_CHECKING, Annotated - -from fastapi import FastAPI, Header -from fastapi.responses import StreamingResponse -from pydantic_ai.ag_ui import SSE_CONTENT_TYPE - from pydantic_ai import Agent -if TYPE_CHECKING: - from ag_ui.core import RunAgentInput - agent = Agent('openai:gpt-4.1', instructions='Be fun!') -adapter = agent.to_ag_ui() -app = FastAPI(title='AG-UI Endpoint') - - -@app.post('/') -async def root( - input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE -) -> StreamingResponse: - return StreamingResponse( - adapter.run(input_data, accept), - media_type=SSE_CONTENT_TYPE, - ) +app = agent.to_ag_ui() ``` You can run the example with: @@ -109,13 +87,16 @@ streamed back to the caller as Server-Sent Events (SSE). A user request may require multiple round trips between client UI and PydanticAI server, depending on the tools and events needed. -[Adapter][pydantic_ai.ag_ui.Adapter] can be used with any ASGI server. +In addition to the [Adapter][pydantic_ai.ag_ui.Adapter] there is also +[FastAGUI][pydantic_ai.ag_ui.FastAGUI] which is slim wrapper around +[Starlette](https://www.starlette.io/) providing easy access to run a PydanticAI +server with AG-UI support with any ASGI server. ### Features To expose a PydanticAI agent as an AG-UI server including state support, you can -use the [`to_ag_ui`][pydantic_ai.agent.Agent.to_ag_ui] method in combination -with [fastapi](https://fastapi.tiangolo.com/). +use the [`to_ag_ui`][pydantic_ai.agent.Agent.to_ag_ui] method create an ASGI +compatible server. In the example below we have document state which is shared between the UI and server using the [`StateDeps`][pydantic_ai.ag_ui.StateDeps] which implements the @@ -134,17 +115,10 @@ real-time synchronization between agents and frontend applications. from __future__ import annotations -from typing import TYPE_CHECKING, Annotated - -from fastapi import FastAPI, Header -from fastapi.responses import StreamingResponse from pydantic import BaseModel -from pydantic_ai.ag_ui import SSE_CONTENT_TYPE, StateDeps from pydantic_ai import Agent - -if TYPE_CHECKING: - from ag_ui.core import RunAgentInput +from pydantic_ai.ag_ui import StateDeps class DocumentState(BaseModel): @@ -158,29 +132,27 @@ agent = Agent( instructions='Be fun!', deps_type=StateDeps[DocumentState], ) -adapter = agent.to_ag_ui() -app = FastAPI(title='AG-UI Endpoint') - - -@app.post('/') -async def root( - input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE -) -> StreamingResponse: - return StreamingResponse( - adapter.run(input_data, accept, deps=StateDeps(state_type=DocumentState)), - media_type=SSE_CONTENT_TYPE, - ) +app = agent.to_ag_ui(deps=StateDeps(state_type=DocumentState)) ``` -Since `app` is an ASGI application, it can be used with any ASGI server. +Since `app` is an ASGI application, it can be used with any ASGI server e.g. ```bash uvicorn agent_to_ag_ui:app --host 0.0.0.0 --port 8000 ``` Since the goal of [`to_ag_ui`][pydantic_ai.agent.Agent.to_ag_ui] is to be a -convenience method, it accepts the same arguments as the -[`Adapter`][pydantic_ai.ag_ui.Adapter] constructor. +convenience method, it accepts the same a combination of the arguments require +for: + +- [`Adapter`][pydantic_ai.ag_ui.Adapter] constructor +- [`Agent.iter`][pydantic_ai.agent.Agent.iter] method + +If you want more control you can either use +[`agent_to_ag_ui`][pydantic_ai.ag_ui.agent_to_ag_ui] helper method or create +and [`Agent`][pydantic_ai.ag_ui.Agent] directly which also provide +the ability to customise [`Starlette`](https://www.starlette.io/applications/#starlette.applications.Starlette) +options. #### Tools @@ -200,18 +172,16 @@ for custom events and state updates. from __future__ import annotations -from typing import TYPE_CHECKING, Annotated +from typing import TYPE_CHECKING from ag_ui.core import CustomEvent, EventType, StateSnapshotEvent -from fastapi import FastAPI, Header -from fastapi.responses import StreamingResponse from pydantic import BaseModel -from pydantic_ai.ag_ui import SSE_CONTENT_TYPE, StateDeps from pydantic_ai import Agent, RunContext +from pydantic_ai.ag_ui import StateDeps if TYPE_CHECKING: - from ag_ui.core import RunAgentInput + pass class DocumentState(BaseModel): @@ -225,8 +195,7 @@ agent = Agent( instructions='Be fun!', deps_type=StateDeps[DocumentState], ) -adapter = agent.to_ag_ui() -app = FastAPI(title='AG-UI Endpoint') +app = agent.to_ag_ui(deps=StateDeps(state_type=DocumentState)) @agent.tool @@ -251,16 +220,6 @@ def custom_events() -> list[CustomEvent]: value=2, ), ] - - -@app.post('/') -async def root( - input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE -) -> StreamingResponse: - return StreamingResponse( - adapter.run(input_data, accept, deps=StateDeps(state_type=DocumentState)), - media_type=SSE_CONTENT_TYPE, - ) ``` ### Examples @@ -296,11 +255,11 @@ options: Run with adapter debug logging: ```shell -python -m pydantic_ai.ag_ui_examples.dojo_server --log-level debug +python -m pydantic_ai_ag_ui_examples.dojo_server --log-level debug ``` Using uvicorn: ```shell -uvicorn pydantic_ai.ag_ui_examples.dojo_server:app --port 9000 +uvicorn pydantic_ai_ag_ui_examples.dojo_server:app --port 9000 ``` diff --git a/examples/pydantic_ai_ag_ui_examples/api/agent.py b/examples/pydantic_ai_ag_ui_examples/api/agent.py index 18841506d..b69c5e274 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/agent.py +++ b/examples/pydantic_ai_ag_ui_examples/api/agent.py @@ -40,4 +40,4 @@ def __init__( instructions=instructions, deps_type=deps_type, ) - self.adapter = self.agent.to_ag_ui() + self.adapter = Adapter(agent=self.agent) diff --git a/examples/pydantic_ai_ag_ui_examples/basic.py b/examples/pydantic_ai_ag_ui_examples/basic.py index e448e2ec1..0b42276ff 100644 --- a/examples/pydantic_ai_ag_ui_examples/basic.py +++ b/examples/pydantic_ai_ag_ui_examples/basic.py @@ -2,44 +2,13 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Annotated - -from fastapi import FastAPI, Header -from fastapi.responses import StreamingResponse - from pydantic_ai import Agent -from pydantic_ai.ag_ui import SSE_CONTENT_TYPE, Adapter - -if TYPE_CHECKING: - from ag_ui.core import RunAgentInput - -app = FastAPI(title='AG-UI Endpoint') agent: Agent[None, str] = Agent( 'openai:gpt-4o-mini', instructions='You are a helpful assistant.', ) -adapter: Adapter[None, str] = agent.to_ag_ui() - - -@app.post('/agent') -async def handler( - input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE -) -> StreamingResponse: - """Endpoint to handle AG-UI protocol requests and stream responses. - - Args: - input_data: The AG-UI run input. - accept: The Accept header to specify the response format. - - Returns: - A streaming response with event-stream media type. - """ - return StreamingResponse( - adapter.run(input_data, accept), - media_type=SSE_CONTENT_TYPE, - ) - +app = agent.to_ag_ui() if __name__ == '__main__': import uvicorn @@ -49,7 +18,7 @@ async def handler( args: Args = parse_args() uvicorn.run( - 'pydantic_ai.ag_ui_examples.dojo_server:app', + 'pydantic_ai_ag_ui_examples.dojo_server:app', port=args.port, reload=args.reload, log_level=args.log_level, diff --git a/examples/pydantic_ai_ag_ui_examples/cli/args.py b/examples/pydantic_ai_ag_ui_examples/cli/args.py index ee7485bef..9962fa7bf 100644 --- a/examples/pydantic_ai_ag_ui_examples/cli/args.py +++ b/examples/pydantic_ai_ag_ui_examples/cli/args.py @@ -65,7 +65,7 @@ def parse_args() -> Args: '--loggers', nargs='*', default=[ - 'pydantic_ai.ag_ui.adapter', + 'pydantic_ai.ag_ui', ], help='Logger names to configure (default: adapter and model loggers)', ) diff --git a/examples/pydantic_ai_ag_ui_examples/dojo_server.py b/examples/pydantic_ai_ag_ui_examples/dojo_server.py index 42db92dba..6ce7156f2 100644 --- a/examples/pydantic_ai_ag_ui_examples/dojo_server.py +++ b/examples/pydantic_ai_ag_ui_examples/dojo_server.py @@ -41,7 +41,7 @@ args: Args = parse_args() uvicorn.run( - 'pydantic_ai.ag_ui_examples.dojo_server:app', + 'pydantic_ai_ag_ui_examples.dojo_server:app', port=args.port, reload=args.reload, log_config=args.log_config(), diff --git a/mkdocs.yml b/mkdocs.yml index 39e344763..d86854e64 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -63,6 +63,7 @@ nav: - examples/question-graph.md - examples/slack-lead-qualifier.md - API Reference: + - api/ag_ui.md - api/agent.md - api/tools.md - api/common_tools.md @@ -104,7 +105,6 @@ nav: - api/pydantic_evals/otel.md - api/pydantic_evals/generation.md - api/fasta2a.md - - api/ag_ui.md extra: # hide the "Made with Material for MkDocs" message diff --git a/pydantic_ai_slim/pydantic_ai/ag_ui.py b/pydantic_ai_slim/pydantic_ai/ag_ui.py index e675f0a23..2122babfe 100644 --- a/pydantic_ai_slim/pydantic_ai/ag_ui.py +++ b/pydantic_ai_slim/pydantic_ai/ag_ui.py @@ -9,10 +9,12 @@ import json import logging import uuid -from collections.abc import Iterable, Sequence +from collections.abc import Iterable, Mapping, Sequence from dataclasses import InitVar, dataclass, field from enum import Enum -from typing import TYPE_CHECKING, Any, Final, Generic, Protocol, TypeVar, cast, runtime_checkable +from typing import TYPE_CHECKING, Any, Callable, Final, Generic, Protocol, TypeVar, cast, runtime_checkable + +from starlette.responses import Response, StreamingResponse try: from ag_ui.core import ( @@ -41,17 +43,29 @@ UserMessage, ) from ag_ui.encoder import EventEncoder -except ImportError as e: +except ImportError as e: # pragma: no cover raise ImportError( 'Please install the `ag-ui-protocol` package to use `Agent.to_ag_ui()` method, ' 'you can use the `ag-ui` optional group — `pip install "pydantic-ai-slim[ag-ui]"`' ) from e +try: + from starlette.applications import Starlette + from starlette.middleware import Middleware + from starlette.requests import Request + from starlette.responses import Response, StreamingResponse + from starlette.routing import BaseRoute + from starlette.types import ExceptionHandler, Lifespan +except ImportError as e: # pragma: no cover + raise ImportError( + 'Please install the `fasta2a` package to use `Agent.to_ag_ui()` method, ' + 'you can use the `ag-ui` optional group — `pip install "pydantic-ai-slim[ag-ui]"`' + ) from e + from pydantic import BaseModel, ValidationError from . import Agent, models from ._agent_graph import ModelRequestNode -from ._output import OutputType from ._parts_manager import ModelResponsePartsManager from .agent import RunOutputDataT from .mcp import ToolResult @@ -75,7 +89,8 @@ ToolReturnPart, UserPromptPart, ) -from .result import AgentStream, OutputDataT +from .output import OutputDataT, OutputSpec +from .result import AgentStream from .settings import ModelSettings from .tools import AgentDepsT, Tool from .usage import Usage, UsageLimits @@ -91,154 +106,210 @@ from .agent import AgentRun from .result import FinalResult - +# Variables. _LOGGER: logging.Logger = logging.getLogger(__name__) - # Constants. SSE_CONTENT_TYPE: Final[str] = 'text/event-stream' """Content type header value for Server-Sent Events (SSE).""" -# Enums. -# TODO(steve): Remove this and all uses once https://github.com/ag-ui-protocol/ag-ui/pull/49 is merged. -class Role(str, Enum): - """Enum for message roles in AG-UI protocol.""" - - ASSISTANT = 'assistant' - USER = 'user' - DEVELOPER = 'developer' - SYSTEM = 'system' - TOOL = 'tool' - +class FastAGUI(Generic[AgentDepsT, OutputDataT], Starlette): + """A FastAPI-like application for running PydanticAI agents with AG-UI protocol support.""" -# Exceptions. -@dataclass -class RunError(Exception): - """Exception raised for errors during agent runs.""" - - message: str - code: str - - def __str__(self) -> str: - return self.message - - -@dataclass(kw_only=True) -class UnexpectedToolCallError(RunError): - """Exception raised when an unexpected tool call is encountered.""" - - tool_name: InitVar[str] - message: str = '' - code: str = 'unexpected_tool_call' - - def __post_init__(self, tool_name: str) -> None: - """Set the message for the unexpected tool call. + def __init__( + self, + *, + # Adapter for the agent. + adapter: Adapter[AgentDepsT, OutputDataT], + path: str = '/', + # Agent.iter parameters. + output_type: OutputSpec[OutputDataT] = str, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: UsageLimits | None = None, + usage: Usage | None = None, + infer_name: bool = True, + additional_tools: Sequence[Tool[AgentDepsT]] | None = None, + # Starlette + debug: bool = False, + routes: Sequence[BaseRoute] | None = None, + middleware: Sequence[Middleware] | None = None, + exception_handlers: Mapping[Any, ExceptionHandler] | None = None, + on_startup: Sequence[Callable[[], Any]] | None = None, + on_shutdown: Sequence[Callable[[], Any]] | None = None, + lifespan: Lifespan[FastAGUI[AgentDepsT, OutputDataT]] | None = None, + ) -> None: + """Initialize the FastAGUI application. Args: - tool_name: The name of the tool that was unexpectedly called. - """ - self.message = f'unexpected tool call name={tool_name}' # pragma: no cover - - -@dataclass -class NoMessagesError(RunError): - """Exception raised when no messages are found in the input.""" - - message: str = 'no messages found in the input' - code: str = 'no_messages' - - -@dataclass -class InvalidStateError(RunError, ValidationError): - """Exception raised when an invalid state is provided.""" - - message: str = 'invalid state provided' - code: str = 'invalid_state' - - -# Protocols. -@runtime_checkable -class StateHandler(Protocol): - """Protocol for state handlers in agent runs.""" - - def set_state(self, state: State) -> None: - """Set the state of the agent run. - - This method is called to update the state of the agent run with the - provided state. + adapter: The adapter to use for running the agent. + path: The path to serve the agent run endpoint. - Args: - state: The run state. + output_type: Custom output type to use for this run, `output_type` may only be used if the agent has + no output validators since output validators would expect an argument that matches the agent's + output type. + model: Optional model to use for this run, required if `model` was not set when creating the agent. + deps: Optional dependencies to use for this run. + model_settings: Optional settings to use for this model's request. + usage_limits: Optional limits on model request count or token usage. + usage: Optional usage to start with, useful for resuming a conversation or agents used in tools. + infer_name: Whether to try to infer the agent name from the call frame if it's not set. + additional_tools: Additional tools to use for this run. - Raises: - ValidationError: If `state` does not match the expected model. + debug: Boolean indicating if debug tracebacks should be returned on errors. + routes: A list of routes to serve incoming HTTP and WebSocket requests. + middleware: A list of middleware to run for every request. A starlette application will always + automatically include two middleware classes. `ServerErrorMiddleware` is added as the very + outermost middleware, to handle any uncaught errors occurring anywhere in the entire stack. + `ExceptionMiddleware` is added as the very innermost middleware, to deal with handled + exception cases occurring in the routing or endpoints. + exception_handlers: A mapping of either integer status codes, or exception class types onto + callables which handle the exceptions. Exception handler callables should be of the form + `handler(request, exc) -> response` and may be either standard functions, or async functions. + on_startup: A list of callables to run on application startup. Startup handler callables do not + take any arguments, and may be either standard functions, or async functions. + on_shutdown: A list of callables to run on application shutdown. Shutdown handler callables do + not take any arguments, and may be either standard functions, or async functions. + lifespan: A lifespan context function, which can be used to perform startup and shutdown tasks. + This is a newer style that replaces the `on_startup` and `on_shutdown` handlers. Use one or + the other, not both. """ - ... - - -StateT = TypeVar('StateT', bound=BaseModel, contravariant=True) -"""Type variable for the state type, which must be a subclass of `BaseModel`.""" - + super().__init__( + debug=debug, + routes=routes, + middleware=middleware, + exception_handlers=exception_handlers, + on_startup=on_startup, + on_shutdown=on_shutdown, + lifespan=lifespan, + ) -@dataclass(kw_only=True) -class StateDeps(Generic[StateT]): - """Provides AG-UI state management. + async def endpoint(request: Request) -> Response | StreamingResponse: + """Endpoint to run the agent with the provided input data.""" + accept: str = request.headers.get('accept', SSE_CONTENT_TYPE) + try: + input_data: RunAgentInput = RunAgentInput.model_validate_json(await request.body()) + except ValidationError as e: # pragma: no cover + _LOGGER.error('invalid request: %s', e) + return Response( + content=json.dumps(e.json()), + media_type='application/json', + status_code=400, + ) - This class is used to manage the state of an agent run. It allows setting - the state of the agent run with a specific type of state model, which must - be a subclass of `BaseModel`. + return StreamingResponse( + adapter.run( + input_data, + accept, + output_type=output_type, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + additional_tools=additional_tools, + ), + media_type=SSE_CONTENT_TYPE, + ) - The state is set using the `set_state` when the run starts by the `Adapter`. + self.router.add_route(path, endpoint, methods=['POST'], name='run_agent') + + +def agent_to_ag_ui( + *, + # Adapter parameters. + agent: Agent[AgentDepsT, OutputDataT], + path: str = '/', + tool_prefix: str = '', + logger: logging.Logger | None = None, + # Agent.iter parameters. + output_type: OutputSpec[OutputDataT] = str, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: UsageLimits | None = None, + usage: Usage | None = None, + infer_name: bool = True, + additional_tools: Sequence[Tool[AgentDepsT]] | None = None, + # Starlette parameters. + debug: bool = False, + routes: Sequence[BaseRoute] | None = None, + middleware: Sequence[Middleware] | None = None, + exception_handlers: Mapping[Any, ExceptionHandler] | None = None, + on_startup: Sequence[Callable[[], Any]] | None = None, + on_shutdown: Sequence[Callable[[], Any]] | None = None, + lifespan: Lifespan[FastAGUI[AgentDepsT, OutputDataT]] | None = None, +) -> FastAGUI[AgentDepsT, OutputDataT]: + """Create a FastAGUI server from an agent. - Implements the `StateHandler` protocol. + Args: + agent: The PydanticAI agent to adapt for AG-UI protocol. + path: The path to serve the agent run endpoint. + tool_prefix: Optional prefix to add to tool names. + logger: Optional logger to use for the adapter, defaults to the module's logger. + + output_type: Custom output type to use for this run, `output_type` may only be used if the agent has + no output validators since output validators would expect an argument that matches the agent's + output type. + model: Optional model to use for this run, required if `model` was not set when creating the agent. + deps: Optional dependencies to use for this run. + model_settings: Optional settings to use for this model's request. + usage_limits: Optional limits on model request count or token usage. + usage: Optional usage to start with, useful for resuming a conversation or agents used in tools. + infer_name: Whether to try to infer the agent name from the call frame if it's not set. + additional_tools: Additional tools to use for this run. + + debug: Boolean indicating if debug tracebacks should be returned on errors. + routes: A list of routes to serve incoming HTTP and WebSocket requests. + middleware: A list of middleware to run for every request. A starlette application will always + automatically include two middleware classes. `ServerErrorMiddleware` is added as the very + outermost middleware, to handle any uncaught errors occurring anywhere in the entire stack. + `ExceptionMiddleware` is added as the very innermost middleware, to deal with handled + exception cases occurring in the routing or endpoints. + exception_handlers: A mapping of either integer status codes, or exception class types onto + callables which handle the exceptions. Exception handler callables should be of the form + `handler(request, exc) -> response` and may be either standard functions, or async functions. + on_startup: A list of callables to run on application startup. Startup handler callables do not + take any arguments, and may be either standard functions, or async functions. + on_shutdown: A list of callables to run on application shutdown. Shutdown handler callables do + not take any arguments, and may be either standard functions, or async functions. + lifespan: A lifespan context function, which can be used to perform startup and shutdown tasks. + This is a newer style that replaces the `on_startup` and `on_shutdown` handlers. Use one or + the other, not both. """ + if logger is None: # pragma: no branch + logger = _LOGGER - state_type: type[StateT] - state: StateT = field(init=False) - - def set_state(self, state: State) -> None: - """Set the state of the agent run. - - This method is called to update the state of the agent run with the - provided state. - - Implements the `StateHandler` protocol. - - Args: - state: The run state, which should match the expected model type or be `None`. - - Raises: - InvalidStateError: If `state` does not match the expected model and is not `None`. - """ - if state is None: - return - - try: - self.state = self.state_type.model_validate(state) - except ValidationError as e: # pragma: no cover - raise InvalidStateError from e - - -@dataclass(repr=False) -class _RequestStreamContext: - """Data class to hold request stream context.""" - - message_id: str = '' - last_tool_call_id: str | None = None - part_ends: list[BaseEvent | None] = field(default_factory=lambda: list[BaseEvent | None]()) - local_tool_calls: set[str] = field(default_factory=set) - - def new_message_id(self) -> str: - """Generate a new message ID for the request stream. - - Assigns a new UUID to the `message_id` and returns it. + adapter: Adapter[AgentDepsT, OutputDataT] = Adapter( + agent=agent, + tool_prefix=tool_prefix, + logger=logger, + ) - Returns: - A new message ID. - """ - self.message_id = str(uuid.uuid4()) - return self.message_id + return FastAGUI( + adapter=adapter, + path=path, + # Agent.iter parameter + output_type=output_type, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + additional_tools=additional_tools, + # Starlette + debug=debug, + routes=routes, + middleware=middleware, + exception_handlers=exception_handlers, + on_startup=on_startup, + on_shutdown=on_shutdown, + lifespan=lifespan, + ) @dataclass(kw_only=True, repr=False) @@ -252,39 +323,32 @@ class Adapter(Generic[AgentDepsT, OutputDataT]): Examples: This is an example of base usage with FastAPI. ```python - from __future__ import annotations + from pydantic_ai import Agent - from typing import TYPE_CHECKING, Annotated + agent = Agent('openai:gpt-4.1', instructions='Be fun!') + app = agent.to_ag_ui() + ``` - from fastapi import FastAPI, Header - from fastapi.responses import StreamingResponse - from pydantic_ai import Agent + PydanticAI tools which return AG-UI events will be sent to the client + as part of the event stream, single events and event iterables are + supported. + ```python + from ag_ui.core import CustomEvent, EventType, StateSnapshotEvent + from pydantic import BaseModel + + from pydantic_ai import Agent, RunContext + from pydantic_ai.ag_ui import StateDeps - from pydantic_ai.ag_ui import SSE_CONTENT_TYPE, Adapter - if TYPE_CHECKING: - from ag_ui.core import RunAgentInput + class DocumentState(BaseModel): + document: str + - app = FastAPI(title="AG-UI Endpoint") agent = Agent( - "openai:gpt-4o-mini", - deps_type=int, - instructions="You are a helpful assistant.", + 'openai:gpt-4.1', instructions='Be fun!', deps_type=StateDeps[DocumentState] ) - adapter = agent.to_ag_ui() - @app.post("/") - async def root(input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE) -> StreamingResponse: - return StreamingResponse( - adapter.run(input_data, accept, deps=42), - media_type=SSE_CONTENT_TYPE, - ) - ``` - PydanticAI tools which return AG-UI events will be sent to the client - as part of the event stream, single events and event iterables are - supported. - ```python @agent.tool def update_state(ctx: RunContext[StateDeps[DocumentState]]) -> StateSnapshotEvent: return StateSnapshotEvent( @@ -292,17 +356,18 @@ def update_state(ctx: RunContext[StateDeps[DocumentState]]) -> StateSnapshotEven snapshot=ctx.deps.state, ) + @agent.tool_plain def custom_events() -> list[CustomEvent]: return [ CustomEvent( type=EventType.CUSTOM, - name="count", + name='count', value=1, ), CustomEvent( type=EventType.CUSTOM, - name="count", + name='count', value=2, ), ] @@ -322,7 +387,7 @@ async def run( run_input: RunAgentInput, accept: str = SSE_CONTENT_TYPE, *, - output_type: OutputType[RunOutputDataT] | None = None, + output_type: OutputSpec[RunOutputDataT] | None = None, model: models.Model | models.KnownModelName | str | None = None, deps: AgentDepsT = None, model_settings: ModelSettings | None = None, @@ -369,7 +434,7 @@ async def run( ) if not run_input.messages: - raise NoMessagesError + raise _NoMessagesError if isinstance(deps, StateHandler): deps.set_state(run_input.state) @@ -405,7 +470,7 @@ async def run( break yield encoder.encode(event) - except RunError as e: + except _RunError as e: self.logger.exception('agent run') yield encoder.encode( RunErrorEvent(type=EventType.RUN_ERROR, message=e.message, code=e.code), @@ -458,7 +523,7 @@ def _message_snapshot( match msg: case ModelRequest(): for request_part in msg.parts: - if isinstance(request_part, ToolReturnPart): + if isinstance(request_part, ToolReturnPart): # pragma: no branch messages.append( ToolMessage( id='result-' + request_part.tool_call_id, @@ -467,7 +532,7 @@ def _message_snapshot( tool_call_id=request_part.tool_call_id, ) ) - case ModelResponse(): + case ModelResponse(): # pragma: no branch self._convert_response_parts(msg.parts, messages) self._convert_response_parts(parts_manager.get_parts(), messages) @@ -589,9 +654,9 @@ def _tool_stub(*args: Any, **kwargs: Any) -> ToolResult: Never returns as it always raises an exception. Raises: - UnexpectedToolCallError: Always raised since this should never be called. + _UnexpectedToolCallError: Always raised since this should never be called. """ - raise UnexpectedToolCallError(tool_name=tool.name) # pragma: no cover + raise _UnexpectedToolCallError(tool_name=tool.name) # pragma: no cover # TODO(steve): See it we can avoid the cast here. return cast( @@ -718,7 +783,7 @@ async def _handle_agent_event( ), None, # Signal continuation of the stream. ] - case ThinkingPart(): # pragma: no branch + case ThinkingPart(): # pragma: no cover # No equivalent AG-UI event yet. pass case PartDeltaEvent(): @@ -749,7 +814,7 @@ async def _handle_agent_event( if isinstance(agent_event.delta.args_delta, str) else json.dumps(agent_event.delta.args_delta), ) - case ThinkingPartDelta(): # pragma: no branch + case ThinkingPartDelta(): # pragma: no cover # No equivalent AG-UI event yet. pass case FinalResultEvent(): @@ -815,18 +880,152 @@ def _convert_history(messages: list[Message]) -> list[ModelMessage]: return result -# ===================================================================================== -# Exports -# ===================================================================================== - __all__ = [ 'Adapter', 'SSE_CONTENT_TYPE', 'StateDeps', 'StateHandler', - 'Role', - 'RunError', - 'UnexpectedToolCallError', - 'NoMessagesError', - 'InvalidStateError', + 'FastAGUI', + 'agent_to_ag_ui', ] + + +# Enums. +# TODO(steve): Remove this and all uses once https://github.com/ag-ui-protocol/ag-ui/pull/49 is merged. +class Role(str, Enum): + """Enum for message roles in AG-UI protocol.""" + + ASSISTANT = 'assistant' + USER = 'user' + DEVELOPER = 'developer' + SYSTEM = 'system' + TOOL = 'tool' + + +# Exceptions. +@dataclass +class _RunError(Exception): + """Exception raised for errors during agent runs.""" + + message: str + code: str + + def __str__(self) -> str: + return self.message + + +@dataclass(kw_only=True) +class _UnexpectedToolCallError(_RunError): + """Exception raised when an unexpected tool call is encountered.""" + + tool_name: InitVar[str] + message: str = '' + code: str = 'unexpected_tool_call' + + def __post_init__(self, tool_name: str) -> None: + """Set the message for the unexpected tool call. + + Args: + tool_name: The name of the tool that was unexpectedly called. + """ + self.message = f'unexpected tool call name={tool_name}' # pragma: no cover + + +@dataclass +class _NoMessagesError(_RunError): + """Exception raised when no messages are found in the input.""" + + message: str = 'no messages found in the input' + code: str = 'no_messages' + + +@dataclass +class _InvalidStateError(_RunError, ValidationError): + """Exception raised when an invalid state is provided.""" + + message: str = 'invalid state provided' + code: str = 'invalid_state' + + +# Protocols. +@runtime_checkable +class StateHandler(Protocol): + """Protocol for state handlers in agent runs.""" + + def set_state(self, state: State) -> None: + """Set the state of the agent run. + + This method is called to update the state of the agent run with the + provided state. + + Args: + state: The run state. + + Raises: + ValidationError: If `state` does not match the expected model. + """ + ... + + +StateT = TypeVar('StateT', bound=BaseModel, contravariant=True) +"""Type variable for the state type, which must be a subclass of `BaseModel`.""" + + +@dataclass(kw_only=True) +class StateDeps(Generic[StateT]): + """Provides AG-UI state management. + + This class is used to manage the state of an agent run. It allows setting + the state of the agent run with a specific type of state model, which must + be a subclass of `BaseModel`. + + The state is set using the `set_state` when the run starts by the `Adapter`. + + Implements the `StateHandler` protocol. + """ + + state_type: type[StateT] + state: StateT = field(init=False) + + def set_state(self, state: State) -> None: + """Set the state of the agent run. + + This method is called to update the state of the agent run with the + provided state. + + Implements the `StateHandler` protocol. + + Args: + state: The run state, which should match the expected model type or be `None`. + + Raises: + InvalidStateError: If `state` does not match the expected model and is not `None`. + """ + if state is None: + return + + try: + self.state = self.state_type.model_validate(state) + except ValidationError as e: # pragma: no cover + raise _InvalidStateError from e + + +@dataclass(repr=False) +class _RequestStreamContext: + """Data class to hold request stream context.""" + + message_id: str = '' + last_tool_call_id: str | None = None + part_ends: list[BaseEvent | None] = field(default_factory=lambda: list[BaseEvent | None]()) + local_tool_calls: set[str] = field(default_factory=set) + + def new_message_id(self) -> str: + """Generate a new message ID for the request stream. + + Assigns a new UUID to the `message_id` and returns it. + + Returns: + A new message ID. + """ + self.message_id = str(uuid.uuid4()) + return self.message_id diff --git a/pydantic_ai_slim/pydantic_ai/agent.py b/pydantic_ai_slim/pydantic_ai/agent.py index 9c1026d35..da1818fff 100644 --- a/pydantic_ai_slim/pydantic_ai/agent.py +++ b/pydantic_ai_slim/pydantic_ai/agent.py @@ -55,6 +55,7 @@ from .toolsets.combined import CombinedToolset from .toolsets.function import FunctionToolset from .toolsets.prepared import PreparedToolset +from .usage import Usage, UsageLimits # Re-exporting like this improves auto-import behavior in PyCharm capture_run_messages = _agent_graph.capture_run_messages @@ -74,7 +75,7 @@ from fasta2a.storage import Storage from pydantic_ai.mcp import MCPServer - from .ag_ui import Adapter + from .ag_ui import FastAGUI __all__ = ( 'Agent', @@ -1858,35 +1859,64 @@ async def run_mcp_servers( def to_ag_ui( self, *, + # Adapter parameters. + tool_prefix: str = '', logger: logging.Logger | None = None, - tool_prefix: str | None = None, - ) -> Adapter[AgentDepsT, OutputDataT]: + # Agent.iter parameters + output_type: OutputSpec[OutputDataT] = str, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: UsageLimits | None = None, + usage: Usage | None = None, + infer_name: bool = True, + additional_tools: Sequence[Tool[AgentDepsT]] | None = None, + ) -> FastAGUI[AgentDepsT, OutputDataT]: """Convert the agent to an Adapter instance. This allows you to use the agent with a compatible AG-UI frontend. + The first two arguments are specific to `Adapter` the rest map directly to the `Agent.iter` method. + Args: logger: Optional logger to use for the adapter. tool_prefix: Optional prefix to add to tool names in the AG-UI. + output_type: Custom output type to use for this run, `output_type` may only be used if the agent has no + output validators since output validators would expect an argument that matches the agent's output type. + model: Optional model to use for this run, required if `model` was not set when creating the agent. + deps: Optional dependencies to use for this run. + model_settings: Optional settings to use for this model's request. + usage_limits: Optional limits on model request count or token usage. + usage: Optional usage to start with, useful for resuming a conversation or agents used in tools. + infer_name: Whether to try to infer the agent name from the call frame if it's not set. + additional_tools: Additional tools to use for this run. + Returns: An adapter that converts between AG-UI protocol and PydanticAI. """ try: - from .ag_ui import Adapter - except ImportError as _import_error: + from .ag_ui import agent_to_ag_ui + except ImportError as e: # pragma: no cover raise ImportError( 'Please install the `ag-ui` dependencies to use `Agent.to_ag_ui()` method, ' - 'you can use the `ag-ui` optional group — `pip install "pydantic-ai-slim[ag_ui]"`' - ) from _import_error - - kwargs: dict[str, Any] = {} - if tool_prefix is not None: - kwargs['tool_prefix'] = tool_prefix - if logger is not None: - kwargs['logger'] = logger - - return Adapter(agent=self, **kwargs) + 'you can use the `ag-ui` optional group — `pip install "pydantic-ai-slim[ag-ui]"`' + ) from e + + return agent_to_ag_ui( + agent=self, + tool_prefix=tool_prefix, + logger=logger, + # Agent.iter parameters + output_type=output_type, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + additional_tools=additional_tools, + ) def to_a2a( self, diff --git a/pydantic_ai_slim/pydantic_ai/models/test.py b/pydantic_ai_slim/pydantic_ai/models/test.py index b67131744..ce8054eee 100644 --- a/pydantic_ai_slim/pydantic_ai/models/test.py +++ b/pydantic_ai_slim/pydantic_ai/models/test.py @@ -214,15 +214,17 @@ def _node_response( parts: list[ModelResponsePart] = [] part: TestPart for part in node.parts: - if isinstance(part, TestTextPart): - assert model_request_parameters.allow_text_output, ( + if isinstance(part, TestTextPart): # pragma: no branch + assert model_request_parameters.allow_text_output, ( # pragma: no cover 'Plain response not allowed, but `part` is a `TestText`.' ) - parts.append(TextPart(part.text)) - elif isinstance(part, TestToolCallPart): + parts.append(TextPart(part.text)) # pragma: no cover + elif isinstance(part, TestToolCallPart): # pragma: no branch tool_calls = self._get_tool_calls(model_request_parameters) - if part.call_tools == 'all': - parts.extend(ToolCallPart(name, self.gen_tool_args(args)) for name, args in tool_calls) + if part.call_tools == 'all': # pragma: no branch + parts.extend( + ToolCallPart(name, self.gen_tool_args(args)) for name, args in tool_calls + ) # pragma: no cover else: parts.extend( ToolCallPart(name, self.gen_tool_args(args)) diff --git a/pydantic_ai_slim/pyproject.toml b/pydantic_ai_slim/pyproject.toml index e918e895c..15eae061d 100644 --- a/pydantic_ai_slim/pyproject.toml +++ b/pydantic_ai_slim/pyproject.toml @@ -81,7 +81,7 @@ evals = ["pydantic-evals=={{ version }}"] # A2A a2a = ["fasta2a=={{ version }}"] # AG UI Adapter -ag-ui = ["ag-ui-protocol>=0.1.5"] +ag-ui = ["ag-ui-protocol>=0.1.5", "starlette>=0.45.3"] [dependency-groups] dev = [ diff --git a/tests/test_ag_ui.py b/tests/test_ag_ui.py index e58422f15..1fef8484c 100644 --- a/tests/test_ag_ui.py +++ b/tests/test_ag_ui.py @@ -5,7 +5,6 @@ import asyncio import contextlib -import logging import re import uuid from collections.abc import Callable @@ -13,12 +12,14 @@ from itertools import count from typing import Any, Final, Literal +import httpx import pytest +from asgi_lifespan import LifespanManager from pydantic import BaseModel from pydantic_ai import Agent from pydantic_ai.ag_ui import ( - _LOGGER as adapter_logger, # type: ignore[reportPrivateUsage] + SSE_CONTENT_TYPE, Adapter, Role, StateDeps, @@ -51,8 +52,6 @@ pytest.mark.skipif(not has_ag_ui, reason='ag-ui-protocol not installed'), ] -# Constants. -CUSTOM_LOGGER: Final[logging.Logger] = logging.getLogger('test_logger') # Type aliases. _MockUUID = Callable[[], str] @@ -117,14 +116,16 @@ async def create_adapter( Returns: An Adapter instance configured with the specified tools. """ - return Agent( - model=TestModel( - call_tools=call_tools, - tool_call_deltas={'get_weather_parts', 'current_time'}, - ), - deps_type=StateDeps[StateInt], - tools=[send_snapshot, send_custom, current_time], - ).to_ag_ui() + return Adapter( + agent=Agent( + model=TestModel( + call_tools=call_tools, + tool_call_deltas={'get_weather_parts', 'current_time'}, + ), + deps_type=StateDeps[StateInt], + tools=[send_snapshot, send_custom, current_time], + ) + ) @pytest.fixture @@ -289,8 +290,8 @@ class AdapterRunTest: # Test parameter data def tc_parameters() -> list[AdapterRunTest]: - if not has_ag_ui: - return [AdapterRunTest(id='skipped', runs=[])] + if not has_ag_ui: # pragma: no branch + return [AdapterRunTest(id='skipped', runs=[])] # pragma: no cover return [ AdapterRunTest( @@ -823,63 +824,50 @@ async def collect_events(run_input: RunAgentInput) -> list[str]: assert len(events) == len(EXPECTED_EVENTS) -@pytest.fixture -async def agent() -> Agent[None, str]: - """Create an Adapter instance for testing.""" - return Agent(model=TestModel()) - - -@dataclass -class ToAGUITest: - id: str - logger: logging.Logger | None = None - tool_prefix: str | None = None - expected_logger: logging.Logger = field( - default_factory=lambda: adapter_logger if has_ag_ui else logging.getLogger(__name__) - ) - expected_tool_prefix: str = '' - - -TEST_PARAMETERS = [ - ToAGUITest( - id='defaults', - ), - ToAGUITest( - id='custom_logger', - logger=CUSTOM_LOGGER, - expected_logger=CUSTOM_LOGGER, - ), - ToAGUITest( - id='custom_tool_prefix', - tool_prefix='test_prefix', - expected_tool_prefix='test_prefix', - ), - ToAGUITest( - id='custom_tool_timeout', - ), - ToAGUITest( - id='custom_all', - logger=CUSTOM_LOGGER, - tool_prefix='test_prefix', - expected_logger=CUSTOM_LOGGER, - expected_tool_prefix='test_prefix', - ), -] - - -@pytest.mark.parametrize('tc', TEST_PARAMETERS, ids=lambda tc: tc.id) @pytest.mark.anyio -async def test_to_ag_ui(agent: Agent[None, str], tc: ToAGUITest) -> None: - """Test the agent.to_ag_ui method. - - Args: - agent: The agent instance to test. - tc: Test case parameters including logger, tool prefix, and timeout. - """ - - adapter: Adapter[None, str] = agent.to_ag_ui( - logger=tc.logger, - tool_prefix=tc.tool_prefix, - ) - assert adapter.logger == tc.expected_logger - assert adapter.tool_prefix == tc.expected_tool_prefix +async def test_to_ag_ui(mock_uuid: _MockUUID) -> None: + """Test the agent.to_ag_ui method.""" + + agent: Agent[None, str] = Agent(model=TestModel()) + app = agent.to_ag_ui() + async with LifespanManager(app): + transport = httpx.ASGITransport(app) + async with httpx.AsyncClient(transport=transport) as client: + client.base_url = 'http://localhost:8000' + run_input: RunAgentInput = RunAgentInput( + state=None, + thread_id=f'{THREAD_ID_PREFIX}test_thread', + run_id=f'{RUN_ID_PREFIX}test_run', + messages=[ # pyright: ignore[reportArgumentType] + UserMessage( + id='msg_1', + role=Role.USER.value, + content='Hello, world!', + ), + ], + tools=[], + context=[], + forwarded_props=None, + ) + events: list[str] + async with client.stream( + 'POST', + '/', + content=run_input.model_dump_json(), + headers={'Content-Type': 'application/json', 'Accept': SSE_CONTENT_TYPE}, + ) as response: + assert response.status_code == 200, f'Unexpected status code: {response.status_code}' + events = [line + '\n\n' async for line in response.aiter_lines() if line.startswith('data: ')] + + assert events, 'No parts received from the server' + expected: list[str] = [ + '{"type":"RUN_STARTED","threadId":"thread_test_thread","runId":"run_test_run"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000001","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000001","delta":"success "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000001","delta":"(no "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000001","delta":"tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000001","delta":"calls)"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000001"}', + '{"type":"RUN_FINISHED","threadId":"thread_test_thread","runId":"run_test_run"}', + ] + assert_events(events, expected) diff --git a/uv.lock b/uv.lock index 4572bf17b..55b0551e0 100644 --- a/uv.lock +++ b/uv.lock @@ -3086,6 +3086,7 @@ a2a = [ ] ag-ui = [ { name = "ag-ui-protocol" }, + { name = "starlette" }, ] anthropic = [ { name = "anthropic" }, @@ -3180,6 +3181,7 @@ requires-dist = [ { name = "pydantic-graph", editable = "pydantic_graph" }, { name = "requests", marker = "extra == 'vertexai'", specifier = ">=2.32.2" }, { name = "rich", marker = "extra == 'cli'", specifier = ">=13" }, + { name = "starlette", marker = "extra == 'ag-ui'", specifier = ">=0.45.3" }, { name = "tavily-python", marker = "extra == 'tavily'", specifier = ">=0.5.0" }, { name = "typing-inspection", specifier = ">=0.4.0" }, ] From 58a9c576c856a900b7491ef0bc503f6439fb89c6 Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Tue, 24 Jun 2025 09:49:16 +0100 Subject: [PATCH 04/13] chore(ag-ui): python 3.9 support Eliminate the use of match statement and dataclass(kw_only=True) to ensure compatibility with Python 3.9. Remove duplicate import of starlette outside try block, causing tests to fail when it's not installed. --- .github/workflows/ci.yml | 1 + docs/ag-ui.md | 4 +- examples/pydantic_ai_ag_ui_examples/README.md | 29 +- pydantic_ai_slim/pydantic_ai/ag_ui.py | 407 +++++++++--------- pydantic_ai_slim/pydantic_ai/models/test.py | 26 +- pyproject.toml | 9 + tests/conftest.py | 6 + tests/test_ag_ui.py | 26 +- uv.lock | 197 ++++----- 9 files changed, 361 insertions(+), 344 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 30c8c4584..ca6620e03 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -204,6 +204,7 @@ jobs: enable-cache: true - run: uv sync --package pydantic-ai-slim --only-dev + - run: rm coverage/.coverage.*-py3.9-* # Exclude 3.9 coverage as it gets the wrong line numbers, causing invalid failures. - run: uv run coverage combine coverage - run: uv run coverage html --show-contexts --title "PydanticAI coverage for ${{ github.sha }}" diff --git a/docs/ag-ui.md b/docs/ag-ui.md index a4bd8f79b..f322559b3 100644 --- a/docs/ag-ui.md +++ b/docs/ag-ui.md @@ -225,7 +225,7 @@ def custom_events() -> list[CustomEvent]: ### Examples For more examples of how to use [`Adapter`][pydantic_ai.ag_ui.Adapter] see -[`pydantic_ai.ag_ui_examples`](https://github.com/pydantic/pydantic-ai/tree/main/examples/pydantic_ai.ag_ui_examples), +[`pydantic_ai_ag_ui_examples`](https://github.com/pydantic/pydantic-ai/tree/main/examples/pydantic_ai_ag_ui_examples), which includes working server for the with the [AG-UI Dojo](https://docs.ag-ui.com/tutorials/debugging#the-ag-ui-dojo) which can be run from a clone of the repo or with the `pydantic-ai-examples` package @@ -238,7 +238,7 @@ pip/uv-add pydantic-ai-examples Direct, which supports command line flags: ```shell -python -m pydantic_ai.ag_ui_examples.dojo_server --help +python -m pydantic_ai_ag_ui_examples.dojo_server --help usage: dojo_server.py [-h] [--port PORT] [--reload] [--no-reload] [--log-level {critical,error,warning,info,debug,trace}] PydanticAI AG-UI Dojo server diff --git a/examples/pydantic_ai_ag_ui_examples/README.md b/examples/pydantic_ai_ag_ui_examples/README.md index 52d472475..5a6c0b12a 100644 --- a/examples/pydantic_ai_ag_ui_examples/README.md +++ b/examples/pydantic_ai_ag_ui_examples/README.md @@ -7,34 +7,36 @@ Implementation of the AG-UI protocol for PydanticAI. This example uses a PydanticAI agent using an OpenAI model and the AG-UI dojo. 1. An [OpenAI API key](https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key) -2. A clone of this repository -3. A clone of the [AG-UI protocol repository](https://github.com/ag-ui-protocol/ag-ui) +2. A clone of the [AG-UI protocol repository](https://github.com/ag-ui-protocol/ag-ui) ## Running To run this integration you need to: -1. Make a copy of `jobs-agent/.env.local-example` as `.env` +1. Make a copy of `.env-sample` as `.env` in the `typescript-sdk/integrations/pydantic-ai` directory 2. Open it in your editor and set `OPENAI_API_KEY` to a valid OpenAI key -3. Open terminal in the root directory of this repository clone -4. Install the required modules and run the server +3. Open terminal in the `typescript-sdk/integrations/pydantic-ai` of the `ag-ui` repo +4. Install the `pydantic-ai-examples` package ```shell - cd jobs-agent - just install-deps - source .venv/bin/activate - python -m examples.pydantic_ai_ag_ui_examples.dojo_server + pip/uv-add pydantic-ai-examples ``` -5. Open another terminal in root directory of the `ag-ui` repository clone -6. Start the integration ag-ui dojo: +5. Run the example dojo server + + ```shell + python -m pydantic_ai_ag_ui_examples.dojo_server + ``` + +6. Open another terminal in root directory of the `ag-ui` repository clone +7. Start the integration ag-ui dojo: ```shell cd typescript-sdk pnpm install && pnpm run dev ``` -7. Finally visit [http://localhost:3000/pydantic-ai](http://localhost:3000/pydantic-ai) +8. Finally visit [http://localhost:3000/pydantic-ai](http://localhost:3000/pydantic-ai) ## Feature Demos @@ -97,7 +99,8 @@ Generate a list of steps for cleaning a car for me to review ### [Predictive State Updates](http://localhost:3000/pydantic-ai/feature/predictive_state_updates) Demonstrates how to use the predictive state updates feature to update the state -of the UI based on agent responses, including user interaction via git aconfirmation. +of the UI based on agent responses, including user interaction via user +confirmation. #### Story Tools diff --git a/pydantic_ai_slim/pydantic_ai/ag_ui.py b/pydantic_ai_slim/pydantic_ai/ag_ui.py index 2122babfe..a21b14a84 100644 --- a/pydantic_ai_slim/pydantic_ai/ag_ui.py +++ b/pydantic_ai_slim/pydantic_ai/ag_ui.py @@ -10,11 +10,21 @@ import logging import uuid from collections.abc import Iterable, Mapping, Sequence -from dataclasses import InitVar, dataclass, field +from dataclasses import dataclass, field from enum import Enum -from typing import TYPE_CHECKING, Any, Callable, Final, Generic, Protocol, TypeVar, cast, runtime_checkable - -from starlette.responses import Response, StreamingResponse +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Final, + Generic, + NoReturn, + Protocol, + TypeVar, + Union, + cast, + runtime_checkable, +) try: from ag_ui.core import ( @@ -68,7 +78,6 @@ from ._agent_graph import ModelRequestNode from ._parts_manager import ModelResponsePartsManager from .agent import RunOutputDataT -from .mcp import ToolResult from .messages import ( AgentStreamEvent, FinalResultEvent, @@ -312,7 +321,7 @@ def agent_to_ag_ui( ) -@dataclass(kw_only=True, repr=False) +@dataclass(repr=False) class Adapter(Generic[AgentDepsT, OutputDataT]): """An agent adapter providing AG-UI protocol support for PydanticAI agents. @@ -520,20 +529,19 @@ def _message_snapshot( # Tool calls were made, so we need to create a snapshot. for msg in new_messages: - match msg: - case ModelRequest(): - for request_part in msg.parts: - if isinstance(request_part, ToolReturnPart): # pragma: no branch - messages.append( - ToolMessage( - id='result-' + request_part.tool_call_id, - role=Role.TOOL, - content=request_part.content, - tool_call_id=request_part.tool_call_id, - ) + if isinstance(msg, ModelRequest): + for request_part in msg.parts: + if isinstance(request_part, ToolReturnPart): # pragma: no branch + messages.append( + ToolMessage( + id='result-' + request_part.tool_call_id, + role=Role.TOOL, + content=request_part.content, + tool_call_id=request_part.tool_call_id, ) - case ModelResponse(): # pragma: no branch - self._convert_response_parts(msg.parts, messages) + ) + elif isinstance(msg, ModelResponse): # pragma: no branch + self._convert_response_parts(msg.parts, messages) self._convert_response_parts(parts_manager.get_parts(), messages) @@ -551,41 +559,40 @@ def _convert_response_parts(self, parts: list[ModelResponsePart], messages: list """ response_part: ModelResponsePart for response_part in parts: - match response_part: - case TextPart(): # pragma: no cover - # This is not expected, but we handle it gracefully. - messages.append( - AssistantMessage( - id=uuid.uuid4().hex, - role=Role.ASSISTANT, - content=response_part.content, - ) - ) - case ToolCallPart(): - args: str = ( - json.dumps(response_part.args) - if isinstance(response_part.args, dict) - else response_part.args or '{}' + if isinstance(response_part, TextPart): # pragma: no cover + # This is not expected, but we handle it gracefully. + messages.append( + AssistantMessage( + id=uuid.uuid4().hex, + role=Role.ASSISTANT, + content=response_part.content, ) - messages.append( - AssistantMessage( - id=uuid.uuid4().hex, - role=Role.ASSISTANT, - tool_calls=[ - ToolCall( - id=response_part.tool_call_id, - type='function', - function=FunctionCall( - name=response_part.tool_name, - arguments=args, - ), - ) - ], - ), - ) - case ThinkingPart(): # pragma: no cover - # No AG-UI equivalent for thinking parts, so we skip them. - pass + ) + elif isinstance(response_part, ToolCallPart): + args: str = ( + json.dumps(response_part.args) + if isinstance(response_part.args, dict) + else response_part.args or '{}' + ) + messages.append( + AssistantMessage( + id=uuid.uuid4().hex, + role=Role.ASSISTANT, + tool_calls=[ + ToolCall( + id=response_part.tool_call_id, + type='function', + function=FunctionCall( + name=response_part.tool_name, + arguments=args, + ), + ) + ], + ), + ) + elif isinstance(response_part, ThinkingPart): # pragma: no cover + # No AG-UI equivalent for thinking parts, so we skip them. + pass async def _tool_events(self, parts: list[ModelRequestPart]) -> AsyncGenerator[BaseEvent | None, None]: """Check for tool call results that are AG-UI events. @@ -606,21 +613,22 @@ async def _tool_events(self, parts: list[ModelRequestPart]) -> AsyncGenerator[Ba continue iter: Iterable[Any] - match part.content: - case BaseEvent(): - self.logger.debug('ag-ui event: %s', part.content) - yield part.content - case str() | bytes(): - # Avoid strings and bytes being checked as iterable. - pass - case Iterable() as iter: - for item in iter: - if isinstance(item, BaseEvent): # pragma: no branch - self.logger.debug('ag-ui event: %s', item) - yield item - case _: # pragma: no cover - # Not currently interested in other types. - pass + if isinstance(part.content, BaseEvent): + self.logger.debug('ag-ui event: %s', part.content) + yield part.content + elif isinstance(part.content, (str, bytes)): + # Avoid strings and bytes being checked as iterable. + pass + elif isinstance(part.content, Iterable): + # Type: ignore to handle partially unknown type + iter = part.content # type: ignore[assignment] + for item in iter: + if isinstance(item, BaseEvent): # pragma: no branch + self.logger.debug('ag-ui event: %s', item) + yield item + else: # pragma: no cover + # Not currently interested in other types. + pass def _convert_tools(self, run_tools: list[ToolAGUI]) -> list[Tool[AgentDepsT]]: """Convert AG-UI tools to PydanticAI tools. @@ -647,7 +655,7 @@ def _tool_call(self, tool: ToolAGUI) -> Tool[AgentDepsT]: A PydanticAI `Tool` object that calls the AG-UI tool. """ - def _tool_stub(*args: Any, **kwargs: Any) -> ToolResult: + def _tool_stub(*args: Any, **kwargs: Any) -> NoReturn: """Stub function which is never called. Returns: @@ -658,7 +666,6 @@ def _tool_stub(*args: Any, **kwargs: Any) -> ToolResult: """ raise _UnexpectedToolCallError(tool_name=tool.name) # pragma: no cover - # TODO(steve): See it we can avoid the cast here. return cast( 'Tool[AgentDepsT]', Tool.from_schema( @@ -729,97 +736,94 @@ async def _handle_agent_event( AG-UI Server-Sent Events (SSE) based on the agent event. """ self.logger.debug('agent_event: %s', agent_event) - match agent_event: - case PartStartEvent(): - # If we have a previous part end it. - part_end: BaseEvent | None - for part_end in stream_ctx.part_ends: - yield part_end - stream_ctx.part_ends.clear() - - match agent_event.part: - case TextPart(): - message_id: str = stream_ctx.new_message_id() - yield TextMessageStartEvent( - type=EventType.TEXT_MESSAGE_START, - message_id=message_id, - role=Role.ASSISTANT.value, - ) - stream_ctx.part_ends = [ - TextMessageEndEvent( - type=EventType.TEXT_MESSAGE_END, - message_id=message_id, - ), - ] - if agent_event.part.content: - yield TextMessageContentEvent( # pragma: no cover - type=EventType.TEXT_MESSAGE_CONTENT, - message_id=message_id, - delta=agent_event.part.content, - ) - case ToolCallPart(): # pragma: no branch - tool_name: str | None = tool_names.get(agent_event.part.tool_name) - if not tool_name: - # Local tool calls are not sent as events to the UI. - stream_ctx.local_tool_calls.add(agent_event.part.tool_call_id) - return - - parts_manager.handle_tool_call_part( - vendor_part_id=None, - tool_name=agent_event.part.tool_name, - args=agent_event.part.args, - tool_call_id=agent_event.part.tool_call_id, - ) - stream_ctx.last_tool_call_id = agent_event.part.tool_call_id - yield ToolCallStartEvent( - type=EventType.TOOL_CALL_START, - tool_call_id=agent_event.part.tool_call_id, - tool_call_name=tool_name or agent_event.part.tool_name, - ) - stream_ctx.part_ends = [ - ToolCallEndEvent( - type=EventType.TOOL_CALL_END, - tool_call_id=agent_event.part.tool_call_id, - ), - None, # Signal continuation of the stream. - ] - case ThinkingPart(): # pragma: no cover - # No equivalent AG-UI event yet. - pass - case PartDeltaEvent(): - match agent_event.delta: - case TextPartDelta(): - yield TextMessageContentEvent( - type=EventType.TEXT_MESSAGE_CONTENT, - message_id=stream_ctx.message_id, - delta=agent_event.delta.content_delta, - ) - case ToolCallPartDelta(): # pragma: no branch - if agent_event.delta.tool_call_id in stream_ctx.local_tool_calls: - # Local tool calls are not sent as events to the UI. - return - - parts_manager.handle_tool_call_delta( - vendor_part_id=None, - tool_name=None, - args=agent_event.delta.args_delta, - tool_call_id=agent_event.delta.tool_call_id, - ) - yield ToolCallArgsEvent( - type=EventType.TOOL_CALL_ARGS, - tool_call_id=agent_event.delta.tool_call_id - or stream_ctx.last_tool_call_id - or 'unknown', # Should never be unknown, but just in case. - delta=agent_event.delta.args_delta - if isinstance(agent_event.delta.args_delta, str) - else json.dumps(agent_event.delta.args_delta), - ) - case ThinkingPartDelta(): # pragma: no cover - # No equivalent AG-UI event yet. - pass - case FinalResultEvent(): + if isinstance(agent_event, PartStartEvent): + # If we have a previous part end it. + part_end: BaseEvent | None + for part_end in stream_ctx.part_ends: + yield part_end + stream_ctx.part_ends.clear() + + if isinstance(agent_event.part, TextPart): + message_id: str = stream_ctx.new_message_id() + yield TextMessageStartEvent( + type=EventType.TEXT_MESSAGE_START, + message_id=message_id, + role=Role.ASSISTANT.value, + ) + stream_ctx.part_ends = [ + TextMessageEndEvent( + type=EventType.TEXT_MESSAGE_END, + message_id=message_id, + ), + ] + if agent_event.part.content: + yield TextMessageContentEvent( # pragma: no cover + type=EventType.TEXT_MESSAGE_CONTENT, + message_id=message_id, + delta=agent_event.part.content, + ) + elif isinstance(agent_event.part, ToolCallPart): # pragma: no branch + tool_name: str | None = tool_names.get(agent_event.part.tool_name) + if not tool_name: + # Local tool calls are not sent as events to the UI. + stream_ctx.local_tool_calls.add(agent_event.part.tool_call_id) + return + + parts_manager.handle_tool_call_part( + vendor_part_id=None, + tool_name=agent_event.part.tool_name, + args=agent_event.part.args, + tool_call_id=agent_event.part.tool_call_id, + ) + stream_ctx.last_tool_call_id = agent_event.part.tool_call_id + yield ToolCallStartEvent( + type=EventType.TOOL_CALL_START, + tool_call_id=agent_event.part.tool_call_id, + tool_call_name=tool_name or agent_event.part.tool_name, + ) + stream_ctx.part_ends = [ + ToolCallEndEvent( + type=EventType.TOOL_CALL_END, + tool_call_id=agent_event.part.tool_call_id, + ), + None, # Signal continuation of the stream. + ] + elif isinstance(agent_event.part, ThinkingPart): # pragma: no cover + # No equivalent AG-UI event yet. + pass + elif isinstance(agent_event, PartDeltaEvent): + if isinstance(agent_event.delta, TextPartDelta): + yield TextMessageContentEvent( + type=EventType.TEXT_MESSAGE_CONTENT, + message_id=stream_ctx.message_id, + delta=agent_event.delta.content_delta, + ) + elif isinstance(agent_event.delta, ToolCallPartDelta): # pragma: no branch + if agent_event.delta.tool_call_id in stream_ctx.local_tool_calls: + # Local tool calls are not sent as events to the UI. + return + + parts_manager.handle_tool_call_delta( + vendor_part_id=None, + tool_name=None, + args=agent_event.delta.args_delta, + tool_call_id=agent_event.delta.tool_call_id, + ) + yield ToolCallArgsEvent( + type=EventType.TOOL_CALL_ARGS, + tool_call_id=agent_event.delta.tool_call_id + or stream_ctx.last_tool_call_id + or 'unknown', # Should never be unknown, but just in case. + delta=agent_event.delta.args_delta + if isinstance(agent_event.delta.args_delta, str) + else json.dumps(agent_event.delta.args_delta), + ) + elif isinstance(agent_event.delta, ThinkingPartDelta): # pragma: no cover # No equivalent AG-UI event yet. pass + elif isinstance(agent_event, FinalResultEvent): + # No equivalent AG-UI event yet. + pass def _convert_history(messages: list[Message]) -> list[ModelMessage]: @@ -835,47 +839,46 @@ def _convert_history(messages: list[Message]) -> list[ModelMessage]: result: list[ModelMessage] = [] tool_calls: dict[str, str] = {} for msg in messages: - match msg: - case UserMessage(): - result.append(ModelRequest(parts=[UserPromptPart(content=msg.content)])) - case AssistantMessage(): - if msg.tool_calls: - for tool_call in msg.tool_calls: - tool_calls[tool_call.id] = tool_call.function.name - - result.append( - ModelResponse( - parts=[ - ToolCallPart( - tool_name=tool_call.function.name, - tool_call_id=tool_call.id, - args=tool_call.function.arguments, - ) - for tool_call in msg.tool_calls - ] - ) - ) + if isinstance(msg, UserMessage): + result.append(ModelRequest(parts=[UserPromptPart(content=msg.content)])) + elif isinstance(msg, AssistantMessage): + if msg.tool_calls: + for tool_call in msg.tool_calls: + tool_calls[tool_call.id] = tool_call.function.name - if msg.content: - result.append(ModelResponse(parts=[TextPart(content=msg.content)])) - case SystemMessage(): - # TODO(steve): Should we handle as instructions instead of system prompt? - result.append(ModelRequest(parts=[SystemPromptPart(content=msg.content)])) - case ToolMessage(): result.append( - ModelRequest( + ModelResponse( parts=[ - ToolReturnPart( - tool_name=tool_calls.get(msg.tool_call_id, 'unknown'), - content=msg.content, - tool_call_id=msg.tool_call_id, + ToolCallPart( + tool_name=tool_call.function.name, + tool_call_id=tool_call.id, + args=tool_call.function.arguments, ) + for tool_call in msg.tool_calls ] ) ) - case DeveloperMessage(): # pragma: no branch - # TODO(steve): Should these be handled differently? - result.append(ModelRequest(parts=[SystemPromptPart(content=msg.content)])) + + if msg.content: + result.append(ModelResponse(parts=[TextPart(content=msg.content)])) + elif isinstance(msg, SystemMessage): + # TODO(steve): Should we handle as instructions instead of system prompt? + result.append(ModelRequest(parts=[SystemPromptPart(content=msg.content)])) + elif isinstance(msg, ToolMessage): + result.append( + ModelRequest( + parts=[ + ToolReturnPart( + tool_name=tool_calls.get(msg.tool_call_id, 'unknown'), + content=msg.content, + tool_call_id=msg.tool_call_id, + ) + ] + ) + ) + elif isinstance(msg, DeveloperMessage): # pragma: no branch + # TODO(steve): Should these be handled differently? + result.append(ModelRequest(parts=[SystemPromptPart(content=msg.content)])) return result @@ -914,21 +917,19 @@ def __str__(self) -> str: return self.message -@dataclass(kw_only=True) class _UnexpectedToolCallError(_RunError): """Exception raised when an unexpected tool call is encountered.""" - tool_name: InitVar[str] - message: str = '' - code: str = 'unexpected_tool_call' - - def __post_init__(self, tool_name: str) -> None: - """Set the message for the unexpected tool call. + def __init__(self, *, tool_name: str) -> None: + """Initialize the unexpected tool call error. Args: tool_name: The name of the tool that was unexpectedly called. """ - self.message = f'unexpected tool call name={tool_name}' # pragma: no cover + super().__init__( + message=f'unexpected tool call name={tool_name}', # pragma: no cover + code='unexpected_tool_call', + ) @dataclass @@ -971,7 +972,7 @@ def set_state(self, state: State) -> None: """Type variable for the state type, which must be a subclass of `BaseModel`.""" -@dataclass(kw_only=True) +@dataclass class StateDeps(Generic[StateT]): """Provides AG-UI state management. @@ -1016,7 +1017,7 @@ class _RequestStreamContext: message_id: str = '' last_tool_call_id: str | None = None - part_ends: list[BaseEvent | None] = field(default_factory=lambda: list[BaseEvent | None]()) + part_ends: list[BaseEvent | None] = field(default_factory=lambda: list[Union[BaseEvent, None]]()) local_tool_calls: set[str] = field(default_factory=set) def new_message_id(self) -> str: diff --git a/pydantic_ai_slim/pydantic_ai/models/test.py b/pydantic_ai_slim/pydantic_ai/models/test.py index ce8054eee..c4bac7573 100644 --- a/pydantic_ai_slim/pydantic_ai/models/test.py +++ b/pydantic_ai_slim/pydantic_ai/models/test.py @@ -6,10 +6,10 @@ from contextlib import asynccontextmanager from dataclasses import InitVar, dataclass, field from datetime import date, datetime, timedelta -from typing import Any, Literal +from typing import Any, Literal, Union import pydantic_core -from typing_extensions import assert_never +from typing_extensions import TypeAlias, assert_never from .. import _utils from ..messages import ( @@ -49,6 +49,9 @@ class _WrappedToolOutput: class TestToolCallPart: """Represents a tool call in the test model.""" + # NOTE: Avoid test discovery by pytest. + __test__ = False + call_tools: list[str] | Literal['all'] = 'all' deltas: bool = False @@ -57,10 +60,13 @@ class TestToolCallPart: class TestTextPart: """Represents a text part in the test model.""" + # NOTE: Avoid test discovery by pytest. + __test__ = False + text: str -TestPart = TestTextPart | TestToolCallPart +TestPart: TypeAlias = Union[TestTextPart, TestToolCallPart] """A part of the test model response.""" @@ -68,6 +74,9 @@ class TestTextPart: class TestNode: """A node in the test model.""" + # NOTE: Avoid test discovery by pytest. + __test__ = False + parts: list[TestPart] id: str = field(default_factory=_utils.generate_tool_call_id) @@ -175,14 +184,14 @@ def _get_output(self, model_request_parameters: ModelRequestParameters) -> _Wrap if k := output_tool.outer_typed_dict_key: return _WrappedToolOutput({k: self.custom_output_args}) - else: - return _WrappedToolOutput(self.custom_output_args) + + return _WrappedToolOutput(self.custom_output_args) elif model_request_parameters.allow_text_output: return _WrappedTextOutput(None) - elif model_request_parameters.output_tools: + elif model_request_parameters.output_tools: # pragma: no branch return _WrappedToolOutput(None) else: - return _WrappedTextOutput(None) + return _WrappedTextOutput(None) # pragma: no cover def _node_response( self, @@ -302,8 +311,7 @@ def _request( output_tool = output_tools[self.seed % len(output_tools)] if custom_output_args is not None: return ModelResponse( - parts=[ToolCallPart(output_tool.name, custom_output_args)], - model_name=self._model_name, + parts=[ToolCallPart(output_tool.name, custom_output_args)], model_name=self._model_name ) else: response_args = self.gen_tool_args(output_tool) diff --git a/pyproject.toml b/pyproject.toml index 901ebb869..0551b2c21 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -226,6 +226,15 @@ omit = [ "pydantic_ai_slim/pydantic_ai/ext/aci.py", # aci-sdk requires Python 3.10+ so cannot be added as an (optional) dependency ] branch = true +disable_warnings = ["include-ignored"] + +[tool.coverage.paths] +# Allow CI run assets to be downloaded an replicated locally. +source = [ + ".", + "/home/runner/work/pydantic-ai/pydantic-ai", + "/System/Volumes/Data/home/runner/work/pydantic-ai/pydantic-ai" +] # https://coverage.readthedocs.io/en/latest/config.html#report [tool.coverage.report] diff --git a/tests/conftest.py b/tests/conftest.py index ce95301d3..475fd4f23 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,7 @@ import asyncio import importlib.util +import logging import os import re import secrets @@ -28,6 +29,11 @@ __all__ = 'IsDatetime', 'IsFloat', 'IsNow', 'IsStr', 'IsInt', 'IsInstance', 'TestEnv', 'ClientWithHandler', 'try_import' +# Configure VCR logger to WARNING as it is too verbose by default +# specifically, it logs every request and response including binary +# content in Cassette.append, which is causing log downloads from +# GitHub action to fail. +logging.getLogger('vcr.cassette').setLevel(logging.WARNING) pydantic_ai.models.ALLOW_MODEL_REQUESTS = False diff --git a/tests/test_ag_ui.py b/tests/test_ag_ui.py index 1fef8484c..93d80581e 100644 --- a/tests/test_ag_ui.py +++ b/tests/test_ag_ui.py @@ -18,12 +18,6 @@ from pydantic import BaseModel from pydantic_ai import Agent -from pydantic_ai.ag_ui import ( - SSE_CONTENT_TYPE, - Adapter, - Role, - StateDeps, -) from pydantic_ai.models.test import TestModel, TestNode, TestToolCallPart has_ag_ui: bool = False @@ -44,6 +38,13 @@ UserMessage, ) + from pydantic_ai.ag_ui import ( + SSE_CONTENT_TYPE, + Adapter, + Role, + StateDeps, + ) + has_ag_ui = True @@ -122,7 +123,7 @@ async def create_adapter( call_tools=call_tools, tool_call_deltas={'get_weather_parts', 'current_time'}, ), - deps_type=StateDeps[StateInt], + deps_type=StateDeps[StateInt], # type: ignore[reportUnknownArgumentType] tools=[send_snapshot, send_custom, current_time], ) ) @@ -171,11 +172,12 @@ def _fake_uuid4() -> uuid.UUID: def assert_events(events: list[str], expected_events: list[str], *, loose: bool = False) -> None: expected: str event: str - for event, expected in zip(events, expected_events, strict=True): + for event, expected in zip(events, expected_events): if loose: expected = normalize_uuids(expected) event = normalize_uuids(event) assert event == f'data: {expected}\n\n' + assert len(events) == len(expected_events) def normalize_uuids(text: str) -> str: @@ -291,7 +293,7 @@ class AdapterRunTest: # Test parameter data def tc_parameters() -> list[AdapterRunTest]: if not has_ag_ui: # pragma: no branch - return [AdapterRunTest(id='skipped', runs=[])] # pragma: no cover + return [AdapterRunTest(id='skipped', runs=[])] return [ AdapterRunTest( @@ -763,7 +765,7 @@ async def test_run_method(mock_uuid: _MockUUID, tc: AdapterRunTest) -> None: events: list[str] = [] thread_id: str = f'{THREAD_ID_PREFIX}{mock_uuid()}' adapter: Adapter[StateDeps[StateInt], str] = await create_adapter(tc.call_tools) - deps: StateDeps[StateInt] = StateDeps[StateInt](state_type=StateInt) + deps: StateDeps[StateInt] = StateDeps[StateInt](state_type=StateInt) # type: ignore[reportUnknownArgumentType] for run in tc.runs: if run.nodes is not None: assert isinstance(adapter.agent.model, TestModel), 'Agent model is not TestModel' @@ -774,11 +776,11 @@ async def test_run_method(mock_uuid: _MockUUID, tc: AdapterRunTest) -> None: run_id=f'{RUN_ID_PREFIX}{mock_uuid()}', ) - events.extend([event async for event in adapter.run(run_input, deps=deps)]) + events.extend([event async for event in adapter.run(run_input, deps=deps)]) # type: ignore[reportUnknownArgumentType] assert_events(events, tc.expected_events) if tc.expected_state is not None: - assert deps.state.value == tc.expected_state + assert deps.state.value == tc.expected_state # type: ignore[reportUnknownArgumentType] async def test_concurrent_runs(mock_uuid: _MockUUID, adapter: Adapter[None, str]) -> None: diff --git a/uv.lock b/uv.lock index 55b0551e0..59ae6a0ef 100644 --- a/uv.lock +++ b/uv.lock @@ -2958,17 +2958,16 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.10.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, { name = "pydantic-core" }, { name = "typing-extensions" }, - { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681, upload-time = "2025-01-24T01:42:12.693Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, + { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696, upload-time = "2025-01-24T01:42:10.371Z" }, ] [[package]] @@ -3208,111 +3207,99 @@ dev = [ [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.27.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/53/ea/bbe9095cdd771987d13c82d104a9c8559ae9aec1e29f139e286fd2e9256e/pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d", size = 2028677, upload-time = "2025-04-23T18:32:27.227Z" }, - { url = "https://files.pythonhosted.org/packages/49/1d/4ac5ed228078737d457a609013e8f7edc64adc37b91d619ea965758369e5/pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954", size = 1864735, upload-time = "2025-04-23T18:32:29.019Z" }, - { url = "https://files.pythonhosted.org/packages/23/9a/2e70d6388d7cda488ae38f57bc2f7b03ee442fbcf0d75d848304ac7e405b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb", size = 1898467, upload-time = "2025-04-23T18:32:31.119Z" }, - { url = "https://files.pythonhosted.org/packages/ff/2e/1568934feb43370c1ffb78a77f0baaa5a8b6897513e7a91051af707ffdc4/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7", size = 1983041, upload-time = "2025-04-23T18:32:33.655Z" }, - { url = "https://files.pythonhosted.org/packages/01/1a/1a1118f38ab64eac2f6269eb8c120ab915be30e387bb561e3af904b12499/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4", size = 2136503, upload-time = "2025-04-23T18:32:35.519Z" }, - { url = "https://files.pythonhosted.org/packages/5c/da/44754d1d7ae0f22d6d3ce6c6b1486fc07ac2c524ed8f6eca636e2e1ee49b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b", size = 2736079, upload-time = "2025-04-23T18:32:37.659Z" }, - { url = "https://files.pythonhosted.org/packages/4d/98/f43cd89172220ec5aa86654967b22d862146bc4d736b1350b4c41e7c9c03/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3", size = 2006508, upload-time = "2025-04-23T18:32:39.637Z" }, - { url = "https://files.pythonhosted.org/packages/2b/cc/f77e8e242171d2158309f830f7d5d07e0531b756106f36bc18712dc439df/pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a", size = 2113693, upload-time = "2025-04-23T18:32:41.818Z" }, - { url = "https://files.pythonhosted.org/packages/54/7a/7be6a7bd43e0a47c147ba7fbf124fe8aaf1200bc587da925509641113b2d/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782", size = 2074224, upload-time = "2025-04-23T18:32:44.033Z" }, - { url = "https://files.pythonhosted.org/packages/2a/07/31cf8fadffbb03be1cb520850e00a8490c0927ec456e8293cafda0726184/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9", size = 2245403, upload-time = "2025-04-23T18:32:45.836Z" }, - { url = "https://files.pythonhosted.org/packages/b6/8d/bbaf4c6721b668d44f01861f297eb01c9b35f612f6b8e14173cb204e6240/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e", size = 2242331, upload-time = "2025-04-23T18:32:47.618Z" }, - { url = "https://files.pythonhosted.org/packages/bb/93/3cc157026bca8f5006250e74515119fcaa6d6858aceee8f67ab6dc548c16/pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9", size = 1910571, upload-time = "2025-04-23T18:32:49.401Z" }, - { url = "https://files.pythonhosted.org/packages/5b/90/7edc3b2a0d9f0dda8806c04e511a67b0b7a41d2187e2003673a996fb4310/pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3", size = 1956504, upload-time = "2025-04-23T18:32:51.287Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, - { url = "https://files.pythonhosted.org/packages/08/98/dbf3fdfabaf81cda5622154fda78ea9965ac467e3239078e0dcd6df159e7/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101", size = 2024034, upload-time = "2025-04-23T18:33:32.843Z" }, - { url = "https://files.pythonhosted.org/packages/8d/99/7810aa9256e7f2ccd492590f86b79d370df1e9292f1f80b000b6a75bd2fb/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64", size = 1858578, upload-time = "2025-04-23T18:33:34.912Z" }, - { url = "https://files.pythonhosted.org/packages/d8/60/bc06fa9027c7006cc6dd21e48dbf39076dc39d9abbaf718a1604973a9670/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d", size = 1892858, upload-time = "2025-04-23T18:33:36.933Z" }, - { url = "https://files.pythonhosted.org/packages/f2/40/9d03997d9518816c68b4dfccb88969756b9146031b61cd37f781c74c9b6a/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535", size = 2068498, upload-time = "2025-04-23T18:33:38.997Z" }, - { url = "https://files.pythonhosted.org/packages/d8/62/d490198d05d2d86672dc269f52579cad7261ced64c2df213d5c16e0aecb1/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d", size = 2108428, upload-time = "2025-04-23T18:33:41.18Z" }, - { url = "https://files.pythonhosted.org/packages/9a/ec/4cd215534fd10b8549015f12ea650a1a973da20ce46430b68fc3185573e8/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6", size = 2069854, upload-time = "2025-04-23T18:33:43.446Z" }, - { url = "https://files.pythonhosted.org/packages/1a/1a/abbd63d47e1d9b0d632fee6bb15785d0889c8a6e0a6c3b5a8e28ac1ec5d2/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca", size = 2237859, upload-time = "2025-04-23T18:33:45.56Z" }, - { url = "https://files.pythonhosted.org/packages/80/1c/fa883643429908b1c90598fd2642af8839efd1d835b65af1f75fba4d94fe/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039", size = 2239059, upload-time = "2025-04-23T18:33:47.735Z" }, - { url = "https://files.pythonhosted.org/packages/d4/29/3cade8a924a61f60ccfa10842f75eb12787e1440e2b8660ceffeb26685e7/pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27", size = 2066661, upload-time = "2025-04-23T18:33:49.995Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443, upload-time = "2024-12-18T11:31:54.917Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/bc/fed5f74b5d802cf9a03e83f60f18864e90e3aed7223adaca5ffb7a8d8d64/pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa", size = 1895938, upload-time = "2024-12-18T11:27:14.406Z" }, + { url = "https://files.pythonhosted.org/packages/71/2a/185aff24ce844e39abb8dd680f4e959f0006944f4a8a0ea372d9f9ae2e53/pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c", size = 1815684, upload-time = "2024-12-18T11:27:16.489Z" }, + { url = "https://files.pythonhosted.org/packages/c3/43/fafabd3d94d159d4f1ed62e383e264f146a17dd4d48453319fd782e7979e/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a", size = 1829169, upload-time = "2024-12-18T11:27:22.16Z" }, + { url = "https://files.pythonhosted.org/packages/a2/d1/f2dfe1a2a637ce6800b799aa086d079998959f6f1215eb4497966efd2274/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5", size = 1867227, upload-time = "2024-12-18T11:27:25.097Z" }, + { url = "https://files.pythonhosted.org/packages/7d/39/e06fcbcc1c785daa3160ccf6c1c38fea31f5754b756e34b65f74e99780b5/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c", size = 2037695, upload-time = "2024-12-18T11:27:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/7a/67/61291ee98e07f0650eb756d44998214231f50751ba7e13f4f325d95249ab/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7", size = 2741662, upload-time = "2024-12-18T11:27:30.798Z" }, + { url = "https://files.pythonhosted.org/packages/32/90/3b15e31b88ca39e9e626630b4c4a1f5a0dfd09076366f4219429e6786076/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a", size = 1993370, upload-time = "2024-12-18T11:27:33.692Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/c06d333ee3a67e2e13e07794995c1535565132940715931c1c43bfc85b11/pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236", size = 1996813, upload-time = "2024-12-18T11:27:37.111Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f7/89be1c8deb6e22618a74f0ca0d933fdcb8baa254753b26b25ad3acff8f74/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962", size = 2005287, upload-time = "2024-12-18T11:27:40.566Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7d/8eb3e23206c00ef7feee17b83a4ffa0a623eb1a9d382e56e4aa46fd15ff2/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9", size = 2128414, upload-time = "2024-12-18T11:27:43.757Z" }, + { url = "https://files.pythonhosted.org/packages/4e/99/fe80f3ff8dd71a3ea15763878d464476e6cb0a2db95ff1c5c554133b6b83/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af", size = 2155301, upload-time = "2024-12-18T11:27:47.36Z" }, + { url = "https://files.pythonhosted.org/packages/2b/a3/e50460b9a5789ca1451b70d4f52546fa9e2b420ba3bfa6100105c0559238/pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4", size = 1816685, upload-time = "2024-12-18T11:27:50.508Z" }, + { url = "https://files.pythonhosted.org/packages/57/4c/a8838731cb0f2c2a39d3535376466de6049034d7b239c0202a64aaa05533/pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31", size = 1982876, upload-time = "2024-12-18T11:27:53.54Z" }, + { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421, upload-time = "2024-12-18T11:27:55.409Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998, upload-time = "2024-12-18T11:27:57.252Z" }, + { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167, upload-time = "2024-12-18T11:27:59.146Z" }, + { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071, upload-time = "2024-12-18T11:28:02.625Z" }, + { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244, upload-time = "2024-12-18T11:28:04.442Z" }, + { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470, upload-time = "2024-12-18T11:28:07.679Z" }, + { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291, upload-time = "2024-12-18T11:28:10.297Z" }, + { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613, upload-time = "2024-12-18T11:28:13.362Z" }, + { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355, upload-time = "2024-12-18T11:28:16.587Z" }, + { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661, upload-time = "2024-12-18T11:28:18.407Z" }, + { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261, upload-time = "2024-12-18T11:28:21.471Z" }, + { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361, upload-time = "2024-12-18T11:28:23.53Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484, upload-time = "2024-12-18T11:28:25.391Z" }, + { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102, upload-time = "2024-12-18T11:28:28.593Z" }, + { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127, upload-time = "2024-12-18T11:28:30.346Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340, upload-time = "2024-12-18T11:28:32.521Z" }, + { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900, upload-time = "2024-12-18T11:28:34.507Z" }, + { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177, upload-time = "2024-12-18T11:28:36.488Z" }, + { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046, upload-time = "2024-12-18T11:28:39.409Z" }, + { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386, upload-time = "2024-12-18T11:28:41.221Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060, upload-time = "2024-12-18T11:28:44.709Z" }, + { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870, upload-time = "2024-12-18T11:28:46.839Z" }, + { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822, upload-time = "2024-12-18T11:28:48.896Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364, upload-time = "2024-12-18T11:28:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303, upload-time = "2024-12-18T11:28:54.122Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064, upload-time = "2024-12-18T11:28:56.074Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046, upload-time = "2024-12-18T11:28:58.107Z" }, + { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092, upload-time = "2024-12-18T11:29:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709, upload-time = "2024-12-18T11:29:03.193Z" }, + { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273, upload-time = "2024-12-18T11:29:05.306Z" }, + { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027, upload-time = "2024-12-18T11:29:07.294Z" }, + { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888, upload-time = "2024-12-18T11:29:09.249Z" }, + { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738, upload-time = "2024-12-18T11:29:11.23Z" }, + { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138, upload-time = "2024-12-18T11:29:16.396Z" }, + { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025, upload-time = "2024-12-18T11:29:20.25Z" }, + { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633, upload-time = "2024-12-18T11:29:23.877Z" }, + { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404, upload-time = "2024-12-18T11:29:25.872Z" }, + { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130, upload-time = "2024-12-18T11:29:29.252Z" }, + { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946, upload-time = "2024-12-18T11:29:31.338Z" }, + { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387, upload-time = "2024-12-18T11:29:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453, upload-time = "2024-12-18T11:29:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186, upload-time = "2024-12-18T11:29:37.649Z" }, + { url = "https://files.pythonhosted.org/packages/27/97/3aef1ddb65c5ccd6eda9050036c956ff6ecbfe66cb7eb40f280f121a5bb0/pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993", size = 1896475, upload-time = "2024-12-18T11:30:18.316Z" }, + { url = "https://files.pythonhosted.org/packages/ad/d3/5668da70e373c9904ed2f372cb52c0b996426f302e0dee2e65634c92007d/pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308", size = 1772279, upload-time = "2024-12-18T11:30:20.547Z" }, + { url = "https://files.pythonhosted.org/packages/8a/9e/e44b8cb0edf04a2f0a1f6425a65ee089c1d6f9c4c2dcab0209127b6fdfc2/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4", size = 1829112, upload-time = "2024-12-18T11:30:23.255Z" }, + { url = "https://files.pythonhosted.org/packages/1c/90/1160d7ac700102effe11616e8119e268770f2a2aa5afb935f3ee6832987d/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf", size = 1866780, upload-time = "2024-12-18T11:30:25.742Z" }, + { url = "https://files.pythonhosted.org/packages/ee/33/13983426df09a36d22c15980008f8d9c77674fc319351813b5a2739b70f3/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76", size = 2037943, upload-time = "2024-12-18T11:30:28.036Z" }, + { url = "https://files.pythonhosted.org/packages/01/d7/ced164e376f6747e9158c89988c293cd524ab8d215ae4e185e9929655d5c/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118", size = 2740492, upload-time = "2024-12-18T11:30:30.412Z" }, + { url = "https://files.pythonhosted.org/packages/8b/1f/3dc6e769d5b7461040778816aab2b00422427bcaa4b56cc89e9c653b2605/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630", size = 1995714, upload-time = "2024-12-18T11:30:34.358Z" }, + { url = "https://files.pythonhosted.org/packages/07/d7/a0bd09bc39283530b3f7c27033a814ef254ba3bd0b5cfd040b7abf1fe5da/pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54", size = 1997163, upload-time = "2024-12-18T11:30:37.979Z" }, + { url = "https://files.pythonhosted.org/packages/2d/bb/2db4ad1762e1c5699d9b857eeb41959191980de6feb054e70f93085e1bcd/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f", size = 2005217, upload-time = "2024-12-18T11:30:40.367Z" }, + { url = "https://files.pythonhosted.org/packages/53/5f/23a5a3e7b8403f8dd8fc8a6f8b49f6b55c7d715b77dcf1f8ae919eeb5628/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362", size = 2127899, upload-time = "2024-12-18T11:30:42.737Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ae/aa38bb8dd3d89c2f1d8362dd890ee8f3b967330821d03bbe08fa01ce3766/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96", size = 2155726, upload-time = "2024-12-18T11:30:45.279Z" }, + { url = "https://files.pythonhosted.org/packages/98/61/4f784608cc9e98f70839187117ce840480f768fed5d386f924074bf6213c/pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e", size = 1817219, upload-time = "2024-12-18T11:30:47.718Z" }, + { url = "https://files.pythonhosted.org/packages/57/82/bb16a68e4a1a858bb3768c2c8f1ff8d8978014e16598f001ea29a25bf1d1/pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67", size = 1985382, upload-time = "2024-12-18T11:30:51.871Z" }, + { url = "https://files.pythonhosted.org/packages/46/72/af70981a341500419e67d5cb45abe552a7c74b66326ac8877588488da1ac/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e", size = 1891159, upload-time = "2024-12-18T11:30:54.382Z" }, + { url = "https://files.pythonhosted.org/packages/ad/3d/c5913cccdef93e0a6a95c2d057d2c2cba347815c845cda79ddd3c0f5e17d/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8", size = 1768331, upload-time = "2024-12-18T11:30:58.178Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f0/a3ae8fbee269e4934f14e2e0e00928f9346c5943174f2811193113e58252/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3", size = 1822467, upload-time = "2024-12-18T11:31:00.6Z" }, + { url = "https://files.pythonhosted.org/packages/d7/7a/7bbf241a04e9f9ea24cd5874354a83526d639b02674648af3f350554276c/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f", size = 1979797, upload-time = "2024-12-18T11:31:07.243Z" }, + { url = "https://files.pythonhosted.org/packages/4f/5f/4784c6107731f89e0005a92ecb8a2efeafdb55eb992b8e9d0a2be5199335/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133", size = 1987839, upload-time = "2024-12-18T11:31:09.775Z" }, + { url = "https://files.pythonhosted.org/packages/6d/a7/61246562b651dff00de86a5f01b6e4befb518df314c54dec187a78d81c84/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc", size = 1998861, upload-time = "2024-12-18T11:31:13.469Z" }, + { url = "https://files.pythonhosted.org/packages/86/aa/837821ecf0c022bbb74ca132e117c358321e72e7f9702d1b6a03758545e2/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50", size = 2116582, upload-time = "2024-12-18T11:31:17.423Z" }, + { url = "https://files.pythonhosted.org/packages/81/b0/5e74656e95623cbaa0a6278d16cf15e10a51f6002e3ec126541e95c29ea3/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9", size = 2151985, upload-time = "2024-12-18T11:31:19.901Z" }, + { url = "https://files.pythonhosted.org/packages/63/37/3e32eeb2a451fddaa3898e2163746b0cffbbdbb4740d38372db0490d67f3/pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151", size = 2004715, upload-time = "2024-12-18T11:31:22.821Z" }, + { url = "https://files.pythonhosted.org/packages/29/0e/dcaea00c9dbd0348b723cae82b0e0c122e0fa2b43fa933e1622fd237a3ee/pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656", size = 1891733, upload-time = "2024-12-18T11:31:26.876Z" }, + { url = "https://files.pythonhosted.org/packages/86/d3/e797bba8860ce650272bda6383a9d8cad1d1c9a75a640c9d0e848076f85e/pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278", size = 1768375, upload-time = "2024-12-18T11:31:29.276Z" }, + { url = "https://files.pythonhosted.org/packages/41/f7/f847b15fb14978ca2b30262548f5fc4872b2724e90f116393eb69008299d/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb", size = 1822307, upload-time = "2024-12-18T11:31:33.123Z" }, + { url = "https://files.pythonhosted.org/packages/9c/63/ed80ec8255b587b2f108e514dc03eed1546cd00f0af281e699797f373f38/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd", size = 1979971, upload-time = "2024-12-18T11:31:35.755Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6d/6d18308a45454a0de0e975d70171cadaf454bc7a0bf86b9c7688e313f0bb/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc", size = 1987616, upload-time = "2024-12-18T11:31:38.534Z" }, + { url = "https://files.pythonhosted.org/packages/82/8a/05f8780f2c1081b800a7ca54c1971e291c2d07d1a50fb23c7e4aef4ed403/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b", size = 1998943, upload-time = "2024-12-18T11:31:41.853Z" }, + { url = "https://files.pythonhosted.org/packages/5e/3e/fe5b6613d9e4c0038434396b46c5303f5ade871166900b357ada4766c5b7/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b", size = 2116654, upload-time = "2024-12-18T11:31:44.756Z" }, + { url = "https://files.pythonhosted.org/packages/db/ad/28869f58938fad8cc84739c4e592989730bfb69b7c90a8fff138dff18e1e/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2", size = 2152292, upload-time = "2024-12-18T11:31:48.613Z" }, + { url = "https://files.pythonhosted.org/packages/a1/0c/c5c5cd3689c32ed1fe8c5d234b079c12c281c051759770c05b8bed6412b5/pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35", size = 2004961, upload-time = "2024-12-18T11:31:52.446Z" }, ] [[package]] From 2e24bfebfef9173a286c3f0fe3521c3022b48bd1 Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Fri, 20 Jun 2025 22:20:58 +0100 Subject: [PATCH 05/13] chore(mistral): disable model_fields deprecation warning Disable the deprecation warning for model_fields in Mistral. --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 0551b2c21..ad3fc4fed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -198,6 +198,8 @@ filterwarnings = [ "error", # Issue with python-multipart - we don't want to bump the minimum version of starlette. "ignore::PendingDeprecationWarning:starlette", + # mistralai accesses model_fields on the instance, which is deprecated in Pydantic 2.11. + "ignore:Accessing the 'model_fields' attribute", # boto3 "ignore::DeprecationWarning:botocore.*", "ignore::RuntimeWarning:pydantic_ai.mcp", From 34e76725693ba2600c1432da8fadc9e4e30f3e47 Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Sat, 21 Jun 2025 15:51:24 +0100 Subject: [PATCH 06/13] chore(mistral): remove invalid no cover Remove invalid no cover comment from Mistral model flagged by strict-no-cover check. --- pydantic_ai_slim/pydantic_ai/models/mistral.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pydantic_ai_slim/pydantic_ai/models/mistral.py b/pydantic_ai_slim/pydantic_ai/models/mistral.py index a8de70274..0fd15df07 100644 --- a/pydantic_ai_slim/pydantic_ai/models/mistral.py +++ b/pydantic_ai_slim/pydantic_ai/models/mistral.py @@ -423,7 +423,7 @@ def _get_python_type(cls, value: dict[str, Any]) -> str: if value_type == 'object': additional_properties = value.get('additionalProperties', {}) if isinstance(additional_properties, bool): - return 'bool' # pragma: no cover + return 'bool' additional_properties_type = additional_properties.get('type') if ( additional_properties_type in SIMPLE_JSON_TYPE_MAPPING From 0ff3a7fd11dabef8a21edebd9b832977b5119a49 Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Thu, 26 Jun 2025 17:50:07 +0100 Subject: [PATCH 07/13] feat(ag-ui): Tool call returns and thinking parts Add support for the new tool call returns and thinking parts to the AG UI adapter. This eliminates the need to build a message snapshot while sill providing the necessary information for the UI. --- pydantic_ai_slim/pydantic_ai/ag_ui.py | 230 ++++++-------------- pydantic_ai_slim/pydantic_ai/models/test.py | 22 +- pydantic_ai_slim/pyproject.toml | 2 +- tests/test_ag_ui.py | 52 +++-- uv.lock | 8 +- 5 files changed, 129 insertions(+), 185 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/ag_ui.py b/pydantic_ai_slim/pydantic_ai/ag_ui.py index a21b14a84..422033a77 100644 --- a/pydantic_ai_slim/pydantic_ai/ag_ui.py +++ b/pydantic_ai_slim/pydantic_ai/ag_ui.py @@ -32,9 +32,7 @@ BaseEvent, DeveloperMessage, EventType, - FunctionCall, Message, - MessagesSnapshotEvent, RunAgentInput, RunErrorEvent, RunFinishedEvent, @@ -44,10 +42,13 @@ TextMessageContentEvent, TextMessageEndEvent, TextMessageStartEvent, + ThinkingTextMessageContentEvent, + ThinkingTextMessageEndEvent, + ThinkingTextMessageStartEvent, Tool as ToolAGUI, - ToolCall, ToolCallArgsEvent, ToolCallEndEvent, + ToolCallResultEvent, ToolCallStartEvent, ToolMessage, UserMessage, @@ -76,7 +77,6 @@ from . import Agent, models from ._agent_graph import ModelRequestNode -from ._parts_manager import ModelResponsePartsManager from .agent import RunOutputDataT from .messages import ( AgentStreamEvent, @@ -85,7 +85,6 @@ ModelRequest, ModelRequestPart, ModelResponse, - ModelResponsePart, PartDeltaEvent, PartStartEvent, SystemPromptPart, @@ -448,11 +447,13 @@ async def run( if isinstance(deps, StateHandler): deps.set_state(run_input.state) + history: _History = _convert_history(run_input.messages) + run: AgentRun[AgentDepsT, Any] async with self.agent.iter( user_prompt=None, output_type=output_type, - message_history=_convert_history(run_input.messages), + message_history=history.messages, model=model, deps=deps, model_settings=model_settings, @@ -461,21 +462,10 @@ async def run( infer_name=infer_name, additional_tools=run_tools, ) as run: - parts_manager: ModelResponsePartsManager = ModelResponsePartsManager() - async for event in self._agent_stream(tool_names, run, parts_manager): + async for event in self._agent_stream(tool_names, run, history.prompt_message_id): if event is None: # Tool call signals early return, so we stop processing. self.logger.debug('tool call early return') - - # TODO(steve): Remove this workaround, it's only needed as AG-UI doesn't - # currently have a way to add server side tool calls to the message history - # via events. To workaround this we create a full snapshot of the messages - # and send that. - snapshot: MessagesSnapshotEvent | None = self._message_snapshot( - run, run_input.messages, parts_manager - ) - if snapshot is not None: - yield encoder.encode(snapshot) break yield encoder.encode(event) @@ -500,135 +490,49 @@ async def run( self.logger.info('done thread_id=%s run_id=%s', run_input.thread_id, run_input.run_id) - def _message_snapshot( - self, run: AgentRun[AgentDepsT, Any], messages: list[Message], parts_manager: ModelResponsePartsManager - ) -> MessagesSnapshotEvent | None: - """Create a message snapshot to replicate the current state of the run. - - This method collects all messages from the run's state and the parts - manager, converting them into AG-UI messages. - - Args: - run: The agent run instance. - messages: The initial messages from the run input. - parts_manager: The parts manager containing the response parts. - - Returns: - A full snapshot of the messages so far in the run if local tool - calls were made, otherwise `None`. - """ - new_messages: list[ModelMessage] = run.ctx.state.message_history[len(messages) :] - if not any( - isinstance(request_part, ToolReturnPart) - for msg in new_messages - if isinstance(msg, ModelRequest) - for request_part in msg.parts - ): - # No tool calls were made, so we don't need a snapshot. - return None - - # Tool calls were made, so we need to create a snapshot. - for msg in new_messages: - if isinstance(msg, ModelRequest): - for request_part in msg.parts: - if isinstance(request_part, ToolReturnPart): # pragma: no branch - messages.append( - ToolMessage( - id='result-' + request_part.tool_call_id, - role=Role.TOOL, - content=request_part.content, - tool_call_id=request_part.tool_call_id, - ) - ) - elif isinstance(msg, ModelResponse): # pragma: no branch - self._convert_response_parts(msg.parts, messages) - - self._convert_response_parts(parts_manager.get_parts(), messages) - - return MessagesSnapshotEvent( - type=EventType.MESSAGES_SNAPSHOT, - messages=messages, - ) - - def _convert_response_parts(self, parts: list[ModelResponsePart], messages: list[Message]) -> None: - """Convert model response parts to AG-UI messages. - - Args: - parts: The list of model response parts to convert. - messages: The list of messages to append the converted parts to. - """ - response_part: ModelResponsePart - for response_part in parts: - if isinstance(response_part, TextPart): # pragma: no cover - # This is not expected, but we handle it gracefully. - messages.append( - AssistantMessage( - id=uuid.uuid4().hex, - role=Role.ASSISTANT, - content=response_part.content, - ) - ) - elif isinstance(response_part, ToolCallPart): - args: str = ( - json.dumps(response_part.args) - if isinstance(response_part.args, dict) - else response_part.args or '{}' - ) - messages.append( - AssistantMessage( - id=uuid.uuid4().hex, - role=Role.ASSISTANT, - tool_calls=[ - ToolCall( - id=response_part.tool_call_id, - type='function', - function=FunctionCall( - name=response_part.tool_name, - arguments=args, - ), - ) - ], - ), - ) - elif isinstance(response_part, ThinkingPart): # pragma: no cover - # No AG-UI equivalent for thinking parts, so we skip them. - pass - - async def _tool_events(self, parts: list[ModelRequestPart]) -> AsyncGenerator[BaseEvent | None, None]: + async def _tool_events( + self, + parts: list[ModelRequestPart], + prompt_message_id: str, + ) -> AsyncGenerator[BaseEvent | None, None]: """Check for tool call results that are AG-UI events. Args: encoder: The event encoder to use for encoding events. parts: The list of request parts to check for tool event returns. + prompt_message_id: The message ID of the user prompt to use for tool call results. Yields: AG-UI Server-Sent Events (SSE). """ - # TODO(steve): Determine how to handle multiple parts. Currently - # AG-UI only supports a single tool call per request, but that - # may change in the future. part: ModelRequestPart for part in parts: if not isinstance(part, ToolReturnPart): continue + yield ToolCallResultEvent( + message_id=prompt_message_id, + type=EventType.TOOL_CALL_RESULT, + role=Role.TOOL.value, + tool_call_id=part.tool_call_id, + content=part.model_response_str(), + ) + + # Now check for AG-UI events returned by the tool calls. iter: Iterable[Any] if isinstance(part.content, BaseEvent): self.logger.debug('ag-ui event: %s', part.content) yield part.content - elif isinstance(part.content, (str, bytes)): - # Avoid strings and bytes being checked as iterable. + elif isinstance(part.content, (str, bytes)): # pragma: no branch + # Avoid iterable check for strings and bytes. pass - elif isinstance(part.content, Iterable): + elif isinstance(part.content, Iterable): # pragma: no branch # Type: ignore to handle partially unknown type iter = part.content # type: ignore[assignment] for item in iter: if isinstance(item, BaseEvent): # pragma: no branch self.logger.debug('ag-ui event: %s', item) yield item - else: # pragma: no cover - # Not currently interested in other types. - pass def _convert_tools(self, run_tools: list[ToolAGUI]) -> list[Tool[AgentDepsT]]: """Convert AG-UI tools to PydanticAI tools. @@ -680,14 +584,14 @@ async def _agent_stream( self, tool_names: dict[str, str], run: AgentRun[AgentDepsT, Any], - parts_manager: ModelResponsePartsManager, + prompt_message_id: str, ) -> AsyncGenerator[BaseEvent | None, None]: """Run the agent streaming responses using AG-UI protocol events. Args: tool_names: A mapping of tool names to their AG-UI names. run: The agent run to process. - parts_manager: The parts manager to handle tool call parts. + prompt_message_id: The message ID of the user prompt to use for tool call results. Yields: AG-UI Server-Sent Events (SSE). @@ -700,17 +604,16 @@ async def _agent_stream( # Not interested UserPromptNode, CallToolsNode or End. continue - # Check for state updates. - snapshot: BaseEvent | None - async for snapshot in self._tool_events(node.request.parts): - yield snapshot + # Check for tool results. + async for msg in self._tool_events(node.request.parts, prompt_message_id): + yield msg stream_ctx: _RequestStreamContext = _RequestStreamContext() request_stream: AgentStream[AgentDepsT] async with node.stream(run.ctx) as request_stream: agent_event: AgentStreamEvent async for agent_event in request_stream: - async for msg in self._handle_agent_event(tool_names, stream_ctx, agent_event, parts_manager): + async for msg in self._handle_agent_event(tool_names, stream_ctx, agent_event): yield msg for part_end in stream_ctx.part_ends: @@ -721,7 +624,6 @@ async def _handle_agent_event( tool_names: dict[str, str], stream_ctx: _RequestStreamContext, agent_event: AgentStreamEvent, - parts_manager: ModelResponsePartsManager, ) -> AsyncGenerator[BaseEvent | None, None]: """Handle an agent event and yield AG-UI protocol events. @@ -730,7 +632,6 @@ async def _handle_agent_event( tool_names: A mapping of tool names to their AG-UI names. stream_ctx: The request stream context to manage state. agent_event: The agent event to process. - parts_manager: The parts manager to handle tool call parts. Yields: AG-UI Server-Sent Events (SSE) based on the agent event. @@ -764,17 +665,6 @@ async def _handle_agent_event( ) elif isinstance(agent_event.part, ToolCallPart): # pragma: no branch tool_name: str | None = tool_names.get(agent_event.part.tool_name) - if not tool_name: - # Local tool calls are not sent as events to the UI. - stream_ctx.local_tool_calls.add(agent_event.part.tool_call_id) - return - - parts_manager.handle_tool_call_part( - vendor_part_id=None, - tool_name=agent_event.part.tool_name, - args=agent_event.part.args, - tool_call_id=agent_event.part.tool_call_id, - ) stream_ctx.last_tool_call_id = agent_event.part.tool_call_id yield ToolCallStartEvent( type=EventType.TOOL_CALL_START, @@ -786,11 +676,25 @@ async def _handle_agent_event( type=EventType.TOOL_CALL_END, tool_call_id=agent_event.part.tool_call_id, ), - None, # Signal continuation of the stream. ] - elif isinstance(agent_event.part, ThinkingPart): # pragma: no cover - # No equivalent AG-UI event yet. - pass + if tool_name: + # AG-UI tool, signal continuation of the stream. + stream_ctx.part_ends.append(None) + + elif isinstance(agent_event.part, ThinkingPart): # pragma: no branch + yield ThinkingTextMessageStartEvent( + type=EventType.THINKING_TEXT_MESSAGE_START, + ) + if agent_event.part.content: # pragma: no branch + yield ThinkingTextMessageContentEvent( + type=EventType.THINKING_TEXT_MESSAGE_CONTENT, + delta=agent_event.part.content, + ) + stream_ctx.part_ends = [ + ThinkingTextMessageEndEvent( + type=EventType.THINKING_TEXT_MESSAGE_END, + ), + ] elif isinstance(agent_event, PartDeltaEvent): if isinstance(agent_event.delta, TextPartDelta): yield TextMessageContentEvent( @@ -799,16 +703,6 @@ async def _handle_agent_event( delta=agent_event.delta.content_delta, ) elif isinstance(agent_event.delta, ToolCallPartDelta): # pragma: no branch - if agent_event.delta.tool_call_id in stream_ctx.local_tool_calls: - # Local tool calls are not sent as events to the UI. - return - - parts_manager.handle_tool_call_delta( - vendor_part_id=None, - tool_name=None, - args=agent_event.delta.args_delta, - tool_call_id=agent_event.delta.tool_call_id, - ) yield ToolCallArgsEvent( type=EventType.TOOL_CALL_ARGS, tool_call_id=agent_event.delta.tool_call_id @@ -819,14 +713,24 @@ async def _handle_agent_event( else json.dumps(agent_event.delta.args_delta), ) elif isinstance(agent_event.delta, ThinkingPartDelta): # pragma: no cover - # No equivalent AG-UI event yet. - pass + yield ThinkingTextMessageContentEvent( + type=EventType.THINKING_TEXT_MESSAGE_CONTENT, + delta=agent_event.delta.content_delta or '', + ) elif isinstance(agent_event, FinalResultEvent): # No equivalent AG-UI event yet. pass -def _convert_history(messages: list[Message]) -> list[ModelMessage]: +@dataclass +class _History: + """A simple history representation for AG-UI protocol.""" + + prompt_message_id: str # The ID of the last user message. + messages: list[ModelMessage] + + +def _convert_history(messages: list[Message]) -> _History: """Convert a AG-UI history to a PydanticAI one. Args: @@ -836,10 +740,12 @@ def _convert_history(messages: list[Message]) -> list[ModelMessage]: List of PydanticAI model messages. """ msg: Message + prompt_message_id: str = '' result: list[ModelMessage] = [] tool_calls: dict[str, str] = {} for msg in messages: if isinstance(msg, UserMessage): + prompt_message_id = msg.id result.append(ModelRequest(parts=[UserPromptPart(content=msg.content)])) elif isinstance(msg, AssistantMessage): if msg.tool_calls: @@ -880,7 +786,10 @@ def _convert_history(messages: list[Message]) -> list[ModelMessage]: # TODO(steve): Should these be handled differently? result.append(ModelRequest(parts=[SystemPromptPart(content=msg.content)])) - return result + return _History( + prompt_message_id=prompt_message_id, + messages=result, + ) __all__ = [ @@ -1018,7 +927,6 @@ class _RequestStreamContext: message_id: str = '' last_tool_call_id: str | None = None part_ends: list[BaseEvent | None] = field(default_factory=lambda: list[Union[BaseEvent, None]]()) - local_tool_calls: set[str] = field(default_factory=set) def new_message_id(self) -> str: """Generate a new message ID for the request stream. diff --git a/pydantic_ai_slim/pydantic_ai/models/test.py b/pydantic_ai_slim/pydantic_ai/models/test.py index c4bac7573..4d69841df 100644 --- a/pydantic_ai_slim/pydantic_ai/models/test.py +++ b/pydantic_ai_slim/pydantic_ai/models/test.py @@ -66,7 +66,20 @@ class TestTextPart: text: str -TestPart: TypeAlias = Union[TestTextPart, TestToolCallPart] +@dataclass +class TestThinkingPart: + """Represents a thinking part in the test model. + + This is used to simulate the model thinking about the response. + """ + + # NOTE: Avoid test discovery by pytest. + __test__ = False + + content: str = 'Thinking...' + + +TestPart: TypeAlias = Union[TestTextPart, TestToolCallPart, TestThinkingPart] """A part of the test model response.""" @@ -240,6 +253,8 @@ def _node_response( for name, args in tool_calls if name in part.call_tools ) + elif isinstance(part, TestThinkingPart): # pragma: no branch + parts.append(ThinkingPart(content=part.content)) return ModelResponse(vendor_id=node.id, parts=parts, model_name=self._model_name) def _request( @@ -372,9 +387,8 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: yield self._parts_manager.handle_tool_call_part( vendor_part_id=i, tool_name=part.tool_name, args=part.args, tool_call_id=part.tool_call_id ) - elif isinstance(part, ThinkingPart): # pragma: no cover - # NOTE: There's no way to reach this part of the code, since we don't generate ThinkingPart on TestModel. - assert False, "This should be unreachable — we don't generate ThinkingPart on TestModel." + elif isinstance(part, ThinkingPart): + yield self._parts_manager.handle_thinking_delta(vendor_part_id=i, content=part.content) else: assert_never(part) diff --git a/pydantic_ai_slim/pyproject.toml b/pydantic_ai_slim/pyproject.toml index 15eae061d..b82198289 100644 --- a/pydantic_ai_slim/pyproject.toml +++ b/pydantic_ai_slim/pyproject.toml @@ -81,7 +81,7 @@ evals = ["pydantic-evals=={{ version }}"] # A2A a2a = ["fasta2a=={{ version }}"] # AG UI Adapter -ag-ui = ["ag-ui-protocol>=0.1.5", "starlette>=0.45.3"] +ag-ui = ["ag-ui-protocol>=0.1.7", "starlette>=0.45.3"] [dependency-groups] dev = [ diff --git a/tests/test_ag_ui.py b/tests/test_ag_ui.py index 93d80581e..59c0cf1e9 100644 --- a/tests/test_ag_ui.py +++ b/tests/test_ag_ui.py @@ -18,7 +18,7 @@ from pydantic import BaseModel from pydantic_ai import Agent -from pydantic_ai.models.test import TestModel, TestNode, TestToolCallPart +from pydantic_ai.models.test import TestModel, TestNode, TestThinkingPart, TestToolCallPart has_ag_ui: bool = False with contextlib.suppress(ImportError): @@ -424,6 +424,7 @@ def tc_parameters() -> list[AdapterRunTest]: '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', + '{"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000003","content":"Tool result","role":"tool"}', '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000005","role":"assistant"}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"{\\"get_weather\\":\\"Tool "}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"result\\"}"}', @@ -502,6 +503,7 @@ def tc_parameters() -> list[AdapterRunTest]: '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000005"}', + '{"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000003","content":"Tool result","role":"tool"}', '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000006","role":"assistant"}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"{\\"get_weather\\":\\"Tool "}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"result\\",\\"get_weather_parts\\":\\"Tool "}', @@ -562,6 +564,7 @@ def tc_parameters() -> list[AdapterRunTest]: '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', + '{"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000003","content":"Tool result","role":"tool"}', '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000005","role":"assistant"}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"{\\"get_weather_parts\\":\\"Tool "}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"result\\"}"}', @@ -585,6 +588,9 @@ def tc_parameters() -> list[AdapterRunTest]: ], expected_events=[ '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"send_snapshot"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', + '{"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000003","content":"{\\"type\\":\\"STATE_SNAPSHOT\\",\\"timestamp\\":null,\\"raw_event\\":null,\\"snapshot\\":{\\"key\\":\\"value\\"}}","role":"tool"}', '{"type":"STATE_SNAPSHOT","snapshot":{"key":"value"}}', '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000004","role":"assistant"}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"{\\"send_snapshot\\":{\\"type\\":\\"STATE_SNAPSHOT\\",\\"timestam"}', @@ -609,6 +615,9 @@ def tc_parameters() -> list[AdapterRunTest]: ], expected_events=[ '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"send_custom"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', + '{"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000003","content":"[{\\"type\\":\\"CUSTOM\\",\\"timestamp\\":null,\\"raw_event\\":null,\\"name\\":\\"custom_event1\\",\\"value\\":{\\"key1\\":\\"value1\\"}},{\\"type\\":\\"CUSTOM\\",\\"timestamp\\":null,\\"raw_event\\":null,\\"name\\":\\"custom_event2\\",\\"value\\":{\\"key2\\":\\"value2\\"}}]","role":"tool"}', '{"type":"CUSTOM","name":"custom_event1","value":{"key1":"value1"}}', '{"type":"CUSTOM","name":"custom_event2","value":{"key2":"value2"}}', '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000004","role":"assistant"}', @@ -634,6 +643,10 @@ def tc_parameters() -> list[AdapterRunTest]: ], expected_events=[ '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"current_time"}', + '{"type":"TOOL_CALL_ARGS","toolCallId":"pyd_ai_00000000000000000000000000000003","delta":"{}"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', + '{"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000003","content":"21T12:08:45.485981+00:00","role":"tool"}', '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000004","role":"assistant"}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"{\\"current_time\\":\\"21T1"}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000004","delta":"2:08:45.485981+00:00\\"}"}', @@ -648,7 +661,10 @@ def tc_parameters() -> list[AdapterRunTest]: Run( nodes=[ TestNode( - parts=[TestToolCallPart(call_tools=['current_time'])], + parts=[ + TestToolCallPart(call_tools=['current_time']), + TestThinkingPart(content='Thinking about the weather'), + ], ), TestNode( parts=[TestToolCallPart(call_tools=['get_weather'])], @@ -716,21 +732,24 @@ def tc_parameters() -> list[AdapterRunTest]: ], expected_events=[ '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"current_time"}', + '{"type":"TOOL_CALL_ARGS","toolCallId":"pyd_ai_00000000000000000000000000000003","delta":"{}"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', + '{"type":"THINKING_TEXT_MESSAGE_START"}', + '{"type":"THINKING_TEXT_MESSAGE_CONTENT","delta":"Thinking about the weather"}', + '{"type":"THINKING_TEXT_MESSAGE_END"}', + '{"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000003","content":"21T12:08:45.485981+00:00","role":"tool"}', '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000004","toolCallName":"get_weather"}', '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000004"}', - '{"type":"MESSAGES_SNAPSHOT","messages":[{"id":"msg_1","role":"user","content":"Please tell me the time and then call get_weather for Paris"},' - + '{"id":"00000000000000000000000000000005","role":"assistant","toolCalls":[{"id":"pyd_ai_00000000000000000000000000000003","type":"function",' - + '"function":{"name":"current_time","arguments":"{}"}}]},{"id":"result-pyd_ai_00000000000000000000000000000003","role":"tool","content":' - + '"21T12:08:45.485981+00:00","toolCallId":"pyd_ai_00000000000000000000000000000003"},{"id":"00000000000000000000000000000006","role":"assistant",' - + '"toolCalls":[{"id":"pyd_ai_00000000000000000000000000000004","type":"function","function":{"name":"get_weather","arguments":"{\\"location\\": \\"a\\"}"}}]}]}', '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', - '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000007"}', - '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000008","role":"assistant"}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000008","delta":"{\\"current_time\\":\\"Tool "}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000008","delta":"result\\",\\"get_weather\\":\\"Tool "}', - '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000008","delta":"result\\"}"}', - '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000008"}', - '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000007"}', + '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000005"}', + '{"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000004","content":"Tool result","role":"tool"}', + '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000006","role":"assistant"}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"{\\"current_time\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"result\\",\\"get_weather\\":\\"Tool "}', + '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"result\\"}"}', + '{"type":"TEXT_MESSAGE_END","messageId":"00000000-0000-0000-0000-000000000006"}', + '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000005"}', ], ), AdapterRunTest( @@ -768,7 +787,10 @@ async def test_run_method(mock_uuid: _MockUUID, tc: AdapterRunTest) -> None: deps: StateDeps[StateInt] = StateDeps[StateInt](state_type=StateInt) # type: ignore[reportUnknownArgumentType] for run in tc.runs: if run.nodes is not None: - assert isinstance(adapter.agent.model, TestModel), 'Agent model is not TestModel' + assert isinstance(adapter.agent.model, TestModel), ( + 'Agent model is not TestModel' + 'data: {"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000003","content":"21T12:08:45.485981+00:00","role":"tool"}\n\n' + ) adapter.agent.model.custom_response_nodes = run.nodes run_input: RunAgentInput = run.run_input( diff --git a/uv.lock b/uv.lock index 59ae6a0ef..628954d84 100644 --- a/uv.lock +++ b/uv.lock @@ -30,14 +30,14 @@ members = [ [[package]] name = "ag-ui-protocol" -version = "0.1.5" +version = "0.1.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/90/26/1d5530e3fa84da37a8b58300f7a4352f763be43b2c393b0fad4d119f8653/ag_ui_protocol-0.1.5.tar.gz", hash = "sha256:48757afe82a4ee88eb078f31ef9672e09df624573d82045054f5a5b5dc021832", size = 4175, upload-time = "2025-05-20T11:37:06.835Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/c0/f2d24d92be950dd6b12f66dbde5fb839dd01e8af34d3a0305b2309a68907/ag_ui_protocol-0.1.7.tar.gz", hash = "sha256:0e93fd9f7c74d52afbd824d6e9738bd3422e859503905ba7582481cbc3c67ab2", size = 4446, upload-time = "2025-06-26T09:37:08.895Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/db/39/c488044d3195f82e35102c190f92b605a8af1ad63f26b9166e9be460e1c1/ag_ui_protocol-0.1.5-py3-none-any.whl", hash = "sha256:d51a0ad9635059b629b4cb57a9a2ec425b4cc8220e91d50a8f9d559571737ae9", size = 5819, upload-time = "2025-05-20T11:37:05.521Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c3/c216f5ad4d78f4030a63fec23f00a71f984f10275ccfc7d3902c3c34b7cd/ag_ui_protocol-0.1.7-py3-none-any.whl", hash = "sha256:8c821662ca6e9852569022f449b9f7aeb3f16aa75390fa8c28ceae2cce642baa", size = 6165, upload-time = "2025-06-26T09:37:07.755Z" }, ] [[package]] @@ -3155,7 +3155,7 @@ dev = [ [package.metadata] requires-dist = [ - { name = "ag-ui-protocol", marker = "extra == 'ag-ui'", specifier = ">=0.1.5" }, + { name = "ag-ui-protocol", marker = "extra == 'ag-ui'", specifier = ">=0.1.7" }, { name = "anthropic", marker = "extra == 'anthropic'", specifier = ">=0.52.0" }, { name = "argcomplete", marker = "extra == 'cli'", specifier = ">=3.5.0" }, { name = "boto3", marker = "extra == 'bedrock'", specifier = ">=1.37.24" }, From 82c5feecf5d08ac2cd040d470728f8a7aaf4ba59 Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Fri, 27 Jun 2025 18:12:05 +0100 Subject: [PATCH 08/13] feat(ag-ui): expose agent path in adapter Allow the agent path to be specified when using to_ag_ui. --- pydantic_ai_slim/pydantic_ai/agent.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pydantic_ai_slim/pydantic_ai/agent.py b/pydantic_ai_slim/pydantic_ai/agent.py index da1818fff..e4ede4bae 100644 --- a/pydantic_ai_slim/pydantic_ai/agent.py +++ b/pydantic_ai_slim/pydantic_ai/agent.py @@ -1858,8 +1858,9 @@ async def run_mcp_servers( def to_ag_ui( self, - *, # Adapter parameters. + path: str = '/', + *, tool_prefix: str = '', logger: logging.Logger | None = None, # Agent.iter parameters @@ -1880,6 +1881,7 @@ def to_ag_ui( Args: logger: Optional logger to use for the adapter. + path: Path to expose the agent at, defaults to the root path. tool_prefix: Optional prefix to add to tool names in the AG-UI. output_type: Custom output type to use for this run, `output_type` may only be used if the agent has no @@ -1905,6 +1907,7 @@ def to_ag_ui( return agent_to_ag_ui( agent=self, + path=path, tool_prefix=tool_prefix, logger=logger, # Agent.iter parameters From 8c4c49c5a87021be8c0d9597c5b2f7fe79f9e7ce Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Fri, 27 Jun 2025 23:38:06 +0100 Subject: [PATCH 09/13] chore(ag-ui): reduce log level for request done Reduce the log level for the request done message in the AG-UI adapter to debug level. --- pydantic_ai_slim/pydantic_ai/ag_ui.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pydantic_ai_slim/pydantic_ai/ag_ui.py b/pydantic_ai_slim/pydantic_ai/ag_ui.py index 422033a77..8b9eb1559 100644 --- a/pydantic_ai_slim/pydantic_ai/ag_ui.py +++ b/pydantic_ai_slim/pydantic_ai/ag_ui.py @@ -488,7 +488,7 @@ async def run( ), ) - self.logger.info('done thread_id=%s run_id=%s', run_input.thread_id, run_input.run_id) + self.logger.debug('done thread_id=%s run_id=%s', run_input.thread_id, run_input.run_id) async def _tool_events( self, From 0593f4df2cf58c417fe564933647c4c484372e3c Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Fri, 27 Jun 2025 23:53:21 +0100 Subject: [PATCH 10/13] chore(ag-ui): add missing __future__ imports Add missing `from __future__ import annotations` imports to ensure ensure that only relevant type annotations are processed at runtime. Fix incorrect import of `dataclass` in `args.py`. --- .../pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py | 2 ++ examples/pydantic_ai_ag_ui_examples/cli/__init__.py | 2 ++ examples/pydantic_ai_ag_ui_examples/cli/args.py | 5 +++-- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py b/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py index 5df1308fc..001455de7 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py +++ b/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py @@ -1,5 +1,7 @@ """Agentic Generative UI feature.""" +from __future__ import annotations + from enum import StrEnum from typing import Annotated, Any, Literal diff --git a/examples/pydantic_ai_ag_ui_examples/cli/__init__.py b/examples/pydantic_ai_ag_ui_examples/cli/__init__.py index dc71f4f1e..e4a3ba3cb 100644 --- a/examples/pydantic_ai_ag_ui_examples/cli/__init__.py +++ b/examples/pydantic_ai_ag_ui_examples/cli/__init__.py @@ -1,5 +1,7 @@ """Command line interface for the PydanticAI AG-UI servers.""" +from __future__ import annotations + from .args import Args, parse_args __all__ = [ diff --git a/examples/pydantic_ai_ag_ui_examples/cli/args.py b/examples/pydantic_ai_ag_ui_examples/cli/args.py index 9962fa7bf..ceb3476fc 100644 --- a/examples/pydantic_ai_ag_ui_examples/cli/args.py +++ b/examples/pydantic_ai_ag_ui_examples/cli/args.py @@ -1,12 +1,13 @@ """CLI argument parser for the PydanticAI AG-UI servers.""" +from __future__ import annotations + import argparse +from dataclasses import dataclass from typing import Any from uvicorn.config import LOGGING_CONFIG -from pydantic_ai.models import dataclass - @dataclass class Args: From acb590288a044216936bcd8f88377d94208ce904 Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Mon, 30 Jun 2025 22:24:10 +0100 Subject: [PATCH 11/13] chore(ag-ui): use toolsets Switch from additional tools to the new toolset system. --- examples/pydantic_ai_ag_ui_examples/basic.py | 2 +- pydantic_ai_slim/pydantic_ai/ag_ui.py | 186 ++++++++----------- pydantic_ai_slim/pydantic_ai/agent.py | 6 +- tests/test_ag_ui.py | 7 +- 4 files changed, 80 insertions(+), 121 deletions(-) diff --git a/examples/pydantic_ai_ag_ui_examples/basic.py b/examples/pydantic_ai_ag_ui_examples/basic.py index 0b42276ff..48828535c 100644 --- a/examples/pydantic_ai_ag_ui_examples/basic.py +++ b/examples/pydantic_ai_ag_ui_examples/basic.py @@ -1,4 +1,4 @@ -"""Basic example of using pydantic_ai.ag_ui with FastAPI.""" +"""Basic example of using Agent.to_ag_ui with FastAPI.""" from __future__ import annotations diff --git a/pydantic_ai_slim/pydantic_ai/ag_ui.py b/pydantic_ai_slim/pydantic_ai/ag_ui.py index 8b9eb1559..c02846f66 100644 --- a/pydantic_ai_slim/pydantic_ai/ag_ui.py +++ b/pydantic_ai_slim/pydantic_ai/ag_ui.py @@ -18,11 +18,8 @@ Callable, Final, Generic, - NoReturn, Protocol, TypeVar, - Union, - cast, runtime_checkable, ) @@ -75,6 +72,11 @@ from pydantic import BaseModel, ValidationError +from pydantic_ai.output import DeferredToolCalls +from pydantic_ai.tools import ToolDefinition +from pydantic_ai.toolsets import AbstractToolset +from pydantic_ai.toolsets.deferred import DeferredToolset + from . import Agent, models from ._agent_graph import ModelRequestNode from .agent import RunOutputDataT @@ -100,7 +102,7 @@ from .output import OutputDataT, OutputSpec from .result import AgentStream from .settings import ModelSettings -from .tools import AgentDepsT, Tool +from .tools import AgentDepsT from .usage import Usage, UsageLimits if TYPE_CHECKING: @@ -139,7 +141,7 @@ def __init__( usage_limits: UsageLimits | None = None, usage: Usage | None = None, infer_name: bool = True, - additional_tools: Sequence[Tool[AgentDepsT]] | None = None, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, # Starlette debug: bool = False, routes: Sequence[BaseRoute] | None = None, @@ -164,7 +166,7 @@ def __init__( usage_limits: Optional limits on model request count or token usage. usage: Optional usage to start with, useful for resuming a conversation or agents used in tools. infer_name: Whether to try to infer the agent name from the call frame if it's not set. - additional_tools: Additional tools to use for this run. + toolsets: Optional list of toolsets to use for this agent, defaults to the agent's toolset. debug: Boolean indicating if debug tracebacks should be returned on errors. routes: A list of routes to serve incoming HTTP and WebSocket requests. @@ -218,7 +220,7 @@ async def endpoint(request: Request) -> Response | StreamingResponse: usage_limits=usage_limits, usage=usage, infer_name=infer_name, - additional_tools=additional_tools, + toolsets=toolsets, ), media_type=SSE_CONTENT_TYPE, ) @@ -241,7 +243,7 @@ def agent_to_ag_ui( usage_limits: UsageLimits | None = None, usage: Usage | None = None, infer_name: bool = True, - additional_tools: Sequence[Tool[AgentDepsT]] | None = None, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, # Starlette parameters. debug: bool = False, routes: Sequence[BaseRoute] | None = None, @@ -268,7 +270,7 @@ def agent_to_ag_ui( usage_limits: Optional limits on model request count or token usage. usage: Optional usage to start with, useful for resuming a conversation or agents used in tools. infer_name: Whether to try to infer the agent name from the call frame if it's not set. - additional_tools: Additional tools to use for this run. + toolsets: Optional list of toolsets to use for this agent, defaults to the agent's toolset. debug: Boolean indicating if debug tracebacks should be returned on errors. routes: A list of routes to serve incoming HTTP and WebSocket requests. @@ -308,7 +310,7 @@ def agent_to_ag_ui( usage_limits=usage_limits, usage=usage, infer_name=infer_name, - additional_tools=additional_tools, + toolsets=toolsets, # Starlette debug=debug, routes=routes, @@ -402,7 +404,7 @@ async def run( usage_limits: UsageLimits | None = None, usage: Usage | None = None, infer_name: bool = True, - additional_tools: Sequence[Tool[AgentDepsT]] | None = None, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, ) -> AsyncGenerator[str, None]: """Run the agent with streaming response using AG-UI protocol events. @@ -420,7 +422,7 @@ async def run( usage_limits: Optional limits on model request count or token usage. usage: Optional usage to start with, useful for resuming a conversation or agents used in tools. infer_name: Whether to try to infer the agent name from the call frame if it's not set. - additional_tools: Additional tools to use for this run. + toolsets: Optional list of toolsets to use for this agent, defaults to the agent's toolset. Yields: Streaming SSE-formatted event chunks. @@ -429,8 +431,9 @@ async def run( tool_names: dict[str, str] = {self.tool_prefix + tool.name: tool.name for tool in run_input.tools} encoder: EventEncoder = EventEncoder(accept=accept) - run_tools: list[Tool[AgentDepsT]] = list(additional_tools) if additional_tools else [] - run_tools.extend(self._convert_tools(run_input.tools)) + run_toolset: list[AbstractToolset[AgentDepsT]] = list(toolsets) if toolsets else [] + if run_input.tools: + run_toolset.append(_AGUIToolset[AgentDepsT](run_input.tools)) try: yield encoder.encode( @@ -449,10 +452,18 @@ async def run( history: _History = _convert_history(run_input.messages) + output_type_: OutputSpec[OutputDataT | DeferredToolCalls | RunOutputDataT] + if output_type is None: + # Use the agent's output type if not specified. + output_type_ = [self.agent.output_type, DeferredToolCalls] + else: + output_type_ = [output_type, DeferredToolCalls] + run: AgentRun[AgentDepsT, Any] async with self.agent.iter( user_prompt=None, - output_type=output_type, + # TODO(steve): Could or should it just accept: [output_type, DeferredToolCalls] + output_type=output_type_, message_history=history.messages, model=model, deps=deps, @@ -460,9 +471,9 @@ async def run( usage_limits=usage_limits, usage=usage, infer_name=infer_name, - additional_tools=run_tools, + toolsets=run_toolset, ) as run: - async for event in self._agent_stream(tool_names, run, history.prompt_message_id): + async for event in self._agent_stream(tool_names, run, history): if event is None: # Tool call signals early return, so we stop processing. self.logger.debug('tool call early return') @@ -493,14 +504,14 @@ async def run( async def _tool_events( self, parts: list[ModelRequestPart], - prompt_message_id: str, + history: _History, ) -> AsyncGenerator[BaseEvent | None, None]: """Check for tool call results that are AG-UI events. Args: encoder: The event encoder to use for encoding events. parts: The list of request parts to check for tool event returns. - prompt_message_id: The message ID of the user prompt to use for tool call results. + history: The history of messages and tool calls to use for the run. Yields: AG-UI Server-Sent Events (SSE). @@ -510,8 +521,12 @@ async def _tool_events( if not isinstance(part, ToolReturnPart): continue + if part.tool_call_id in history.tool_calls: + # Tool call was passed in the history, so we skip it. + continue + yield ToolCallResultEvent( - message_id=prompt_message_id, + message_id=history.prompt_message_id, type=EventType.TOOL_CALL_RESULT, role=Role.TOOL.value, tool_call_id=part.tool_call_id, @@ -534,64 +549,18 @@ async def _tool_events( self.logger.debug('ag-ui event: %s', item) yield item - def _convert_tools(self, run_tools: list[ToolAGUI]) -> list[Tool[AgentDepsT]]: - """Convert AG-UI tools to PydanticAI tools. - - Creates `Tool` objects from AG-UI tool definitions. These tools don't - actually execute anything, that is done by AG-UI client - they just - provide the necessary tool definitions to PydanticAI agent. - - Args: - run_tools: List of AG-UI tool definitions to convert. - - Returns: - List of PydanticAI Tool objects that call the AG-UI tools. - """ - return [self._tool_call(tool) for tool in run_tools] - - def _tool_call(self, tool: ToolAGUI) -> Tool[AgentDepsT]: - """Create a PydanticAI tool from an AG-UI tool definition. - - Args: - tool: The AG-UI tool definition to convert. - - Returns: - A PydanticAI `Tool` object that calls the AG-UI tool. - """ - - def _tool_stub(*args: Any, **kwargs: Any) -> NoReturn: - """Stub function which is never called. - - Returns: - Never returns as it always raises an exception. - - Raises: - _UnexpectedToolCallError: Always raised since this should never be called. - """ - raise _UnexpectedToolCallError(tool_name=tool.name) # pragma: no cover - - return cast( - 'Tool[AgentDepsT]', - Tool.from_schema( - function=_tool_stub, - name=tool.name, - description=tool.description, - json_schema=tool.parameters, - ), - ) - async def _agent_stream( self, tool_names: dict[str, str], run: AgentRun[AgentDepsT, Any], - prompt_message_id: str, + history: _History, ) -> AsyncGenerator[BaseEvent | None, None]: """Run the agent streaming responses using AG-UI protocol events. Args: tool_names: A mapping of tool names to their AG-UI names. run: The agent run to process. - prompt_message_id: The message ID of the user prompt to use for tool call results. + history: The history of messages and tool calls to use for the run. Yields: AG-UI Server-Sent Events (SSE). @@ -605,7 +574,7 @@ async def _agent_stream( continue # Check for tool results. - async for msg in self._tool_events(node.request.parts, prompt_message_id): + async for msg in self._tool_events(node.request.parts, history): yield msg stream_ctx: _RequestStreamContext = _RequestStreamContext() @@ -616,8 +585,9 @@ async def _agent_stream( async for msg in self._handle_agent_event(tool_names, stream_ctx, agent_event): yield msg - for part_end in stream_ctx.part_ends: - yield part_end + if stream_ctx.part_end: + yield stream_ctx.part_end + stream_ctx.part_end = None async def _handle_agent_event( self, @@ -638,11 +608,10 @@ async def _handle_agent_event( """ self.logger.debug('agent_event: %s', agent_event) if isinstance(agent_event, PartStartEvent): - # If we have a previous part end it. - part_end: BaseEvent | None - for part_end in stream_ctx.part_ends: - yield part_end - stream_ctx.part_ends.clear() + if stream_ctx.part_end: + # End the previous part. + yield stream_ctx.part_end + stream_ctx.part_end = None if isinstance(agent_event.part, TextPart): message_id: str = stream_ctx.new_message_id() @@ -651,12 +620,10 @@ async def _handle_agent_event( message_id=message_id, role=Role.ASSISTANT.value, ) - stream_ctx.part_ends = [ - TextMessageEndEvent( - type=EventType.TEXT_MESSAGE_END, - message_id=message_id, - ), - ] + stream_ctx.part_end = TextMessageEndEvent( + type=EventType.TEXT_MESSAGE_END, + message_id=message_id, + ) if agent_event.part.content: yield TextMessageContentEvent( # pragma: no cover type=EventType.TEXT_MESSAGE_CONTENT, @@ -671,15 +638,10 @@ async def _handle_agent_event( tool_call_id=agent_event.part.tool_call_id, tool_call_name=tool_name or agent_event.part.tool_name, ) - stream_ctx.part_ends = [ - ToolCallEndEvent( - type=EventType.TOOL_CALL_END, - tool_call_id=agent_event.part.tool_call_id, - ), - ] - if tool_name: - # AG-UI tool, signal continuation of the stream. - stream_ctx.part_ends.append(None) + stream_ctx.part_end = ToolCallEndEvent( + type=EventType.TOOL_CALL_END, + tool_call_id=agent_event.part.tool_call_id, + ) elif isinstance(agent_event.part, ThinkingPart): # pragma: no branch yield ThinkingTextMessageStartEvent( @@ -690,11 +652,10 @@ async def _handle_agent_event( type=EventType.THINKING_TEXT_MESSAGE_CONTENT, delta=agent_event.part.content, ) - stream_ctx.part_ends = [ - ThinkingTextMessageEndEvent( - type=EventType.THINKING_TEXT_MESSAGE_END, - ), - ] + stream_ctx.part_end = ThinkingTextMessageEndEvent( + type=EventType.THINKING_TEXT_MESSAGE_END, + ) + elif isinstance(agent_event, PartDeltaEvent): if isinstance(agent_event.delta, TextPartDelta): yield TextMessageContentEvent( @@ -728,6 +689,7 @@ class _History: prompt_message_id: str # The ID of the last user message. messages: list[ModelMessage] + tool_calls: set[str] = field(default_factory=set) def _convert_history(messages: list[Message]) -> _History: @@ -742,7 +704,7 @@ def _convert_history(messages: list[Message]) -> _History: msg: Message prompt_message_id: str = '' result: list[ModelMessage] = [] - tool_calls: dict[str, str] = {} + tool_calls: dict[str, str] = {} # Tool call ID to tool name mapping. for msg in messages: if isinstance(msg, UserMessage): prompt_message_id = msg.id @@ -789,6 +751,7 @@ def _convert_history(messages: list[Message]) -> _History: return _History( prompt_message_id=prompt_message_id, messages=result, + tool_calls=set(tool_calls.keys()), ) @@ -826,21 +789,6 @@ def __str__(self) -> str: return self.message -class _UnexpectedToolCallError(_RunError): - """Exception raised when an unexpected tool call is encountered.""" - - def __init__(self, *, tool_name: str) -> None: - """Initialize the unexpected tool call error. - - Args: - tool_name: The name of the tool that was unexpectedly called. - """ - super().__init__( - message=f'unexpected tool call name={tool_name}', # pragma: no cover - code='unexpected_tool_call', - ) - - @dataclass class _NoMessagesError(_RunError): """Exception raised when no messages are found in the input.""" @@ -926,7 +874,7 @@ class _RequestStreamContext: message_id: str = '' last_tool_call_id: str | None = None - part_ends: list[BaseEvent | None] = field(default_factory=lambda: list[Union[BaseEvent, None]]()) + part_end: BaseEvent | None = None def new_message_id(self) -> str: """Generate a new message ID for the request stream. @@ -938,3 +886,15 @@ def new_message_id(self) -> str: """ self.message_id = str(uuid.uuid4()) return self.message_id + + +class _AGUIToolset(DeferredToolset[AgentDepsT]): + """A toolset that is used for AG-UI.""" + + def __init__(self, tools: list[ToolAGUI]) -> None: + super().__init__( + [ + ToolDefinition(name=tool.name, description=tool.description, parameters_json_schema=tool.parameters) + for tool in tools + ] + ) diff --git a/pydantic_ai_slim/pydantic_ai/agent.py b/pydantic_ai_slim/pydantic_ai/agent.py index e4ede4bae..f3e7cea9b 100644 --- a/pydantic_ai_slim/pydantic_ai/agent.py +++ b/pydantic_ai_slim/pydantic_ai/agent.py @@ -1871,7 +1871,7 @@ def to_ag_ui( usage_limits: UsageLimits | None = None, usage: Usage | None = None, infer_name: bool = True, - additional_tools: Sequence[Tool[AgentDepsT]] | None = None, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, ) -> FastAGUI[AgentDepsT, OutputDataT]: """Convert the agent to an Adapter instance. @@ -1892,7 +1892,7 @@ def to_ag_ui( usage_limits: Optional limits on model request count or token usage. usage: Optional usage to start with, useful for resuming a conversation or agents used in tools. infer_name: Whether to try to infer the agent name from the call frame if it's not set. - additional_tools: Additional tools to use for this run. + toolsets: Optional list of toolsets to use for this agent, defaults to the agent's toolset. Returns: An adapter that converts between AG-UI protocol and PydanticAI. @@ -1918,7 +1918,7 @@ def to_ag_ui( usage_limits=usage_limits, usage=usage, infer_name=infer_name, - additional_tools=additional_tools, + toolsets=toolsets, ) def to_a2a( diff --git a/tests/test_ag_ui.py b/tests/test_ag_ui.py index 59c0cf1e9..f696c33b2 100644 --- a/tests/test_ag_ui.py +++ b/tests/test_ag_ui.py @@ -424,7 +424,6 @@ def tc_parameters() -> list[AdapterRunTest]: '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', - '{"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000003","content":"Tool result","role":"tool"}', '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000005","role":"assistant"}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"{\\"get_weather\\":\\"Tool "}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"result\\"}"}', @@ -501,9 +500,11 @@ def tc_parameters() -> list[AdapterRunTest]: '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000003","toolCallName":"get_weather"}', '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', + '{"type":"TOOL_CALL_START","toolCallId":"pyd_ai_00000000000000000000000000000004","toolCallName":"get_weather_parts"}', + '{"type":"TOOL_CALL_ARGS","toolCallId":"pyd_ai_00000000000000000000000000000004","delta":"{\\"location\\":\\"a\\"}"}', + '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000004"}', '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000005"}', - '{"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000003","content":"Tool result","role":"tool"}', '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000006","role":"assistant"}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"{\\"get_weather\\":\\"Tool "}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"result\\",\\"get_weather_parts\\":\\"Tool "}', @@ -564,7 +565,6 @@ def tc_parameters() -> list[AdapterRunTest]: '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000003"}', '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000004"}', - '{"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000003","content":"Tool result","role":"tool"}', '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000005","role":"assistant"}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"{\\"get_weather_parts\\":\\"Tool "}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000005","delta":"result\\"}"}', @@ -743,7 +743,6 @@ def tc_parameters() -> list[AdapterRunTest]: '{"type":"TOOL_CALL_END","toolCallId":"pyd_ai_00000000000000000000000000000004"}', '{"type":"RUN_FINISHED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000002"}', '{"type":"RUN_STARTED","threadId":"thread_00000000-0000-0000-0000-000000000001","runId":"run_00000000-0000-0000-0000-000000000005"}', - '{"type":"TOOL_CALL_RESULT","messageId":"msg_1","toolCallId":"pyd_ai_00000000000000000000000000000004","content":"Tool result","role":"tool"}', '{"type":"TEXT_MESSAGE_START","messageId":"00000000-0000-0000-0000-000000000006","role":"assistant"}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"{\\"current_time\\":\\"Tool "}', '{"type":"TEXT_MESSAGE_CONTENT","messageId":"00000000-0000-0000-0000-000000000006","delta":"result\\",\\"get_weather\\":\\"Tool "}', From 07d5221a20846425cef81aa5ec17a21a01ea5f50 Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Mon, 7 Jul 2025 15:03:29 +0100 Subject: [PATCH 12/13] chore(ag-ui): address PR feedback * Update examples to use `to_ag_ui` * Use relative imports where appropriate * Remove `path` and `logger` parameters from `to_ag_ui` and dependencies * Clean up import failure messages * Improve FastAGUI doc comment * Use `PrefixedToolset` eliminating manual tool prefixing * Use CallToolsNode to handle tool calls Also make StateDeps more strict, to avoid runtime failures. --- docs/ag-ui.md | 8 +- .../api/__init__.py | 24 +- .../pydantic_ai_ag_ui_examples/api/agent.py | 58 ++-- .../api/agentic_chat.py | 37 +- .../api/agentic_generative_ui.py | 39 +-- .../api/human_in_the_loop.py | 39 +-- .../api/predictive_state_updates.py | 40 +-- .../api/shared_state.py | 61 ++-- .../api/tool_based_generative_ui.py | 36 +- .../pydantic_ai_ag_ui_examples/dojo_server.py | 32 +- pydantic_ai_slim/pydantic_ai/ag_ui.py | 327 ++++++++---------- pydantic_ai_slim/pydantic_ai/agent.py | 74 +++- tests/test_ag_ui.py | 8 +- 13 files changed, 323 insertions(+), 460 deletions(-) diff --git a/docs/ag-ui.md b/docs/ag-ui.md index f322559b3..6d0df700b 100644 --- a/docs/ag-ui.md +++ b/docs/ag-ui.md @@ -124,7 +124,7 @@ from pydantic_ai.ag_ui import StateDeps class DocumentState(BaseModel): """State for the document being written.""" - document: str + document: str = '' agent = Agent( @@ -132,7 +132,7 @@ agent = Agent( instructions='Be fun!', deps_type=StateDeps[DocumentState], ) -app = agent.to_ag_ui(deps=StateDeps(state_type=DocumentState)) +app = agent.to_ag_ui(deps=StateDeps(DocumentState())) ``` Since `app` is an ASGI application, it can be used with any ASGI server e.g. @@ -187,7 +187,7 @@ if TYPE_CHECKING: class DocumentState(BaseModel): """State for the document being written.""" - document: str + document: str = '' agent = Agent( @@ -195,7 +195,7 @@ agent = Agent( instructions='Be fun!', deps_type=StateDeps[DocumentState], ) -app = agent.to_ag_ui(deps=StateDeps(state_type=DocumentState)) +app = agent.to_ag_ui(deps=StateDeps(DocumentState())) @agent.tool diff --git a/examples/pydantic_ai_ag_ui_examples/api/__init__.py b/examples/pydantic_ai_ag_ui_examples/api/__init__.py index c595aac5a..d17cab009 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/__init__.py +++ b/examples/pydantic_ai_ag_ui_examples/api/__init__.py @@ -2,18 +2,18 @@ from __future__ import annotations -from .agentic_chat import router as agentic_chat_router -from .agentic_generative_ui import router as agentic_generative_ui_router -from .human_in_the_loop import router as human_in_the_loop_router -from .predictive_state_updates import router as predictive_state_updates_router -from .shared_state import router as shared_state_router -from .tool_based_generative_ui import router as tool_based_generative_ui_router +from .agentic_chat import app as agentic_chat_app +from .agentic_generative_ui import app as agentic_generative_ui_app +from .human_in_the_loop import app as human_in_the_loop_app +from .predictive_state_updates import app as predictive_state_updates_app +from .shared_state import app as shared_state_app +from .tool_based_generative_ui import app as tool_based_generative_ui_app __all__: list[str] = [ - 'agentic_chat_router', - 'agentic_generative_ui_router', - 'human_in_the_loop_router', - 'predictive_state_updates_router', - 'shared_state_router', - 'tool_based_generative_ui_router', + 'agentic_chat_app', + 'agentic_generative_ui_app', + 'human_in_the_loop_app', + 'predictive_state_updates_app', + 'shared_state_app', + 'tool_based_generative_ui_app', ] diff --git a/examples/pydantic_ai_ag_ui_examples/api/agent.py b/examples/pydantic_ai_ag_ui_examples/api/agent.py index b69c5e274..ddc5a29d2 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/agent.py +++ b/examples/pydantic_ai_ag_ui_examples/api/agent.py @@ -2,42 +2,34 @@ from __future__ import annotations -from dataclasses import dataclass -from types import NoneType -from typing import Generic - from dotenv import load_dotenv from pydantic_ai import Agent -from pydantic_ai.ag_ui import Adapter -from pydantic_ai.result import OutputDataT +from pydantic_ai.ag_ui import FastAGUI from pydantic_ai.tools import AgentDepsT -@dataclass(init=False, repr=False) -class AGUIAgent(Generic[AgentDepsT, OutputDataT]): - """Pydantic AI agent with AG-UI adapter.""" - - agent: Agent[AgentDepsT, str] - adapter: Adapter[AgentDepsT, str] - instructions: str | None - - def __init__( - self, deps_type: type[AgentDepsT] = NoneType, instructions: str | None = None - ) -> None: - """Initialize the API agent with AG-UI adapter. - - Args: - deps_type: Type annotation for the agent dependencies. - instructions: Optional instructions for the agent. - """ - # Ensure environment variables are loaded. - load_dotenv() - - self.agent = Agent( - 'openai:gpt-4o-mini', - output_type=str, - instructions=instructions, - deps_type=deps_type, - ) - self.adapter = Adapter(agent=self.agent) +def agent( + model: str = 'openai:gpt-4o-mini', + deps: AgentDepsT = None, + instructions: str | None = None, +) -> FastAGUI[AgentDepsT, str]: + """Create a Pydantic AI agent with AG-UI adapter. + + Args: + model: The model to use for the agent. + deps: Optional dependencies for the agent. + instructions: Optional instructions for the agent. + + Returns: + An instance of FastAGUI with the agent and adapter. + """ + # Ensure environment variables are loaded. + load_dotenv() + + return Agent( + model, + output_type=str, + instructions=instructions, + deps_type=type(deps), + ).to_ag_ui(deps=deps) diff --git a/examples/pydantic_ai_ag_ui_examples/api/agentic_chat.py b/examples/pydantic_ai_ag_ui_examples/api/agentic_chat.py index d490c0451..a369d1ef7 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/agentic_chat.py +++ b/examples/pydantic_ai_ag_ui_examples/api/agentic_chat.py @@ -3,26 +3,16 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING, Annotated from zoneinfo import ZoneInfo -from ag_ui.core import RunAgentInput -from fastapi import APIRouter, Header -from fastapi.responses import StreamingResponse +from pydantic_ai.ag_ui import FastAGUI -from pydantic_ai.ag_ui import SSE_CONTENT_TYPE +from .agent import agent -from .agent import AGUIAgent +app: FastAGUI = agent() -if TYPE_CHECKING: # pragma: no cover - from ag_ui.core import RunAgentInput - -router: APIRouter = APIRouter(prefix='/agentic_chat') -agui: AGUIAgent = AGUIAgent() - - -@agui.agent.tool_plain +@app.adapter.agent.tool_plain async def current_time(timezone: str = 'UTC') -> str: """Get the current time in ISO format. @@ -34,22 +24,3 @@ async def current_time(timezone: str = 'UTC') -> str: """ tz: ZoneInfo = ZoneInfo(timezone) return datetime.now(tz=tz).isoformat() - - -@router.post('') -async def handler( - input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE -) -> StreamingResponse: - """Endpoint to handle AG-UI protocol requests and stream responses. - - Args: - input_data: The AG-UI run input. - accept: The Accept header to specify the response format. - - Returns: - A streaming response with event-stream media type. - """ - return StreamingResponse( - agui.adapter.run(input_data, accept), - media_type=SSE_CONTENT_TYPE, - ) diff --git a/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py b/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py index 001455de7..cdc131095 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py +++ b/examples/pydantic_ai_ag_ui_examples/api/agentic_generative_ui.py @@ -3,19 +3,17 @@ from __future__ import annotations from enum import StrEnum -from typing import Annotated, Any, Literal +from typing import Any, Literal -from ag_ui.core import EventType, RunAgentInput, StateDeltaEvent, StateSnapshotEvent -from fastapi import APIRouter, Header -from fastapi.responses import StreamingResponse +from ag_ui.core import EventType, StateDeltaEvent, StateSnapshotEvent from pydantic import BaseModel, Field -from pydantic_ai.ag_ui import SSE_CONTENT_TYPE +from pydantic_ai.ag_ui import FastAGUI -from .agent import AGUIAgent +from .agent import agent -router: APIRouter = APIRouter(prefix='/agentic_generative_ui') -instructions: str = """When planning use tools only, without any other messages. +app: FastAGUI = agent( + instructions="""When planning use tools only, without any other messages. IMPORTANT: - Use the `create_plan` tool to set the initial state of the steps - Use the `update_plan_step` tool to update the status of each step @@ -26,7 +24,7 @@ Only one plan can be active at a time, so do not call the `create_plan` tool again until all the steps in current plan are completed. """ -agui: AGUIAgent = AGUIAgent(instructions=instructions) +) class StepStatus(StrEnum): @@ -73,7 +71,7 @@ class JSONPatchOp(BaseModel): ) -@agui.agent.tool_plain +@app.adapter.agent.tool_plain def create_plan(steps: list[str]) -> StateSnapshotEvent: """Create a plan with multiple steps. @@ -92,7 +90,7 @@ def create_plan(steps: list[str]) -> StateSnapshotEvent: ) -@agui.agent.tool_plain +@app.adapter.agent.tool_plain def update_plan_step( index: int, description: str | None = None, status: StepStatus | None = None ) -> StateDeltaEvent: @@ -121,22 +119,3 @@ def update_plan_step( type=EventType.STATE_DELTA, delta=changes, ) - - -@router.post('') -async def handler( - input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE -) -> StreamingResponse: - """Endpoint to handle AG-UI protocol requests and stream responses. - - Args: - input_data: The AG-UI run input. - accept: The Accept header to specify the response format. - - Returns: - A streaming response with event-stream media type. - """ - return StreamingResponse( - agui.adapter.run(input_data, accept), - media_type=SSE_CONTENT_TYPE, - ) diff --git a/examples/pydantic_ai_ag_ui_examples/api/human_in_the_loop.py b/examples/pydantic_ai_ag_ui_examples/api/human_in_the_loop.py index 51d72bdd7..e27b85c30 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/human_in_the_loop.py +++ b/examples/pydantic_ai_ag_ui_examples/api/human_in_the_loop.py @@ -5,45 +5,16 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Annotated +from pydantic_ai.ag_ui import FastAGUI -from ag_ui.core import RunAgentInput -from fastapi import APIRouter, Header -from fastapi.responses import StreamingResponse +from .agent import agent -from pydantic_ai.ag_ui import SSE_CONTENT_TYPE - -from .agent import AGUIAgent - -if TYPE_CHECKING: # pragma: no cover - from ag_ui.core import RunAgentInput - - -instructions: str = """When planning tasks use tools only, without any other messages. +app: FastAGUI = agent( + instructions="""When planning tasks use tools only, without any other messages. IMPORTANT: - Use the `generate_task_steps` tool to display the suggested steps to the user - Never repeat the plan, or send a message detailing steps - If accepted, confirm the creation of the plan and the number of selected (enabled) steps only - If not accepted, ask the user for more information, DO NOT use the `generate_task_steps` tool again """ -router: APIRouter = APIRouter(prefix='/human_in_the_loop') -agui: AGUIAgent = AGUIAgent(instructions=instructions) - - -@router.post('') -async def handler( - input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE -) -> StreamingResponse: - """Endpoint to handle AG-UI protocol requests and stream responses. - - Args: - input_data: The AG-UI run input. - accept: The Accept header to specify the response format. - - Returns: - A streaming response with event-stream media type. - """ - return StreamingResponse( - agui.adapter.run(input_data, accept), - media_type=SSE_CONTENT_TYPE, - ) +) diff --git a/examples/pydantic_ai_ag_ui_examples/api/predictive_state_updates.py b/examples/pydantic_ai_ag_ui_examples/api/predictive_state_updates.py index 9cfb7ce05..c82d3647c 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/predictive_state_updates.py +++ b/examples/pydantic_ai_ag_ui_examples/api/predictive_state_updates.py @@ -3,16 +3,14 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Annotated +from typing import TYPE_CHECKING -from ag_ui.core import CustomEvent, EventType, RunAgentInput -from fastapi import APIRouter, Header -from fastapi.responses import StreamingResponse +from ag_ui.core import CustomEvent, EventType from pydantic import BaseModel -from pydantic_ai.ag_ui import SSE_CONTENT_TYPE, StateDeps +from pydantic_ai.ag_ui import FastAGUI, StateDeps -from .agent import AGUIAgent +from .agent import agent if TYPE_CHECKING: # pragma: no cover from pydantic_ai import RunContext @@ -24,18 +22,15 @@ class DocumentState(BaseModel): """State for the document being written.""" - document: str + document: str = '' -router: APIRouter = APIRouter(prefix='/predictive_state_updates') -agui: AGUIAgent[StateDeps[DocumentState]] = AGUIAgent( - deps_type=StateDeps[DocumentState] -) +app: FastAGUI = agent(deps=StateDeps(DocumentState())) # Tools which return AG-UI events will be sent to the client as part of the # event stream, single events and iterables of events are supported. -@agui.agent.tool_plain +@app.adapter.agent.tool_plain def document_predict_state() -> list[CustomEvent]: """Enable document state prediction. @@ -58,7 +53,7 @@ def document_predict_state() -> list[CustomEvent]: ] -@agui.agent.instructions() +@app.adapter.agent.instructions() def story_instructions(ctx: RunContext[StateDeps[DocumentState]]) -> str: """Provide instructions for writing document if present. @@ -86,22 +81,3 @@ def story_instructions(ctx: RunContext[StateDeps[DocumentState]]) -> str: {ctx.deps.state.document} """ - - -@router.post('') -async def handler( - input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE -) -> StreamingResponse: - """Endpoint to handle AG-UI protocol requests and stream responses. - - Args: - input_data: The AG-UI run input. - accept: The Accept header to specify the response format. - - Returns: - A streaming response with event-stream media type. - """ - return StreamingResponse( - agui.adapter.run(input_data, accept, deps=StateDeps(state_type=DocumentState)), - media_type=SSE_CONTENT_TYPE, - ) diff --git a/examples/pydantic_ai_ag_ui_examples/api/shared_state.py b/examples/pydantic_ai_ag_ui_examples/api/shared_state.py index 6b01399b3..3ab8bcd0a 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/shared_state.py +++ b/examples/pydantic_ai_ag_ui_examples/api/shared_state.py @@ -5,16 +5,14 @@ import json import logging from enum import StrEnum -from typing import TYPE_CHECKING, Annotated +from typing import TYPE_CHECKING -from ag_ui.core import EventType, RunAgentInput, StateSnapshotEvent -from fastapi import APIRouter, Header -from fastapi.responses import StreamingResponse +from ag_ui.core import EventType, StateSnapshotEvent from pydantic import BaseModel, Field -from pydantic_ai.ag_ui import SSE_CONTENT_TYPE, StateDeps +from pydantic_ai.ag_ui import FastAGUI, StateDeps -from .agent import AGUIAgent +from .agent import agent if TYPE_CHECKING: # pragma: no cover from pydantic_ai import RunContext @@ -67,29 +65,37 @@ class Recipe(BaseModel): """A class representing a recipe.""" skill_level: SkillLevel = Field( - description='The skill level required for the recipe' + default=SkillLevel.BEGINNER, + description='The skill level required for the recipe', ) special_preferences: list[SpecialPreferences] = Field( - description='Any special preferences for the recipe' + default_factory=lambda: list[SpecialPreferences](), + description='Any special preferences for the recipe', + ) + cooking_time: CookingTime = Field( + default=CookingTime.FIVE_MIN, description='The cooking time of the recipe' + ) + ingredients: list[Ingredient] = Field( + default_factory=lambda: list[Ingredient](), + description='Ingredients for the recipe', + ) + instructions: list[str] = Field( + default_factory=lambda: list[str](), description='Instructions for the recipe' ) - cooking_time: CookingTime = Field(description='The cooking time of the recipe') - ingredients: list[Ingredient] = Field(description='Ingredients for the recipe') - instructions: list[str] = Field(description='Instructions for the recipe') class RecipeSnapshot(BaseModel): """A class representing the state of the recipe.""" - recipe: Recipe = Field(description='The current state of the recipe') + recipe: Recipe = Field( + default_factory=Recipe, description='The current state of the recipe' + ) -router: APIRouter = APIRouter(prefix='/shared_state') -agui: AGUIAgent[StateDeps[RecipeSnapshot]] = AGUIAgent( - deps_type=StateDeps[RecipeSnapshot] -) +app: FastAGUI = agent(deps=StateDeps(RecipeSnapshot())) -@agui.agent.tool_plain +@app.adapter.agent.tool_plain def display_recipe(recipe: Recipe) -> StateSnapshotEvent: """Display the recipe to the user. @@ -105,7 +111,7 @@ def display_recipe(recipe: Recipe) -> StateSnapshotEvent: ) -@agui.agent.instructions +@app.adapter.agent.instructions def recipe_instructions(ctx: RunContext[StateDeps[RecipeSnapshot]]) -> str: """Instructions for the recipe generation agent. @@ -137,22 +143,3 @@ def recipe_instructions(ctx: RunContext[StateDeps[RecipeSnapshot]]) -> str: {ctx.deps.state.recipe.model_dump_json(indent=2)} """ - - -@router.post('') -async def handler( - input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE -) -> StreamingResponse: - """Endpoint to handle AG-UI protocol requests and stream responses. - - Args: - input_data: The AG-UI run input. - accept: The Accept header to specify the response format. - - Returns: - A streaming response with event-stream media type. - """ - return StreamingResponse( - agui.adapter.run(input_data, accept, deps=StateDeps(state_type=RecipeSnapshot)), - media_type=SSE_CONTENT_TYPE, - ) diff --git a/examples/pydantic_ai_ag_ui_examples/api/tool_based_generative_ui.py b/examples/pydantic_ai_ag_ui_examples/api/tool_based_generative_ui.py index f9f5394f3..9d04040f5 100644 --- a/examples/pydantic_ai_ag_ui_examples/api/tool_based_generative_ui.py +++ b/examples/pydantic_ai_ag_ui_examples/api/tool_based_generative_ui.py @@ -5,38 +5,8 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Annotated +from pydantic_ai.ag_ui import FastAGUI -from ag_ui.core import RunAgentInput -from fastapi import APIRouter, Header -from fastapi.responses import StreamingResponse +from .agent import agent -from pydantic_ai.ag_ui import SSE_CONTENT_TYPE - -from .agent import AGUIAgent - -if TYPE_CHECKING: # pragma: no cover - from ag_ui.core import RunAgentInput - - -router: APIRouter = APIRouter(prefix='/tool_based_generative_ui') -agui: AGUIAgent = AGUIAgent() - - -@router.post('') -async def handler( - input_data: RunAgentInput, accept: Annotated[str, Header()] = SSE_CONTENT_TYPE -) -> StreamingResponse: - """Endpoint to handle AG-UI protocol requests and stream responses. - - Args: - input_data: The AG-UI run input. - accept: The Accept header to specify the response format. - - Returns: - A streaming response with event-stream media type. - """ - return StreamingResponse( - agui.adapter.run(input_data, accept), - media_type=SSE_CONTENT_TYPE, - ) +app: FastAGUI = agent() diff --git a/examples/pydantic_ai_ag_ui_examples/dojo_server.py b/examples/pydantic_ai_ag_ui_examples/dojo_server.py index 6ce7156f2..597c034e6 100644 --- a/examples/pydantic_ai_ag_ui_examples/dojo_server.py +++ b/examples/pydantic_ai_ag_ui_examples/dojo_server.py @@ -16,21 +16,29 @@ from fastapi import FastAPI from .api import ( - agentic_chat_router, - agentic_generative_ui_router, - human_in_the_loop_router, - predictive_state_updates_router, - shared_state_router, - tool_based_generative_ui_router, + agentic_chat_app, + agentic_generative_ui_app, + human_in_the_loop_app, + predictive_state_updates_app, + shared_state_app, + tool_based_generative_ui_app, ) app = FastAPI(title='PydanticAI AG-UI server') -app.include_router(agentic_chat_router, tags=['Agentic Chat']) -app.include_router(agentic_generative_ui_router, tags=['Agentic Generative UI']) -app.include_router(human_in_the_loop_router, tags=['Human in the Loop']) -app.include_router(predictive_state_updates_router, tags=['Predictive State Updates']) -app.include_router(shared_state_router, tags=['Shared State']) -app.include_router(tool_based_generative_ui_router, tags=['Tool Based Generative UI']) +app.mount('/agentic_chat', agentic_chat_app, 'Agentic Chat') +app.mount('/agentic_generative_ui', agentic_generative_ui_app, 'Agentic Generative UI') +app.mount('/human_in_the_loop', human_in_the_loop_app, 'Human in the Loop') +app.mount( + '/predictive_state_updates', + predictive_state_updates_app, + 'Predictive State Updates', +) +app.mount('/shared_state', shared_state_app, 'Shared State') +app.mount( + '/tool_based_generative_ui', + tool_based_generative_ui_app, + 'Tool Based Generative UI', +) if __name__ == '__main__': diff --git a/pydantic_ai_slim/pydantic_ai/ag_ui.py b/pydantic_ai_slim/pydantic_ai/ag_ui.py index c02846f66..eecd8e6c8 100644 --- a/pydantic_ai_slim/pydantic_ai/ag_ui.py +++ b/pydantic_ai_slim/pydantic_ai/ag_ui.py @@ -42,7 +42,6 @@ ThinkingTextMessageContentEvent, ThinkingTextMessageEndEvent, ThinkingTextMessageStartEvent, - Tool as ToolAGUI, ToolCallArgsEvent, ToolCallEndEvent, ToolCallResultEvent, @@ -66,27 +65,22 @@ from starlette.types import ExceptionHandler, Lifespan except ImportError as e: # pragma: no cover raise ImportError( - 'Please install the `fasta2a` package to use `Agent.to_ag_ui()` method, ' + 'Please install the `starlette` package to use `Agent.to_ag_ui()` method, ' 'you can use the `ag-ui` optional group — `pip install "pydantic-ai-slim[ag-ui]"`' ) from e from pydantic import BaseModel, ValidationError -from pydantic_ai.output import DeferredToolCalls -from pydantic_ai.tools import ToolDefinition -from pydantic_ai.toolsets import AbstractToolset -from pydantic_ai.toolsets.deferred import DeferredToolset - -from . import Agent, models -from ._agent_graph import ModelRequestNode -from .agent import RunOutputDataT +from ._agent_graph import CallToolsNode, ModelRequestNode +from .agent import Agent, RunOutputDataT from .messages import ( AgentStreamEvent, FinalResultEvent, + FunctionToolResultEvent, ModelMessage, ModelRequest, - ModelRequestPart, ModelResponse, + ModelResponsePart, PartDeltaEvent, PartStartEvent, SystemPromptPart, @@ -99,10 +93,13 @@ ToolReturnPart, UserPromptPart, ) -from .output import OutputDataT, OutputSpec +from .models import KnownModelName, Model +from .output import DeferredToolCalls, OutputDataT, OutputSpec from .result import AgentStream from .settings import ModelSettings -from .tools import AgentDepsT +from .tools import AgentDepsT, ToolDefinition +from .toolsets import AbstractToolset +from .toolsets.deferred import DeferredToolset from .usage import Usage, UsageLimits if TYPE_CHECKING: @@ -125,17 +122,16 @@ class FastAGUI(Generic[AgentDepsT, OutputDataT], Starlette): - """A FastAPI-like application for running PydanticAI agents with AG-UI protocol support.""" + """ASGI application for running PydanticAI agents with AG-UI protocol support.""" def __init__( self, *, # Adapter for the agent. adapter: Adapter[AgentDepsT, OutputDataT], - path: str = '/', # Agent.iter parameters. - output_type: OutputSpec[OutputDataT] = str, - model: models.Model | models.KnownModelName | str | None = None, + output_type: OutputSpec[OutputDataT] | None = None, + model: Model | KnownModelName | str | None = None, deps: AgentDepsT = None, model_settings: ModelSettings | None = None, usage_limits: UsageLimits | None = None, @@ -155,7 +151,6 @@ def __init__( Args: adapter: The adapter to use for running the agent. - path: The path to serve the agent run endpoint. output_type: Custom output type to use for this run, `output_type` may only be used if the agent has no output validators since output validators would expect an argument that matches the agent's @@ -195,6 +190,7 @@ def __init__( on_shutdown=on_shutdown, lifespan=lifespan, ) + self.adapter: Adapter[AgentDepsT, OutputDataT] = adapter async def endpoint(request: Request) -> Response | StreamingResponse: """Endpoint to run the agent with the provided input data.""" @@ -225,19 +221,16 @@ async def endpoint(request: Request) -> Response | StreamingResponse: media_type=SSE_CONTENT_TYPE, ) - self.router.add_route(path, endpoint, methods=['POST'], name='run_agent') + self.router.add_route('/', endpoint, methods=['POST'], name='run_agent') def agent_to_ag_ui( *, # Adapter parameters. agent: Agent[AgentDepsT, OutputDataT], - path: str = '/', - tool_prefix: str = '', - logger: logging.Logger | None = None, # Agent.iter parameters. - output_type: OutputSpec[OutputDataT] = str, - model: models.Model | models.KnownModelName | str | None = None, + output_type: OutputSpec[OutputDataT] | None = None, + model: Model | KnownModelName | str | None = None, deps: AgentDepsT = None, model_settings: ModelSettings | None = None, usage_limits: UsageLimits | None = None, @@ -257,9 +250,6 @@ def agent_to_ag_ui( Args: agent: The PydanticAI agent to adapt for AG-UI protocol. - path: The path to serve the agent run endpoint. - tool_prefix: Optional prefix to add to tool names. - logger: Optional logger to use for the adapter, defaults to the module's logger. output_type: Custom output type to use for this run, `output_type` may only be used if the agent has no output validators since output validators would expect an argument that matches the agent's @@ -290,19 +280,11 @@ def agent_to_ag_ui( This is a newer style that replaces the `on_startup` and `on_shutdown` handlers. Use one or the other, not both. """ - if logger is None: # pragma: no branch - logger = _LOGGER - - adapter: Adapter[AgentDepsT, OutputDataT] = Adapter( - agent=agent, - tool_prefix=tool_prefix, - logger=logger, - ) + adapter: Adapter[AgentDepsT, OutputDataT] = Adapter(agent=agent) return FastAGUI( adapter=adapter, - path=path, - # Agent.iter parameter + # Agent.iter parameters output_type=output_type, model=model, deps=deps, @@ -331,7 +313,7 @@ class Adapter(Generic[AgentDepsT, OutputDataT]): responses using the AG-UI protocol. Examples: - This is an example of base usage with FastAPI. + This is an example of basic usage with FastAGUI. ```python from pydantic_ai import Agent @@ -384,13 +366,10 @@ def custom_events() -> list[CustomEvent]: ``` Args: agent: The PydanticAI `Agent` to adapt. - tool_prefix: Optional prefix to add to tool names. - logger: The logger to use for logging. """ agent: Agent[AgentDepsT, OutputDataT] = field(repr=False) - tool_prefix: str = field(default='', repr=False) - logger: logging.Logger = field(default=_LOGGER, repr=False) + _logger: logging.Logger = field(default=_LOGGER, repr=False, init=False) async def run( self, @@ -398,7 +377,7 @@ async def run( accept: str = SSE_CONTENT_TYPE, *, output_type: OutputSpec[RunOutputDataT] | None = None, - model: models.Model | models.KnownModelName | str | None = None, + model: Model | KnownModelName | str | None = None, deps: AgentDepsT = None, model_settings: ModelSettings | None = None, usage_limits: UsageLimits | None = None, @@ -427,13 +406,24 @@ async def run( Yields: Streaming SSE-formatted event chunks. """ - self.logger.debug('starting run: %s', json.dumps(run_input.model_dump(), indent=2)) + self._logger.debug('starting run: %s', json.dumps(run_input.model_dump(), indent=2)) - tool_names: dict[str, str] = {self.tool_prefix + tool.name: tool.name for tool in run_input.tools} encoder: EventEncoder = EventEncoder(accept=accept) - run_toolset: list[AbstractToolset[AgentDepsT]] = list(toolsets) if toolsets else [] if run_input.tools: - run_toolset.append(_AGUIToolset[AgentDepsT](run_input.tools)) + # AG-UI tools can't be prefixed as that would result in a mismatch between the tool names in the + # PydanticAI events and actual AG-UI tool names, preventing the tool from being called. If any + # conflicts arise, the AG-UI tool should be renamed or a `PrefixedToolset` used for local toolsets. + toolset: AbstractToolset[AgentDepsT] = DeferredToolset[AgentDepsT]( + [ + ToolDefinition( + name=tool.name, + description=tool.description, + parameters_json_schema=tool.parameters, + ) + for tool in run_input.tools + ] + ) + toolsets = [toolset] if toolsets is None else [toolset] + list(toolsets) try: yield encoder.encode( @@ -448,22 +438,14 @@ async def run( raise _NoMessagesError if isinstance(deps, StateHandler): - deps.set_state(run_input.state) + deps.state = run_input.state history: _History = _convert_history(run_input.messages) - output_type_: OutputSpec[OutputDataT | DeferredToolCalls | RunOutputDataT] - if output_type is None: - # Use the agent's output type if not specified. - output_type_ = [self.agent.output_type, DeferredToolCalls] - else: - output_type_ = [output_type, DeferredToolCalls] - run: AgentRun[AgentDepsT, Any] async with self.agent.iter( user_prompt=None, - # TODO(steve): Could or should it just accept: [output_type, DeferredToolCalls] - output_type=output_type_, + output_type=[output_type or self.agent.output_type, DeferredToolCalls], message_history=history.messages, model=model, deps=deps, @@ -471,22 +453,17 @@ async def run( usage_limits=usage_limits, usage=usage, infer_name=infer_name, - toolsets=run_toolset, + toolsets=toolsets, ) as run: - async for event in self._agent_stream(tool_names, run, history): - if event is None: - # Tool call signals early return, so we stop processing. - self.logger.debug('tool call early return') - break - + async for event in self._agent_stream(run, history): yield encoder.encode(event) except _RunError as e: - self.logger.exception('agent run') + self._logger.exception('agent run') yield encoder.encode( RunErrorEvent(type=EventType.RUN_ERROR, message=e.message, code=e.code), ) except Exception as e: # pragma: no cover - self.logger.exception('unexpected error in agent run') + self._logger.exception('unexpected error in agent run') yield encoder.encode( RunErrorEvent(type=EventType.RUN_ERROR, message=str(e), code='run_error'), ) @@ -499,66 +476,53 @@ async def run( ), ) - self.logger.debug('done thread_id=%s run_id=%s', run_input.thread_id, run_input.run_id) + self._logger.debug('done thread_id=%s run_id=%s', run_input.thread_id, run_input.run_id) - async def _tool_events( + async def _tool_result_event( self, - parts: list[ModelRequestPart], - history: _History, - ) -> AsyncGenerator[BaseEvent | None, None]: - """Check for tool call results that are AG-UI events. + result: ToolReturnPart, + prompt_message_id: str, + ) -> AsyncGenerator[BaseEvent, None]: + """Convert a tool call result to AG-UI events. Args: - encoder: The event encoder to use for encoding events. - parts: The list of request parts to check for tool event returns. - history: The history of messages and tool calls to use for the run. + result: The tool call result to process. + prompt_message_id: The message ID of the prompt that initiated the tool call. Yields: AG-UI Server-Sent Events (SSE). """ - part: ModelRequestPart - for part in parts: - if not isinstance(part, ToolReturnPart): - continue - - if part.tool_call_id in history.tool_calls: - # Tool call was passed in the history, so we skip it. - continue - - yield ToolCallResultEvent( - message_id=history.prompt_message_id, - type=EventType.TOOL_CALL_RESULT, - role=Role.TOOL.value, - tool_call_id=part.tool_call_id, - content=part.model_response_str(), - ) + yield ToolCallResultEvent( + message_id=prompt_message_id, + type=EventType.TOOL_CALL_RESULT, + role=Role.TOOL.value, + tool_call_id=result.tool_call_id, + content=result.model_response_str(), + ) - # Now check for AG-UI events returned by the tool calls. - iter: Iterable[Any] - if isinstance(part.content, BaseEvent): - self.logger.debug('ag-ui event: %s', part.content) - yield part.content - elif isinstance(part.content, (str, bytes)): # pragma: no branch - # Avoid iterable check for strings and bytes. - pass - elif isinstance(part.content, Iterable): # pragma: no branch - # Type: ignore to handle partially unknown type - iter = part.content # type: ignore[assignment] - for item in iter: - if isinstance(item, BaseEvent): # pragma: no branch - self.logger.debug('ag-ui event: %s', item) - yield item + # Now check for AG-UI events returned by the tool calls. + content: Any = result.content + if isinstance(content, BaseEvent): + self._logger.debug('ag-ui event: %s', content) + yield content + elif isinstance(content, (str, bytes)): # pragma: no branch + # Avoid iterable check for strings and bytes. + pass + elif isinstance(content, Iterable): # pragma: no branch + item: Any + for item in content: # type: ignore[reportUnknownMemberType] + if isinstance(item, BaseEvent): # pragma: no branch + self._logger.debug('ag-ui event: %s', item) + yield item async def _agent_stream( self, - tool_names: dict[str, str], run: AgentRun[AgentDepsT, Any], history: _History, - ) -> AsyncGenerator[BaseEvent | None, None]: + ) -> AsyncGenerator[BaseEvent, None]: """Run the agent streaming responses using AG-UI protocol events. Args: - tool_names: A mapping of tool names to their AG-UI names. run: The agent run to process. history: The history of messages and tool calls to use for the run. @@ -566,54 +530,53 @@ async def _agent_stream( AG-UI Server-Sent Events (SSE). """ node: AgentNode[AgentDepsT, Any] | End[FinalResult[Any]] - msg: BaseEvent | None + msg: BaseEvent async for node in run: - self.logger.debug('processing node=%r', node) - if not isinstance(node, ModelRequestNode): - # Not interested UserPromptNode, CallToolsNode or End. - continue - - # Check for tool results. - async for msg in self._tool_events(node.request.parts, history): - yield msg - - stream_ctx: _RequestStreamContext = _RequestStreamContext() - request_stream: AgentStream[AgentDepsT] - async with node.stream(run.ctx) as request_stream: - agent_event: AgentStreamEvent - async for agent_event in request_stream: - async for msg in self._handle_agent_event(tool_names, stream_ctx, agent_event): - yield msg - - if stream_ctx.part_end: - yield stream_ctx.part_end - stream_ctx.part_end = None - - async def _handle_agent_event( + self._logger.debug('processing node=%r', node) + if isinstance(node, CallToolsNode): + # Handle tool results. + async with node.stream(run.ctx) as handle_stream: + async for event in handle_stream: + if isinstance(event, FunctionToolResultEvent) and isinstance(event.result, ToolReturnPart): + async for msg in self._tool_result_event(event.result, history.prompt_message_id): + yield msg + elif isinstance(node, ModelRequestNode): + # Handle model requests. + stream_ctx: _RequestStreamContext = _RequestStreamContext() + request_stream: AgentStream[AgentDepsT] + async with node.stream(run.ctx) as request_stream: + agent_event: AgentStreamEvent + async for agent_event in request_stream: + async for msg in self._agent_event(stream_ctx, agent_event): + yield msg + + if stream_ctx.part_end: # pragma: no branch + yield stream_ctx.part_end + stream_ctx.part_end = None + + async def _agent_event( self, - tool_names: dict[str, str], stream_ctx: _RequestStreamContext, agent_event: AgentStreamEvent, - ) -> AsyncGenerator[BaseEvent | None, None]: + ) -> AsyncGenerator[BaseEvent, None]: """Handle an agent event and yield AG-UI protocol events. Args: - encoder: The event encoder to use for encoding events. - tool_names: A mapping of tool names to their AG-UI names. stream_ctx: The request stream context to manage state. agent_event: The agent event to process. Yields: AG-UI Server-Sent Events (SSE) based on the agent event. """ - self.logger.debug('agent_event: %s', agent_event) + self._logger.debug('agent_event: %s', agent_event) if isinstance(agent_event, PartStartEvent): if stream_ctx.part_end: # End the previous part. yield stream_ctx.part_end stream_ctx.part_end = None - if isinstance(agent_event.part, TextPart): + part: ModelResponsePart = agent_event.part + if isinstance(part, TextPart): message_id: str = stream_ctx.new_message_id() yield TextMessageStartEvent( type=EventType.TEXT_MESSAGE_START, @@ -624,33 +587,32 @@ async def _handle_agent_event( type=EventType.TEXT_MESSAGE_END, message_id=message_id, ) - if agent_event.part.content: + if part.content: yield TextMessageContentEvent( # pragma: no cover type=EventType.TEXT_MESSAGE_CONTENT, message_id=message_id, - delta=agent_event.part.content, + delta=part.content, ) - elif isinstance(agent_event.part, ToolCallPart): # pragma: no branch - tool_name: str | None = tool_names.get(agent_event.part.tool_name) - stream_ctx.last_tool_call_id = agent_event.part.tool_call_id + elif isinstance(part, ToolCallPart): # pragma: no branch + stream_ctx.last_tool_call_id = part.tool_call_id yield ToolCallStartEvent( type=EventType.TOOL_CALL_START, - tool_call_id=agent_event.part.tool_call_id, - tool_call_name=tool_name or agent_event.part.tool_name, + tool_call_id=part.tool_call_id, + tool_call_name=part.tool_name, ) stream_ctx.part_end = ToolCallEndEvent( type=EventType.TOOL_CALL_END, - tool_call_id=agent_event.part.tool_call_id, + tool_call_id=part.tool_call_id, ) - elif isinstance(agent_event.part, ThinkingPart): # pragma: no branch + elif isinstance(part, ThinkingPart): # pragma: no branch yield ThinkingTextMessageStartEvent( type=EventType.THINKING_TEXT_MESSAGE_START, ) - if agent_event.part.content: # pragma: no branch + if part.content: # pragma: no branch yield ThinkingTextMessageContentEvent( type=EventType.THINKING_TEXT_MESSAGE_CONTENT, - delta=agent_event.part.content, + delta=part.content, ) stream_ctx.part_end = ThinkingTextMessageEndEvent( type=EventType.THINKING_TEXT_MESSAGE_END, @@ -689,7 +651,6 @@ class _History: prompt_message_id: str # The ID of the last user message. messages: list[ModelMessage] - tool_calls: set[str] = field(default_factory=set) def _convert_history(messages: list[Message]) -> _History: @@ -730,7 +691,6 @@ def _convert_history(messages: list[Message]) -> _History: if msg.content: result.append(ModelResponse(parts=[TextPart(content=msg.content)])) elif isinstance(msg, SystemMessage): - # TODO(steve): Should we handle as instructions instead of system prompt? result.append(ModelRequest(parts=[SystemPromptPart(content=msg.content)])) elif isinstance(msg, ToolMessage): result.append( @@ -745,13 +705,11 @@ def _convert_history(messages: list[Message]) -> _History: ) ) elif isinstance(msg, DeveloperMessage): # pragma: no branch - # TODO(steve): Should these be handled differently? result.append(ModelRequest(parts=[SystemPromptPart(content=msg.content)])) return _History( prompt_message_id=prompt_message_id, messages=result, - tool_calls=set(tool_calls.keys()), ) @@ -798,7 +756,15 @@ class _NoMessagesError(_RunError): @dataclass -class _InvalidStateError(_RunError, ValidationError): +class StateNotSetError(_RunError, AttributeError): + """Exception raised when the state has not been set.""" + + message: str = 'state is not set' + code: str = 'state_not_set' + + +@dataclass +class InvalidStateError(_RunError, ValidationError): """Exception raised when an invalid state is provided.""" message: str = 'invalid state provided' @@ -810,7 +776,13 @@ class _InvalidStateError(_RunError, ValidationError): class StateHandler(Protocol): """Protocol for state handlers in agent runs.""" - def set_state(self, state: State) -> None: + @property + def state(self) -> State: + """Get the current state of the agent run.""" + ... + + @state.setter + def state(self, state: State) -> None: """Set the state of the agent run. This method is called to update the state of the agent run with the @@ -820,16 +792,15 @@ def set_state(self, state: State) -> None: state: The run state. Raises: - ValidationError: If `state` does not match the expected model. + InvalidStateError: If `state` does not match the expected model. """ ... -StateT = TypeVar('StateT', bound=BaseModel, contravariant=True) +StateT = TypeVar('StateT', bound=BaseModel) """Type variable for the state type, which must be a subclass of `BaseModel`.""" -@dataclass class StateDeps(Generic[StateT]): """Provides AG-UI state management. @@ -837,15 +808,26 @@ class StateDeps(Generic[StateT]): the state of the agent run with a specific type of state model, which must be a subclass of `BaseModel`. - The state is set using the `set_state` when the run starts by the `Adapter`. + The state is set using the `state` setter by the `Adapter` when the run starts. Implements the `StateHandler` protocol. """ - state_type: type[StateT] - state: StateT = field(init=False) + def __init__(self, default: StateT) -> None: + """Initialize the state with the provided state type.""" + self._state = default + + @property + def state(self) -> StateT: + """Get the current state of the agent run. + + Returns: + The current run state. + """ + return self._state - def set_state(self, state: State) -> None: + @state.setter + def state(self, state: State) -> None: """Set the state of the agent run. This method is called to update the state of the agent run with the @@ -854,18 +836,19 @@ def set_state(self, state: State) -> None: Implements the `StateHandler` protocol. Args: - state: The run state, which should match the expected model type or be `None`. + state: The run state, which must be `None` or model validate for the state type. Raises: - InvalidStateError: If `state` does not match the expected model and is not `None`. + InvalidStateError: If `state` does not validate. """ if state is None: + # If state is None, we keep the current state, which will be the default state. return try: - self.state = self.state_type.model_validate(state) + self._state = type(self._state).model_validate(state) except ValidationError as e: # pragma: no cover - raise _InvalidStateError from e + raise InvalidStateError from e @dataclass(repr=False) @@ -886,15 +869,3 @@ def new_message_id(self) -> str: """ self.message_id = str(uuid.uuid4()) return self.message_id - - -class _AGUIToolset(DeferredToolset[AgentDepsT]): - """A toolset that is used for AG-UI.""" - - def __init__(self, tools: list[ToolAGUI]) -> None: - super().__init__( - [ - ToolDefinition(name=tool.name, description=tool.description, parameters_json_schema=tool.parameters) - for tool in tools - ] - ) diff --git a/pydantic_ai_slim/pydantic_ai/agent.py b/pydantic_ai_slim/pydantic_ai/agent.py index f3e7cea9b..c8a00d619 100644 --- a/pydantic_ai_slim/pydantic_ai/agent.py +++ b/pydantic_ai_slim/pydantic_ai/agent.py @@ -3,10 +3,9 @@ import dataclasses import inspect import json -import logging import warnings from asyncio import Lock -from collections.abc import AsyncIterator, Awaitable, Iterator, Sequence +from collections.abc import AsyncIterator, Awaitable, Iterator, Mapping, Sequence from contextlib import AbstractAsyncContextManager, AsyncExitStack, asynccontextmanager, contextmanager from contextvars import ContextVar from copy import deepcopy @@ -66,7 +65,7 @@ if TYPE_CHECKING: from starlette.middleware import Middleware - from starlette.routing import Route + from starlette.routing import BaseRoute, Route from starlette.types import ExceptionHandler, Lifespan from fasta2a.applications import FastA2A @@ -1858,13 +1857,9 @@ async def run_mcp_servers( def to_ag_ui( self, - # Adapter parameters. - path: str = '/', *, - tool_prefix: str = '', - logger: logging.Logger | None = None, # Agent.iter parameters - output_type: OutputSpec[OutputDataT] = str, + output_type: OutputSpec[OutputDataT] | None = None, model: models.Model | models.KnownModelName | str | None = None, deps: AgentDepsT = None, model_settings: ModelSettings | None = None, @@ -1872,6 +1867,14 @@ def to_ag_ui( usage: Usage | None = None, infer_name: bool = True, toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + # Starlette + debug: bool = False, + routes: Sequence[BaseRoute] | None = None, + middleware: Sequence[Middleware] | None = None, + exception_handlers: Mapping[Any, ExceptionHandler] | None = None, + on_startup: Sequence[Callable[[], Any]] | None = None, + on_shutdown: Sequence[Callable[[], Any]] | None = None, + lifespan: Lifespan[FastAGUI[AgentDepsT, OutputDataT]] | None = None, ) -> FastAGUI[AgentDepsT, OutputDataT]: """Convert the agent to an Adapter instance. @@ -1879,13 +1882,25 @@ def to_ag_ui( The first two arguments are specific to `Adapter` the rest map directly to the `Agent.iter` method. - Args: - logger: Optional logger to use for the adapter. - path: Path to expose the agent at, defaults to the root path. - tool_prefix: Optional prefix to add to tool names in the AG-UI. + Example: + ```python + from pydantic_ai import Agent - output_type: Custom output type to use for this run, `output_type` may only be used if the agent has no - output validators since output validators would expect an argument that matches the agent's output type. + agent = Agent('openai:gpt-4o') + app = agent.to_ag_ui() + ``` + + The `app` is an ASGI application that can be used with any ASGI server. + + To run the application, you can use the following command: + + ```bash + uvicorn app:app --host 0.0.0.0 --port 8000 + ``` + Args: + output_type: Custom output type to use for this run, `output_type` may only be used if the agent has + no output validators since output validators would expect an argument that matches the agent's + output type. model: Optional model to use for this run, required if `model` was not set when creating the agent. deps: Optional dependencies to use for this run. model_settings: Optional settings to use for this model's request. @@ -1894,6 +1909,24 @@ def to_ag_ui( infer_name: Whether to try to infer the agent name from the call frame if it's not set. toolsets: Optional list of toolsets to use for this agent, defaults to the agent's toolset. + debug: Boolean indicating if debug tracebacks should be returned on errors. + routes: A list of routes to serve incoming HTTP and WebSocket requests. + middleware: A list of middleware to run for every request. A starlette application will always + automatically include two middleware classes. `ServerErrorMiddleware` is added as the very + outermost middleware, to handle any uncaught errors occurring anywhere in the entire stack. + `ExceptionMiddleware` is added as the very innermost middleware, to deal with handled + exception cases occurring in the routing or endpoints. + exception_handlers: A mapping of either integer status codes, or exception class types onto + callables which handle the exceptions. Exception handler callables should be of the form + `handler(request, exc) -> response` and may be either standard functions, or async functions. + on_startup: A list of callables to run on application startup. Startup handler callables do not + take any arguments, and may be either standard functions, or async functions. + on_shutdown: A list of callables to run on application shutdown. Shutdown handler callables do + not take any arguments, and may be either standard functions, or async functions. + lifespan: A lifespan context function, which can be used to perform startup and shutdown tasks. + This is a newer style that replaces the `on_startup` and `on_shutdown` handlers. Use one or + the other, not both. + Returns: An adapter that converts between AG-UI protocol and PydanticAI. """ @@ -1901,15 +1934,12 @@ def to_ag_ui( from .ag_ui import agent_to_ag_ui except ImportError as e: # pragma: no cover raise ImportError( - 'Please install the `ag-ui` dependencies to use `Agent.to_ag_ui()` method, ' + 'Please install the `ag-ui-protocol` and `starlette` packages to use `Agent.to_ag_ui()` method, ' 'you can use the `ag-ui` optional group — `pip install "pydantic-ai-slim[ag-ui]"`' ) from e return agent_to_ag_ui( agent=self, - path=path, - tool_prefix=tool_prefix, - logger=logger, # Agent.iter parameters output_type=output_type, model=model, @@ -1919,6 +1949,14 @@ def to_ag_ui( usage=usage, infer_name=infer_name, toolsets=toolsets, + # Starlette + debug=debug, + routes=routes, + middleware=middleware, + exception_handlers=exception_handlers, + on_startup=on_startup, + on_shutdown=on_shutdown, + lifespan=lifespan, ) def to_a2a( diff --git a/tests/test_ag_ui.py b/tests/test_ag_ui.py index f696c33b2..f58886e0f 100644 --- a/tests/test_ag_ui.py +++ b/tests/test_ag_ui.py @@ -125,7 +125,7 @@ async def create_adapter( ), deps_type=StateDeps[StateInt], # type: ignore[reportUnknownArgumentType] tools=[send_snapshot, send_custom, current_time], - ) + ), ) @@ -783,7 +783,7 @@ async def test_run_method(mock_uuid: _MockUUID, tc: AdapterRunTest) -> None: events: list[str] = [] thread_id: str = f'{THREAD_ID_PREFIX}{mock_uuid()}' adapter: Adapter[StateDeps[StateInt], str] = await create_adapter(tc.call_tools) - deps: StateDeps[StateInt] = StateDeps[StateInt](state_type=StateInt) # type: ignore[reportUnknownArgumentType] + deps: StateDeps[StateInt] = StateDeps(StateInt()) for run in tc.runs: if run.nodes is not None: assert isinstance(adapter.agent.model, TestModel), ( @@ -797,11 +797,11 @@ async def test_run_method(mock_uuid: _MockUUID, tc: AdapterRunTest) -> None: run_id=f'{RUN_ID_PREFIX}{mock_uuid()}', ) - events.extend([event async for event in adapter.run(run_input, deps=deps)]) # type: ignore[reportUnknownArgumentType] + events.extend([event async for event in adapter.run(run_input, deps=deps)]) assert_events(events, tc.expected_events) if tc.expected_state is not None: - assert deps.state.value == tc.expected_state # type: ignore[reportUnknownArgumentType] + assert deps.state.value == tc.expected_state async def test_concurrent_runs(mock_uuid: _MockUUID, adapter: Adapter[None, str]) -> None: From d8c6cb547805e393c9b4149416ca9ec41bc9d315 Mon Sep 17 00:00:00 2001 From: Steven Hartland Date: Mon, 7 Jul 2025 18:50:36 +0100 Subject: [PATCH 13/13] chore: fix coverage failure Add some no cover pragmas in existing code to avoid coverage check failure. Correct raise to avoid resetting traceback in `AgentGraph`. --- pydantic_ai_slim/pydantic_ai/_agent_graph.py | 2 +- tests/test_agent.py | 4 ++-- tests/test_toolsets.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/_agent_graph.py b/pydantic_ai_slim/pydantic_ai/_agent_graph.py index 3c9c8f0dc..3378bc345 100644 --- a/pydantic_ai_slim/pydantic_ai/_agent_graph.py +++ b/pydantic_ai_slim/pydantic_ai/_agent_graph.py @@ -621,7 +621,7 @@ async def process_function_tools( # noqa: C901 result_data = await toolset.call_tool(call, run_context) except exceptions.UnexpectedModelBehavior as e: ctx.state.increment_retries(ctx.deps.max_result_retries, e) - raise e + raise # pragma: no cover except ToolRetryError as e: ctx.state.increment_retries(ctx.deps.max_result_retries, e) yield _messages.FunctionToolCallEvent(call) diff --git a/tests/test_agent.py b/tests/test_agent.py index fd8f5e538..45bbd1b39 100644 --- a/tests/test_agent.py +++ b/tests/test_agent.py @@ -3606,7 +3606,7 @@ async def only_if_plan_presented( async def test_context_manager(): try: from pydantic_ai.mcp import MCPServerStdio - except ImportError: + except ImportError: # pragma: no cover return server1 = MCPServerStdio('python', ['-m', 'tests.mcp_server']) @@ -3626,7 +3626,7 @@ async def test_context_manager(): def test_set_mcp_sampling_model(): try: from pydantic_ai.mcp import MCPServerStdio - except ImportError: + except ImportError: # pragma: no cover return test_model = TestModel() diff --git a/tests/test_toolsets.py b/tests/test_toolsets.py index ba2ec479c..623f2502f 100644 --- a/tests/test_toolsets.py +++ b/tests/test_toolsets.py @@ -482,7 +482,7 @@ async def prepare_add_context(ctx: RunContext[TestDeps], tool_defs: list[ToolDef async def test_context_manager(): try: from pydantic_ai.mcp import MCPServerStdio - except ImportError: + except ImportError: # pragma: no cover pytest.skip('mcp is not installed') server1 = MCPServerStdio('python', ['-m', 'tests.mcp_server'])