diff --git a/.github/actions/spelling/allow.txt b/.github/actions/spelling/allow.txt index 6f8229ad..3985a757 100644 --- a/.github/actions/spelling/allow.txt +++ b/.github/actions/spelling/allow.txt @@ -20,6 +20,7 @@ JSONRPCt Llm POSTGRES RUF +Tful aconnect adk agentic @@ -38,6 +39,7 @@ drivername dunders euo excinfo +fernet fetchrow fetchval genai diff --git a/.github/workflows/linter.yaml b/.github/workflows/linter.yaml index 0ec19d90..d3f35913 100644 --- a/.github/workflows/linter.yaml +++ b/.github/workflows/linter.yaml @@ -26,6 +26,8 @@ jobs: run: uv sync --dev - name: Run Ruff Linter run: uv run ruff check . + - name: Run Ruff Format Check + run: uv run ruff format --check . - name: Run MyPy Type Checker run: uv run mypy src - name: Run Pyright (Pylance equivalent) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index b07a0e0a..88beeb1c 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -15,11 +15,24 @@ jobs: postgres: image: postgres:15-alpine env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres + POSTGRES_USER: a2a + POSTGRES_PASSWORD: a2a_password POSTGRES_DB: a2a_test ports: - 5432:5432 + options: >- + --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + mysql: + image: mysql:8.0 + env: + MYSQL_ROOT_PASSWORD: root + MYSQL_DATABASE: a2a_test + MYSQL_USER: a2a + MYSQL_PASSWORD: a2a_password + ports: + - 3306:3306 + options: >- + --health-cmd="mysqladmin ping -h localhost -u root -proot" --health-interval=10s --health-timeout=5s --health-retries=5 strategy: matrix: @@ -31,11 +44,10 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - name: Set postgres for tests - run: | - sudo apt-get update && sudo apt-get install -y postgresql-client - PGPASSWORD=postgres psql -h localhost -p 5432 -U postgres -d a2a_test -f ${{ github.workspace }}/docker/postgres/init.sql - export POSTGRES_TEST_DSN="postgresql+asyncpg://postgres:postgres@localhost:5432/a2a_test" + - name: Set up test environment variables + run: | + echo "POSTGRES_TEST_DSN=postgresql+asyncpg://a2a:a2a_password@localhost:5432/a2a_test" >> $GITHUB_ENV + echo "MYSQL_TEST_DSN=mysql+aiomysql://a2a:a2a_password@localhost:3306/a2a_test" >> $GITHUB_ENV - name: Install uv uses: astral-sh/setup-uv@v6 @@ -43,7 +55,7 @@ jobs: run: | echo "$HOME/.cargo/bin" >> $GITHUB_PATH - name: Install dependencies - run: uv sync --dev --extra sql + run: uv sync --dev --extra sql --extra encryption - name: Run tests and check coverage run: uv run pytest --cov=a2a --cov-report=xml --cov-fail-under=89 - name: Show coverage summary in log diff --git a/CHANGELOG.md b/CHANGELOG.md index c5555e6d..10eec020 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ # Changelog +## [0.2.12](https://github.com/a2aproject/a2a-python/compare/v0.2.11...v0.2.12) (2025-07-14) + + +### Features + +* add `metadata` property to `RequestContext` ([#302](https://github.com/a2aproject/a2a-python/issues/302)) ([e781ced](https://github.com/a2aproject/a2a-python/commit/e781ced3b082ef085f9aeef02ceebb9b35c68280)) +* add A2ABaseModel ([#292](https://github.com/a2aproject/a2a-python/issues/292)) ([24f2eb0](https://github.com/a2aproject/a2a-python/commit/24f2eb0947112539cbd4e493c98d0d9dadc87f05)) +* add support for notification tokens in PushNotificationSender ([#266](https://github.com/a2aproject/a2a-python/issues/266)) ([75aa4ed](https://github.com/a2aproject/a2a-python/commit/75aa4ed866a6b4005e59eb000e965fb593e0888f)) +* Update A2A types from specification 🤖 ([#289](https://github.com/a2aproject/a2a-python/issues/289)) ([ecb321a](https://github.com/a2aproject/a2a-python/commit/ecb321a354d691ca90b52cc39e0a397a576fd7d7)) + + +### Bug Fixes + +* add proper a2a request body documentation to Swagger UI ([#276](https://github.com/a2aproject/a2a-python/issues/276)) ([4343be9](https://github.com/a2aproject/a2a-python/commit/4343be99ad0df5eb6908867b71d55b1f7d0fafc6)), closes [#274](https://github.com/a2aproject/a2a-python/issues/274) +* Handle asyncio.cancellederror and raise to propagate back ([#293](https://github.com/a2aproject/a2a-python/issues/293)) ([9d6cb68](https://github.com/a2aproject/a2a-python/commit/9d6cb68a1619960b9c9fd8e7aa08ffb27047343f)) +* Improve error handling in task creation ([#294](https://github.com/a2aproject/a2a-python/issues/294)) ([6412c75](https://github.com/a2aproject/a2a-python/commit/6412c75413e26489bd3d33f59e41b626a71807d3)) +* Resolve dependency issue with sql stores ([#303](https://github.com/a2aproject/a2a-python/issues/303)) ([2126828](https://github.com/a2aproject/a2a-python/commit/2126828b5cb6291f47ca15d56c0e870950f17536)) +* Send push notifications for message/send ([#298](https://github.com/a2aproject/a2a-python/issues/298)) ([0274112](https://github.com/a2aproject/a2a-python/commit/0274112bb5b077c17b344da3a65277f2ad67d38f)) +* **server:** Improve event consumer error handling ([#282](https://github.com/a2aproject/a2a-python/issues/282)) ([a5786a1](https://github.com/a2aproject/a2a-python/commit/a5786a112779a21819d28e4dfee40fa11f1bb49a)) + ## [0.2.11](https://github.com/a2aproject/a2a-python/compare/v0.2.10...v0.2.11) (2025-07-07) diff --git a/docker/postgres/init.sql b/docker/postgres/init.sql deleted file mode 100644 index 86b1dd02..00000000 --- a/docker/postgres/init.sql +++ /dev/null @@ -1,8 +0,0 @@ --- Create a dedicated user for the application -CREATE USER a2a WITH PASSWORD 'a2a_password'; - --- Create the tasks database -CREATE DATABASE a2a_tasks; - -GRANT ALL PRIVILEGES ON DATABASE a2a_test TO a2a; - diff --git a/pyproject.toml b/pyproject.toml index e73df213..57dbc3ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,6 +41,7 @@ postgresql = ["sqlalchemy[asyncio,postgresql-asyncpg]>=2.0.0"] mysql = ["sqlalchemy[asyncio,aiomysql]>=2.0.0"] sqlite = ["sqlalchemy[asyncio,aiosqlite]>=2.0.0"] sql = ["sqlalchemy[asyncio,postgresql-asyncpg,aiomysql,aiosqlite]>=2.0.0"] +encryption = ["cryptography>=43.0.0"] [project.urls] homepage = "https://a2aproject.github.io/A2A/" diff --git a/src/a2a/client/__init__.py b/src/a2a/client/__init__.py index 3d673b31..393d85ec 100644 --- a/src/a2a/client/__init__.py +++ b/src/a2a/client/__init__.py @@ -10,6 +10,7 @@ A2AClientError, A2AClientHTTPError, A2AClientJSONError, + A2AClientTimeoutError, ) from a2a.client.grpc_client import A2AGrpcClient from a2a.client.helpers import create_text_message_object @@ -22,6 +23,7 @@ 'A2AClientError', 'A2AClientHTTPError', 'A2AClientJSONError', + 'A2AClientTimeoutError', 'A2AGrpcClient', 'AuthInterceptor', 'ClientCallContext', diff --git a/src/a2a/client/client.py b/src/a2a/client/client.py index 66a1e49b..66dfe0a4 100644 --- a/src/a2a/client/client.py +++ b/src/a2a/client/client.py @@ -10,7 +10,11 @@ from httpx_sse import SSEError, aconnect_sse from pydantic import ValidationError -from a2a.client.errors import A2AClientHTTPError, A2AClientJSONError +from a2a.client.errors import ( + A2AClientHTTPError, + A2AClientJSONError, + A2AClientTimeoutError, +) from a2a.client.middleware import ClientCallContext, ClientCallInterceptor from a2a.types import ( AgentCard, @@ -340,6 +344,8 @@ async def _send_request( ) response.raise_for_status() return response.json() + except httpx.ReadTimeout as e: + raise A2AClientTimeoutError('Client Request timed out') from e except httpx.HTTPStatusError as e: raise A2AClientHTTPError(e.response.status_code, str(e)) from e except json.JSONDecodeError as e: diff --git a/src/a2a/client/errors.py b/src/a2a/client/errors.py index da02e582..5fe5512a 100644 --- a/src/a2a/client/errors.py +++ b/src/a2a/client/errors.py @@ -31,3 +31,16 @@ def __init__(self, message: str): """ self.message = message super().__init__(f'JSON Error: {message}') + + +class A2AClientTimeoutError(A2AClientError): + """Client exception for timeout errors during a request.""" + + def __init__(self, message: str): + """Initializes the A2AClientTimeoutError. + + Args: + message: A descriptive error message. + """ + self.message = message + super().__init__(f'Timeout Error: {message}') diff --git a/src/a2a/server/agent_execution/context.py b/src/a2a/server/agent_execution/context.py index 01055840..782d488b 100644 --- a/src/a2a/server/agent_execution/context.py +++ b/src/a2a/server/agent_execution/context.py @@ -1,5 +1,7 @@ import uuid +from typing import Any + from a2a.server.context import ServerCallContext from a2a.types import ( InvalidParamsError, @@ -134,6 +136,13 @@ def call_context(self) -> ServerCallContext | None: """The server call context associated with this request.""" return self._call_context + @property + def metadata(self) -> dict[str, Any]: + """Metadata associated with the request, if available.""" + if not self._params: + return {} + return self._params.metadata or {} + def _check_or_generate_task_id(self) -> None: """Ensures a task ID is present, generating one if necessary.""" if not self._params: diff --git a/src/a2a/server/apps/__init__.py b/src/a2a/server/apps/__init__.py index a73e05c8..4d42ee8c 100644 --- a/src/a2a/server/apps/__init__.py +++ b/src/a2a/server/apps/__init__.py @@ -6,11 +6,17 @@ CallContextBuilder, JSONRPCApplication, ) +from a2a.server.apps.rest import ( + A2ARESTFastAPIApplication, + RESTApplication, +) __all__ = [ 'A2AFastAPIApplication', + 'A2ARESTFastAPIApplication', 'A2AStarletteApplication', 'CallContextBuilder', 'JSONRPCApplication', + 'RESTApplication', ] diff --git a/src/a2a/server/apps/jsonrpc/__init__.py b/src/a2a/server/apps/jsonrpc/__init__.py index ab803d4e..1121fdbc 100644 --- a/src/a2a/server/apps/jsonrpc/__init__.py +++ b/src/a2a/server/apps/jsonrpc/__init__.py @@ -3,7 +3,9 @@ from a2a.server.apps.jsonrpc.fastapi_app import A2AFastAPIApplication from a2a.server.apps.jsonrpc.jsonrpc_app import ( CallContextBuilder, + DefaultCallContextBuilder, JSONRPCApplication, + StarletteUserProxy, ) from a2a.server.apps.jsonrpc.starlette_app import A2AStarletteApplication @@ -12,5 +14,7 @@ 'A2AFastAPIApplication', 'A2AStarletteApplication', 'CallContextBuilder', + 'DefaultCallContextBuilder', 'JSONRPCApplication', + 'StarletteUserProxy', ] diff --git a/src/a2a/server/apps/rest/__init__.py b/src/a2a/server/apps/rest/__init__.py new file mode 100644 index 00000000..c57b0f38 --- /dev/null +++ b/src/a2a/server/apps/rest/__init__.py @@ -0,0 +1,10 @@ +"""A2A REST Applications.""" + +from a2a.server.apps.rest.fastapi_app import A2ARESTFastAPIApplication +from a2a.server.apps.rest.rest_app import RESTApplication + + +__all__ = [ + 'A2ARESTFastAPIApplication', + 'RESTApplication', +] diff --git a/src/a2a/server/apps/rest/fastapi_app.py b/src/a2a/server/apps/rest/fastapi_app.py new file mode 100644 index 00000000..76fa887f --- /dev/null +++ b/src/a2a/server/apps/rest/fastapi_app.py @@ -0,0 +1,81 @@ +import logging + +from typing import Any + +from fastapi import APIRouter, FastAPI, Request, Response + +from a2a.server.apps.jsonrpc.jsonrpc_app import ( + CallContextBuilder, +) +from a2a.server.apps.rest.rest_app import ( + RESTApplication, +) +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types import AgentCard + + +logger = logging.getLogger(__name__) + + +class A2ARESTFastAPIApplication: + """A FastAPI application implementing the A2A protocol server REST endpoints. + + Handles incoming REST requests, routes them to the appropriate + handler methods, and manages response generation including Server-Sent Events + (SSE). + """ + + def __init__( + self, + agent_card: AgentCard, + http_handler: RequestHandler, + context_builder: CallContextBuilder | None = None, + ): + """Initializes the A2ARESTFastAPIApplication. + + Args: + agent_card: The AgentCard describing the agent's capabilities. + http_handler: The handler instance responsible for processing A2A + requests via http. + extended_agent_card: An optional, distinct AgentCard to be served + at the authenticated extended card endpoint. + context_builder: The CallContextBuilder used to construct the + ServerCallContext passed to the http_handler. If None, no + ServerCallContext is passed. + """ + self._handler = RESTApplication( + agent_card=agent_card, + http_handler=http_handler, + context_builder=context_builder, + ) + + def build( + self, + agent_card_url: str = '/.well-known/agent.json', + rpc_url: str = '', + **kwargs: Any, + ) -> FastAPI: + """Builds and returns the FastAPI application instance. + + Args: + agent_card_url: The URL for the agent card endpoint. + rpc_url: The URL for the A2A JSON-RPC endpoint. + extended_agent_card_url: The URL for the authenticated extended agent card endpoint. + **kwargs: Additional keyword arguments to pass to the FastAPI constructor. + + Returns: + A configured FastAPI application instance. + """ + app = FastAPI(**kwargs) + router = APIRouter() + for route, callback in self._handler.routes().items(): + router.add_api_route( + f'{rpc_url}{route[0]}', callback, methods=[route[1]] + ) + + @router.get(f'{rpc_url}{agent_card_url}') + async def get_agent_card(request: Request) -> Response: + return await self._handler._handle_get_agent_card(request) + + app.include_router(router) + return app diff --git a/src/a2a/server/apps/rest/rest_app.py b/src/a2a/server/apps/rest/rest_app.py new file mode 100644 index 00000000..717c6e9f --- /dev/null +++ b/src/a2a/server/apps/rest/rest_app.py @@ -0,0 +1,228 @@ +import functools +import json +import logging +import traceback + +from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable +from typing import Any + +from pydantic import ValidationError +from sse_starlette.sse import EventSourceResponse +from starlette.requests import Request +from starlette.responses import JSONResponse + +from a2a.server.apps.jsonrpc import ( + CallContextBuilder, + DefaultCallContextBuilder, +) +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.server.request_handlers.rest_handler import ( + RESTHandler, +) +from a2a.types import ( + AgentCard, + InternalError, + InvalidRequestError, + JSONParseError, + UnsupportedOperationError, +) +from a2a.utils.errors import MethodNotImplementedError + + +logger = logging.getLogger(__name__) + + +class RESTApplication: + """Base class for A2A REST applications. + + Defines REST requests processors and the routes to attach them too, as well as + manages response generation including Server-Sent Events (SSE). + """ + + def __init__( + self, + agent_card: AgentCard, + http_handler: RequestHandler, + context_builder: CallContextBuilder | None = None, + ): + """Initializes the RESTApplication. + + Args: + agent_card: The AgentCard describing the agent's capabilities. + http_handler: The handler instance responsible for processing A2A + requests via http. + context_builder: The CallContextBuilder used to construct the + ServerCallContext passed to the http_handler. If None, no + ServerCallContext is passed. + """ + self.agent_card = agent_card + self.handler = RESTHandler( + agent_card=agent_card, request_handler=http_handler + ) + self._context_builder = context_builder or DefaultCallContextBuilder() + + def _generate_error_response(self, error) -> JSONResponse: + """Creates a JSONResponse for a errors. + + Logs the error based on its type. + + Args: + error: The Error object. + + Returns: + A `JSONResponse` object formatted as a JSON error response. + """ + log_level = ( + logging.ERROR + if isinstance(error, InternalError) + else logging.WARNING + ) + logger.log( + log_level, + 'Request Error: ' + f"Code={error.code}, Message='{error.message}'" + f'{", Data=" + str(error.data) if error.data else ""}', + ) + return JSONResponse( + '{"message": ' + error.message + '}', + status_code=404, + ) + + def _handle_error(self, error: Exception) -> JSONResponse: + traceback.print_exc() + if isinstance(error, MethodNotImplementedError): + return self._generate_error_response(UnsupportedOperationError()) + if isinstance(error, json.decoder.JSONDecodeError): + return self._generate_error_response( + JSONParseError(message=str(error)) + ) + if isinstance(error, ValidationError): + return self._generate_error_response( + InvalidRequestError(data=json.loads(error.json())), + ) + logger.error(f'Unhandled exception: {error}') + return self._generate_error_response(InternalError(message=str(error))) + + async def _handle_request( + self, + method: Callable[[Request, ServerCallContext], Awaitable[str]], + request: Request, + ) -> JSONResponse: + try: + call_context = self._context_builder.build(request) + response = await method(request, call_context) + return JSONResponse(content=response) + except Exception as e: + return self._handle_error(e) + + async def _handle_streaming_request( + self, + method: Callable[[Request, ServerCallContext], AsyncIterator[str]], + request: Request, + ) -> EventSourceResponse: + try: + call_context = self._context_builder.build(request) + + async def event_generator( + stream: AsyncGenerator[str], + ) -> AsyncGenerator[dict[str, str]]: + async for item in stream: + yield {'data': item} + + return EventSourceResponse( + event_generator(method(request, call_context)) + ) + except Exception as e: + # Since the stream has started, we can't return a JSONResponse. + # Instead, we runt the error handling logic (provides logging) + # and reraise the error and let server framework manage + self._handle_error(e) + raise e + + async def _handle_get_agent_card(self, request: Request) -> JSONResponse: + """Handles GET requests for the agent card endpoint. + + Args: + request: The incoming Starlette Request object. + + Returns: + A JSONResponse containing the agent card data. + """ + # The public agent card is a direct serialization of the agent_card + # provided at initialization. + return JSONResponse( + self.agent_card.model_dump(mode='json', exclude_none=True) + ) + + async def handle_authenticated_agent_card( + self, request: Request + ) -> JSONResponse: + """Hook for per credential agent card response. + + If a dynamic card is needed based on the credentials provided in the request + override this method and return the customized content. + + Args: + request: The incoming Starlette Request object. + + Returns: + A JSONResponse containing the authenticated card. + """ + if not self.agent_card.supportsAuthenticatedExtendedCard: + return JSONResponse( + '{"detail": "Authenticated card not supported"}', + status_code=404, + ) + return JSONResponse( + self.agent_card.model_dump(mode='json', exclude_none=True) + ) + + def routes(self) -> dict[tuple[str, str], Callable[[Request], Any]]: + routes = { + ('/v1/message:send', 'POST'): ( + functools.partial( + self._handle_request, self.handler.on_message_send + ), + ), + ('/v1/message:stream', 'POST'): ( + functools.partial( + self._handle_streaming_request, + self.handler.on_message_send_stream, + ), + ), + ('/v1/tasks/{id}:subscribe', 'POST'): ( + functools.partial( + self._handle_streaming_request, + self.handler.on_resubscribe_to_task, + ), + ), + ('/v1/tasks/{id}', 'GET'): ( + functools.partial( + self._handle_request, self.handler.on_get_task + ), + ), + ('/v1/tasks/{id}/pushNotificationConfigs/{push_id}', 'GET'): ( + functools.partial( + self._handle_request, self.handler.get_push_notification + ), + ), + ('/v1/tasks/{id}/pushNotificationConfigs', 'POST'): ( + functools.partial( + self._handle_request, self.handler.set_push_notification + ), + ), + ('/v1/tasks/{id}/pushNotificationConfigs', 'GET'): ( + functools.partial( + self._handle_request, self.handler.list_push_notifications + ), + ), + ('/v1/tasks', 'GET'): ( + functools.partial( + self._handle_request, self.handler.list_tasks + ), + ), + } + if self.agent_card.supportsAuthenticatedExtendedCard: + routes['/v1/card'] = (self.handle_authenticated_agent_card, 'GET') + return routes diff --git a/src/a2a/server/models.py b/src/a2a/server/models.py index 639c0729..09db5641 100644 --- a/src/a2a/server/models.py +++ b/src/a2a/server/models.py @@ -16,7 +16,7 @@ def override(func): # noqa: ANN001, ANN201 try: - from sqlalchemy import JSON, Dialect, String + from sqlalchemy import JSON, Dialect, LargeBinary, String from sqlalchemy.orm import ( DeclarativeBase, Mapped, @@ -208,3 +208,58 @@ class TaskModel(TaskMixin, Base): """Default task model with standard table name.""" __tablename__ = 'tasks' + + +# PushNotificationConfigMixin that can be used with any table name +class PushNotificationConfigMixin: + """Mixin providing standard push notification config columns.""" + + task_id: Mapped[str] = mapped_column(String(36), primary_key=True) + config_id: Mapped[str] = mapped_column(String(255), primary_key=True) + config_data: Mapped[bytes] = mapped_column(LargeBinary, nullable=False) + + @override + def __repr__(self) -> str: + """Return a string representation of the push notification config.""" + repr_template = '<{CLS}(task_id="{TID}", config_id="{CID}")>' + return repr_template.format( + CLS=self.__class__.__name__, + TID=self.task_id, + CID=self.config_id, + ) + + +def create_push_notification_config_model( + table_name: str = 'push_notification_configs', + base: type[DeclarativeBase] = Base, +) -> type: + """Create a PushNotificationConfigModel class with a configurable table name.""" + + class PushNotificationConfigModel(PushNotificationConfigMixin, base): + __tablename__ = table_name + + @override + def __repr__(self) -> str: + """Return a string representation of the push notification config.""" + repr_template = '' + return repr_template.format( + TABLE=table_name, + TID=self.task_id, + CID=self.config_id, + ) + + PushNotificationConfigModel.__name__ = ( + f'PushNotificationConfigModel_{table_name}' + ) + PushNotificationConfigModel.__qualname__ = ( + f'PushNotificationConfigModel_{table_name}' + ) + + return PushNotificationConfigModel + + +# Default PushNotificationConfigModel for backward compatibility +class PushNotificationConfigModel(PushNotificationConfigMixin, Base): + """Default push notification config model with standard table name.""" + + __tablename__ = 'push_notification_configs' diff --git a/src/a2a/server/request_handlers/__init__.py b/src/a2a/server/request_handlers/__init__.py index 8cf2fe8c..f3f8e020 100644 --- a/src/a2a/server/request_handlers/__init__.py +++ b/src/a2a/server/request_handlers/__init__.py @@ -10,12 +10,14 @@ build_error_response, prepare_response_object, ) +from a2a.server.request_handlers.rest_handler import RESTHandler __all__ = [ 'DefaultRequestHandler', 'GrpcHandler', 'JSONRPCHandler', + 'RESTHandler', 'RequestHandler', 'build_error_response', 'prepare_response_object', diff --git a/src/a2a/server/request_handlers/default_request_handler.py b/src/a2a/server/request_handlers/default_request_handler.py index f3b584d4..a1b8d565 100644 --- a/src/a2a/server/request_handlers/default_request_handler.py +++ b/src/a2a/server/request_handlers/default_request_handler.py @@ -33,9 +33,7 @@ InvalidParamsError, ListTaskPushNotificationConfigParams, Message, - MessageSendConfiguration, MessageSendParams, - PushNotificationConfig, Task, TaskIdParams, TaskNotFoundError, @@ -202,18 +200,6 @@ async def _setup_message_execution( ) task = task_manager.update_with_message(params.message, task) - if self.should_add_push_info(params): - assert self._push_config_store is not None - assert isinstance( - params.configuration, MessageSendConfiguration - ) - assert isinstance( - params.configuration.pushNotificationConfig, - PushNotificationConfig, - ) - await self._push_config_store.set_info( - task.id, params.configuration.pushNotificationConfig - ) # Build request context request_context = await self._request_context_builder.build( @@ -228,6 +214,16 @@ async def _setup_message_execution( # Always assign a task ID. We may not actually upgrade to a task, but # dictating the task ID at this layer is useful for tracking running # agents. + + if ( + self._push_config_store + and params.configuration + and params.configuration.pushNotificationConfig + ): + await self._push_config_store.set_info( + task_id, params.configuration.pushNotificationConfig + ) + queue = await self._queue_manager.create_or_tap(task_id) result_aggregator = ResultAggregator(task_manager) # TODO: to manage the non-blocking flows. @@ -333,16 +329,6 @@ async def on_message_send_stream( if isinstance(event, Task): self._validate_task_id_match(task_id, event.id) - if ( - self._push_config_store - and params.configuration - and params.configuration.pushNotificationConfig - ): - await self._push_config_store.set_info( - task_id, - params.configuration.pushNotificationConfig, - ) - await self._send_push_notification_if_needed( task_id, result_aggregator ) @@ -509,11 +495,3 @@ async def on_delete_task_push_notification_config( await self._push_config_store.delete_info( params.id, params.pushNotificationConfigId ) - - def should_add_push_info(self, params: MessageSendParams) -> bool: - """Determines if push notification info should be set for a task.""" - return bool( - self._push_config_store - and params.configuration - and params.configuration.pushNotificationConfig - ) diff --git a/src/a2a/server/request_handlers/jsonrpc_handler.py b/src/a2a/server/request_handlers/jsonrpc_handler.py index 3e79bc09..5de7dae9 100644 --- a/src/a2a/server/request_handlers/jsonrpc_handler.py +++ b/src/a2a/server/request_handlers/jsonrpc_handler.py @@ -347,10 +347,8 @@ async def list_push_notification_config( A `ListTaskPushNotificationConfigResponse` object containing the config or a JSON-RPC error. """ try: - config = ( - await self.request_handler.on_list_task_push_notification_config( - request.params, context - ) + config = await self.request_handler.on_list_task_push_notification_config( + request.params, context ) return prepare_response_object( request.id, diff --git a/src/a2a/server/request_handlers/response_helpers.py b/src/a2a/server/request_handlers/response_helpers.py index 7f005099..4c55c419 100644 --- a/src/a2a/server/request_handlers/response_helpers.py +++ b/src/a2a/server/request_handlers/response_helpers.py @@ -41,7 +41,7 @@ GetTaskPushNotificationConfigResponse, SendStreamingMessageResponse, ListTaskPushNotificationConfigResponse, - DeleteTaskPushNotificationConfigResponse + DeleteTaskPushNotificationConfigResponse, ) """Type variable for RootModel response types.""" @@ -55,7 +55,7 @@ GetTaskPushNotificationConfigSuccessResponse, SendStreamingMessageSuccessResponse, ListTaskPushNotificationConfigSuccessResponse, - DeleteTaskPushNotificationConfigSuccessResponse + DeleteTaskPushNotificationConfigSuccessResponse, ) """Type variable for SuccessResponse types.""" diff --git a/src/a2a/server/request_handlers/rest_handler.py b/src/a2a/server/request_handlers/rest_handler.py new file mode 100644 index 00000000..930318d2 --- /dev/null +++ b/src/a2a/server/request_handlers/rest_handler.py @@ -0,0 +1,319 @@ +import logging + +from collections.abc import AsyncIterable + +from google.protobuf.json_format import MessageToJson, Parse +from starlette.requests import Request + +from a2a.grpc import a2a_pb2 +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types import ( + A2AError, + AgentCard, + GetTaskPushNotificationConfigParams, + InternalError, + Task, + TaskIdParams, + TaskNotFoundError, + TaskPushNotificationConfig, + TaskQueryParams, +) +from a2a.utils import proto_utils +from a2a.utils.errors import ServerError +from a2a.utils.helpers import validate +from a2a.utils.telemetry import SpanKind, trace_class + + +logger = logging.getLogger(__name__) + + +@trace_class(kind=SpanKind.SERVER) +class RESTHandler: + """Maps incoming REST-like (JSON+HTTP) requests to the appropriate request handler method and formats responses. + + This uses the protobuf definitions of the gRPC service as the source of truth. By + doing this, it ensures that this implementation and the gRPC transcoding + (via Envoy) are equivalent. This handler should be used if using the gRPC handler + with Envoy is not feasible for a given deployment solution. Use this handler + and a related application if you desire to ONLY server the RESTful API. + """ + + def __init__( + self, + agent_card: AgentCard, + request_handler: RequestHandler, + ): + """Initializes the RESTHandler. + + Args: + agent_card: The AgentCard describing the agent's capabilities. + request_handler: The underlying `RequestHandler` instance to delegate requests to. + """ + self.agent_card = agent_card + self.request_handler = request_handler + + async def on_message_send( + self, + request: Request, + context: ServerCallContext | None = None, + ) -> str: + """Handles the 'message/send' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `str` containing the JSON result (Task or Message) + + Raises: + A2AError if a `ServerError` is raised by the handler. + """ + # TODO: Wrap in error handler to return error states + try: + body = await request.body() + params = a2a_pb2.SendMessageRequest() + Parse(body, params) + # Transform the proto object to the python internal objects + a2a_request = proto_utils.FromProto.message_send_params( + params, + ) + task_or_message = await self.request_handler.on_message_send( + a2a_request, context + ) + return MessageToJson( + proto_utils.ToProto.task_or_message(task_or_message) + ) + except ServerError as e: + raise A2AError(error=e.error if e.error else InternalError()) from e + + @validate( + lambda self: self.agent_card.capabilities.streaming, + 'Streaming is not supported by the agent', + ) + async def on_message_send_stream( + self, + request: Request, + context: ServerCallContext | None = None, + ) -> AsyncIterable[str]: + """Handles the 'message/stream' REST method. + + Yields response objects as they are produced by the underlying handler's stream. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Yields: + `str` objects containing streaming events + (Task, Message, TaskStatusUpdateEvent, TaskArtifactUpdateEvent) as JSON + Raises: + `A2AError` + """ + try: + body = await request.body() + params = a2a_pb2.SendMessageRequest() + Parse(body, params) + # Transform the proto object to the python internal objects + a2a_request = proto_utils.FromProto.message_send_params( + params, + ) + async for event in self.request_handler.on_message_send_stream( + a2a_request, context + ): + response = proto_utils.ToProto.stream_response(event) + yield MessageToJson(response) + except ServerError as e: + raise A2AError(error=e.error if e.error else InternalError()) from e + return + + async def on_cancel_task( + self, + request: Request, + context: ServerCallContext | None = None, + ) -> str: + """Handles the 'tasks/cancel' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `str` containing the updated Task in JSON format + Raises: + A2AError. + """ + try: + task_id = request.path_params['id'] + task = await self.request_handler.on_cancel_task( + TaskIdParams(id=task_id), context + ) + if task: + return MessageToJson(proto_utils.ToProto.task(task)) + raise ServerError(error=TaskNotFoundError()) + except ServerError as e: + raise A2AError( + error=e.error if e.error else InternalError(), + ) from e + + @validate( + lambda self: self.agent_card.capabilities.streaming, + 'Streaming is not supported by the agent', + ) + async def on_resubscribe_to_task( + self, + request: Request, + context: ServerCallContext | None = None, + ) -> AsyncIterable[str]: + """Handles the 'tasks/resubscribe' REST method. + + Yields response objects as they are produced by the underlying handler's stream. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Yields: + `str` containing streaming events in JSON format + + Raises: + A A2AError if an error is encountered + """ + try: + task_id = request.path_params['id'] + async for event in self.request_handler.on_resubscribe_to_task( + TaskIdParams(id=task_id), context + ): + yield ( + MessageToJson(proto_utils.ToProto.stream_response(event)) + ) + except ServerError as e: + raise A2AError(error=e.error if e.error else InternalError()) from e + + async def get_push_notification( + self, + request: Request, + context: ServerCallContext | None = None, + ) -> str: + """Handles the 'tasks/pushNotificationConfig/get' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `str` containing the config as JSON + Raises: + A2AError. + """ + try: + task_id = request.path_params['id'] + push_id = request.path_params['push_id'] + if push_id: + params = GetTaskPushNotificationConfigParams( + id=task_id, push_id=push_id + ) + else: + params = TaskIdParams['id'] + config = ( + await self.request_handler.on_get_task_push_notification_config( + params, context + ) + ) + return MessageToJson( + proto_utils.ToProto.task_push_notification_config(config) + ) + except ServerError as e: + raise A2AError(error=e.error if e.error else InternalError()) + + @validate( + lambda self: self.agent_card.capabilities.pushNotifications, + 'Push notifications are not supported by the agent', + ) + async def set_push_notification( + self, + request: Request, + context: ServerCallContext | None = None, + ) -> str: + """Handles the 'tasks/pushNotificationConfig/set' REST method. + + Requires the agent to support push notifications. + + Args: + request: The incoming `TaskPushNotificationConfig` object. + context: Context provided by the server. + + Returns: + A `str` containing the config as JSON object. + + Raises: + ServerError: If push notifications are not supported by the agent + (due to the `@validate` decorator), A2AError if processing error is + found. + """ + try: + task_id = request.path_params['id'] + body = await request.body() + params = a2a_pb2.TaskPushNotificationConfig() + Parse(body, params) + params = TaskPushNotificationConfig.validate_model(body) + a2a_request = ( + proto_utils.FromProto.task_push_notification_config( + params, + ), + ) + config = ( + await self.request_handler.on_set_task_push_notification_config( + a2a_request, context + ) + ) + return MessageToJson( + proto_utils.ToProto.task_push_notification_config(config) + ) + except ServerError as e: + raise A2AError(error=e.error if e.error else InternalError()) from e + + async def on_get_task( + self, + request: Request, + context: ServerCallContext | None = None, + ) -> str: + """Handles the 'v1/tasks/{id}' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `Task` object containing the Task. + + Raises: + A2AError + """ + try: + task_id = request.path_params['id'] + historyLength = None + if 'historyLength' in request.query_params: + historyLength = request.query_params['historyLength'] + params = TaskQueryParams(id=task_id, historyLength=historyLength) + task = await self.request_handler.on_get_task(params, context) + if task: + return MessageToJson(proto_utils.ToProto.task(task)) + raise ServerError(error=TaskNotFoundError()) + except ServerError as e: + raise A2AError(error=e.error if e.error else InternalError()) from e + + async def list_push_notifications( + self, + request: Request, + context: ServerCallContext | None = None, + ) -> list[TaskPushNotificationConfig]: + raise NotImplementedError('list notifications not implemented') + + async def list_tasks( + self, + request: Request, + context: ServerCallContext | None = None, + ) -> list[Task]: + raise NotImplementedError('list tasks not implemented') diff --git a/src/a2a/server/tasks/__init__.py b/src/a2a/server/tasks/__init__.py index f46ef2cb..641195ea 100644 --- a/src/a2a/server/tasks/__init__.py +++ b/src/a2a/server/tasks/__init__.py @@ -1,9 +1,10 @@ """Components for managing tasks within the A2A server.""" +import logging + from a2a.server.tasks.base_push_notification_sender import ( BasePushNotificationSender, ) -from a2a.server.tasks.database_task_store import DatabaseTaskStore from a2a.server.tasks.inmemory_push_notification_config_store import ( InMemoryPushNotificationConfigStore, ) @@ -18,8 +19,55 @@ from a2a.server.tasks.task_updater import TaskUpdater +logger = logging.getLogger(__name__) + +try: + from a2a.server.tasks.database_task_store import ( + DatabaseTaskStore, # type: ignore + ) +except ImportError as e: + _original_error = e + # If the database task store is not available, we can still use in-memory stores. + logger.debug( + 'DatabaseTaskStore not loaded. This is expected if database dependencies are not installed. Error: %s', + e, + ) + + class DatabaseTaskStore: # type: ignore + """Placeholder for DatabaseTaskStore when dependencies are not installed.""" + + def __init__(self, *args, **kwargs): + raise ImportError( + 'To use DatabaseTaskStore, its dependencies must be installed. ' + 'You can install them with \'pip install "a2a-sdk[sql]"\'' + ) from _original_error + + +try: + from a2a.server.tasks.database_push_notification_config_store import ( + DatabasePushNotificationConfigStore, # type: ignore + ) +except ImportError as e: + _original_error = e + # If the database push notification config store is not available, we can still use in-memory stores. + logger.debug( + 'DatabasePushNotificationConfigStore not loaded. This is expected if database dependencies are not installed. Error: %s', + e, + ) + + class DatabasePushNotificationConfigStore: # type: ignore + """Placeholder for DatabasePushNotificationConfigStore when dependencies are not installed.""" + + def __init__(self, *args, **kwargs): + raise ImportError( + 'To use DatabasePushNotificationConfigStore, its dependencies must be installed. ' + 'You can install them with \'pip install "a2a-sdk[sql]"\'' + ) from _original_error + + __all__ = [ 'BasePushNotificationSender', + 'DatabasePushNotificationConfigStore', 'DatabaseTaskStore', 'InMemoryPushNotificationConfigStore', 'InMemoryTaskStore', diff --git a/src/a2a/server/tasks/base_push_notification_sender.py b/src/a2a/server/tasks/base_push_notification_sender.py index 5f9efba2..51558d42 100644 --- a/src/a2a/server/tasks/base_push_notification_sender.py +++ b/src/a2a/server/tasks/base_push_notification_sender.py @@ -58,7 +58,7 @@ async def _dispatch_notification( response = await self._client.post( url, json=task.model_dump(mode='json', exclude_none=True), - headers=headers + headers=headers, ) response.raise_for_status() logger.info( diff --git a/src/a2a/server/tasks/database_push_notification_config_store.py b/src/a2a/server/tasks/database_push_notification_config_store.py new file mode 100644 index 00000000..21e38069 --- /dev/null +++ b/src/a2a/server/tasks/database_push_notification_config_store.py @@ -0,0 +1,288 @@ +import json +import logging + +from typing import TYPE_CHECKING + +from pydantic import ValidationError + + +try: + from sqlalchemy import ( + Table, + delete, + select, + ) + from sqlalchemy.ext.asyncio import ( + AsyncEngine, + AsyncSession, + async_sessionmaker, + ) + from sqlalchemy.orm import class_mapper +except ImportError as e: + raise ImportError( + 'DatabasePushNotificationConfigStore requires SQLAlchemy and a database driver. ' + 'Install with one of: ' + "'pip install a2a-sdk[postgresql]', " + "'pip install a2a-sdk[mysql]', " + "'pip install a2a-sdk[sqlite]', " + "or 'pip install a2a-sdk[sql]'" + ) from e + +from a2a.server.models import ( + Base, + PushNotificationConfigModel, + create_push_notification_config_model, +) +from a2a.server.tasks.push_notification_config_store import ( + PushNotificationConfigStore, +) +from a2a.types import PushNotificationConfig + + +if TYPE_CHECKING: + from cryptography.fernet import Fernet + + +logger = logging.getLogger(__name__) + + +class DatabasePushNotificationConfigStore(PushNotificationConfigStore): + """SQLAlchemy-based implementation of PushNotificationConfigStore. + + Stores push notification configurations in a database supported by SQLAlchemy. + """ + + engine: AsyncEngine + async_session_maker: async_sessionmaker[AsyncSession] + create_table: bool + _initialized: bool + config_model: type[PushNotificationConfigModel] + _fernet: 'Fernet | None' + + def __init__( + self, + engine: AsyncEngine, + create_table: bool = True, + table_name: str = 'push_notification_configs', + encryption_key: str | bytes | None = None, + ) -> None: + """Initializes the DatabasePushNotificationConfigStore. + + Args: + engine: An existing SQLAlchemy AsyncEngine to be used by the store. + create_table: If true, create the table on initialization. + table_name: Name of the database table. Defaults to 'push_notification_configs'. + encryption_key: A key for encrypting sensitive configuration data. + If provided, `config_data` will be encrypted in the database. + The key must be a URL-safe base64-encoded 32-byte key. + """ + logger.debug( + f'Initializing DatabasePushNotificationConfigStore with existing engine, table: {table_name}' + ) + self.engine = engine + self.async_session_maker = async_sessionmaker( + self.engine, expire_on_commit=False + ) + self.create_table = create_table + self._initialized = False + self.config_model = ( + PushNotificationConfigModel + if table_name == 'push_notification_configs' + else create_push_notification_config_model(table_name) + ) + self._fernet = None + + if encryption_key: + try: + from cryptography.fernet import Fernet # noqa: PLC0415 + except ImportError as e: + raise ImportError( + "DatabasePushNotificationConfigStore with encryption requires the 'cryptography' " + 'library. Install with: ' + "'pip install a2a-sdk[encryption]'" + ) from e + + if isinstance(encryption_key, str): + encryption_key = encryption_key.encode('utf-8') + self._fernet = Fernet(encryption_key) + logger.debug( + 'Encryption enabled for push notification config store.' + ) + + async def initialize(self) -> None: + """Initialize the database and create the table if needed.""" + if self._initialized: + return + + logger.debug( + 'Initializing database schema for push notification configs...' + ) + if self.create_table: + async with self.engine.begin() as conn: + mapper = class_mapper(self.config_model) + tables_to_create = [ + table for table in mapper.tables if isinstance(table, Table) + ] + await conn.run_sync( + Base.metadata.create_all, tables=tables_to_create + ) + self._initialized = True + logger.debug( + 'Database schema for push notification configs initialized.' + ) + + async def _ensure_initialized(self) -> None: + """Ensure the database connection is initialized.""" + if not self._initialized: + await self.initialize() + + def _to_orm( + self, task_id: str, config: PushNotificationConfig + ) -> PushNotificationConfigModel: + """Maps a Pydantic PushNotificationConfig to a SQLAlchemy model instance. + + The config data is serialized to JSON bytes, and encrypted if a key is configured. + """ + json_payload = config.model_dump_json().encode('utf-8') + + if self._fernet: + data_to_store = self._fernet.encrypt(json_payload) + else: + data_to_store = json_payload + + return self.config_model( + task_id=task_id, + config_id=config.id, + config_data=data_to_store, + ) + + def _from_orm( + self, model_instance: PushNotificationConfigModel + ) -> PushNotificationConfig: + """Maps a SQLAlchemy model instance to a Pydantic PushNotificationConfig. + + Handles decryption if a key is configured, with a fallback to plain JSON. + """ + payload = model_instance.config_data + + if self._fernet: + from cryptography.fernet import InvalidToken # noqa: PLC0415 + + try: + decrypted_payload = self._fernet.decrypt(payload) + return PushNotificationConfig.model_validate_json( + decrypted_payload + ) + except (json.JSONDecodeError, ValidationError) as e: + logger.error( + 'Failed to parse decrypted push notification config for task %s, config %s. ' + 'Data is corrupted or not valid JSON after decryption.', + model_instance.task_id, + model_instance.config_id, + ) + raise ValueError( + 'Failed to parse decrypted push notification config data' + ) from e + except InvalidToken: + # Decryption failed. This could be because the data is not encrypted. + # We'll log a warning and try to parse it as plain JSON as a fallback. + logger.warning( + 'Failed to decrypt push notification config for task %s, config %s. ' + 'Attempting to parse as unencrypted JSON. ' + 'This may indicate an incorrect encryption key or unencrypted data in the database.', + model_instance.task_id, + model_instance.config_id, + ) + # Fall through to the unencrypted parsing logic below. + + # Try to parse as plain JSON. + try: + return PushNotificationConfig.model_validate_json(payload) + except (json.JSONDecodeError, ValidationError) as e: + if self._fernet: + logger.error( + 'Failed to parse push notification config for task %s, config %s. ' + 'Decryption failed and the data is not valid JSON. ' + 'This likely indicates the data is corrupted or encrypted with a different key.', + model_instance.task_id, + model_instance.config_id, + ) + else: + # if no key is configured and the payload is not valid JSON. + logger.error( + 'Failed to parse push notification config for task %s, config %s. ' + 'Data is not valid JSON and no encryption key is configured.', + model_instance.task_id, + model_instance.config_id, + ) + raise ValueError( + 'Failed to parse push notification config data. ' + 'Data is not valid JSON, or it is encrypted with the wrong key.' + ) from e + + async def set_info( + self, task_id: str, notification_config: PushNotificationConfig + ) -> None: + """Sets or updates the push notification configuration for a task.""" + await self._ensure_initialized() + + config_to_save = notification_config.model_copy() + if config_to_save.id is None: + config_to_save.id = task_id + + db_config = self._to_orm(task_id, config_to_save) + async with self.async_session_maker.begin() as session: + await session.merge(db_config) + logger.debug( + f'Push notification config for task {task_id} with config id {config_to_save.id} saved/updated.' + ) + + async def get_info(self, task_id: str) -> list[PushNotificationConfig]: + """Retrieves all push notification configurations for a task.""" + await self._ensure_initialized() + async with self.async_session_maker() as session: + stmt = select(self.config_model).where( + self.config_model.task_id == task_id + ) + result = await session.execute(stmt) + models = result.scalars().all() + + configs = [] + for model in models: + try: + configs.append(self._from_orm(model)) + except ValueError as e: + logger.error( + 'Could not deserialize push notification config for task %s, config %s: %s', + model.task_id, + model.config_id, + e, + ) + return configs + + async def delete_info( + self, task_id: str, config_id: str | None = None + ) -> None: + """Deletes push notification configurations for a task. + + If config_id is provided, only that specific configuration is deleted. + If config_id is None, all configurations for the task are deleted. + """ + await self._ensure_initialized() + async with self.async_session_maker.begin() as session: + stmt = delete(self.config_model).where( + self.config_model.task_id == task_id + ) + if config_id is not None: + stmt = stmt.where(self.config_model.config_id == config_id) + + result = await session.execute(stmt) + + if result.rowcount > 0: + logger.info( + f'Deleted {result.rowcount} push notification config(s) for task {task_id}.' + ) + else: + logger.warning( + f'Attempted to delete push notification config for task {task_id} with config_id: {config_id} that does not exist.' + ) diff --git a/src/a2a/server/tasks/database_task_store.py b/src/a2a/server/tasks/database_task_store.py index 7e85ddd4..70b02e10 100644 --- a/src/a2a/server/tasks/database_task_store.py +++ b/src/a2a/server/tasks/database_task_store.py @@ -2,12 +2,13 @@ try: - from sqlalchemy import delete, select + from sqlalchemy import Table, delete, select from sqlalchemy.ext.asyncio import ( AsyncEngine, AsyncSession, async_sessionmaker, ) + from sqlalchemy.orm import class_mapper except ImportError as e: raise ImportError( 'DatabaseTaskStore requires SQLAlchemy and a database driver. ' @@ -75,8 +76,13 @@ async def initialize(self) -> None: logger.debug('Initializing database schema...') if self.create_table: async with self.engine.begin() as conn: - # This will create the 'tasks' table based on TaskModel's definition - await conn.run_sync(Base.metadata.create_all) + mapper = class_mapper(self.task_model) + tables_to_create = [ + table for table in mapper.tables if isinstance(table, Table) + ] + await conn.run_sync( + Base.metadata.create_all, tables=tables_to_create + ) self._initialized = True logger.debug('Database schema initialized.') diff --git a/src/a2a/server/tasks/push_notification_config_store.py b/src/a2a/server/tasks/push_notification_config_store.py index dd93791f..efe46b40 100644 --- a/src/a2a/server/tasks/push_notification_config_store.py +++ b/src/a2a/server/tasks/push_notification_config_store.py @@ -7,7 +7,9 @@ class PushNotificationConfigStore(ABC): """Interface for storing and retrieving push notification configurations for tasks.""" @abstractmethod - async def set_info(self, task_id: str, notification_config: PushNotificationConfig) -> None: + async def set_info( + self, task_id: str, notification_config: PushNotificationConfig + ) -> None: """Sets or updates the push notification configuration for a task.""" @abstractmethod @@ -15,5 +17,7 @@ async def get_info(self, task_id: str) -> list[PushNotificationConfig]: """Retrieves the push notification configuration for a task.""" @abstractmethod - async def delete_info(self, task_id: str, config_id: str | None = None) -> None: + async def delete_info( + self, task_id: str, config_id: str | None = None + ) -> None: """Deletes the push notification configuration for a task.""" diff --git a/src/a2a/server/tasks/task_updater.py b/src/a2a/server/tasks/task_updater.py index d8fa21af..11157456 100644 --- a/src/a2a/server/tasks/task_updater.py +++ b/src/a2a/server/tasks/task_updater.py @@ -60,12 +60,18 @@ async def update_status( """ async with self._lock: if self._terminal_state_reached: - raise RuntimeError(f"Task {self.task_id} is already in a terminal state.") + raise RuntimeError( + f'Task {self.task_id} is already in a terminal state.' + ) if state in self._terminal_states: self._terminal_state_reached = True final = True - current_timestamp = timestamp if timestamp else datetime.now(timezone.utc).isoformat() + current_timestamp = ( + timestamp + if timestamp + else datetime.now(timezone.utc).isoformat() + ) await self.event_queue.enqueue_event( TaskStatusUpdateEvent( taskId=self.task_id, @@ -112,7 +118,7 @@ async def add_artifact( # noqa: PLR0913 metadata=metadata, ), append=append, - lastChunk=last_chunk + lastChunk=last_chunk, ) ) diff --git a/src/a2a/utils/task.py b/src/a2a/utils/task.py index 9cf4df43..9b8c82a9 100644 --- a/src/a2a/utils/task.py +++ b/src/a2a/utils/task.py @@ -15,7 +15,16 @@ def new_task(request: Message) -> Task: Returns: A new `Task` object initialized with 'submitted' status and the input message in history. + + Raises: + TypeError: If the message role is None. + ValueError: If the message parts are empty. """ + if not request.role: + raise TypeError('Message role cannot be None') + if not request.parts: + raise ValueError('Message parts cannot be empty') + return Task( status=TaskStatus(state=TaskState.submitted), id=(request.taskId if request.taskId else str(uuid.uuid4())), @@ -46,6 +55,11 @@ def completed_task( Returns: A `Task` object with status set to 'completed'. """ + if not artifacts or not all(isinstance(a, Artifact) for a in artifacts): + raise ValueError( + 'artifacts must be a non-empty list of Artifact objects' + ) + if history is None: history = [] return Task( diff --git a/tests/client/test_auth_middleware.py b/tests/client/test_auth_middleware.py index 94c0abde..34db5b95 100644 --- a/tests/client/test_auth_middleware.py +++ b/tests/client/test_auth_middleware.py @@ -1,3 +1,5 @@ +from collections.abc import Callable +from dataclasses import dataclass from typing import Any import httpx @@ -11,6 +13,7 @@ AgentCapabilities, AgentCard, AuthorizationCodeOAuthFlow, + HTTPAuthSecurityScheme, In, Message, MessageSendParams, @@ -20,11 +23,13 @@ Role, SecurityScheme, SendMessageRequest, + SendMessageSuccessResponse, ) -# A simple mock interceptor for testing basic middleware functionality class HeaderInterceptor(ClientCallInterceptor): + """A simple mock interceptor for testing basic middleware functionality.""" + def __init__(self, header_name: str, header_value: str): self.header_name = header_name self.header_value = header_value @@ -43,220 +48,248 @@ async def intercept( return request_payload, http_kwargs -@pytest.mark.asyncio -@respx.mock -async def test_client_with_simple_interceptor(): - """ - Tests that a basic interceptor is called and successfully - modifies the outgoing request headers. - """ - # Arrange - test_url = 'http://fake-agent.com/rpc' - header_interceptor = HeaderInterceptor('X-Test-Header', 'Test-Value-123') - async with httpx.AsyncClient() as http_client: - client = A2AClient( - httpx_client=http_client, - url=test_url, - interceptors=[header_interceptor], - ) +def build_success_response() -> dict: + """Creates a valid JSON-RPC success response as dict.""" + return SendMessageSuccessResponse( + id='1', + jsonrpc='2.0', + result=Message( + kind='message', + messageId='message-id', + role=Role.agent, + parts=[], + ), + ).model_dump(mode='json') + + +def build_send_message_request() -> SendMessageRequest: + """Builds a minimal SendMessageRequest.""" + return SendMessageRequest( + id='1', + params=MessageSendParams( + message=Message( + messageId='msg1', + role=Role.user, + parts=[], + ) + ), + ) - # Mock the HTTP response with a minimal valid success response - minimal_success_response = { - 'jsonrpc': '2.0', - 'id': '1', - 'result': { - 'kind': 'message', - 'messageId': 'response-msg', - 'role': 'agent', - 'parts': [], - }, - } - respx.post(test_url).mock( - return_value=httpx.Response(200, json=minimal_success_response) - ) - # Act - await client.send_message( - request=SendMessageRequest( - id='1', - params=MessageSendParams( - message=Message( - messageId='msg1', - role=Role.user, - parts=[], - ) - ), - ) +async def send_message( + client: A2AClient, + url: str, + session_id: str | None = None, +) -> httpx.Request: + """Mocks the response and sends a message using the client.""" + respx.post(url).mock( + return_value=httpx.Response( + 200, + json=build_success_response(), ) + ) + context = ClientCallContext( + state={'sessionId': session_id} if session_id else {} + ) + await client.send_message( + request=build_send_message_request(), + context=context, + ) + return respx.calls.last.request - # Assert - assert len(respx.calls) == 1 - request = respx.calls.last.request - assert 'x-test-header' in request.headers - assert request.headers['x-test-header'] == 'Test-Value-123' + +@pytest.fixture +def store(): + store = InMemoryContextCredentialStore() + yield store @pytest.mark.asyncio -async def test_in_memory_context_credential_store(): +async def test_auth_interceptor_skips_when_no_agent_card(store): """ - Tests the functionality of the InMemoryContextCredentialStore to ensure - it correctly stores and retrieves credentials based on sessionId. + Tests that the AuthInterceptor does not modify the request when no AgentCard is provided. """ - # Arrange - store = InMemoryContextCredentialStore() - session_id = 'test-session-123' + request_payload = {'foo': 'bar'} + http_kwargs = {'fizz': 'buzz'} + auth_interceptor = AuthInterceptor(credential_service=store) + + new_payload, new_kwargs = await auth_interceptor.intercept( + method_name='message/send', + request_payload=request_payload, + http_kwargs=http_kwargs, + agent_card=None, + context=ClientCallContext(state={}), + ) + assert new_payload == request_payload + assert new_kwargs == http_kwargs + + +@pytest.mark.asyncio +async def test_in_memory_context_credential_store(store): + """ + Verifies that InMemoryContextCredentialStore correctly stores and retrieves + credentials based on the session ID in the client context. + """ + session_id = 'session-id' scheme_name = 'test-scheme' credential = 'test-token' - - # Act await store.set_credentials(session_id, scheme_name, credential) # Assert: Successful retrieval context = ClientCallContext(state={'sessionId': session_id}) retrieved_credential = await store.get_credentials(scheme_name, context) assert retrieved_credential == credential - # Assert: Retrieval with wrong session ID returns None wrong_context = ClientCallContext(state={'sessionId': 'wrong-session'}) retrieved_credential_wrong = await store.get_credentials( scheme_name, wrong_context ) assert retrieved_credential_wrong is None - # Assert: Retrieval with no context returns None retrieved_credential_none = await store.get_credentials(scheme_name, None) assert retrieved_credential_none is None - # Assert: Retrieval with context but no sessionId returns None empty_context = ClientCallContext(state={}) retrieved_credential_empty = await store.get_credentials( scheme_name, empty_context ) assert retrieved_credential_empty is None + # Assert: Overwrite the credential when session_id already exists + new_credential = 'new-token' + await store.set_credentials(session_id, scheme_name, new_credential) + assert await store.get_credentials(scheme_name, context) == new_credential @pytest.mark.asyncio @respx.mock -async def test_auth_interceptor_with_api_key(): +async def test_client_with_simple_interceptor(): """ - Tests the authentication flow with an API key in the header. + Ensures that a custom HeaderInterceptor correctly injects a static header + into outbound HTTP requests from the A2AClient. """ - # Arrange - test_url = 'http://apikey-agent.com/rpc' - session_id = 'user-session-2' - scheme_name = 'apiKeyAuth' - api_key = 'secret-api-key' + url = 'http://agent.com/rpc' + interceptor = HeaderInterceptor('X-Test-Header', 'Test-Value-123') - cred_store = InMemoryContextCredentialStore() - await cred_store.set_credentials(session_id, scheme_name, api_key) + async with httpx.AsyncClient() as http_client: + client = A2AClient( + httpx_client=http_client, url=url, interceptors=[interceptor] + ) + request = await send_message(client, url) + assert request.headers['x-test-header'] == 'Test-Value-123' - auth_interceptor = AuthInterceptor(credential_service=cred_store) - api_key_scheme_params = { - 'type': 'apiKey', - 'name': 'X-API-Key', - 'in': In.header, - } +@dataclass +class AuthTestCase: + """ + Represents a test scenario for verifying authentication behavior in AuthInterceptor. + """ - agent_card = AgentCard( - url=test_url, - name='ApiKeyBot', - description='A bot that requires an API Key', - version='1.0', - defaultInputModes=[], - defaultOutputModes=[], - skills=[], - capabilities=AgentCapabilities(), - security=[{scheme_name: []}], - securitySchemes={ - scheme_name: SecurityScheme( - root=APIKeySecurityScheme(**api_key_scheme_params) + url: str + """The endpoint URL of the agent to which the request is sent.""" + session_id: str + """The client session ID used to fetch credentials from the credential store.""" + scheme_name: str + """The name of the security scheme defined in the agent card.""" + credential: str + """The actual credential value (e.g., API key, access token) to be injected.""" + security_scheme: Any + """The security scheme object (e.g., APIKeySecurityScheme, OAuth2SecurityScheme, etc.) to define behavior.""" + expected_header_key: str + """The expected HTTP header name to be set by the interceptor.""" + expected_header_value_func: Callable[[str], str] + """A function that maps the credential to its expected header value (e.g., lambda c: f"Bearer {c}").""" + + +api_key_test_case = AuthTestCase( + url='http://agent.com/rpc', + session_id='session-id', + scheme_name='apikey', + credential='secret-api-key', + security_scheme=APIKeySecurityScheme( + type='apiKey', + name='X-API-Key', + in_=In.header, + ), + expected_header_key='x-api-key', + expected_header_value_func=lambda c: c, +) + + +oauth2_test_case = AuthTestCase( + url='http://agent.com/rpc', + session_id='session-id', + scheme_name='oauth2', + credential='secret-oauth-access-token', + security_scheme=OAuth2SecurityScheme( + type='oauth2', + flows=OAuthFlows( + authorizationCode=AuthorizationCodeOAuthFlow( + authorizationUrl='http://provider.com/auth', + tokenUrl='http://provider.com/token', + scopes={'read': 'Read scope'}, ) - }, - ) + ), + ), + expected_header_key='Authorization', + expected_header_value_func=lambda c: f'Bearer {c}', +) - async with httpx.AsyncClient() as http_client: - client = A2AClient( - httpx_client=http_client, - agent_card=agent_card, - interceptors=[auth_interceptor], - ) - minimal_success_response = { - 'jsonrpc': '2.0', - 'id': '1', - 'result': { - 'kind': 'message', - 'messageId': 'response-msg', - 'role': 'agent', - 'parts': [], - }, - } - respx.post(test_url).mock( - return_value=httpx.Response(200, json=minimal_success_response) - ) +oidc_test_case = AuthTestCase( + url='http://agent.com/rpc', + session_id='session-id', + scheme_name='oidc', + credential='secret-oidc-id-token', + security_scheme=OpenIdConnectSecurityScheme( + type='openIdConnect', + openIdConnectUrl='http://provider.com/.well-known/openid-configuration', + ), + expected_header_key='Authorization', + expected_header_value_func=lambda c: f'Bearer {c}', +) - # Act - context = ClientCallContext(state={'sessionId': session_id}) - await client.send_message( - request=SendMessageRequest( - id='1', - params=MessageSendParams( - message=Message( - messageId='msg1', - role=Role.user, - parts=[], - ) - ), - ), - context=context, - ) - # Assert - assert len(respx.calls) == 1 - request = respx.calls.last.request - assert 'x-api-key' in request.headers - assert request.headers['x-api-key'] == api_key +bearer_test_case = AuthTestCase( + url='http://agent.com/rpc', + session_id='session-id', + scheme_name='bearer', + credential='bearer-token-123', + security_scheme=HTTPAuthSecurityScheme( + scheme='bearer', + ), + expected_header_key='Authorization', + expected_header_value_func=lambda c: f'Bearer {c}', +) @pytest.mark.asyncio +@pytest.mark.parametrize( + 'test_case', + [api_key_test_case, oauth2_test_case, oidc_test_case, bearer_test_case], +) @respx.mock -async def test_auth_interceptor_with_oauth2_scheme(): +async def test_auth_interceptor_variants(test_case, store): """ - Tests the AuthInterceptor with an OAuth2 security scheme defined in AgentCard. - Ensures it correctly sets the Authorization: Bearer header. + Parametrized test verifying that AuthInterceptor correctly attaches credentials + based on the defined security scheme in the AgentCard. """ - test_url = 'http://oauth-agent.com/rpc' - session_id = 'user-session-oauth' - scheme_name = 'myOAuthScheme' - access_token = 'secret-oauth-access-token' - - cred_store = InMemoryContextCredentialStore() - await cred_store.set_credentials(session_id, scheme_name, access_token) - - auth_interceptor = AuthInterceptor(credential_service=cred_store) - - oauth_flows = OAuthFlows( - authorizationCode=AuthorizationCodeOAuthFlow( - authorizationUrl='http://provider.com/auth', - tokenUrl='http://provider.com/token', - scopes={'read': 'Read scope'}, - ) + await store.set_credentials( + test_case.session_id, test_case.scheme_name, test_case.credential ) - + auth_interceptor = AuthInterceptor(credential_service=store) agent_card = AgentCard( - url=test_url, - name='OAuthBot', - description='A bot that uses OAuth2', + url=test_case.url, + name=f'{test_case.scheme_name}bot', + description=f'A bot that uses {test_case.scheme_name}', version='1.0', defaultInputModes=[], defaultOutputModes=[], skills=[], capabilities=AgentCapabilities(), - security=[{scheme_name: ['read']}], + security=[{test_case.scheme_name: []}], securitySchemes={ - scheme_name: SecurityScheme( - root=OAuth2SecurityScheme(type='oauth2', flows=oauth_flows) + test_case.scheme_name: SecurityScheme( + root=test_case.security_scheme ) }, ) @@ -267,121 +300,48 @@ async def test_auth_interceptor_with_oauth2_scheme(): agent_card=agent_card, interceptors=[auth_interceptor], ) - - minimal_success_response = { - 'jsonrpc': '2.0', - 'id': 'oauth_test_1', - 'result': { - 'kind': 'message', - 'messageId': 'response-msg-oauth', - 'role': 'agent', - 'parts': [], - }, - } - respx.post(test_url).mock( - return_value=httpx.Response(200, json=minimal_success_response) - ) - - # Act - context = ClientCallContext(state={'sessionId': session_id}) - await client.send_message( - request=SendMessageRequest( - id='oauth_test_1', - params=MessageSendParams( - message=Message( - messageId='msg-oauth', - role=Role.user, - parts=[], - ) - ), - ), - context=context, + request = await send_message( + client, test_case.url, test_case.session_id ) - - # Assert - assert len(respx.calls) == 1 - request_sent = respx.calls.last.request - assert 'Authorization' in request_sent.headers - assert request_sent.headers['Authorization'] == f'Bearer {access_token}' + assert request.headers[ + test_case.expected_header_key + ] == test_case.expected_header_value_func(test_case.credential) @pytest.mark.asyncio -@respx.mock -async def test_auth_interceptor_with_oidc_scheme(): +async def test_auth_interceptor_skips_when_scheme_not_in_security_schemes( + store, +): """ - Tests the AuthInterceptor with an OpenIdConnectSecurityScheme. - Ensures it correctly sets the Authorization: Bearer header. + Tests that AuthInterceptor skips a scheme if it's listed in security requirements + but not defined in securitySchemes. """ - # Arrange - test_url = 'http://oidc-agent.com/rpc' - session_id = 'user-session-oidc' - scheme_name = 'myOidcScheme' - id_token = 'secret-oidc-id-token' - - cred_store = InMemoryContextCredentialStore() - await cred_store.set_credentials(session_id, scheme_name, id_token) - - auth_interceptor = AuthInterceptor(credential_service=cred_store) - + scheme_name = 'missing' + session_id = 'session-id' + credential = 'dummy-token' + request_payload = {'foo': 'bar'} + http_kwargs = {'fizz': 'buzz'} + await store.set_credentials(session_id, scheme_name, credential) + auth_interceptor = AuthInterceptor(credential_service=store) agent_card = AgentCard( - url=test_url, - name='OidcBot', - description='A bot that uses OpenID Connect', + url='http://agent.com/rpc', + name='missingbot', + description='A bot that uses missing scheme definition', version='1.0', defaultInputModes=[], defaultOutputModes=[], skills=[], capabilities=AgentCapabilities(), security=[{scheme_name: []}], - securitySchemes={ - scheme_name: SecurityScheme( - root=OpenIdConnectSecurityScheme( - type='openIdConnect', - openIdConnectUrl='http://provider.com/.well-known/openid-configuration', - ) - ) - }, + securitySchemes={}, ) - async with httpx.AsyncClient() as http_client: - client = A2AClient( - httpx_client=http_client, - agent_card=agent_card, - interceptors=[auth_interceptor], - ) - - minimal_success_response = { - 'jsonrpc': '2.0', - 'id': 'oidc_test_1', - 'result': { - 'kind': 'message', - 'messageId': 'response-msg-oidc', - 'role': 'agent', - 'parts': [], - }, - } - respx.post(test_url).mock( - return_value=httpx.Response(200, json=minimal_success_response) - ) - - # Act - context = ClientCallContext(state={'sessionId': session_id}) - await client.send_message( - request=SendMessageRequest( - id='oidc_test_1', - params=MessageSendParams( - message=Message( - messageId='msg-oidc', - role=Role.user, - parts=[], - ) - ), - ), - context=context, - ) - - # Assert - assert len(respx.calls) == 1 - request_sent = respx.calls.last.request - assert 'Authorization' in request_sent.headers - assert request_sent.headers['Authorization'] == f'Bearer {id_token}' + new_payload, new_kwargs = await auth_interceptor.intercept( + method_name='message/send', + request_payload=request_payload, + http_kwargs=http_kwargs, + agent_card=agent_card, + context=ClientCallContext(state={'sessionId': session_id}), + ) + assert new_payload == request_payload + assert new_kwargs == http_kwargs diff --git a/tests/client/test_client.py b/tests/client/test_client.py index 5b6e9491..00ab8796 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -14,6 +14,7 @@ A2AClient, A2AClientHTTPError, A2AClientJSONError, + A2AClientTimeoutError, create_text_message_object, ) from a2a.types import ( @@ -1266,3 +1267,25 @@ async def test_cancel_task_error_response( mode='json', exclude_none=True ) == error_details.model_dump(exclude_none=True) assert response.root.id == 'err_cancel_req' + + @pytest.mark.asyncio + async def test_send_message_client_timeout( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + mock_httpx_client.post.side_effect = httpx.ReadTimeout( + 'Request timed out' + ) + client = A2AClient( + httpx_client=mock_httpx_client, agent_card=mock_agent_card + ) + + params = MessageSendParams( + message=create_text_message_object(content='Hello') + ) + + request = SendMessageRequest(id=123, params=params) + + with pytest.raises(A2AClientTimeoutError) as exc_info: + await client.send_message(request=request) + + assert 'Request timed out' in str(exc_info.value) diff --git a/tests/server/agent_execution/test_context.py b/tests/server/agent_execution/test_context.py index ea90631c..aa72b5a6 100644 --- a/tests/server/agent_execution/test_context.py +++ b/tests/server/agent_execution/test_context.py @@ -211,6 +211,17 @@ def test_message_property_with_params(self, mock_params): context = RequestContext(request=mock_params) assert context.message == mock_params.message + def test_metadata_property_without_content(self): + """Test metadata property returns empty dict when no content are provided.""" + context = RequestContext() + assert context.metadata == {} + + def test_metadata_property_with_content(self, mock_params): + """Test metadata property returns the metadata from params.""" + mock_params.metadata = {'key': 'value'} + context = RequestContext(request=mock_params) + assert context.metadata == {'key': 'value'} + def test_init_with_existing_ids_in_message(self, mock_message, mock_params): """Test initialization with existing IDs in the message.""" mock_message.taskId = 'existing-task-id' diff --git a/tests/server/request_handlers/test_default_request_handler.py b/tests/server/request_handlers/test_default_request_handler.py index fdf100f7..1eee49b1 100644 --- a/tests/server/request_handlers/test_default_request_handler.py +++ b/tests/server/request_handlers/test_default_request_handler.py @@ -20,17 +20,20 @@ from a2a.server.events import EventQueue, InMemoryQueueManager, QueueManager from a2a.server.request_handlers import DefaultRequestHandler from a2a.server.tasks import ( + InMemoryPushNotificationConfigStore, InMemoryTaskStore, + PushNotificationConfigStore, + PushNotificationSender, ResultAggregator, TaskStore, TaskUpdater, - PushNotificationConfigStore, - PushNotificationSender, - InMemoryPushNotificationConfigStore, ) from a2a.types import ( + DeleteTaskPushNotificationConfigParams, + GetTaskPushNotificationConfigParams, InternalError, InvalidParamsError, + ListTaskPushNotificationConfigParams, Message, MessageSendConfiguration, MessageSendParams, @@ -46,9 +49,6 @@ TaskStatus, TextPart, UnsupportedOperationError, - GetTaskPushNotificationConfigParams, - ListTaskPushNotificationConfigParams, - DeleteTaskPushNotificationConfigParams, ) @@ -401,6 +401,90 @@ async def get_current_result(): mock_agent_executor.execute.assert_awaited_once() +@pytest.mark.asyncio +async def test_on_message_send_with_push_notification_no_existing_Task(): + """Test on_message_send for new task sets push notification info if provided.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_push_notification_store = AsyncMock(spec=PushNotificationConfigStore) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) + + task_id = 'push_task_1' + context_id = 'push_ctx_1' + + mock_task_store.get.return_value = ( + None # Simulate new task scenario for TaskManager + ) + + # Mock _request_context_builder.build to return a context with the generated/confirmed IDs + mock_request_context = MagicMock(spec=RequestContext) + mock_request_context.task_id = task_id + mock_request_context.context_id = context_id + mock_request_context_builder.build.return_value = mock_request_context + + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + push_config_store=mock_push_notification_store, + request_context_builder=mock_request_context_builder, + ) + + push_config = PushNotificationConfig(url='http://callback.com/push') + message_config = MessageSendConfiguration( + pushNotificationConfig=push_config, + acceptedOutputModes=['text/plain'], # Added required field + ) + params = MessageSendParams( + message=Message( + role=Role.user, + messageId='msg_push', + parts=[], + taskId=task_id, + contextId=context_id, + ), + configuration=message_config, + ) + + # Mock ResultAggregator and its consume_and_break_on_interrupt + mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) + final_task_result = create_sample_task( + task_id=task_id, context_id=context_id, status_state=TaskState.completed + ) + mock_result_aggregator_instance.consume_and_break_on_interrupt.return_value = ( + final_task_result, + False, + ) + + # Mock the current_result property to return the final task result + async def get_current_result(): + return final_task_result + + # Configure the 'current_result' property on the type of the mock instance + type(mock_result_aggregator_instance).current_result = PropertyMock( + return_value=get_current_result() + ) + + with ( + patch( + 'a2a.server.request_handlers.default_request_handler.ResultAggregator', + return_value=mock_result_aggregator_instance, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=None, + ), + ): + await request_handler.on_message_send( + params, create_server_call_context() + ) + + mock_push_notification_store.set_info.assert_awaited_once_with( + task_id, push_config + ) + # Other assertions for full flow if needed (e.g., agent execution) + mock_agent_executor.execute.assert_awaited_once() + + @pytest.mark.asyncio async def test_on_message_send_no_result_from_aggregator(): """Test on_message_send when aggregator returns (None, False).""" diff --git a/tests/server/request_handlers/test_jsonrpc_handler.py b/tests/server/request_handlers/test_jsonrpc_handler.py index a4400ead..1f421aef 100644 --- a/tests/server/request_handlers/test_jsonrpc_handler.py +++ b/tests/server/request_handlers/test_jsonrpc_handler.py @@ -16,7 +16,13 @@ from a2a.server.events import QueueManager from a2a.server.events.event_queue import EventQueue from a2a.server.request_handlers import DefaultRequestHandler, JSONRPCHandler -from a2a.server.tasks import TaskStore, InMemoryPushNotificationConfigStore, BasePushNotificationSender, PushNotificationConfigStore, PushNotificationSender +from a2a.server.tasks import ( + TaskStore, + InMemoryPushNotificationConfigStore, + BasePushNotificationSender, + PushNotificationConfigStore, + PushNotificationSender, +) from a2a.types import ( AgentCapabilities, AgentCard, @@ -436,8 +442,10 @@ async def streaming_coro(): async def test_set_push_notification_success(self) -> None: mock_agent_executor = AsyncMock(spec=AgentExecutor) mock_task_store = AsyncMock(spec=TaskStore) - mock_push_notification_store = AsyncMock(spec=PushNotificationConfigStore) - + mock_push_notification_store = AsyncMock( + spec=PushNotificationConfigStore + ) + request_handler = DefaultRequestHandler( mock_agent_executor, mock_task_store, @@ -471,10 +479,12 @@ async def test_set_push_notification_success(self) -> None: async def test_get_push_notification_success(self) -> None: mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store = AsyncMock(spec=TaskStore) push_notification_store = InMemoryPushNotificationConfigStore() request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store, push_config_store=push_notification_store + mock_agent_executor, + mock_task_store, + push_config_store=push_notification_store, ) self.mock_agent_card.capabilities = AgentCapabilities( streaming=True, pushNotifications=True @@ -516,9 +526,14 @@ async def test_on_message_stream_new_message_send_push_notification_success( mock_task_store = AsyncMock(spec=TaskStore) mock_httpx_client = AsyncMock(spec=httpx.AsyncClient) push_notification_store = InMemoryPushNotificationConfigStore() - push_notification_sender = BasePushNotificationSender(mock_httpx_client, push_notification_store) + push_notification_sender = BasePushNotificationSender( + mock_httpx_client, push_notification_store + ) request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store, push_config_store=push_notification_store, push_sender=push_notification_sender + mock_agent_executor, + mock_task_store, + push_config_store=push_notification_store, + push_sender=push_notification_sender, ) self.mock_agent_card.capabilities = AgentCapabilities( streaming=True, pushNotifications=True @@ -585,7 +600,7 @@ async def streaming_coro(): 'kind': 'task', 'status': {'state': 'submitted'}, }, - headers=None + headers=None, ), call( 'http://example.com', @@ -606,7 +621,7 @@ async def streaming_coro(): 'kind': 'task', 'status': {'state': 'submitted'}, }, - headers=None + headers=None, ), call( 'http://example.com', @@ -627,7 +642,7 @@ async def streaming_coro(): 'kind': 'task', 'status': {'state': 'completed'}, }, - headers=None + headers=None, ), ] mock_httpx_client.post.assert_has_calls(calls) @@ -727,7 +742,7 @@ async def test_streaming_not_supported_error( pass self.assertEqual( - str(context.exception.error.message), # type: ignore + str(context.exception.error.message), # type: ignore 'Streaming is not supported by the agent', ) @@ -761,7 +776,7 @@ async def test_push_notifications_not_supported_error(self) -> None: await handler.set_push_notification_config(request) self.assertEqual( - str(context.exception.error.message), # type: ignore + str(context.exception.error.message), # type: ignore 'Push notifications are not supported by the agent', ) @@ -960,7 +975,7 @@ async def consume_raises_error(*args, **kwargs) -> NoReturn: # Assert self.assertIsInstance(response.root, JSONRPCErrorResponse) - self.assertEqual(response.root.error, UnsupportedOperationError()) # type: ignore + self.assertEqual(response.root.error, UnsupportedOperationError()) # type: ignore async def test_on_message_send_task_id_mismatch(self) -> None: mock_agent_executor = AsyncMock(spec=AgentExecutor) @@ -1031,24 +1046,35 @@ async def test_on_get_push_notification(self) -> None: mock_task = Task(**MINIMAL_TASK) mock_task_store.get.return_value = mock_task - # Create request handler without a push notifier request_handler = AsyncMock(spec=DefaultRequestHandler) - task_push_config = TaskPushNotificationConfig(taskId=mock_task.id, pushNotificationConfig=PushNotificationConfig(id="config1", url='http://example.com')) - request_handler.on_get_task_push_notification_config.return_value = task_push_config + task_push_config = TaskPushNotificationConfig( + taskId=mock_task.id, + pushNotificationConfig=PushNotificationConfig( + id='config1', url='http://example.com' + ), + ) + request_handler.on_get_task_push_notification_config.return_value = ( + task_push_config + ) self.mock_agent_card.capabilities = AgentCapabilities( pushNotifications=True ) handler = JSONRPCHandler(self.mock_agent_card, request_handler) list_request = GetTaskPushNotificationConfigRequest( - id='1', params=GetTaskPushNotificationConfigParams(id=mock_task.id, pushNotificationConfigId="config1") + id='1', + params=GetTaskPushNotificationConfigParams( + id=mock_task.id, pushNotificationConfigId='config1' + ), ) response = await handler.get_push_notification_config(list_request) # Assert - self.assertIsInstance(response.root, GetTaskPushNotificationConfigSuccessResponse) - self.assertEqual(response.root.result, task_push_config) # type: ignore + self.assertIsInstance( + response.root, GetTaskPushNotificationConfigSuccessResponse + ) + self.assertEqual(response.root.result, task_push_config) # type: ignore async def test_on_list_push_notification(self) -> None: """Test list_push_notification_config handling""" @@ -1056,12 +1082,18 @@ async def test_on_list_push_notification(self) -> None: mock_task = Task(**MINIMAL_TASK) mock_task_store.get.return_value = mock_task - # Create request handler without a push notifier request_handler = AsyncMock(spec=DefaultRequestHandler) - task_push_config = TaskPushNotificationConfig(taskId=mock_task.id, pushNotificationConfig=PushNotificationConfig(url='http://example.com')) - request_handler.on_list_task_push_notification_config.return_value = [task_push_config] + task_push_config = TaskPushNotificationConfig( + taskId=mock_task.id, + pushNotificationConfig=PushNotificationConfig( + url='http://example.com' + ), + ) + request_handler.on_list_task_push_notification_config.return_value = [ + task_push_config + ] self.mock_agent_card.capabilities = AgentCapabilities( pushNotifications=True @@ -1072,8 +1104,10 @@ async def test_on_list_push_notification(self) -> None: ) response = await handler.list_push_notification_config(list_request) # Assert - self.assertIsInstance(response.root, ListTaskPushNotificationConfigSuccessResponse) - self.assertEqual(response.root.result, [task_push_config]) # type: ignore + self.assertIsInstance( + response.root, ListTaskPushNotificationConfigSuccessResponse + ) + self.assertEqual(response.root.result, [task_push_config]) # type: ignore async def test_on_list_push_notification_error(self) -> None: """Test list_push_notification_config handling""" @@ -1081,14 +1115,19 @@ async def test_on_list_push_notification_error(self) -> None: mock_task = Task(**MINIMAL_TASK) mock_task_store.get.return_value = mock_task - # Create request handler without a push notifier request_handler = AsyncMock(spec=DefaultRequestHandler) - task_push_config = TaskPushNotificationConfig(taskId=mock_task.id, pushNotificationConfig=PushNotificationConfig(url='http://example.com')) + task_push_config = TaskPushNotificationConfig( + taskId=mock_task.id, + pushNotificationConfig=PushNotificationConfig( + url='http://example.com' + ), + ) # throw server error - request_handler.on_list_task_push_notification_config.side_effect = ServerError(InternalError()) - + request_handler.on_list_task_push_notification_config.side_effect = ( + ServerError(InternalError()) + ) self.mock_agent_card.capabilities = AgentCapabilities( pushNotifications=True @@ -1100,45 +1139,55 @@ async def test_on_list_push_notification_error(self) -> None: response = await handler.list_push_notification_config(list_request) # Assert self.assertIsInstance(response.root, JSONRPCErrorResponse) - self.assertEqual(response.root.error, InternalError()) # type: ignore - + self.assertEqual(response.root.error, InternalError()) # type: ignore + async def test_on_delete_push_notification(self) -> None: """Test delete_push_notification_config handling""" # Create request handler without a push notifier - request_handler = AsyncMock(spec=DefaultRequestHandler) - request_handler.on_delete_task_push_notification_config.return_value = None + request_handler = AsyncMock(spec=DefaultRequestHandler) + request_handler.on_delete_task_push_notification_config.return_value = ( + None + ) self.mock_agent_card.capabilities = AgentCapabilities( pushNotifications=True ) handler = JSONRPCHandler(self.mock_agent_card, request_handler) delete_request = DeleteTaskPushNotificationConfigRequest( - id='1', params=DeleteTaskPushNotificationConfigParams(id="task1", pushNotificationConfigId="config1") + id='1', + params=DeleteTaskPushNotificationConfigParams( + id='task1', pushNotificationConfigId='config1' + ), ) response = await handler.delete_push_notification_config(delete_request) # Assert - self.assertIsInstance(response.root, DeleteTaskPushNotificationConfigSuccessResponse) - self.assertEqual(response.root.result, None) # type: ignore + self.assertIsInstance( + response.root, DeleteTaskPushNotificationConfigSuccessResponse + ) + self.assertEqual(response.root.result, None) # type: ignore async def test_on_delete_push_notification_error(self) -> None: """Test delete_push_notification_config error handling""" - # Create request handler without a push notifier request_handler = AsyncMock(spec=DefaultRequestHandler) # throw server error - request_handler.on_delete_task_push_notification_config.side_effect = ServerError(UnsupportedOperationError()) - + request_handler.on_delete_task_push_notification_config.side_effect = ( + ServerError(UnsupportedOperationError()) + ) self.mock_agent_card.capabilities = AgentCapabilities( pushNotifications=True ) handler = JSONRPCHandler(self.mock_agent_card, request_handler) delete_request = DeleteTaskPushNotificationConfigRequest( - id='1', params=DeleteTaskPushNotificationConfigParams(id="task1", pushNotificationConfigId="config1") + id='1', + params=DeleteTaskPushNotificationConfigParams( + id='task1', pushNotificationConfigId='config1' + ), ) response = await handler.delete_push_notification_config(delete_request) # Assert self.assertIsInstance(response.root, JSONRPCErrorResponse) - self.assertEqual(response.root.error, UnsupportedOperationError()) # type: ignore \ No newline at end of file + self.assertEqual(response.root.error, UnsupportedOperationError()) # type: ignore diff --git a/tests/server/tasks/test_database_push_notification_config_store.py b/tests/server/tasks/test_database_push_notification_config_store.py new file mode 100644 index 00000000..95222558 --- /dev/null +++ b/tests/server/tasks/test_database_push_notification_config_store.py @@ -0,0 +1,564 @@ +import os + +from collections.abc import AsyncGenerator + +import pytest +import pytest_asyncio + +from _pytest.mark.structures import ParameterSet +from sqlalchemy import select +from sqlalchemy.ext.asyncio import ( + async_sessionmaker, + create_async_engine, +) + + +# Skip entire test module if SQLAlchemy is not installed +pytest.importorskip('sqlalchemy', reason='Database tests require SQLAlchemy') +pytest.importorskip( + 'cryptography', + reason='Database tests require Cryptography. Install extra encryption', +) + +# Now safe to import SQLAlchemy-dependent modules +from cryptography.fernet import Fernet +from sqlalchemy.inspection import inspect + +from a2a.server.models import ( + Base, + PushNotificationConfigModel, +) # Important: To get Base.metadata +from a2a.server.tasks import DatabasePushNotificationConfigStore +from a2a.types import ( + PushNotificationConfig, + Task, + TaskState, + TaskStatus, +) + + +# DSNs for different databases +SQLITE_TEST_DSN = ( + 'sqlite+aiosqlite:///file:testdb?mode=memory&cache=shared&uri=true' +) +POSTGRES_TEST_DSN = os.environ.get( + 'POSTGRES_TEST_DSN' +) # e.g., "postgresql+asyncpg://user:pass@host:port/dbname" +MYSQL_TEST_DSN = os.environ.get( + 'MYSQL_TEST_DSN' +) # e.g., "mysql+aiomysql://user:pass@host:port/dbname" + +# Parameterization for the db_store fixture +DB_CONFIGS: list[ParameterSet | tuple[str | None, str]] = [ + pytest.param((SQLITE_TEST_DSN, 'sqlite'), id='sqlite') +] + +if POSTGRES_TEST_DSN: + DB_CONFIGS.append( + pytest.param((POSTGRES_TEST_DSN, 'postgresql'), id='postgresql') + ) +else: + DB_CONFIGS.append( + pytest.param( + (None, 'postgresql'), + marks=pytest.mark.skip(reason='POSTGRES_TEST_DSN not set'), + id='postgresql_skipped', + ) + ) + +if MYSQL_TEST_DSN: + DB_CONFIGS.append(pytest.param((MYSQL_TEST_DSN, 'mysql'), id='mysql')) +else: + DB_CONFIGS.append( + pytest.param( + (None, 'mysql'), + marks=pytest.mark.skip(reason='MYSQL_TEST_DSN not set'), + id='mysql_skipped', + ) + ) + + +# Minimal Task object for testing - remains the same +task_status_submitted = TaskStatus( + state=TaskState.submitted, timestamp='2023-01-01T00:00:00Z' +) +MINIMAL_TASK_OBJ = Task( + id='task-abc', + contextId='session-xyz', + status=task_status_submitted, + kind='task', + metadata={'test_key': 'test_value'}, + artifacts=[], + history=[], +) + + +@pytest_asyncio.fixture(params=DB_CONFIGS) +async def db_store_parameterized( + request, +) -> AsyncGenerator[DatabasePushNotificationConfigStore, None]: + """ + Fixture that provides a DatabaseTaskStore connected to different databases + based on parameterization (SQLite, PostgreSQL, MySQL). + """ + db_url, dialect_name = request.param + + if db_url is None: + pytest.skip(f'DSN for {dialect_name} not set in environment variables.') + + engine = create_async_engine(db_url) + store = None # Initialize store to None for the finally block + + try: + # Create tables + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + # create_table=False as we've explicitly created tables above. + store = DatabasePushNotificationConfigStore( + engine=engine, + create_table=False, + encryption_key=Fernet.generate_key(), + ) + # Initialize the store (connects, etc.). Safe to call even if tables exist. + await store.initialize() + + yield store + + finally: + if engine: # If engine was created for setup/teardown + # Drop tables using the fixture's engine + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await engine.dispose() # Dispose the engine created in the fixture + + +@pytest.mark.asyncio +async def test_initialize_creates_table( + db_store_parameterized: DatabasePushNotificationConfigStore, +) -> None: + """Test that tables are created (implicitly by fixture setup).""" + # Ensure store is initialized (already done by fixture, but good for clarity) + await db_store_parameterized._ensure_initialized() + + # Use the store's engine for inspection + async with db_store_parameterized.engine.connect() as conn: + + def has_table_sync(sync_conn): + inspector = inspect(sync_conn) + return inspector.has_table( + PushNotificationConfigModel.__tablename__ + ) + + assert await conn.run_sync(has_table_sync) + + +@pytest.mark.asyncio +async def test_initialize_is_idempotent( + db_store_parameterized: DatabasePushNotificationConfigStore, +) -> None: + """Test that tables are created (implicitly by fixture setup).""" + # Ensure store is initialized (already done by fixture, but good for clarity) + await db_store_parameterized.initialize() + # Call initialize again to check idempotency + await db_store_parameterized.initialize() + + +@pytest.mark.asyncio +async def test_set_and_get_info_single_config( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test setting and retrieving a single configuration.""" + task_id = 'task-1' + config = PushNotificationConfig(id='config-1', url='http://example.com') + + await db_store_parameterized.set_info(task_id, config) + retrieved_configs = await db_store_parameterized.get_info(task_id) + + assert len(retrieved_configs) == 1 + assert retrieved_configs[0] == config + + +@pytest.mark.asyncio +async def test_set_and_get_info_multiple_configs( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test setting and retrieving multiple configurations for a single task.""" + + task_id = 'task-1' + config1 = PushNotificationConfig(id='config-1', url='http://example.com/1') + config2 = PushNotificationConfig(id='config-2', url='http://example.com/2') + + await db_store_parameterized.set_info(task_id, config1) + await db_store_parameterized.set_info(task_id, config2) + retrieved_configs = await db_store_parameterized.get_info(task_id) + + assert len(retrieved_configs) == 2 + assert config1 in retrieved_configs + assert config2 in retrieved_configs + + +@pytest.mark.asyncio +async def test_set_info_updates_existing_config( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that setting an existing config ID updates the record.""" + task_id = 'task-1' + config_id = 'config-1' + initial_config = PushNotificationConfig( + id=config_id, url='http://initial.url' + ) + updated_config = PushNotificationConfig( + id=config_id, url='http://updated.url' + ) + + await db_store_parameterized.set_info(task_id, initial_config) + await db_store_parameterized.set_info(task_id, updated_config) + retrieved_configs = await db_store_parameterized.get_info(task_id) + + assert len(retrieved_configs) == 1 + assert retrieved_configs[0].url == 'http://updated.url' + + +@pytest.mark.asyncio +async def test_set_info_defaults_config_id_to_task_id( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that config.id defaults to task_id if not provided.""" + task_id = 'task-1' + config = PushNotificationConfig(url='http://example.com') # id is None + + await db_store_parameterized.set_info(task_id, config) + retrieved_configs = await db_store_parameterized.get_info(task_id) + + assert len(retrieved_configs) == 1 + assert retrieved_configs[0].id == task_id + + +@pytest.mark.asyncio +async def test_get_info_not_found( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test getting info for a task with no configs returns an empty list.""" + retrieved_configs = await db_store_parameterized.get_info( + 'non-existent-task' + ) + assert retrieved_configs == [] + + +@pytest.mark.asyncio +async def test_delete_info_specific_config( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test deleting a single, specific configuration.""" + task_id = 'task-1' + config1 = PushNotificationConfig(id='config-1', url='http://a.com') + config2 = PushNotificationConfig(id='config-2', url='http://b.com') + + await db_store_parameterized.set_info(task_id, config1) + await db_store_parameterized.set_info(task_id, config2) + + await db_store_parameterized.delete_info(task_id, 'config-1') + retrieved_configs = await db_store_parameterized.get_info(task_id) + + assert len(retrieved_configs) == 1 + assert retrieved_configs[0] == config2 + + +@pytest.mark.asyncio +async def test_delete_info_all_for_task( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test deleting all configurations for a task when config_id is None.""" + + task_id = 'task-1' + config1 = PushNotificationConfig(id='config-1', url='http://a.com') + config2 = PushNotificationConfig(id='config-2', url='http://b.com') + + await db_store_parameterized.set_info(task_id, config1) + await db_store_parameterized.set_info(task_id, config2) + + await db_store_parameterized.delete_info(task_id, None) + retrieved_configs = await db_store_parameterized.get_info(task_id) + + assert retrieved_configs == [] + + +@pytest.mark.asyncio +async def test_delete_info_not_found( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that deleting a non-existent config does not raise an error.""" + # Should not raise + await db_store_parameterized.delete_info('task-1', 'non-existent-config') + + +@pytest.mark.asyncio +async def test_data_is_encrypted_in_db( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Verify that the data stored in the database is actually encrypted.""" + task_id = 'encrypted-task' + config = PushNotificationConfig( + id='config-1', url='http://secret.url', token='secret-token' + ) + plain_json = config.model_dump_json() + + await db_store_parameterized.set_info(task_id, config) + + # Directly query the database to inspect the raw data + async_session = async_sessionmaker( + db_store_parameterized.engine, expire_on_commit=False + ) + async with async_session() as session: + stmt = select(PushNotificationConfigModel).where( + PushNotificationConfigModel.task_id == task_id + ) + result = await session.execute(stmt) + db_model = result.scalar_one() + + assert db_model.config_data != plain_json.encode('utf-8') + + fernet = db_store_parameterized._fernet + + decrypted_data = fernet.decrypt(db_model.config_data) # type: ignore + assert decrypted_data.decode('utf-8') == plain_json + + +@pytest.mark.asyncio +async def test_decryption_error_with_wrong_key( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that using the wrong key to decrypt raises a ValueError.""" + # 1. Store with one key + + task_id = 'wrong-key-task' + config = PushNotificationConfig(id='config-1', url='http://secret.url') + await db_store_parameterized.set_info(task_id, config) + + # 2. Try to read with a different key + # Directly query the database to inspect the raw data + wrong_key = Fernet.generate_key() + store2 = DatabasePushNotificationConfigStore( + db_store_parameterized.engine, encryption_key=wrong_key + ) + + retrieved_configs = await store2.get_info(task_id) + assert retrieved_configs == [] + + # _from_orm should raise a ValueError + async_session = async_sessionmaker( + db_store_parameterized.engine, expire_on_commit=False + ) + async with async_session() as session: + db_model = await session.get( + PushNotificationConfigModel, (task_id, 'config-1') + ) + + with pytest.raises(ValueError) as exc_info: + store2._from_orm(db_model) # type: ignore + + +@pytest.mark.asyncio +async def test_decryption_error_with_no_key( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that using the wrong key to decrypt raises a ValueError.""" + # 1. Store with one key + + task_id = 'wrong-key-task' + config = PushNotificationConfig(id='config-1', url='http://secret.url') + await db_store_parameterized.set_info(task_id, config) + + # 2. Try to read with no key set + # Directly query the database to inspect the raw data + store2 = DatabasePushNotificationConfigStore(db_store_parameterized.engine) + + retrieved_configs = await store2.get_info(task_id) + assert retrieved_configs == [] + + # _from_orm should raise a ValueError + async_session = async_sessionmaker( + db_store_parameterized.engine, expire_on_commit=False + ) + async with async_session() as session: + db_model = await session.get( + PushNotificationConfigModel, (task_id, 'config-1') + ) + + with pytest.raises(ValueError) as exc_info: + store2._from_orm(db_model) # type: ignore + + +@pytest.mark.asyncio +async def test_custom_table_name( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that the store works correctly with a custom table name.""" + table_name = 'my_custom_push_configs' + engine = db_store_parameterized.engine + custom_store = None + try: + # Use a new store with a custom table name + custom_store = DatabasePushNotificationConfigStore( + engine=engine, + create_table=True, + table_name=table_name, + encryption_key=Fernet.generate_key(), + ) + + task_id = 'custom-table-task' + config = PushNotificationConfig(id='config-1', url='http://custom.url') + + # This will create the table on first use + await custom_store.set_info(task_id, config) + retrieved_configs = await custom_store.get_info(task_id) + + assert len(retrieved_configs) == 1 + assert retrieved_configs[0] == config + + # Verify the custom table exists and has data + async with custom_store.engine.connect() as conn: + + def has_table_sync(sync_conn): + inspector = inspect(sync_conn) + return inspector.has_table(table_name) + + assert await conn.run_sync(has_table_sync) + + result = await conn.execute( + select(custom_store.config_model).where( + custom_store.config_model.task_id == task_id + ) + ) + assert result.scalar_one_or_none() is not None + finally: + if custom_store: + # Clean up the dynamically created table from the metadata + # to prevent errors in subsequent parameterized test runs. + Base.metadata.remove(custom_store.config_model.__table__) # type: ignore + + +@pytest.mark.asyncio +async def test_set_and_get_info_multiple_configs_no_key( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test setting and retrieving multiple configurations for a single task.""" + + store = DatabasePushNotificationConfigStore( + engine=db_store_parameterized.engine, + create_table=False, + encryption_key=None, # No encryption key + ) + await store.initialize() + + task_id = 'task-1' + config1 = PushNotificationConfig(id='config-1', url='http://example.com/1') + config2 = PushNotificationConfig(id='config-2', url='http://example.com/2') + + await store.set_info(task_id, config1) + await store.set_info(task_id, config2) + retrieved_configs = await store.get_info(task_id) + + assert len(retrieved_configs) == 2 + assert config1 in retrieved_configs + assert config2 in retrieved_configs + + +@pytest.mark.asyncio +async def test_data_is_not_encrypted_in_db_if_no_key_is_set( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test data is not encrypted when no encryption key is set.""" + + store = DatabasePushNotificationConfigStore( + engine=db_store_parameterized.engine, + create_table=False, + encryption_key=None, # No encryption key + ) + await store.initialize() + + task_id = 'task-1' + config = PushNotificationConfig(id='config-1', url='http://example.com/1') + plain_json = config.model_dump_json() + + await store.set_info(task_id, config) + + # Directly query the database to inspect the raw data + async_session = async_sessionmaker( + db_store_parameterized.engine, expire_on_commit=False + ) + async with async_session() as session: + stmt = select(PushNotificationConfigModel).where( + PushNotificationConfigModel.task_id == task_id + ) + result = await session.execute(stmt) + db_model = result.scalar_one() + + assert db_model.config_data == plain_json.encode('utf-8') + + +@pytest.mark.asyncio +async def test_decryption_fallback_for_unencrypted_data( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test reading unencrypted data with an encryption-enabled store.""" + # 1. Store unencrypted data using a new store instance without a key + unencrypted_store = DatabasePushNotificationConfigStore( + engine=db_store_parameterized.engine, + create_table=False, # Table already exists from fixture + encryption_key=None, + ) + await unencrypted_store.initialize() + + task_id = 'mixed-encryption-task' + config = PushNotificationConfig(id='config-1', url='http://plain.url') + await unencrypted_store.set_info(task_id, config) + + # 2. Try to read with the encryption-enabled store from the fixture + retrieved_configs = await db_store_parameterized.get_info(task_id) + + # Should fall back to parsing as plain JSON and not fail + assert len(retrieved_configs) == 1 + assert retrieved_configs[0] == config + + +@pytest.mark.asyncio +async def test_parsing_error_after_successful_decryption( + db_store_parameterized: DatabasePushNotificationConfigStore, +): + """Test that a parsing error after successful decryption is handled.""" + + task_id = 'corrupted-data-task' + config_id = 'config-1' + + # 1. Encrypt data that is NOT valid JSON + fernet = Fernet(Fernet.generate_key()) + corrupted_payload = b'this is not valid json' + encrypted_data = fernet.encrypt(corrupted_payload) + + # 2. Manually insert this corrupted data into the DB + async_session = async_sessionmaker( + db_store_parameterized.engine, expire_on_commit=False + ) + async with async_session() as session: + db_model = PushNotificationConfigModel( + task_id=task_id, + config_id=config_id, + config_data=encrypted_data, + ) + session.add(db_model) + await session.commit() + + # 3. get_info should log an error and return an empty list + retrieved_configs = await db_store_parameterized.get_info(task_id) + assert retrieved_configs == [] + + # 4. _from_orm should raise a ValueError + async with async_session() as session: + db_model_retrieved = await session.get( + PushNotificationConfigModel, (task_id, config_id) + ) + + with pytest.raises(ValueError) as exc_info: + db_store_parameterized._from_orm(db_model_retrieved) # type: ignore diff --git a/tests/server/tasks/test_inmemory_push_notifications.py b/tests/server/tasks/test_inmemory_push_notifications.py index 9fe4eee7..7913952e 100644 --- a/tests/server/tasks/test_inmemory_push_notifications.py +++ b/tests/server/tasks/test_inmemory_push_notifications.py @@ -161,7 +161,9 @@ async def test_send_notification_success(self): async def test_send_notification_with_token_success(self): task_id = 'task_send_success' task_data = create_sample_task(task_id=task_id) - config = create_sample_push_config(url='http://notify.me/here', token='unique_token') + config = create_sample_push_config( + url='http://notify.me/here', token='unique_token' + ) await self.config_store.set_info(task_id, config) # Mock the post call to simulate success @@ -180,7 +182,7 @@ async def test_send_notification_with_token_success(self): ) self.assertEqual( called_kwargs['headers'], - {"X-A2A-Notification-Token": "unique_token"}, + {'X-A2A-Notification-Token': 'unique_token'}, ) self.assertNotIn( 'auth', called_kwargs diff --git a/tests/server/tasks/test_push_notification_sender.py b/tests/server/tasks/test_push_notification_sender.py index 50cfed68..7f547cd0 100644 --- a/tests/server/tasks/test_push_notification_sender.py +++ b/tests/server/tasks/test_push_notification_sender.py @@ -61,14 +61,16 @@ async def test_send_notification_success(self): self.mock_httpx_client.post.assert_awaited_once_with( config.url, json=task_data.model_dump(mode='json', exclude_none=True), - headers=None + headers=None, ) mock_response.raise_for_status.assert_called_once() async def test_send_notification_with_token_success(self): task_id = 'task_send_success' task_data = create_sample_task(task_id=task_id) - config = create_sample_push_config(url='http://notify.me/here', token='unique_token') + config = create_sample_push_config( + url='http://notify.me/here', token='unique_token' + ) self.mock_config_store.get_info.return_value = [config] mock_response = AsyncMock(spec=httpx.Response) @@ -83,7 +85,7 @@ async def test_send_notification_with_token_success(self): self.mock_httpx_client.post.assert_awaited_once_with( config.url, json=task_data.model_dump(mode='json', exclude_none=True), - headers={'X-A2A-Notification-Token': 'unique_token'} + headers={'X-A2A-Notification-Token': 'unique_token'}, ) mock_response.raise_for_status.assert_called_once() @@ -120,7 +122,7 @@ async def test_send_notification_http_status_error( self.mock_httpx_client.post.assert_awaited_once_with( config.url, json=task_data.model_dump(mode='json', exclude_none=True), - headers=None + headers=None, ) mock_logger.error.assert_called_once() @@ -148,12 +150,12 @@ async def test_send_notification_multiple_configs(self): self.mock_httpx_client.post.assert_any_call( config1.url, json=task_data.model_dump(mode='json', exclude_none=True), - headers=None + headers=None, ) # Check calls for config2 self.mock_httpx_client.post.assert_any_call( config2.url, json=task_data.model_dump(mode='json', exclude_none=True), - headers=None + headers=None, ) mock_response.raise_for_status.call_count = 2 diff --git a/tests/server/tasks/test_task_updater.py b/tests/server/tasks/test_task_updater.py index 39b64252..1a633e6a 100644 --- a/tests/server/tasks/test_task_updater.py +++ b/tests/server/tasks/test_task_updater.py @@ -508,7 +508,9 @@ async def test_cancel_with_message(task_updater, event_queue, sample_message): @pytest.mark.asyncio -async def test_update_status_raises_error_if_terminal_state_reached(task_updater, event_queue): +async def test_update_status_raises_error_if_terminal_state_reached( + task_updater, event_queue +): await task_updater.complete() event_queue.reset_mock() with pytest.raises(RuntimeError): @@ -520,8 +522,8 @@ async def test_update_status_raises_error_if_terminal_state_reached(task_updater async def test_concurrent_updates_race_condition(event_queue): task_updater = TaskUpdater( event_queue=event_queue, - task_id="test-task-id", - context_id="test-context-id", + task_id='test-task-id', + context_id='test-context-id', ) tasks = [ task_updater.complete(), diff --git a/tests/test_types.py b/tests/test_types.py index 7b5637a9..1bae000d 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -71,7 +71,7 @@ TaskStatusUpdateEvent, TextPart, UnsupportedOperationError, - GetTaskPushNotificationConfigParams + GetTaskPushNotificationConfigParams, ) @@ -1501,28 +1501,26 @@ def test_subclass_enums() -> None: def test_get_task_push_config_params() -> None: """Tests successful validation of GetTaskPushNotificationConfigParams.""" # Minimal valid data - params = { - "id":"task-1234" - } + params = {'id': 'task-1234'} TaskIdParams.model_validate(params) GetTaskPushNotificationConfigParams.model_validate(params) + def test_use_get_task_push_notification_params_for_request() -> None: # GetTaskPushNotificationConfigRequest get_push_notif_req_data: dict[str, Any] = { 'id': 1, 'jsonrpc': '2.0', 'method': 'tasks/pushNotificationConfig/get', - 'params': { - "id":"task-1234", - "pushNotificationConfigId":"c1" - } + 'params': {'id': 'task-1234', 'pushNotificationConfigId': 'c1'}, } a2a_req_get_push_req = A2ARequest.model_validate(get_push_notif_req_data) assert isinstance( a2a_req_get_push_req.root, GetTaskPushNotificationConfigRequest ) - assert isinstance(a2a_req_get_push_req.root.params, GetTaskPushNotificationConfigParams) + assert isinstance( + a2a_req_get_push_req.root.params, GetTaskPushNotificationConfigParams + ) assert ( a2a_req_get_push_req.root.method == 'tasks/pushNotificationConfig/get' - ) \ No newline at end of file + ) diff --git a/tests/utils/test_task.py b/tests/utils/test_task.py index 796a7ad8..0e391b74 100644 --- a/tests/utils/test_task.py +++ b/tests/utils/test_task.py @@ -3,7 +3,9 @@ from unittest.mock import patch -from a2a.types import Message, Part, Role, TextPart +import pytest + +from a2a.types import Artifact, Message, Part, Role, TextPart from a2a.utils.task import completed_task, new_task @@ -57,7 +59,12 @@ def test_new_task_initial_message_in_history(self): def test_completed_task_status(self): task_id = str(uuid.uuid4()) context_id = str(uuid.uuid4()) - artifacts = [] # Artifacts should be of type Artifact + artifacts = [ + Artifact( + artifactId='artifact_1', + parts=[Part(root=TextPart(text='some content'))], + ) + ] task = completed_task( task_id=task_id, context_id=context_id, @@ -69,7 +76,12 @@ def test_completed_task_status(self): def test_completed_task_assigns_ids_and_artifacts(self): task_id = str(uuid.uuid4()) context_id = str(uuid.uuid4()) - artifacts = [] # Artifacts should be of type Artifact + artifacts = [ + Artifact( + artifactId='artifact_1', + parts=[Part(root=TextPart(text='some content'))], + ) + ] task = completed_task( task_id=task_id, context_id=context_id, @@ -83,7 +95,12 @@ def test_completed_task_assigns_ids_and_artifacts(self): def test_completed_task_empty_history_if_not_provided(self): task_id = str(uuid.uuid4()) context_id = str(uuid.uuid4()) - artifacts = [] # Artifacts should be of type Artifact + artifacts = [ + Artifact( + artifactId='artifact_1', + parts=[Part(root=TextPart(text='some content'))], + ) + ] task = completed_task( task_id=task_id, context_id=context_id, artifacts=artifacts ) @@ -92,7 +109,12 @@ def test_completed_task_empty_history_if_not_provided(self): def test_completed_task_uses_provided_history(self): task_id = str(uuid.uuid4()) context_id = str(uuid.uuid4()) - artifacts = [] # Artifacts should be of type Artifact + artifacts = [ + Artifact( + artifactId='artifact_1', + parts=[Part(root=TextPart(text='some content'))], + ) + ] history = [ Message( role=Role.user, @@ -113,6 +135,49 @@ def test_completed_task_uses_provided_history(self): ) self.assertEqual(task.history, history) + def test_new_task_invalid_message_empty_parts(self): + with self.assertRaises(ValueError): + new_task( + Message( + role=Role.user, + parts=[], + messageId=str(uuid.uuid4()), + ) + ) + + def test_new_task_invalid_message_none_role(self): + with self.assertRaises(TypeError): + msg = Message.model_construct( + role=None, + parts=[Part(root=TextPart(text='test message'))], + messageId=str(uuid.uuid4()), + ) + new_task(msg) + + def test_completed_task_empty_artifacts(self): + with pytest.raises( + ValueError, + match='artifacts must be a non-empty list of Artifact objects', + ): + completed_task( + task_id='task-123', + context_id='ctx-456', + artifacts=[], + history=[], + ) + + def test_completed_task_invalid_artifact_type(self): + with pytest.raises( + ValueError, + match='artifacts must be a non-empty list of Artifact objects', + ): + completed_task( + task_id='task-123', + context_id='ctx-456', + artifacts=['not an artifact'], + history=[], + ) + if __name__ == '__main__': unittest.main() diff --git a/uv.lock b/uv.lock index 453d33d5..70a6421e 100644 --- a/uv.lock +++ b/uv.lock @@ -26,6 +26,9 @@ dependencies = [ ] [package.optional-dependencies] +encryption = [ + { name = "cryptography" }, +] mysql = [ { name = "sqlalchemy", extra = ["aiomysql", "asyncio"] }, ] @@ -57,6 +60,7 @@ dev = [ [package.metadata] requires-dist = [ + { name = "cryptography", marker = "extra == 'encryption'", specifier = ">=43.0.0" }, { name = "fastapi", specifier = ">=0.115.2" }, { name = "google-api-core", specifier = ">=1.26.0" }, { name = "grpcio", specifier = ">=1.60" }, @@ -75,7 +79,7 @@ requires-dist = [ { name = "sse-starlette" }, { name = "starlette" }, ] -provides-extras = ["mysql", "postgresql", "sql", "sqlite"] +provides-extras = ["encryption", "mysql", "postgresql", "sql", "sqlite"] [package.metadata.requires-dev] dev = [ @@ -254,6 +258,63 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650 }, ] +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191 }, + { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592 }, + { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024 }, + { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188 }, + { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571 }, + { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687 }, + { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211 }, + { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325 }, + { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784 }, + { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564 }, + { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804 }, + { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299 }, + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, +] + [[package]] name = "cfgv" version = "3.4.0" @@ -414,6 +475,53 @@ toml = [ { name = "tomli", marker = "python_full_version <= '3.11'" }, ] +[[package]] +name = "cryptography" +version = "45.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/1e/49527ac611af559665f71cbb8f92b332b5ec9c6fbc4e88b0f8e92f5e85df/cryptography-45.0.5.tar.gz", hash = "sha256:72e76caa004ab63accdf26023fccd1d087f6d90ec6048ff33ad0445abf7f605a", size = 744903 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/fb/09e28bc0c46d2c547085e60897fea96310574c70fb21cd58a730a45f3403/cryptography-45.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:101ee65078f6dd3e5a028d4f19c07ffa4dd22cce6a20eaa160f8b5219911e7d8", size = 7043092 }, + { url = "https://files.pythonhosted.org/packages/b1/05/2194432935e29b91fb649f6149c1a4f9e6d3d9fc880919f4ad1bcc22641e/cryptography-45.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3a264aae5f7fbb089dbc01e0242d3b67dffe3e6292e1f5182122bdf58e65215d", size = 4205926 }, + { url = "https://files.pythonhosted.org/packages/07/8b/9ef5da82350175e32de245646b1884fc01124f53eb31164c77f95a08d682/cryptography-45.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e74d30ec9c7cb2f404af331d5b4099a9b322a8a6b25c4632755c8757345baac5", size = 4429235 }, + { url = "https://files.pythonhosted.org/packages/7c/e1/c809f398adde1994ee53438912192d92a1d0fc0f2d7582659d9ef4c28b0c/cryptography-45.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3af26738f2db354aafe492fb3869e955b12b2ef2e16908c8b9cb928128d42c57", size = 4209785 }, + { url = "https://files.pythonhosted.org/packages/d0/8b/07eb6bd5acff58406c5e806eff34a124936f41a4fb52909ffa4d00815f8c/cryptography-45.0.5-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e6c00130ed423201c5bc5544c23359141660b07999ad82e34e7bb8f882bb78e0", size = 3893050 }, + { url = "https://files.pythonhosted.org/packages/ec/ef/3333295ed58d900a13c92806b67e62f27876845a9a908c939f040887cca9/cryptography-45.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:dd420e577921c8c2d31289536c386aaa30140b473835e97f83bc71ea9d2baf2d", size = 4457379 }, + { url = "https://files.pythonhosted.org/packages/d9/9d/44080674dee514dbb82b21d6fa5d1055368f208304e2ab1828d85c9de8f4/cryptography-45.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d05a38884db2ba215218745f0781775806bde4f32e07b135348355fe8e4991d9", size = 4209355 }, + { url = "https://files.pythonhosted.org/packages/c9/d8/0749f7d39f53f8258e5c18a93131919ac465ee1f9dccaf1b3f420235e0b5/cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ad0caded895a00261a5b4aa9af828baede54638754b51955a0ac75576b831b27", size = 4456087 }, + { url = "https://files.pythonhosted.org/packages/09/d7/92acac187387bf08902b0bf0699816f08553927bdd6ba3654da0010289b4/cryptography-45.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9024beb59aca9d31d36fcdc1604dd9bbeed0a55bface9f1908df19178e2f116e", size = 4332873 }, + { url = "https://files.pythonhosted.org/packages/03/c2/840e0710da5106a7c3d4153c7215b2736151bba60bf4491bdb421df5056d/cryptography-45.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:91098f02ca81579c85f66df8a588c78f331ca19089763d733e34ad359f474174", size = 4564651 }, + { url = "https://files.pythonhosted.org/packages/2e/92/cc723dd6d71e9747a887b94eb3827825c6c24b9e6ce2bb33b847d31d5eaa/cryptography-45.0.5-cp311-abi3-win32.whl", hash = "sha256:926c3ea71a6043921050eaa639137e13dbe7b4ab25800932a8498364fc1abec9", size = 2929050 }, + { url = "https://files.pythonhosted.org/packages/1f/10/197da38a5911a48dd5389c043de4aec4b3c94cb836299b01253940788d78/cryptography-45.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:b85980d1e345fe769cfc57c57db2b59cff5464ee0c045d52c0df087e926fbe63", size = 3403224 }, + { url = "https://files.pythonhosted.org/packages/fe/2b/160ce8c2765e7a481ce57d55eba1546148583e7b6f85514472b1d151711d/cryptography-45.0.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3562c2f23c612f2e4a6964a61d942f891d29ee320edb62ff48ffb99f3de9ae8", size = 7017143 }, + { url = "https://files.pythonhosted.org/packages/c2/e7/2187be2f871c0221a81f55ee3105d3cf3e273c0a0853651d7011eada0d7e/cryptography-45.0.5-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3fcfbefc4a7f332dece7272a88e410f611e79458fab97b5efe14e54fe476f4fd", size = 4197780 }, + { url = "https://files.pythonhosted.org/packages/b9/cf/84210c447c06104e6be9122661159ad4ce7a8190011669afceeaea150524/cryptography-45.0.5-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:460f8c39ba66af7db0545a8c6f2eabcbc5a5528fc1cf6c3fa9a1e44cec33385e", size = 4420091 }, + { url = "https://files.pythonhosted.org/packages/3e/6a/cb8b5c8bb82fafffa23aeff8d3a39822593cee6e2f16c5ca5c2ecca344f7/cryptography-45.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9b4cf6318915dccfe218e69bbec417fdd7c7185aa7aab139a2c0beb7468c89f0", size = 4198711 }, + { url = "https://files.pythonhosted.org/packages/04/f7/36d2d69df69c94cbb2473871926daf0f01ad8e00fe3986ac3c1e8c4ca4b3/cryptography-45.0.5-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2089cc8f70a6e454601525e5bf2779e665d7865af002a5dec8d14e561002e135", size = 3883299 }, + { url = "https://files.pythonhosted.org/packages/82/c7/f0ea40f016de72f81288e9fe8d1f6748036cb5ba6118774317a3ffc6022d/cryptography-45.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0027d566d65a38497bc37e0dd7c2f8ceda73597d2ac9ba93810204f56f52ebc7", size = 4450558 }, + { url = "https://files.pythonhosted.org/packages/06/ae/94b504dc1a3cdf642d710407c62e86296f7da9e66f27ab12a1ee6fdf005b/cryptography-45.0.5-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:be97d3a19c16a9be00edf79dca949c8fa7eff621763666a145f9f9535a5d7f42", size = 4198020 }, + { url = "https://files.pythonhosted.org/packages/05/2b/aaf0adb845d5dabb43480f18f7ca72e94f92c280aa983ddbd0bcd6ecd037/cryptography-45.0.5-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:7760c1c2e1a7084153a0f68fab76e754083b126a47d0117c9ed15e69e2103492", size = 4449759 }, + { url = "https://files.pythonhosted.org/packages/91/e4/f17e02066de63e0100a3a01b56f8f1016973a1d67551beaf585157a86b3f/cryptography-45.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6ff8728d8d890b3dda5765276d1bc6fb099252915a2cd3aff960c4c195745dd0", size = 4319991 }, + { url = "https://files.pythonhosted.org/packages/f2/2e/e2dbd629481b499b14516eed933f3276eb3239f7cee2dcfa4ee6b44d4711/cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a", size = 4554189 }, + { url = "https://files.pythonhosted.org/packages/f8/ea/a78a0c38f4c8736287b71c2ea3799d173d5ce778c7d6e3c163a95a05ad2a/cryptography-45.0.5-cp37-abi3-win32.whl", hash = "sha256:1e1da5accc0c750056c556a93c3e9cb828970206c68867712ca5805e46dc806f", size = 2911769 }, + { url = "https://files.pythonhosted.org/packages/79/b3/28ac139109d9005ad3f6b6f8976ffede6706a6478e21c889ce36c840918e/cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97", size = 3390016 }, + { url = "https://files.pythonhosted.org/packages/f8/8b/34394337abe4566848a2bd49b26bcd4b07fd466afd3e8cce4cb79a390869/cryptography-45.0.5-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:206210d03c1193f4e1ff681d22885181d47efa1ab3018766a7b32a7b3d6e6afd", size = 3575762 }, + { url = "https://files.pythonhosted.org/packages/8b/5d/a19441c1e89afb0f173ac13178606ca6fab0d3bd3ebc29e9ed1318b507fc/cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c648025b6840fe62e57107e0a25f604db740e728bd67da4f6f060f03017d5097", size = 4140906 }, + { url = "https://files.pythonhosted.org/packages/4b/db/daceb259982a3c2da4e619f45b5bfdec0e922a23de213b2636e78ef0919b/cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b8fa8b0a35a9982a3c60ec79905ba5bb090fc0b9addcfd3dc2dd04267e45f25e", size = 4374411 }, + { url = "https://files.pythonhosted.org/packages/6a/35/5d06ad06402fc522c8bf7eab73422d05e789b4e38fe3206a85e3d6966c11/cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:14d96584701a887763384f3c47f0ca7c1cce322aa1c31172680eb596b890ec30", size = 4140942 }, + { url = "https://files.pythonhosted.org/packages/65/79/020a5413347e44c382ef1f7f7e7a66817cd6273e3e6b5a72d18177b08b2f/cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57c816dfbd1659a367831baca4b775b2a5b43c003daf52e9d57e1d30bc2e1b0e", size = 4374079 }, + { url = "https://files.pythonhosted.org/packages/9b/c5/c0e07d84a9a2a8a0ed4f865e58f37c71af3eab7d5e094ff1b21f3f3af3bc/cryptography-45.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b9e38e0a83cd51e07f5a48ff9691cae95a79bea28fe4ded168a8e5c6c77e819d", size = 3321362 }, + { url = "https://files.pythonhosted.org/packages/c0/71/9bdbcfd58d6ff5084687fe722c58ac718ebedbc98b9f8f93781354e6d286/cryptography-45.0.5-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8c4a6ff8a30e9e3d38ac0539e9a9e02540ab3f827a3394f8852432f6b0ea152e", size = 3587878 }, + { url = "https://files.pythonhosted.org/packages/f0/63/83516cfb87f4a8756eaa4203f93b283fda23d210fc14e1e594bd5f20edb6/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bd4c45986472694e5121084c6ebbd112aa919a25e783b87eb95953c9573906d6", size = 4152447 }, + { url = "https://files.pythonhosted.org/packages/22/11/d2823d2a5a0bd5802b3565437add16f5c8ce1f0778bf3822f89ad2740a38/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:982518cd64c54fcada9d7e5cf28eabd3ee76bd03ab18e08a48cad7e8b6f31b18", size = 4386778 }, + { url = "https://files.pythonhosted.org/packages/5f/38/6bf177ca6bce4fe14704ab3e93627c5b0ca05242261a2e43ef3168472540/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:12e55281d993a793b0e883066f590c1ae1e802e3acb67f8b442e721e475e6463", size = 4151627 }, + { url = "https://files.pythonhosted.org/packages/38/6a/69fc67e5266bff68a91bcb81dff8fb0aba4d79a78521a08812048913e16f/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:5aa1e32983d4443e310f726ee4b071ab7569f58eedfdd65e9675484a4eb67bd1", size = 4385593 }, + { url = "https://files.pythonhosted.org/packages/f6/34/31a1604c9a9ade0fdab61eb48570e09a796f4d9836121266447b0eaf7feb/cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f", size = 3331106 }, +] + [[package]] name = "datamodel-code-generator" version = "0.31.1" @@ -1105,6 +1213,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259 }, ] +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + [[package]] name = "pydantic" version = "2.11.7"