|
| 1 | +from typing import Any |
| 2 | +from typing import Optional |
| 3 | + |
| 4 | +from google.adk.sessions import InMemorySessionService |
| 5 | +from google.adk.agents.invocation_context import InvocationContext |
| 6 | +from google.adk.agents.llm_agent import LlmAgent |
| 7 | +from google.adk.models.llm_request import LlmRequest |
| 8 | +from google.adk.models.llm_response import LlmResponse |
| 9 | +from google.adk.telemetry import trace_call_llm |
| 10 | +from google.genai import types |
| 11 | +import pytest |
| 12 | + |
| 13 | + |
| 14 | +async def _create_invocation_context( |
| 15 | + agent: LlmAgent, state: Optional[dict[str, Any]] = None |
| 16 | +) -> InvocationContext: |
| 17 | + session_service = InMemorySessionService() |
| 18 | + session = await session_service.create_session( |
| 19 | + app_name='test_app', user_id='test_user', state=state |
| 20 | + ) |
| 21 | + invocation_context = InvocationContext( |
| 22 | + invocation_id='test_id', |
| 23 | + agent=agent, |
| 24 | + session=session, |
| 25 | + session_service=session_service, |
| 26 | + ) |
| 27 | + return invocation_context |
| 28 | + |
| 29 | + |
| 30 | +@pytest.mark.asyncio |
| 31 | +async def test_trace_call_llm_function_response_includes_part_from_bytes(): |
| 32 | + agent = LlmAgent(name='test_agent') |
| 33 | + invocation_context = await _create_invocation_context(agent) |
| 34 | + llm_request = LlmRequest( |
| 35 | + contents=[ |
| 36 | + types.Content( |
| 37 | + role="user", |
| 38 | + parts=[ |
| 39 | + types.Part.from_function_response( |
| 40 | + name="test_function_1", |
| 41 | + response={ |
| 42 | + "result": b"test_data", |
| 43 | + }, |
| 44 | + ), |
| 45 | + ], |
| 46 | + ), |
| 47 | + types.Content( |
| 48 | + role="user", |
| 49 | + parts=[ |
| 50 | + types.Part.from_function_response( |
| 51 | + name="test_function_2", |
| 52 | + response={ |
| 53 | + "result": types.Part.from_bytes(data=b"test_data", mime_type="application/octet-stream"), |
| 54 | + }, |
| 55 | + ), |
| 56 | + ], |
| 57 | + ), |
| 58 | + ], |
| 59 | + config=types.GenerateContentConfig(system_instruction=""), |
| 60 | + ) |
| 61 | + llm_response = LlmResponse(turn_complete=True) |
| 62 | + trace_call_llm(invocation_context, 'test_event_id', llm_request, llm_response) |
0 commit comments