Skip to content

fix: propagate context at MCP server #1644

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 11 commits into
base: main
Choose a base branch
from
13 changes: 7 additions & 6 deletions .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,26 +1,27 @@
{
"python/openinference-semantic-conventions": "0.1.17",
"python/instrumentation/openinference-instrumentation-agno": "0.1.3",
"python/instrumentation/openinference-instrumentation-agno": "0.1.4",
"python/instrumentation/openinference-instrumentation-openai": "0.1.28",
"python/instrumentation/openinference-instrumentation-llama-index": "4.2.1",
"python/instrumentation/openinference-instrumentation-dspy": "0.1.23",
"python/instrumentation/openinference-instrumentation-langchain": "0.1.42",
"python/instrumentation/openinference-instrumentation-langchain": "0.1.43",
"python/instrumentation/openinference-instrumentation-bedrock": "0.1.22",
"python/instrumentation/openinference-instrumentation-mistralai": "1.3.3",
"python/openinference-instrumentation": "0.1.29",
"python/openinference-instrumentation": "0.1.30",
"python/instrumentation/openinference-instrumentation-guardrails": "0.1.9",
"python/instrumentation/openinference-instrumentation-vertexai": "0.1.11",
"python/instrumentation/openinference-instrumentation-crewai": "0.1.9",
"python/instrumentation/openinference-instrumentation-haystack": "0.1.23",
"python/instrumentation/openinference-instrumentation-litellm": "0.1.19",
"python/instrumentation/openinference-instrumentation-groq": "0.1.11",
"python/instrumentation/openinference-instrumentation-instructor": "0.1.9",
"python/instrumentation/openinference-instrumentation-anthropic": "0.1.17",
"python/instrumentation/openinference-instrumentation-anthropic": "0.1.18",
"python/instrumentation/openinference-instrumentation-smolagents": "0.1.11",
"python/instrumentation/openinference-instrumentation-autogen": "0.1.9",
"python/instrumentation/openinference-instrumentation-openai-agents": "0.1.12",
"python/instrumentation/openinference-instrumentation-portkey": "0.1.1",
"python/instrumentation/openinference-instrumentation-beeai": "0.1.6",
"python/instrumentation/openinference-instrumentation-mcp": "1.2.1",
"python/instrumentation/openinference-instrumentation-google-genai": "0.1.1"
"python/instrumentation/openinference-instrumentation-mcp": "1.3.0",
"python/instrumentation/openinference-instrumentation-google-genai": "0.1.1",
"python/instrumentation/openinference-instrumentation-autogen-agentchat": "0.1.0"
}
86 changes: 48 additions & 38 deletions README.md

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
# Changelog

## [0.1.4](https://github.com/Arize-ai/openinference/compare/python-openinference-instrumentation-agno-v0.1.3...python-openinference-instrumentation-agno-v0.1.4) (2025-05-20)


### Features

* Updates for new Agno verison ([#1647](https://github.com/Arize-ai/openinference/issues/1647)) ([8292d0d](https://github.com/Arize-ai/openinference/commit/8292d0d5620a9c58c4646e553704a31fd3f8cba3))

## [0.1.3](https://github.com/Arize-ai/openinference/compare/python-openinference-instrumentation-agno-v0.1.2...python-openinference-instrumentation-agno-v0.1.3) (2025-05-13)


Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
duckduckgo-search
agno>=1.4.6
agno>=1.5.2
opentelemetry-sdk
opentelemetry-exporter-otlp
openinference-instrumentation-openai
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,10 @@ dependencies = [

[project.optional-dependencies]
instruments = [
"agno>=1.4.5",
"agno>=1.5.2",
]
test = [
"agno==1.4.5",
"agno==1.5.2",
"opentelemetry-sdk",
"pytest-recording",
"openai",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
)
from openinference.instrumentation.agno.version import __version__

_instruments = ("agno >= 1.4.5",)
_instruments = ("agno >= 1.5.2",)


# Find all model classes in agno.models that inherit from BaseModel
Expand Down Expand Up @@ -59,9 +59,13 @@ def find_model_subclasses() -> List[Type[Any]]:
class AgnoInstrumentor(BaseInstrumentor): # type: ignore
__slots__ = (
"_original_run_method",
"_original_run_stream_method",
"_original_arun_method",
"_original_arun_stream_method",
"_original_team_run_method",
"_original_team_run_stream_method",
"_original_team_arun_method",
"_original_team_arun_stream_method",
"_original_function_execute_method",
"_original_function_aexecute_method",
"_original_model_call_methods",
Expand Down Expand Up @@ -94,29 +98,50 @@ def _instrument(self, **kwargs: Any) -> None:
name="_run",
wrapper=run_wrapper.run,
)

# Register async wrapper
self._original_run_stream_method = getattr(Agent, "_run_stream", None)
wrap_function_wrapper(
module=Agent,
name="_run_stream",
wrapper=run_wrapper.run_stream,
)
self._original_arun_method = getattr(Agent, "_arun", None)
wrap_function_wrapper(
module=Agent,
name="_arun",
wrapper=run_wrapper.arun,
)
self._original_arun_stream_method = getattr(Agent, "_arun_stream", None)
wrap_function_wrapper(
module=Agent,
name="_arun_stream",
wrapper=run_wrapper.arun_stream,
)

# Register wrapper for team
self._original_team_run_method = getattr(Team, "_run", None)
wrap_function_wrapper(
module=Team,
name="_run",
wrapper=run_wrapper.run,
)

# Register async wrapper for team
self._original_team_run_stream_method = getattr(Team, "_run_stream", None)
wrap_function_wrapper(
module=Team,
name="_run_stream",
wrapper=run_wrapper.run_stream,
)
self._original_team_arun_method = getattr(Team, "_arun", None)
wrap_function_wrapper(
module=Team,
name="_arun",
wrapper=run_wrapper.arun,
)
self._original_team_arun_stream_method = getattr(Team, "_arun_stream", None)
wrap_function_wrapper(
module=Team,
name="_arun_stream",
wrapper=run_wrapper.arun_stream,
)

self._original_model_call_methods: Optional[dict[type, dict[str, Callable[..., Any]]]] = {}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,6 @@ def run(
) -> Any:
if context_api.get_value(context_api._SUPPRESS_INSTRUMENTATION_KEY):
return wrapped(*args, **kwargs)

agent = instance
if hasattr(agent, "name") and agent.name:
agent_name = agent.name.replace(" ", "_").replace("-", "_")
Expand All @@ -144,20 +143,56 @@ def run(
),
) as span:
try:
if "stream" in kwargs and kwargs["stream"] is True:
yield from wrapped(*args, **kwargs)
run_response = agent.run_response
span.set_status(trace_api.StatusCode.OK)
span.set_attribute(OUTPUT_VALUE, run_response.to_json())
span.set_attribute(OUTPUT_MIME_TYPE, JSON)
else:
response = wrapped(*args, **kwargs)
run_response = wrapped(*args, **kwargs)
span.set_status(trace_api.StatusCode.OK)
span.set_attribute(OUTPUT_VALUE, run_response.to_json())
span.set_attribute(OUTPUT_MIME_TYPE, JSON)
return run_response

except Exception as e:
span.set_status(trace_api.StatusCode.ERROR, str(e))
raise

def run_stream(
self,
wrapped: Callable[..., Any],
instance: Any,
args: Tuple[Any, ...],
kwargs: Mapping[str, Any],
) -> Any:
if context_api.get_value(context_api._SUPPRESS_INSTRUMENTATION_KEY):
return wrapped(*args, **kwargs)

for run_response in response:
span.set_status(trace_api.StatusCode.OK)
span.set_attribute(OUTPUT_VALUE, run_response.to_json())
span.set_attribute(OUTPUT_MIME_TYPE, JSON)
yield run_response
agent = instance
if hasattr(agent, "name") and agent.name:
agent_name = agent.name.replace(" ", "_").replace("-", "_")
else:
agent_name = "Agent"
span_name = f"{agent_name}.run"

with self._tracer.start_as_current_span(
span_name,
attributes=dict(
_flatten(
{
OPENINFERENCE_SPAN_KIND: AGENT,
INPUT_VALUE: _get_input_value(
wrapped,
*args,
**kwargs,
),
**dict(_agent_run_attributes(agent)),
**dict(get_attributes_from_context()),
}
)
),
) as span:
try:
yield from wrapped(*args, **kwargs)
run_response = agent.run_response
span.set_status(trace_api.StatusCode.OK)
span.set_attribute(OUTPUT_VALUE, run_response.to_json())
span.set_attribute(OUTPUT_MIME_TYPE, JSON)

except Exception as e:
span.set_status(trace_api.StatusCode.ERROR, str(e))
Expand All @@ -171,15 +206,11 @@ async def arun(
kwargs: Mapping[str, Any],
) -> Any:
if context_api.get_value(context_api._SUPPRESS_INSTRUMENTATION_KEY):
if "stream" in kwargs and kwargs["stream"] is True:
async for response in await wrapped(*args, **kwargs):
yield response
else:
response = await wrapped(*args, **kwargs)
yield response
response = await wrapped(*args, **kwargs)
return response

agent = instance
if hasattr(agent, "name") and agent.name:
if hasattr(agent, "name"):
agent_name = agent.name.replace(" ", "_").replace("-", "_")
else:
agent_name = "Agent"
Expand All @@ -203,21 +234,57 @@ async def arun(
),
) as span:
try:
if "stream" in kwargs and kwargs["stream"] is True:
span.set_status(trace_api.StatusCode.OK)
async for response in wrapped(*args, **kwargs): # type: ignore[attr-defined]
yield response
run_response = agent.run_response
span.set_status(trace_api.StatusCode.OK)
span.set_attribute(OUTPUT_VALUE, run_response.to_json())
else:
response = wrapped(*args, **kwargs)
run_response = await wrapped(*args, **kwargs)
span.set_status(trace_api.StatusCode.OK)
span.set_attribute(OUTPUT_VALUE, run_response.to_json())
span.set_attribute(OUTPUT_MIME_TYPE, JSON)
return run_response
except Exception as e:
span.set_status(trace_api.StatusCode.ERROR, str(e))
raise

run_response = await response.__anext__()
span.set_status(trace_api.StatusCode.OK)
span.set_attribute(OUTPUT_VALUE, run_response.to_json())
yield run_response
async def arun_stream(
self,
wrapped: Callable[..., Awaitable[Any]],
instance: Any,
args: Tuple[Any, ...],
kwargs: Mapping[str, Any],
) -> Any:
if context_api.get_value(context_api._SUPPRESS_INSTRUMENTATION_KEY):
async for response in await wrapped(*args, **kwargs):
yield response

agent = instance
if hasattr(agent, "name") and agent.name:
agent_name = agent.name.replace(" ", "_").replace("-", "_")
else:
agent_name = "Agent"
span_name = f"{agent_name}.run"

with self._tracer.start_as_current_span(
span_name,
attributes=dict(
_flatten(
{
OPENINFERENCE_SPAN_KIND: AGENT,
INPUT_VALUE: _get_input_value(
wrapped,
*args,
**kwargs,
),
**dict(_agent_run_attributes(agent)),
**dict(get_attributes_from_context()),
}
)
),
) as span:
try:
async for response in wrapped(*args, **kwargs): # type: ignore[attr-defined]
yield response
run_response = agent.run_response
span.set_status(trace_api.StatusCode.OK)
span.set_attribute(OUTPUT_VALUE, run_response.to_json())
span.set_attribute(OUTPUT_MIME_TYPE, JSON)
except Exception as e:
span.set_status(trace_api.StatusCode.ERROR, str(e))
raise
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.1.3"
__version__ = "0.1.4"
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
# Changelog

## [0.1.18](https://github.com/Arize-ai/openinference/compare/python-openinference-instrumentation-anthropic-v0.1.17...python-openinference-instrumentation-anthropic-v0.1.18) (2025-05-19)


### Features

* **anthropic:** add stream wrapper and tests ([#1572](https://github.com/Arize-ai/openinference/issues/1572)) ([918aa01](https://github.com/Arize-ai/openinference/commit/918aa017441fd4c8cffdbcaab287913349a41a60))

## [0.1.17](https://github.com/Arize-ai/openinference/compare/python-openinference-instrumentation-anthropic-v0.1.16...python-openinference-instrumentation-anthropic-v0.1.17) (2025-04-28)


Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
from anthropic import Anthropic
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk import trace as trace_sdk
from opentelemetry.sdk.trace.export import SimpleSpanProcessor

from openinference.instrumentation.anthropic import AnthropicInstrumentor

# Configure HaystackInstrumentor with Phoenix endpoint
endpoint = "http://127.0.0.1:6006/v1/traces"
tracer_provider = trace_sdk.TracerProvider()
tracer_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter(endpoint)))

AnthropicInstrumentor().instrument(tracer_provider=tracer_provider)

client = Anthropic()

with client.messages.stream(
max_tokens=1024,
messages=[{"role": "user", "content": "Hello!"}],
model="claude-3-7-sonnet-20250219",
) as stream:
for text in stream:
print(text, end="", flush=True)
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
_AsyncCompletionsWrapper,
_AsyncMessagesWrapper,
_CompletionsWrapper,
_MessagesStreamWrapper,
_MessagesWrapper,
)
from openinference.instrumentation.anthropic.version import __version__
Expand All @@ -29,6 +30,7 @@ class AnthropicInstrumentor(BaseInstrumentor): # type: ignore[misc]
"_original_async_completions_create",
"_original_messages_create",
"_original_async_messages_create",
"_original_messages_stream",
"_instruments",
"_tracer",
)
Expand Down Expand Up @@ -79,6 +81,13 @@ def _instrument(self, **kwargs: Any) -> None:
wrapper=_AsyncMessagesWrapper(tracer=self._tracer),
)

self._original_messages_stream = Messages.stream
wrap_function_wrapper(
module="anthropic.resources.messages",
name="Messages.stream",
wrapper=_MessagesStreamWrapper(tracer=self._tracer),
)

def _uninstrument(self, **kwargs: Any) -> None:
from anthropic.resources.completions import AsyncCompletions, Completions
from anthropic.resources.messages import AsyncMessages, Messages
Expand Down
Loading