Skip to content

Commit 02a916b

Browse files
dirkbrndmanuhortet
authored andcommitted
feat: Arize Phoenix via OpenInference (#3136)
## Summary This adds Arize instrumentation via the OpenInference auto instrumentor. It requires the [related PR](Arize-ai/openinference#1603) to be merged first. ## Type of change - [ ] Bug fix - [x] New feature - [ ] Breaking change - [ ] Improvement - [ ] Model update - [ ] Other: --- ## Checklist - [ ] Code complies with style guidelines - [ ] Ran format/validation scripts (`./scripts/format.sh` and `./scripts/validate.sh`) - [ ] Self-review completed - [ ] Documentation updated (comments, docstrings) - [ ] Examples and guides: Relevant cookbook examples have been included or updated (if applicable) - [ ] Tested in clean environment - [ ] Tests added/updated (if applicable) --- ## Additional Notes Add any important context (deployment instructions, screenshots, security considerations, etc.) --------- Co-authored-by: manuhortet <manuhortet@gmail.com>
1 parent ad88dac commit 02a916b

File tree

6 files changed

+164
-3
lines changed

6 files changed

+164
-3
lines changed
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
"""
2+
This example shows how to instrument your agno agent with OpenInference and send traces to Arize Phoenix.
3+
4+
1. Install dependencies: pip install arize-phoenix openai openinference-instrumentation-agno opentelemetry-sdk opentelemetry-exporter-otlp
5+
2. Setup your Arize Phoenix account and get your API key: https://phoenix.arize.com/.
6+
3. Set your Arize Phoenix API key as an environment variable:
7+
- export ARIZE_PHOENIX_API_KEY=<your-key>
8+
"""
9+
10+
import asyncio
11+
import os
12+
from agno.agent import Agent
13+
from agno.models.openai import OpenAIChat
14+
from agno.tools.yfinance import YFinanceTools
15+
16+
from phoenix.otel import register
17+
18+
os.environ["PHOENIX_CLIENT_HEADERS"] = f"api_key={os.getenv('ARIZE_PHOENIX_API_KEY')}";
19+
os.environ["PHOENIX_COLLECTOR_ENDPOINT"] = "https://app.phoenix.arize.com";
20+
21+
# configure the Phoenix tracer
22+
tracer_provider = register(
23+
project_name="agno-stock-price-agent", # Default is 'default'
24+
auto_instrument=True # Automatically use the installed OpenInference instrumentation
25+
)
26+
27+
agent = Agent(
28+
name="Stock Price Agent",
29+
model=OpenAIChat(id="gpt-4o-mini"),
30+
tools=[YFinanceTools()],
31+
instructions="You are a stock price agent. Answer questions in the style of a stock analyst.",
32+
debug_mode=True,
33+
)
34+
35+
agent.print_response("What is the current price of Tesla?")
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
"""
2+
This example shows how to instrument your agno agent with OpenInference and send traces to Arize Phoenix.
3+
4+
1. Install dependencies: pip install arize-phoenix openai openinference-instrumentation-agno opentelemetry-sdk opentelemetry-exporter-otlp
5+
2. Run `phoenix serve` to start the local collector.
6+
"""
7+
8+
import os
9+
from agno.agent import Agent
10+
from agno.models.openai import OpenAIChat
11+
from agno.tools.yfinance import YFinanceTools
12+
13+
from phoenix.otel import register
14+
15+
os.environ["PHOENIX_COLLECTOR_ENDPOINT"] = "http://localhost:6006";
16+
17+
# configure the Phoenix tracer
18+
tracer_provider = register(
19+
project_name="agno-stock-price-agent", # Default is 'default'
20+
auto_instrument=True # Automatically use the installed OpenInference instrumentation
21+
)
22+
23+
agent = Agent(
24+
name="Stock Price Agent",
25+
model=OpenAIChat(id="gpt-4o-mini"),
26+
tools=[YFinanceTools()],
27+
instructions="You are a stock price agent. Answer questions in the style of a stock analyst.",
28+
debug_mode=True,
29+
)
30+
31+
agent.print_response("What is the current price of Tesla?")
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
"""
2+
This example shows how to instrument your agno agent with OpenInference and send traces to Arize Phoenix.
3+
4+
1. Install dependencies: pip install openai langfuse opentelemetry-sdk opentelemetry-exporter-otlp openinference-instrumentation-agno
5+
2. Set your Langfuse API key as an environment variables:
6+
- export LANGFUSE_PUBLIC_KEY=<your-key>
7+
- export LANGFUSE_SECRET_KEY=<your-key>
8+
"""
9+
10+
import base64
11+
import os
12+
13+
from agno.agent import Agent
14+
from agno.models.openai import OpenAIChat
15+
from agno.tools.yfinance import YFinanceTools
16+
from openinference.instrumentation.agno import AgnoInstrumentor
17+
from opentelemetry import trace as trace_api
18+
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
19+
from opentelemetry.sdk.trace import TracerProvider
20+
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
21+
22+
LANGFUSE_AUTH = base64.b64encode(
23+
f"{os.getenv('LANGFUSE_PUBLIC_KEY')}:{os.getenv('LANGFUSE_SECRET_KEY')}".encode()
24+
).decode()
25+
os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = (
26+
"https://us.cloud.langfuse.com/api/public/otel" # 🇺🇸 US data region
27+
)
28+
# os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"]="https://cloud.langfuse.com/api/public/otel" # 🇪🇺 EU data region
29+
# os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"]="http://localhost:3000/api/public/otel" # 🏠 Local deployment (>= v3.22.0)
30+
31+
os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Basic {LANGFUSE_AUTH}"
32+
33+
34+
tracer_provider = TracerProvider()
35+
tracer_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter()))
36+
trace_api.set_tracer_provider(tracer_provider=tracer_provider)
37+
38+
# Start instrumenting agno
39+
AgnoInstrumentor().instrument()
40+
41+
42+
agent = Agent(
43+
name="Stock Price Agent",
44+
model=OpenAIChat(id="gpt-4o-mini"),
45+
tools=[YFinanceTools()],
46+
instructions="You are a stock price agent. Answer questions in the style of a stock analyst.",
47+
debug_mode=True,
48+
)
49+
50+
agent.print_response("What is the current price of Tesla?")

cookbook/observability/langfuse_via_openlit.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,11 @@
99

1010
import base64
1111
import os
12-
1312
from agno.agent import Agent
1413
from agno.models.openai import OpenAIChat
1514
from agno.tools.duckduckgo import DuckDuckGoTools
1615

16+
1717
LANGFUSE_AUTH = base64.b64encode(
1818
f"{os.getenv('LANGFUSE_PUBLIC_KEY')}:{os.getenv('LANGFUSE_SECRET_KEY')}".encode()
1919
).decode()
@@ -42,7 +42,6 @@
4242
tracer = trace.get_tracer(__name__)
4343

4444
import openlit
45-
4645
# Initialize OpenLIT instrumentation. The disable_batch flag is set to true to process traces immediately.
4746
openlit.init(tracer=tracer, disable_batch=True)
4847

@@ -53,4 +52,4 @@
5352
debug_mode=True,
5453
)
5554

56-
agent.run("What is currently trending on Twitter?")
55+
agent.print_response("What is currently trending on Twitter?")
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
"""
2+
This example shows how to instrument your agno agent with OpenInference and send traces to LangSmith.
3+
4+
1. Create a LangSmith account and get your API key: https://smith.langchain.com/
5+
2. Set your LangSmith API key as an environment variable:
6+
- export LANGSMITH_API_KEY=<your-key>
7+
- export LANGSMITH_TRACING=true
8+
- export LANGSMITH_ENDPOINT=https://eu.api.smith.langchain.com or https://api.smith.langchain.com
9+
- export LANGSMITH_PROJECT=<your-project-name>
10+
3. Install dependencies: pip install openai openinference-instrumentation-agno opentelemetry-sdk opentelemetry-exporter-otlp
11+
"""
12+
13+
import os
14+
from agno.agent import Agent
15+
from agno.models.openai import OpenAIChat
16+
from agno.tools.duckduckgo import DuckDuckGoTools
17+
18+
19+
from opentelemetry.sdk.trace import TracerProvider
20+
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
21+
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
22+
from opentelemetry import trace as trace_api
23+
from openinference.instrumentation.agno import AgnoInstrumentor
24+
25+
endpoint = "https://eu.api.smith.langchain.com/otel/v1/traces"
26+
headers = {"x-api-key": os.getenv('LANGSMITH_API_KEY'), "Langsmith-Project": os.getenv('LANGSMITH_PROJECT')}
27+
28+
29+
tracer_provider = TracerProvider()
30+
tracer_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter(endpoint=endpoint, headers=headers)))
31+
trace_api.set_tracer_provider(tracer_provider=tracer_provider)
32+
33+
# Start instrumenting agno
34+
AgnoInstrumentor().instrument()
35+
36+
agent = Agent(
37+
name="Stock Market Agent",
38+
model=OpenAIChat(id="gpt-4o-mini"),
39+
tools=[DuckDuckGoTools()],
40+
markdown=True,
41+
debug_mode=True,
42+
)
43+
44+
agent.print_response("What is news on the stock market?")

libs/agno/pyproject.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,9 @@ dev = ["mypy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-mock", "ruff",
5252
opentelemetry = ["opentelemetry-sdk", "opentelemetry-exporter-otlp"]
5353
weave = ["weave"]
5454
openlit = ["openlit", "agno[opentelemetry]"]
55+
arize = ["arize-phoenix", "agno[opentelemetry]", "opentelemetry-exporter-otlp-proto-grpc", "opentelemetry-distro"]
5556
langfuse = ["langfuse"]
57+
5658
# Dependencies for Models
5759
azure = ["azure-ai-inference", "aiohttp"]
5860
anthropic = ["anthropic"]

0 commit comments

Comments
 (0)