Skip to content

Commit b2955a2

Browse files
authored
Add AgentOps Integration to LiteLLM (#9685)
* feat(sidebars): add new item for agentops integration in Logging & Observability category * Update agentops_integration.md to enhance title formatting and remove redundant section * Enhance AgentOps integration in documentation and codebase by removing LiteLLMCallbackHandler references, adding environment variable configurations, and updating logging initialization for AgentOps support. * Update AgentOps integration documentation to include instructions for obtaining API keys and clarify environment variable setup. * Add unit tests for AgentOps integration and improve error handling in token fetching * Add unit tests for AgentOps configuration and token fetching functionality * Corrected agentops test directory * Linting fix * chore: add OpenTelemetry dependencies to pyproject.toml * chore: update OpenTelemetry dependencies and add new packages in pyproject.toml and poetry.lock
1 parent ebfff97 commit b2955a2

File tree

9 files changed

+608
-67
lines changed

9 files changed

+608
-67
lines changed
Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
# 🖇️ AgentOps - LLM Observability Platform
2+
3+
:::tip
4+
5+
This is community maintained. Please make an issue if you run into a bug:
6+
https://github.com/BerriAI/litellm
7+
8+
:::
9+
10+
[AgentOps](https://docs.agentops.ai) is an observability platform that enables tracing and monitoring of LLM calls, providing detailed insights into your AI operations.
11+
12+
## Using AgentOps with LiteLLM
13+
14+
LiteLLM provides `success_callbacks` and `failure_callbacks`, allowing you to easily integrate AgentOps for comprehensive tracing and monitoring of your LLM operations.
15+
16+
### Integration
17+
18+
Use just a few lines of code to instantly trace your responses **across all providers** with AgentOps:
19+
Get your AgentOps API Keys from https://app.agentops.ai/
20+
```python
21+
import litellm
22+
23+
# Configure LiteLLM to use AgentOps
24+
litellm.success_callback = ["agentops"]
25+
26+
# Make your LLM calls as usual
27+
response = litellm.completion(
28+
model="gpt-3.5-turbo",
29+
messages=[{"role": "user", "content": "Hello, how are you?"}],
30+
)
31+
```
32+
33+
Complete Code:
34+
35+
```python
36+
import os
37+
from litellm import completion
38+
39+
# Set env variables
40+
os.environ["OPENAI_API_KEY"] = "your-openai-key"
41+
os.environ["AGENTOPS_API_KEY"] = "your-agentops-api-key"
42+
43+
# Configure LiteLLM to use AgentOps
44+
litellm.success_callback = ["agentops"]
45+
46+
# OpenAI call
47+
response = completion(
48+
model="gpt-4",
49+
messages=[{"role": "user", "content": "Hi 👋 - I'm OpenAI"}],
50+
)
51+
52+
print(response)
53+
```
54+
55+
### Configuration Options
56+
57+
The AgentOps integration can be configured through environment variables:
58+
59+
- `AGENTOPS_API_KEY` (str, optional): Your AgentOps API key
60+
- `AGENTOPS_ENVIRONMENT` (str, optional): Deployment environment (defaults to "production")
61+
- `AGENTOPS_SERVICE_NAME` (str, optional): Service name for tracing (defaults to "agentops")
62+
63+
### Advanced Usage
64+
65+
You can configure additional settings through environment variables:
66+
67+
```python
68+
import os
69+
70+
# Configure AgentOps settings
71+
os.environ["AGENTOPS_API_KEY"] = "your-agentops-api-key"
72+
os.environ["AGENTOPS_ENVIRONMENT"] = "staging"
73+
os.environ["AGENTOPS_SERVICE_NAME"] = "my-service"
74+
75+
# Enable AgentOps tracing
76+
litellm.success_callback = ["agentops"]
77+
```
78+
79+
### Support
80+
81+
For issues or questions, please refer to:
82+
- [AgentOps Documentation](https://docs.agentops.ai)
83+
- [LiteLLM Documentation](https://docs.litellm.ai)

docs/my-website/sidebars.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -411,6 +411,7 @@ const sidebars = {
411411
type: "category",
412412
label: "Logging & Observability",
413413
items: [
414+
"observability/agentops_integration",
414415
"observability/langfuse_integration",
415416
"observability/lunary_integration",
416417
"observability/mlflow",

litellm/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,7 @@
113113
"pagerduty",
114114
"humanloop",
115115
"gcs_pubsub",
116+
"agentops",
116117
"anthropic_cache_control_hook",
117118
]
118119
logged_real_time_event_types: Optional[Union[List[str], Literal["*"]]] = None
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
from .agentops import AgentOps
2+
3+
__all__ = ["AgentOps"]
Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,118 @@
1+
"""
2+
AgentOps integration for LiteLLM - Provides OpenTelemetry tracing for LLM calls
3+
"""
4+
import os
5+
from dataclasses import dataclass
6+
from typing import Optional, Dict, Any
7+
from litellm.integrations.opentelemetry import OpenTelemetry, OpenTelemetryConfig
8+
from litellm.llms.custom_httpx.http_handler import _get_httpx_client
9+
10+
@dataclass
11+
class AgentOpsConfig:
12+
endpoint: str = "https://otlp.agentops.cloud/v1/traces"
13+
api_key: Optional[str] = None
14+
service_name: Optional[str] = None
15+
deployment_environment: Optional[str] = None
16+
auth_endpoint: str = "https://api.agentops.ai/v3/auth/token"
17+
18+
@classmethod
19+
def from_env(cls):
20+
return cls(
21+
endpoint="https://otlp.agentops.cloud/v1/traces",
22+
api_key=os.getenv("AGENTOPS_API_KEY"),
23+
service_name=os.getenv("AGENTOPS_SERVICE_NAME", "agentops"),
24+
deployment_environment=os.getenv("AGENTOPS_ENVIRONMENT", "production"),
25+
auth_endpoint="https://api.agentops.ai/v3/auth/token"
26+
)
27+
28+
class AgentOps(OpenTelemetry):
29+
"""
30+
AgentOps integration - built on top of OpenTelemetry
31+
32+
Example usage:
33+
```python
34+
import litellm
35+
36+
litellm.success_callback = ["agentops"]
37+
38+
response = litellm.completion(
39+
model="gpt-3.5-turbo",
40+
messages=[{"role": "user", "content": "Hello, how are you?"}],
41+
)
42+
```
43+
"""
44+
def __init__(
45+
self,
46+
config: Optional[AgentOpsConfig] = None,
47+
):
48+
if config is None:
49+
config = AgentOpsConfig.from_env()
50+
51+
# Prefetch JWT token for authentication
52+
jwt_token = None
53+
project_id = None
54+
if config.api_key:
55+
try:
56+
response = self._fetch_auth_token(config.api_key, config.auth_endpoint)
57+
jwt_token = response.get("token")
58+
project_id = response.get("project_id")
59+
except Exception:
60+
pass
61+
62+
headers = f"Authorization=Bearer {jwt_token}" if jwt_token else None
63+
64+
otel_config = OpenTelemetryConfig(
65+
exporter="otlp_http",
66+
endpoint=config.endpoint,
67+
headers=headers
68+
)
69+
70+
# Initialize OpenTelemetry with our config
71+
super().__init__(
72+
config=otel_config,
73+
callback_name="agentops"
74+
)
75+
76+
# Set AgentOps-specific resource attributes
77+
resource_attrs = {
78+
"service.name": config.service_name or "litellm",
79+
"deployment.environment": config.deployment_environment or "production",
80+
"telemetry.sdk.name": "agentops",
81+
}
82+
83+
if project_id:
84+
resource_attrs["project.id"] = project_id
85+
86+
self.resource_attributes = resource_attrs
87+
88+
def _fetch_auth_token(self, api_key: str, auth_endpoint: str) -> Dict[str, Any]:
89+
"""
90+
Fetch JWT authentication token from AgentOps API
91+
92+
Args:
93+
api_key: AgentOps API key
94+
auth_endpoint: Authentication endpoint
95+
96+
Returns:
97+
Dict containing JWT token and project ID
98+
"""
99+
headers = {
100+
"Content-Type": "application/json",
101+
"Connection": "keep-alive",
102+
}
103+
104+
client = _get_httpx_client()
105+
try:
106+
response = client.post(
107+
url=auth_endpoint,
108+
headers=headers,
109+
json={"api_key": api_key},
110+
timeout=10
111+
)
112+
113+
if response.status_code != 200:
114+
raise Exception(f"Failed to fetch auth token: {response.text}")
115+
116+
return response.json()
117+
finally:
118+
client.close()

litellm/litellm_core_utils/litellm_logging.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
from litellm.batches.batch_utils import _handle_completed_batch
2929
from litellm.caching.caching import DualCache, InMemoryCache
3030
from litellm.caching.caching_handler import LLMCachingHandler
31+
3132
from litellm.constants import (
3233
DEFAULT_MOCK_RESPONSE_COMPLETION_TOKEN_COUNT,
3334
DEFAULT_MOCK_RESPONSE_PROMPT_TOKEN_COUNT,
@@ -36,6 +37,7 @@
3637
RealtimeAPITokenUsageProcessor,
3738
_select_model_name_for_cost_calc,
3839
)
40+
from litellm.integrations.agentops import AgentOps
3941
from litellm.integrations.anthropic_cache_control_hook import AnthropicCacheControlHook
4042
from litellm.integrations.arize.arize import ArizeLogger
4143
from litellm.integrations.custom_guardrail import CustomGuardrail
@@ -2685,7 +2687,15 @@ def _init_custom_logger_compatible_class( # noqa: PLR0915
26852687
"""
26862688
try:
26872689
custom_logger_init_args = custom_logger_init_args or {}
2688-
if logging_integration == "lago":
2690+
if logging_integration == "agentops": # Add AgentOps initialization
2691+
for callback in _in_memory_loggers:
2692+
if isinstance(callback, AgentOps):
2693+
return callback # type: ignore
2694+
2695+
agentops_logger = AgentOps()
2696+
_in_memory_loggers.append(agentops_logger)
2697+
return agentops_logger # type: ignore
2698+
elif logging_integration == "lago":
26892699
for callback in _in_memory_loggers:
26902700
if isinstance(callback, LagoLogger):
26912701
return callback # type: ignore

0 commit comments

Comments
 (0)