Skip to content

Commit 6994d0e

Browse files
authored
fix(ai): Consider gen_ai valid op prefix for AI spans (#4859)
AI spans were the ones where `op` prefix was `ai.`, but that has recently changed, and now most of AI spans `op` starts with `gen_ai`. This PR fixes that.
1 parent 4468f5a commit 6994d0e

File tree

4 files changed

+112
-3
lines changed

4 files changed

+112
-3
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
- Take into account more types of tokens when doing AI cost calculation. ([#4840](https://github.com/getsentry/relay/pull/4840))
1616
- Use the `FiniteF64` type for measurements. ([#4828](https://github.com/getsentry/relay/pull/4828))
1717
- Derive a `sentry.description` attribute for V2 spans ([#4832](https://github.com/getsentry/relay/pull/4832))
18+
- Consider `gen_ai` also as AI span op prefix. ([#4859](https://github.com/getsentry/relay/pull/4859))
1819

1920
## 25.6.1
2021

relay-dynamic-config/src/defaults.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ pub fn add_span_metrics(project_config: &mut ProjectConfig) {
112112
/// These metrics are added to [`crate::GlobalConfig`] by the service and enabled
113113
/// by project configs in sentry.
114114
pub fn hardcoded_span_metrics() -> Vec<(GroupKey, Vec<MetricSpec>, Vec<TagMapping>)> {
115-
let is_ai = RuleCondition::glob("span.op", "ai.*");
115+
let is_ai = RuleCondition::glob("span.op", "ai.*") | RuleCondition::glob("span.op", "gen_ai.*");
116116

117117
let is_db = RuleCondition::eq("span.sentry_tags.category", "db")
118118
& !RuleCondition::glob("span.op", DISABLED_DATABASES)

relay-event-normalization/src/event.rs

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2312,6 +2312,106 @@ mod tests {
23122312

23132313
#[test]
23142314
fn test_ai_data() {
2315+
let json = r#"
2316+
{
2317+
"spans": [
2318+
{
2319+
"timestamp": 1702474613.0495,
2320+
"start_timestamp": 1702474613.0175,
2321+
"description": "OpenAI ",
2322+
"op": "gen_ai.chat_completions.openai",
2323+
"span_id": "9c01bd820a083e63",
2324+
"parent_span_id": "a1e13f3f06239d69",
2325+
"trace_id": "922dda2462ea4ac2b6a4b339bee90863",
2326+
"data": {
2327+
"gen_ai.usage.input_tokens": 1000,
2328+
"gen_ai.usage.output_tokens": 2000,
2329+
"gen_ai.usage.output_tokens.reasoning": 3000,
2330+
"gen_ai.usage.input_tokens.cached": 4000,
2331+
"gen_ai.request.model": "claude-2.1"
2332+
}
2333+
},
2334+
{
2335+
"timestamp": 1702474613.0495,
2336+
"start_timestamp": 1702474613.0175,
2337+
"description": "OpenAI ",
2338+
"op": "gen_ai.chat_completions.openai",
2339+
"span_id": "ac01bd820a083e63",
2340+
"parent_span_id": "a1e13f3f06239d69",
2341+
"trace_id": "922dda2462ea4ac2b6a4b339bee90863",
2342+
"data": {
2343+
"gen_ai.usage.input_tokens": 1000,
2344+
"gen_ai.usage.output_tokens": 2000,
2345+
"gen_ai.request.model": "gpt4-21-04"
2346+
}
2347+
}
2348+
]
2349+
}
2350+
"#;
2351+
2352+
let mut event = Annotated::<Event>::from_json(json).unwrap();
2353+
2354+
normalize_event(
2355+
&mut event,
2356+
&NormalizationConfig {
2357+
ai_model_costs: Some(&ModelCosts {
2358+
version: 2,
2359+
costs: vec![],
2360+
models: HashMap::from([
2361+
(
2362+
"claude-2.1".to_owned(),
2363+
ModelCostV2 {
2364+
input_per_token: 0.01,
2365+
output_per_token: 0.02,
2366+
output_reasoning_per_token: 0.03,
2367+
input_cached_per_token: 0.0,
2368+
},
2369+
),
2370+
(
2371+
"gpt4-21-04".to_owned(),
2372+
ModelCostV2 {
2373+
input_per_token: 0.09,
2374+
output_per_token: 0.05,
2375+
output_reasoning_per_token: 0.06,
2376+
input_cached_per_token: 0.0,
2377+
},
2378+
),
2379+
]),
2380+
}),
2381+
..NormalizationConfig::default()
2382+
},
2383+
);
2384+
2385+
let spans = event.value().unwrap().spans.value().unwrap();
2386+
assert_eq!(spans.len(), 2);
2387+
assert_eq!(
2388+
spans
2389+
.first()
2390+
.and_then(|span| span.value())
2391+
.and_then(|span| span.data.value())
2392+
.and_then(|data| data.gen_ai_usage_total_cost.value()),
2393+
Some(&Value::F64(140.0))
2394+
);
2395+
assert_eq!(
2396+
spans
2397+
.get(1)
2398+
.and_then(|span| span.value())
2399+
.and_then(|span| span.data.value())
2400+
.and_then(|data| data.gen_ai_usage_total_cost.value()),
2401+
Some(&Value::F64(190.0))
2402+
);
2403+
assert_eq!(
2404+
spans
2405+
.get(1)
2406+
.and_then(|span| span.value())
2407+
.and_then(|span| span.data.value())
2408+
.and_then(|data| data.gen_ai_usage_total_tokens.value()),
2409+
Some(&Value::F64(3000.0))
2410+
);
2411+
}
2412+
2413+
#[test]
2414+
fn test_ai_data_with_ai_op_prefix() {
23152415
let json = r#"
23162416
{
23172417
"spans": [

relay-event-normalization/src/normalize/span/ai.rs

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ fn calculate_ai_model_cost(model_cost: Option<ModelCostV2>, data: &SpanData) ->
4242

4343
/// Maps AI-related measurements (legacy) to span data.
4444
pub fn map_ai_measurements_to_data(span: &mut Span) {
45-
if !span.op.value().is_some_and(|op| op.starts_with("ai.")) {
45+
if !is_ai_span(span) {
4646
return;
4747
};
4848

@@ -86,7 +86,7 @@ pub fn map_ai_measurements_to_data(span: &mut Span) {
8686

8787
/// Extract the gen_ai_usage_total_cost data into the span
8888
pub fn extract_ai_data(span: &mut Span, ai_model_costs: &ModelCosts) {
89-
if !span.op.value().is_some_and(|op| op.starts_with("ai.")) {
89+
if !is_ai_span(span) {
9090
return;
9191
}
9292

@@ -123,3 +123,11 @@ pub fn enrich_ai_span_data(event: &mut Event, model_costs: Option<&ModelCosts>)
123123
}
124124
}
125125
}
126+
127+
/// Returns true if the span is an AI span.
128+
/// AI spans are spans with op starting with "ai." (legacy) or "gen_ai." (new).
129+
pub fn is_ai_span(span: &Span) -> bool {
130+
span.op
131+
.value()
132+
.is_some_and(|op| op.starts_with("ai.") || op.starts_with("gen_ai."))
133+
}

0 commit comments

Comments
 (0)