Skip to content

Commit ebdd485

Browse files
authored
feat(node): update pipeline spans to use agent naming (#16712)
This matches our expectations for the new trace views being built and aligns to Python. ![image](https://github.com/user-attachments/assets/f1d430b3-547e-4bc0-9c52-8b2683397783)
1 parent 7d95254 commit ebdd485

File tree

3 files changed

+19
-19
lines changed
  • dev-packages
    • e2e-tests/test-applications/nextjs-15/tests
    • node-integration-tests/suites/tracing/vercelai
  • packages/node/src/integrations/tracing/vercelai

3 files changed

+19
-19
lines changed

dev-packages/e2e-tests/test-applications/nextjs-15/tests/ai-test.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ test('should create AI spans with correct attributes', async ({ page }) => {
2121
// TODO: For now, this is sadly not fully working - the monkey patching of the ai package is not working
2222
// because of this, only spans that are manually opted-in at call time will be captured
2323
// this may be fixed by https://github.com/vercel/ai/pull/6716 in the future
24-
const aiPipelineSpans = spans.filter(span => span.op === 'ai.pipeline.generate_text');
24+
const aiPipelineSpans = spans.filter(span => span.op === 'gen_ai.invoke_agent');
2525
const aiGenerateSpans = spans.filter(span => span.op === 'gen_ai.generate_text');
2626
const toolCallSpans = spans.filter(span => span.op === 'gen_ai.execute_tool');
2727

dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -26,11 +26,11 @@ describe('Vercel AI integration', () => {
2626
'gen_ai.usage.output_tokens': 20,
2727
'gen_ai.usage.total_tokens': 30,
2828
'operation.name': 'ai.generateText',
29-
'sentry.op': 'ai.pipeline.generate_text',
29+
'sentry.op': 'gen_ai.invoke_agent',
3030
'sentry.origin': 'auto.vercelai.otel',
3131
},
3232
description: 'generateText',
33-
op: 'ai.pipeline.generate_text',
33+
op: 'gen_ai.invoke_agent',
3434
origin: 'auto.vercelai.otel',
3535
status: 'ok',
3636
}),
@@ -83,11 +83,11 @@ describe('Vercel AI integration', () => {
8383
'gen_ai.usage.output_tokens': 20,
8484
'gen_ai.usage.total_tokens': 30,
8585
'operation.name': 'ai.generateText',
86-
'sentry.op': 'ai.pipeline.generate_text',
86+
'sentry.op': 'gen_ai.invoke_agent',
8787
'sentry.origin': 'auto.vercelai.otel',
8888
},
8989
description: 'generateText',
90-
op: 'ai.pipeline.generate_text',
90+
op: 'gen_ai.invoke_agent',
9191
origin: 'auto.vercelai.otel',
9292
status: 'ok',
9393
}),
@@ -140,11 +140,11 @@ describe('Vercel AI integration', () => {
140140
'gen_ai.usage.output_tokens': 25,
141141
'gen_ai.usage.total_tokens': 40,
142142
'operation.name': 'ai.generateText',
143-
'sentry.op': 'ai.pipeline.generate_text',
143+
'sentry.op': 'gen_ai.invoke_agent',
144144
'sentry.origin': 'auto.vercelai.otel',
145145
},
146146
description: 'generateText',
147-
op: 'ai.pipeline.generate_text',
147+
op: 'gen_ai.invoke_agent',
148148
origin: 'auto.vercelai.otel',
149149
status: 'ok',
150150
}),
@@ -220,11 +220,11 @@ describe('Vercel AI integration', () => {
220220
'gen_ai.usage.output_tokens': 20,
221221
'gen_ai.usage.total_tokens': 30,
222222
'operation.name': 'ai.generateText',
223-
'sentry.op': 'ai.pipeline.generate_text',
223+
'sentry.op': 'gen_ai.invoke_agent',
224224
'sentry.origin': 'auto.vercelai.otel',
225225
},
226226
description: 'generateText',
227-
op: 'ai.pipeline.generate_text',
227+
op: 'gen_ai.invoke_agent',
228228
origin: 'auto.vercelai.otel',
229229
status: 'ok',
230230
}),
@@ -280,11 +280,11 @@ describe('Vercel AI integration', () => {
280280
'gen_ai.usage.output_tokens': 20,
281281
'gen_ai.usage.total_tokens': 30,
282282
'operation.name': 'ai.generateText',
283-
'sentry.op': 'ai.pipeline.generate_text',
283+
'sentry.op': 'gen_ai.invoke_agent',
284284
'sentry.origin': 'auto.vercelai.otel',
285285
},
286286
description: 'generateText',
287-
op: 'ai.pipeline.generate_text',
287+
op: 'gen_ai.invoke_agent',
288288
origin: 'auto.vercelai.otel',
289289
status: 'ok',
290290
}),
@@ -341,11 +341,11 @@ describe('Vercel AI integration', () => {
341341
'gen_ai.usage.output_tokens': 25,
342342
'gen_ai.usage.total_tokens': 40,
343343
'operation.name': 'ai.generateText',
344-
'sentry.op': 'ai.pipeline.generate_text',
344+
'sentry.op': 'gen_ai.invoke_agent',
345345
'sentry.origin': 'auto.vercelai.otel',
346346
},
347347
description: 'generateText',
348-
op: 'ai.pipeline.generate_text',
348+
op: 'gen_ai.invoke_agent',
349349
origin: 'auto.vercelai.otel',
350350
status: 'ok',
351351
}),

packages/node/src/integrations/tracing/vercelai/index.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ const _vercelAIIntegration = ((options: VercelAiOptions = {}) => {
102102

103103
// Generate Spans
104104
if (name === 'ai.generateText') {
105-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.generate_text');
105+
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
106106
return;
107107
}
108108

@@ -113,7 +113,7 @@ const _vercelAIIntegration = ((options: VercelAiOptions = {}) => {
113113
}
114114

115115
if (name === 'ai.streamText') {
116-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.stream_text');
116+
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
117117
return;
118118
}
119119

@@ -124,7 +124,7 @@ const _vercelAIIntegration = ((options: VercelAiOptions = {}) => {
124124
}
125125

126126
if (name === 'ai.generateObject') {
127-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.generate_object');
127+
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
128128
return;
129129
}
130130

@@ -135,7 +135,7 @@ const _vercelAIIntegration = ((options: VercelAiOptions = {}) => {
135135
}
136136

137137
if (name === 'ai.streamObject') {
138-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.stream_object');
138+
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
139139
return;
140140
}
141141

@@ -146,7 +146,7 @@ const _vercelAIIntegration = ((options: VercelAiOptions = {}) => {
146146
}
147147

148148
if (name === 'ai.embed') {
149-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.embed');
149+
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
150150
return;
151151
}
152152

@@ -157,7 +157,7 @@ const _vercelAIIntegration = ((options: VercelAiOptions = {}) => {
157157
}
158158

159159
if (name === 'ai.embedMany') {
160-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.embed_many');
160+
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
161161
return;
162162
}
163163

0 commit comments

Comments
 (0)