Skip to content

feat(node): Expand how vercel ai input/outputs can be set #16455

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Jun 5, 2025
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion dev-packages/node-integration-tests/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
"@types/mongodb": "^3.6.20",
"@types/mysql": "^2.15.21",
"@types/pg": "^8.6.5",
"ai": "^4.0.6",
"ai": "^4.3.16",
"amqplib": "^0.10.7",
"apollo-server": "^3.11.1",
"body-parser": "^1.20.3",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import * as Sentry from '@sentry/node';
import { loggingTransport } from '@sentry-internal/node-integration-tests';

Sentry.init({
dsn: 'https://public@dsn.ingest.sentry.io/1337',
release: '1.0',
tracesSampleRate: 1.0,
sendDefaultPii: true,
transport: loggingTransport,
integrations: [Sentry.vercelAIIntegration()],
});
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@ Sentry.init({
release: '1.0',
tracesSampleRate: 1.0,
transport: loggingTransport,
integrations: [Sentry.vercelAIIntegration()],
});
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@ import { afterAll, describe, expect } from 'vitest';
import { cleanupChildProcesses, createEsmAndCjsTests } from '../../../utils/runner';

// `ai` SDK only support Node 18+
describe('ai', () => {
describe('Vercel AI integration', () => {
afterAll(() => {
cleanupChildProcesses();
});

const EXPECTED_TRANSACTION = {
const EXPECTED_TRANSACTION_DEFAULT_PII_FALSE = {
transaction: 'main',
spans: expect.arrayContaining([
// First span - no telemetry config, should enable telemetry but not record inputs/outputs when sendDefaultPii: false
expect.objectContaining({
data: expect.objectContaining({
'ai.completion_tokens.used': 20,
Expand All @@ -35,48 +36,51 @@ describe('ai', () => {
origin: 'auto.vercelai.otel',
status: 'ok',
}),
// Second span - explicitly enabled telemetry but recordInputs/recordOutputs not set, should not record when sendDefaultPii: false
expect.objectContaining({
data: expect.objectContaining({
'sentry.origin': 'auto.vercelai.otel',
'sentry.op': 'ai.run.doGenerate',
'operation.name': 'ai.generateText.doGenerate',
'ai.operationId': 'ai.generateText.doGenerate',
'ai.model.provider': 'mock-provider',
'ai.completion_tokens.used': 20,
'ai.model.id': 'mock-model-id',
'ai.settings.maxRetries': 2,
'gen_ai.system': 'mock-provider',
'gen_ai.request.model': 'mock-model-id',
'ai.pipeline.name': 'generateText.doGenerate',
'ai.model.provider': 'mock-provider',
'ai.model_id': 'mock-model-id',
'ai.streaming': false,
'ai.response.finishReason': 'stop',
'ai.response.model': 'mock-model-id',
'ai.usage.promptTokens': 10,
'ai.usage.completionTokens': 20,
'gen_ai.response.finish_reasons': ['stop'],
'gen_ai.usage.input_tokens': 10,
'gen_ai.usage.output_tokens': 20,
'ai.completion_tokens.used': 20,
'ai.operationId': 'ai.generateText',
'ai.pipeline.name': 'generateText',
'ai.prompt_tokens.used': 10,
'ai.response.finishReason': 'stop',
'ai.settings.maxRetries': 2,
'ai.settings.maxSteps': 1,
'ai.streaming': false,
'ai.total_tokens.used': 30,
'ai.usage.completionTokens': 20,
'ai.usage.promptTokens': 10,
'operation.name': 'ai.generateText',
'sentry.op': 'ai.pipeline.generateText',
'sentry.origin': 'auto.vercelai.otel',
}),
description: 'generateText.doGenerate',
op: 'ai.run.doGenerate',
description: 'generateText',
op: 'ai.pipeline.generateText',
origin: 'auto.vercelai.otel',
status: 'ok',
}),
]),
};

const EXPECTED_TRANSACTION_DEFAULT_PII_TRUE = {
transaction: 'main',
spans: expect.arrayContaining([
// First span - no telemetry config, should enable telemetry AND record inputs/outputs when sendDefaultPii: true
expect.objectContaining({
data: expect.objectContaining({
'ai.completion_tokens.used': 20,
'ai.model.id': 'mock-model-id',
'ai.model.provider': 'mock-provider',
'ai.model_id': 'mock-model-id',
'ai.prompt': '{"prompt":"Where is the second span?"}',
'ai.prompt': '{"prompt":"Where is the first span?"}',
'ai.operationId': 'ai.generateText',
'ai.pipeline.name': 'generateText',
'ai.prompt_tokens.used': 10,
'ai.response.finishReason': 'stop',
'ai.input_messages': '{"prompt":"Where is the second span?"}',
'ai.input_messages': '{"prompt":"Where is the first span?"}',
'ai.settings.maxRetries': 2,
'ai.settings.maxSteps': 1,
'ai.streaming': false,
Expand All @@ -92,42 +96,46 @@ describe('ai', () => {
origin: 'auto.vercelai.otel',
status: 'ok',
}),
// Second span - explicitly enabled telemetry, should record inputs/outputs regardless of sendDefaultPii
expect.objectContaining({
data: expect.objectContaining({
'sentry.origin': 'auto.vercelai.otel',
'sentry.op': 'ai.run.doGenerate',
'operation.name': 'ai.generateText.doGenerate',
'ai.operationId': 'ai.generateText.doGenerate',
'ai.model.provider': 'mock-provider',
'ai.completion_tokens.used': 20,
'ai.model.id': 'mock-model-id',
'ai.settings.maxRetries': 2,
'gen_ai.system': 'mock-provider',
'gen_ai.request.model': 'mock-model-id',
'ai.pipeline.name': 'generateText.doGenerate',
'ai.model.provider': 'mock-provider',
'ai.model_id': 'mock-model-id',
'ai.streaming': false,
'ai.response.finishReason': 'stop',
'ai.response.model': 'mock-model-id',
'ai.usage.promptTokens': 10,
'ai.usage.completionTokens': 20,
'gen_ai.response.finish_reasons': ['stop'],
'gen_ai.usage.input_tokens': 10,
'gen_ai.usage.output_tokens': 20,
'ai.completion_tokens.used': 20,
'ai.prompt': '{"prompt":"Where is the second span?"}',
'ai.operationId': 'ai.generateText',
'ai.pipeline.name': 'generateText',
'ai.prompt_tokens.used': 10,
'ai.response.finishReason': 'stop',
'ai.input_messages': '{"prompt":"Where is the second span?"}',
'ai.settings.maxRetries': 2,
'ai.settings.maxSteps': 1,
'ai.streaming': false,
'ai.total_tokens.used': 30,
'ai.usage.completionTokens': 20,
'ai.usage.promptTokens': 10,
'operation.name': 'ai.generateText',
'sentry.op': 'ai.pipeline.generateText',
'sentry.origin': 'auto.vercelai.otel',
}),
description: 'generateText.doGenerate',
op: 'ai.run.doGenerate',
description: 'generateText',
op: 'ai.pipeline.generateText',
origin: 'auto.vercelai.otel',
status: 'ok',
}),
]),
};

createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument.mjs', (createRunner, test) => {
test('creates ai related spans ', async () => {
await createRunner().expect({ transaction: EXPECTED_TRANSACTION }).start().completed();
test('creates ai related spans with sendDefaultPii: false', async () => {
await createRunner().expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_FALSE }).start().completed();
});
});

createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument-with-pii.mjs', (createRunner, test) => {
test('creates ai related spans with sendDefaultPii: true', async () => {
await createRunner().expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_TRUE }).start().completed();
});
});
});
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

super-l: Is there a reason for this file? No strong feelings, so feel free to disregard but I'd rather avoid a constants file with one export.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I put this here to avoid circular imports (instrumentation -> index)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ahh that's of course fine! sorry, didn't notice it

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export const INTEGRATION_NAME = 'VercelAI';
3 changes: 1 addition & 2 deletions packages/node/src/integrations/tracing/vercelai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,9 @@ import type { IntegrationFn } from '@sentry/core';
import { defineIntegration, SEMANTIC_ATTRIBUTE_SENTRY_OP, spanToJSON } from '@sentry/core';
import { generateInstrumentOnce } from '../../../otel/instrument';
import { addOriginToSpan } from '../../../utils/addOriginToSpan';
import { INTEGRATION_NAME } from './constants';
import { SentryVercelAiInstrumentation } from './instrumentation';

const INTEGRATION_NAME = 'VercelAI';

export const instrumentVercelAi = generateInstrumentOnce(INTEGRATION_NAME, () => new SentryVercelAiInstrumentation({}));

const _vercelAIIntegration = (() => {
Expand Down
34 changes: 23 additions & 11 deletions packages/node/src/integrations/tracing/vercelai/instrumentation.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import type { InstrumentationConfig, InstrumentationModuleDefinition } from '@opentelemetry/instrumentation';
import { InstrumentationBase, InstrumentationNodeModuleDefinition } from '@opentelemetry/instrumentation';
import { SDK_VERSION } from '@sentry/core';
import { getCurrentScope, SDK_VERSION } from '@sentry/core';
import { INTEGRATION_NAME } from './constants';
import type { TelemetrySettings } from './types';

// List of patched methods
Expand Down Expand Up @@ -71,16 +72,27 @@ export class SentryVercelAiInstrumentation extends InstrumentationBase {
const existingExperimentalTelemetry = args[0].experimental_telemetry || {};
const isEnabled = existingExperimentalTelemetry.isEnabled;

// if `isEnabled` is not explicitly set to `true` or `false`, enable telemetry
// but disable capturing inputs and outputs by default
if (isEnabled === undefined) {
args[0].experimental_telemetry = {
isEnabled: true,
recordInputs: false,
recordOutputs: false,
...existingExperimentalTelemetry,
};
}
const client = getCurrentScope().getClient();
const shouldRecordInputsAndOutputs = client?.getIntegrationByName(INTEGRATION_NAME)
? client.getOptions().sendDefaultPii
: false;

// Set recordInputs and recordOutputs based on sendDefaultPii if not explicitly set
const recordInputs =
existingExperimentalTelemetry.recordInputs !== undefined
? existingExperimentalTelemetry.recordInputs
: shouldRecordInputsAndOutputs;
const recordOutputs =
existingExperimentalTelemetry.recordOutputs !== undefined
? existingExperimentalTelemetry.recordOutputs
: shouldRecordInputsAndOutputs;

args[0].experimental_telemetry = {
...existingExperimentalTelemetry,
isEnabled: isEnabled !== undefined ? isEnabled : true,
recordInputs,
recordOutputs,
};

// @ts-expect-error we know that the method exists
return originalMethod.apply(this, args);
Expand Down