diff --git a/package-lock.json b/package-lock.json index 3b9ea2b..0a4f17e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@literalai/client", - "version": "0.0.504", + "version": "0.0.505", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@literalai/client", - "version": "0.0.504", + "version": "0.0.505", "license": "Apache-2.0", "dependencies": { "axios": "^1.6.2", @@ -36,7 +36,9 @@ "tsup": "^8.0.1", "typedoc": "^0.25.13", "typedoc-plugin-markdown": "^4.0.0-next.25", - "typescript": "^5.3.3" + "typescript": "^5.3.3", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.23.0" }, "peerDependencies": { "@ai-sdk/openai": "^0.0.9", @@ -3349,6 +3351,15 @@ } } }, + "node_modules/ai/node_modules/zod-to-json-schema": { + "version": "3.22.5", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.22.5.tgz", + "integrity": "sha512-+akaPo6a0zpVCCseDed504KBJUQpEW5QZw7RMneNmKw+fGaML1Z9tUNLnHHAC8x6dzVRO1eB2oEMyZRnuBZg7Q==", + "dev": true, + "peerDependencies": { + "zod": "^3.22.4" + } + }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -9314,20 +9325,19 @@ } }, "node_modules/zod": { - "version": "3.22.4", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.22.4.tgz", - "integrity": "sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==", - "peer": true, + "version": "3.23.8", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz", + "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", "funding": { "url": "https://github.com/sponsors/colinhacks" } }, "node_modules/zod-to-json-schema": { - "version": "3.22.5", - "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.22.5.tgz", - "integrity": "sha512-+akaPo6a0zpVCCseDed504KBJUQpEW5QZw7RMneNmKw+fGaML1Z9tUNLnHHAC8x6dzVRO1eB2oEMyZRnuBZg7Q==", + "version": "3.23.0", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.23.0.tgz", + "integrity": "sha512-az0uJ243PxsRIa2x1WmNE/pnuA05gUq/JB8Lwe1EDCCL/Fz9MgjYQ0fPlyc2Tcv6aF2ZA7WM5TWaRZVEFaAIag==", "peerDependencies": { - "zod": "^3.22.4" + "zod": "^3.23.3" } } } diff --git a/package.json b/package.json index b9be91a..2d98012 100644 --- a/package.json +++ b/package.json @@ -45,7 +45,9 @@ "tsup": "^8.0.1", "typedoc": "^0.25.13", "typedoc-plugin-markdown": "^4.0.0-next.25", - "typescript": "^5.3.3" + "typescript": "^5.3.3", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.23.0" }, "dependencies": { "axios": "^1.6.2", @@ -57,6 +59,7 @@ "@ai-sdk/openai": "^0.0.9", "ai": "^3.1.0", "langchain": "^0.1.14", - "openai": "^4.26.0" + "openai": "^4.26.0", + "zod-to-json-schema": "^3.23.0" } } diff --git a/src/generation.ts b/src/generation.ts index 55ae918..967527c 100644 --- a/src/generation.ts +++ b/src/generation.ts @@ -22,11 +22,12 @@ export interface IImageUrlContent { export interface IGenerationMessage { uuid?: string; templated?: boolean; - content: string | (ITextContent | IImageUrlContent)[]; + content: string | (ITextContent | IImageUrlContent)[] | null; role: GenerationMessageRole; name?: string; function_call?: Record; tool_calls?: Record[]; + tool_call_id?: string; } export type GenerationType = 'COMPLETION' | 'CHAT'; diff --git a/src/instrumentation/vercel-sdk.ts b/src/instrumentation/vercel-sdk.ts index 7f708ca..a33a1d5 100644 --- a/src/instrumentation/vercel-sdk.ts +++ b/src/instrumentation/vercel-sdk.ts @@ -8,12 +8,14 @@ import type { streamObject, streamText } from 'ai'; +import { zodToJsonSchema } from 'zod-to-json-schema'; import { ChatGeneration, Generation, IGenerationMessage, ILLMSettings, + ITool, LiteralClient, Step, Thread @@ -61,13 +63,37 @@ const extractSettings = (options: Options): ILLMSettings => { delete settings.model; delete settings.prompt; delete settings.abortSignal; + if ('tools' in settings) { + settings.tools = Object.fromEntries( + Object.entries(settings.tools).map(([key, tool]) => [ + key, + { + description: tool.description, + parameters: zodToJsonSchema(tool.parameters) + } + ]) + ); + } return settings; }; +const extractTools = (options: Options): ITool[] | undefined => { + if (!('tools' in options) || !options.tools) return undefined; + return Object.entries(options.tools).map(([key, tool]) => ({ + type: 'function', + function: { + name: key, + description: tool.description!, + parameters: zodToJsonSchema(tool.parameters) as any + } + })); +}; + const computeMetricsSync = ( + options: Options, result: Result, startTime: number -): Partial => { +): Partial => { const outputTokenCount = result.usage.completionTokens; const inputTokenCount = result.usage.promptTokens; @@ -80,20 +106,56 @@ const computeMetricsSync = ( const completion = 'text' in result ? result.text : JSON.stringify(result.object); + const messages = extractMessages(options); + + if (completion) { + messages.push({ + role: 'assistant', + content: completion + }); + } + if ('toolCalls' in result && result.toolCalls.length) { + messages.push({ + role: 'assistant', + content: null, + tool_calls: result.toolCalls.map((call) => ({ + id: call.toolCallId, + type: 'function', + function: { + name: call.toolName, + arguments: call.args + } + })) + }); + for (const toolResult of result.toolResults as any[]) { + messages.push({ + role: 'tool', + tool_call_id: toolResult.toolCallId, + content: String(toolResult.result) + }); + } + } + + const messageCompletion = messages.pop(); + return { duration, tokenThroughputInSeconds, outputTokenCount, inputTokenCount, - messageCompletion: { role: 'assistant', content: completion } + messages, + messageCompletion }; }; const computeMetricsStream = async ( + options: Options, stream: ReadableStream, startTime: number ): Promise> => { - const messageCompletion: IGenerationMessage = { + const messages = extractMessages(options); + + const textMessage: IGenerationMessage = { role: 'assistant', content: '' }; @@ -102,16 +164,36 @@ const computeMetricsStream = async ( let ttFirstToken: number | undefined = undefined; for await (const chunk of stream as unknown as AsyncIterable) { if (typeof chunk === 'string') { - messageCompletion.content += chunk; + textMessage.content += chunk; } else { switch (chunk.type) { case 'text-delta': { - messageCompletion.content += chunk.textDelta; + textMessage.content += chunk.textDelta; + break; + } + case 'tool-call': { + messages.push({ + role: 'assistant', + content: null, + tool_calls: [ + { + id: chunk.toolCallId, + type: 'function', + function: { + name: chunk.toolName, + arguments: chunk.args + } + } + ] + }); break; } - case 'tool-call': case 'tool-result': { - // TODO: Handle + messages.push({ + role: 'tool', + tool_call_id: chunk.toolCallId, + content: String(chunk.result) + }); break; } } @@ -129,11 +211,15 @@ const computeMetricsStream = async ( ? outputTokenCount / (duration / 1000) : undefined; + if (textMessage.content) messages.push(textMessage); + const messageCompletion = messages.pop(); + return { duration, tokenThroughputInSeconds, outputTokenCount, ttFirstToken, + messages, messageCompletion }; }; @@ -184,13 +270,17 @@ export const makeInstrumentVercelSDK = (client: LiteralClient) => { (async () => { if ('fullStream' in result) { // streamObject or streamText - const metrics = await computeMetricsStream(stream!, startTime); + const metrics = await computeMetricsStream( + options, + stream!, + startTime + ); const generation = new ChatGeneration({ provider: options.model.provider, model: options.model.modelId, settings: extractSettings(options), - messages: extractMessages(options), + tools: extractTools(options), ...metrics }); @@ -212,13 +302,13 @@ export const makeInstrumentVercelSDK = (client: LiteralClient) => { } } else { // generateObject or generateText - const metrics = computeMetricsSync(result, startTime); + const metrics = computeMetricsSync(options, result, startTime); const generation = new ChatGeneration({ provider: options.model.provider, model: options.model.modelId, settings: extractSettings(options), - messages: extractMessages(options), + tools: extractTools(options), ...metrics }); diff --git a/tests/integration/vercel-sdk.test.ts b/tests/integration/vercel-sdk.test.ts index 4484447..0e2be0d 100644 --- a/tests/integration/vercel-sdk.test.ts +++ b/tests/integration/vercel-sdk.test.ts @@ -1,5 +1,6 @@ import { openai } from '@ai-sdk/openai'; import { generateText, streamText } from 'ai'; +import { z } from 'zod'; import { LiteralClient } from '../../src'; @@ -142,5 +143,193 @@ describe('Vercel SDK Instrumentation', () => { }) ]); }); + + it('should monitor tools', async () => { + const spy = jest.spyOn(client.api, 'createGeneration'); + + const generateTextWithLiteralAI = + client.instrumentation.vercel.instrument(generateText); + + const { text, toolResults } = await generateTextWithLiteralAI({ + model, + system: 'You are a friendly assistant!', + messages: [{ role: 'user', content: 'Convert 20°C to Fahrenheit' }], + tools: { + celsiusToFahrenheit: { + description: 'Converts celsius to fahrenheit', + parameters: z.object({ + value: z.number().describe('The value in celsius') + }), + execute: async ({ value }) => { + const celsius = parseFloat(value); + const fahrenheit = celsius * (9 / 5) + 32; + return fahrenheit; + } + } + } + }); + + expect(text).toBe(''); + expect(toolResults).toEqual([ + { + toolCallId: expect.any(String), + toolName: 'celsiusToFahrenheit', + args: { value: 20 }, + result: 68 + } + ]); + + expect(spy).toHaveBeenCalledWith( + expect.objectContaining({ + provider: 'openai.chat', + model: 'gpt-3.5-turbo', + tools: [ + { + type: 'function', + function: { + name: 'celsiusToFahrenheit', + description: 'Converts celsius to fahrenheit', + parameters: { + $schema: 'http://json-schema.org/draft-07/schema#', + type: 'object', + additionalProperties: false, + properties: { + value: { + type: 'number', + description: 'The value in celsius' + } + }, + required: ['value'] + } + } + } + ], + messages: [ + { role: 'system', content: 'You are a friendly assistant!' }, + { role: 'user', content: 'Convert 20°C to Fahrenheit' }, + { + role: 'assistant', + content: null, + tool_calls: [ + { + id: toolResults[0].toolCallId, + type: 'function', + function: { + arguments: { value: 20 }, + name: 'celsiusToFahrenheit' + } + } + ] + } + ], + messageCompletion: { + role: 'tool', + tool_call_id: toolResults[0].toolCallId, + content: String(toolResults[0].result) + }, + duration: expect.any(Number) + }) + ); + }); + + it('should monitor tools in streams', async () => { + const spy = jest.spyOn(client.api, 'createGeneration'); + + const streamTextWithLiteralAI = + client.instrumentation.vercel.instrument(streamText); + + const result = await streamTextWithLiteralAI({ + model, + system: 'You are a friendly assistant!', + messages: [{ role: 'user', content: 'Convert 20°C to Fahrenheit' }], + tools: { + celsiusToFahrenheit: { + description: 'Converts celsius to fahrenheit', + parameters: z.object({ + value: z.number().describe('The value in celsius') + }), + execute: async ({ value }) => { + const celsius = parseFloat(value); + const fahrenheit = celsius * (9 / 5) + 32; + return fahrenheit; + } + } + } + }); + + // use textStream as an async iterable: + const chunks = []; + let toolCall, toolResult; + for await (const chunk of result.fullStream) { + chunks.push(chunk); + if (chunk.type === 'tool-call') { + toolCall = chunk; + } + if (chunk.type === 'tool-result') { + toolResult = chunk; + } + } + + expect(toolCall!.toolCallId).toEqual(toolResult!.toolCallId); + expect(toolResult).toEqual({ + type: 'tool-result', + toolCallId: expect.any(String), + toolName: 'celsiusToFahrenheit', + args: { value: 20 }, + result: 68 + }); + + expect(spy).toHaveBeenCalledWith( + expect.objectContaining({ + provider: 'openai.chat', + model: 'gpt-3.5-turbo', + tools: [ + { + type: 'function', + function: { + name: 'celsiusToFahrenheit', + description: 'Converts celsius to fahrenheit', + parameters: { + $schema: 'http://json-schema.org/draft-07/schema#', + type: 'object', + additionalProperties: false, + properties: { + value: { + type: 'number', + description: 'The value in celsius' + } + }, + required: ['value'] + } + } + } + ], + messages: [ + { role: 'system', content: 'You are a friendly assistant!' }, + { role: 'user', content: 'Convert 20°C to Fahrenheit' }, + { + role: 'assistant', + content: null, + tool_calls: [ + { + id: toolResult!.toolCallId, + type: 'function', + function: { + arguments: { value: 20 }, + name: 'celsiusToFahrenheit' + } + } + ] + } + ], + messageCompletion: { + role: 'tool', + tool_call_id: toolResult!.toolCallId, + content: String(toolResult!.result) + }, + duration: expect.any(Number) + }) + ); + }); }); });