From 669d197509ccce3c84f0feab093035b9fada1024 Mon Sep 17 00:00:00 2001 From: Damien BUTY Date: Thu, 25 Jul 2024 16:58:35 +0200 Subject: [PATCH 1/2] feat(openai): add tags & metadata at the call level --- src/api.ts | 2 - src/instrumentation/index.ts | 11 +- src/instrumentation/openai.ts | 128 ++++++++++++++++++------ tests/integration/openai.test.ts | 166 +++++++++++++++++-------------- 4 files changed, 195 insertions(+), 112 deletions(-) diff --git a/src/api.ts b/src/api.ts index 5dc9787..c055432 100644 --- a/src/api.ts +++ b/src/api.ts @@ -423,7 +423,6 @@ export class API { return response.data; } catch (e) { - console.error(e); if (e instanceof AxiosError) { throw new Error(JSON.stringify(e.response?.data.errors)); } else { @@ -451,7 +450,6 @@ export class API { return response.data; } catch (e) { - console.error(e); if (e instanceof AxiosError) { throw new Error(JSON.stringify(e.response?.data)); } else { diff --git a/src/instrumentation/index.ts b/src/instrumentation/index.ts index 526bb2f..d382c0b 100644 --- a/src/instrumentation/index.ts +++ b/src/instrumentation/index.ts @@ -1,15 +1,16 @@ -import { LiteralClient } from '..'; +import { LiteralClient, Maybe } from '..'; import { LiteralCallbackHandler } from './langchain'; import { instrumentLlamaIndex, withThread } from './llamaindex'; import instrumentOpenAI from './openai'; -import { InstrumentOpenAIOptions } from './openai'; import { makeInstrumentVercelSDK } from './vercel-sdk'; -export type { InstrumentOpenAIOptions } from './openai'; +export type OpenAIGlobalOptions = { + tags?: Maybe; + metadata?: Maybe>; +}; export default (client: LiteralClient) => ({ - openai: (options?: InstrumentOpenAIOptions) => - instrumentOpenAI(client, options), + openai: (options?: OpenAIGlobalOptions) => instrumentOpenAI(client, options), langchain: { literalCallback: (threadId?: string) => { try { diff --git a/src/instrumentation/openai.ts b/src/instrumentation/openai.ts index 786dea0..d1bf864 100644 --- a/src/instrumentation/openai.ts +++ b/src/instrumentation/openai.ts @@ -1,4 +1,5 @@ import OpenAI from 'openai'; +import { RequestOptions } from 'openai/core'; import type { ChatCompletion, ChatCompletionChunk, @@ -13,40 +14,70 @@ import { IGenerationMessage, LiteralClient, Maybe, + OpenAIGlobalOptions, StepConstructor } from '..'; // Define a generic type for the original function to be wrapped -type OriginalFunction = (...args: T) => Promise; +type OriginalFunction = ( + body: any, + options?: RequestOptions +) => Promise; + +type OpenAICallOptions = { + literalaiTags?: Maybe; + literalaiMetadata?: Maybe>; +}; + +function cleanOpenAIArgs( + body: any, + callOptions?: RequestOptions & OpenAICallOptions +): [any, RequestOptions?] { + if (!callOptions) { + return [body]; + } + + const { literalaiTags, literalaiMetadata, ...restCallOptions } = callOptions; + return [body, restCallOptions]; +} // Utility function to wrap a method -function wrapFunction( - originalFunction: OriginalFunction, +function wrapFunction( + originalFunction: OriginalFunction, client: LiteralClient, - options: InstrumentOpenAIOptions = {} -): OriginalFunction { - return async function (this: any, ...args: T): Promise { + globalOptions: OpenAIGlobalOptions = {} +) { + return async function ( + this: any, + body: any, + callOptions?: RequestOptions & OpenAICallOptions + ): Promise { const start = Date.now(); // Call the original function - const result = await originalFunction.apply(this, args); + const result = await originalFunction.apply( + this, + cleanOpenAIArgs(body, callOptions) + ); if (result instanceof Stream) { const streamResult = result; const [returnedResult, processedResult] = streamResult.tee(); await processOpenAIOutput(client, processedResult, { - ...options, + globalOptions, + callOptions, start, - inputs: args[0] + inputs: body }); - return returnedResult as R; + return returnedResult as Output; } else { await processOpenAIOutput(client, result as ChatCompletion | Completion, { - ...options, + globalOptions, + callOptions, start, - inputs: args[0] + inputs: body }); return result; @@ -56,32 +87,51 @@ function wrapFunction( function instrumentOpenAI( client: LiteralClient, - options: InstrumentOpenAIOptions = {} + options: OpenAIGlobalOptions = {} ) { // Patching the chat.completions.create function const originalChatCompletionsCreate = OpenAI.Chat.Completions.prototype.create; - OpenAI.Chat.Completions.prototype.create = wrapFunction( + const wrappedChatCompletionsCreate = wrapFunction( originalChatCompletionsCreate, client, options - ) as any; + ); // Patching the completions.create function const originalCompletionsCreate = OpenAI.Completions.prototype.create; - OpenAI.Completions.prototype.create = wrapFunction( + const wrappedCompletionsCreate = wrapFunction( originalCompletionsCreate, client, options - ) as any; + ); // Patching the images.generate function const originalImagesGenerate = OpenAI.Images.prototype.generate; - OpenAI.Images.prototype.generate = wrapFunction( + const wrappedImagesGenerate = wrapFunction( originalImagesGenerate, client, options - ) as any; + ); + + OpenAI.Chat.Completions.prototype.create = + wrappedChatCompletionsCreate as any; + OpenAI.Completions.prototype.create = wrappedCompletionsCreate as any; + OpenAI.Images.prototype.generate = wrappedImagesGenerate as any; + + return { + chat: { + completions: { + create: wrappedChatCompletionsCreate + } + }, + completions: { + create: wrappedCompletionsCreate + }, + images: { + generate: wrappedImagesGenerate + } + }; } function processChatDelta( @@ -277,11 +327,9 @@ export type OpenAIOutput = | Stream | ImagesResponse; -export interface InstrumentOpenAIOptions { - tags?: Maybe; -} - -export interface ProcessOpenAIOutput extends InstrumentOpenAIOptions { +export interface ProcessOpenAIOutput { + globalOptions: Maybe; + callOptions: Maybe; start: number; inputs: Record; } @@ -299,13 +347,22 @@ function isStream(obj: any): boolean { const processOpenAIOutput = async ( client: LiteralClient, output: OpenAIOutput, - { start, tags, inputs }: ProcessOpenAIOutput + { start, globalOptions, callOptions, inputs }: ProcessOpenAIOutput ) => { + const tags = [ + ...(globalOptions?.tags ?? []), + ...(callOptions?.literalaiTags ?? []) + ]; + const metadata = { + ...globalOptions?.metadata, + ...callOptions?.literalaiMetadata + }; + const baseGeneration = { provider: 'openai', model: inputs.model, settings: getSettings(inputs), - tags: tags + tags }; const threadFromStore = client._currentThread(); @@ -322,7 +379,8 @@ const processOpenAIOutput = async ( output: output, startTime: new Date(start).toISOString(), endTime: new Date().toISOString(), - tags: tags + tags, + metadata }; const step = parent @@ -366,7 +424,9 @@ const processOpenAIOutput = async ( generation, output: messageCompletion, startTime: new Date(start).toISOString(), - endTime: new Date(start + metrics.duration).toISOString() + endTime: new Date(start + metrics.duration).toISOString(), + tags, + metadata }); await step.send(); } else { @@ -392,7 +452,9 @@ const processOpenAIOutput = async ( generation, output: { content: completion }, startTime: new Date(start).toISOString(), - endTime: new Date(start + metrics.duration).toISOString() + endTime: new Date(start + metrics.duration).toISOString(), + tags, + metadata }); await step.send(); } else { @@ -430,7 +492,9 @@ const processOpenAIOutput = async ( generation, output: messageCompletion, startTime: new Date(start).toISOString(), - endTime: new Date().toISOString() + endTime: new Date().toISOString(), + tags, + metadata }); await step.send(); } else { @@ -454,7 +518,9 @@ const processOpenAIOutput = async ( generation, output: { content: completion }, startTime: new Date(start).toISOString(), - endTime: new Date().toISOString() + endTime: new Date().toISOString(), + tags, + metadata }); await step.send(); } else { diff --git a/tests/integration/openai.test.ts b/tests/integration/openai.test.ts index 4391961..98853b5 100644 --- a/tests/integration/openai.test.ts +++ b/tests/integration/openai.test.ts @@ -115,13 +115,7 @@ describe('OpenAI Instrumentation', () => { const { data: [generation] } = await client.api.getGenerations({ - filters: [ - { - field: 'tags', - operator: 'in', - value: [testId] - } - ] + filters: [{ field: 'tags', operator: 'in', value: [testId] }] }); step = await client.api.getStep(generation.id); @@ -136,14 +130,8 @@ describe('OpenAI Instrumentation', () => { it("should log a generation's input & output", async () => { expect(generationFromStep.messages).toEqual([ - { - role: 'system', - content: 'You are a helpful assistant.' - }, - { - role: 'user', - content: 'What is the capital of Canada?' - } + { role: 'system', content: 'You are a helpful assistant.' }, + { role: 'user', content: 'What is the capital of Canada?' } ]); expect(generationFromStep.messageCompletion).toEqual({ role: 'assistant', @@ -184,13 +172,7 @@ describe('OpenAI Instrumentation', () => { const { data: [generation] } = await client.api.getGenerations({ - filters: [ - { - field: 'tags', - operator: 'in', - value: [testId] - } - ] + filters: [{ field: 'tags', operator: 'in', value: [testId] }] }); step = await client.api.getStep(generation.id); @@ -205,14 +187,8 @@ describe('OpenAI Instrumentation', () => { it("should log a generation's input & output", async () => { expect(generationFromStep.messages).toEqual([ - { - role: 'system', - content: 'You are a helpful assistant.' - }, - { - role: 'user', - content: 'What is the capital of Canada?' - } + { role: 'system', content: 'You are a helpful assistant.' }, + { role: 'user', content: 'What is the capital of Canada?' } ]); expect(generationFromStep.messageCompletion).toEqual({ role: 'assistant', @@ -247,13 +223,7 @@ describe('OpenAI Instrumentation', () => { data: [step] } = await client.api.getSteps({ first: 1, - filters: [ - { - field: 'tags', - operator: 'in', - value: [testId] - } - ] + filters: [{ field: 'tags', operator: 'in', value: [testId] }] }); expect(step?.threadId).toBeNull(); @@ -267,10 +237,8 @@ describe('OpenAI Instrumentation', () => { describe('Inside of a thread or step wrapper', () => { it('logs the generation inside its thread and parent', async () => { - const testId = uuidv4(); - const client = new LiteralClient(apiKey, url); - client.instrumentation.openai({ tags: [testId] }); + client.instrumentation.openai(); let threadId: Maybe; let parentId: Maybe; @@ -296,13 +264,7 @@ describe('OpenAI Instrumentation', () => { data: [step] } = await client.api.getSteps({ first: 1, - filters: [ - { - field: 'tags', - operator: 'in', - value: [testId] - } - ] + filters: [{ field: 'parentId', operator: 'eq', value: parentId! }] }); expect(step?.threadId).toBe(threadId); @@ -310,10 +272,8 @@ describe('OpenAI Instrumentation', () => { }, 30_000); it("doesn't mix up threads and steps", async () => { - const testId = uuidv4(); - const client = new LiteralClient(apiKey, url); - client.instrumentation.openai({ tags: [testId] }); + client.instrumentation.openai(); const firstThreadId = uuidv4(); const secondThreadId = uuidv4(); @@ -362,36 +322,14 @@ describe('OpenAI Instrumentation', () => { data: [firstGeneration] } = await client.api.getSteps({ first: 1, - filters: [ - { - field: 'threadId', - operator: 'eq', - value: firstThreadId! - }, - { - field: 'tags', - operator: 'in', - value: [testId] - } - ] + filters: [{ field: 'threadId', operator: 'eq', value: firstThreadId! }] }); const { data: [secondGeneration] } = await client.api.getSteps({ first: 1, - filters: [ - { - field: 'threadId', - operator: 'eq', - value: secondThreadId! - }, - { - field: 'tags', - operator: 'in', - value: [testId] - } - ] + filters: [{ field: 'threadId', operator: 'eq', value: secondThreadId! }] }); expect(firstStep?.threadId).toEqual(firstThreadId); @@ -403,4 +341,84 @@ describe('OpenAI Instrumentation', () => { expect(secondGeneration?.parentId).toEqual(secondStep?.id); }, 30_000); }); + + describe('Handling tags and metadata', () => { + it('handles tags and metadata on the instrumentation call', async () => { + const client = new LiteralClient(apiKey, url); + client.instrumentation.openai({ + tags: ['tag1', 'tag2'], + metadata: { key: 'value' } + }); + + let parentId: Maybe; + + await client.thread({ name: 'openai' }).wrap(async () => { + return client.run({ name: 'openai' }).wrap(async () => { + parentId = client.getCurrentStep().id; + + await openai.chat.completions.create({ + model: 'gpt-3.5-turbo', + messages: [ + { role: 'system', content: 'You are a helpful assistant.' }, + { role: 'user', content: 'What is the capital of Canada?' } + ] + }); + }); + }); + + const { + data: [step] + } = await client.api.getSteps({ + first: 1, + filters: [{ field: 'parentId', operator: 'eq', value: parentId! }] + }); + + expect(step!.tags).toEqual(expect.arrayContaining(['tag1', 'tag2'])); + expect(step!.metadata).toEqual({ key: 'value' }); + }); + + it('handles tags and metadata on the LLM call', async () => { + const client = new LiteralClient(apiKey, url); + + const instrumentedOpenAi = client.instrumentation.openai({ + tags: ['tag1', 'tag2'], + metadata: { key: 'value' } + }); + + let parentId: Maybe; + + await client.thread({ name: 'openai' }).wrap(async () => { + return client.run({ name: 'openai' }).wrap(async () => { + parentId = client.getCurrentStep().id; + + await instrumentedOpenAi.chat.completions.create( + { + model: 'gpt-3.5-turbo', + messages: [ + { role: 'system', content: 'You are a helpful assistant.' }, + { role: 'user', content: 'What is the capital of Canada?' } + ] + }, + { + literalaiTags: ['tag3', 'tag4'], + literalaiMetadata: { otherKey: 'otherValue' } + } + ); + }); + }); + + const { + data: [step] + } = await client.api.getSteps({ + first: 1, + filters: [{ field: 'parentId', operator: 'eq', value: parentId! }] + }); + + expect(step!.tags).toEqual( + expect.arrayContaining(['tag1', 'tag2', 'tag3', 'tag4']) + ); + expect(step!.metadata!.key).toEqual('value'); + expect(step!.metadata!.otherKey).toEqual('otherValue'); + }); + }); }); From f51c03d3161b7a780fa84b021f4c768590ffebf2 Mon Sep 17 00:00:00 2001 From: Damien BUTY Date: Thu, 25 Jul 2024 17:38:56 +0200 Subject: [PATCH 2/2] fix: tests --- src/instrumentation/openai.ts | 2 -- tests/integration/openai.test.ts | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/instrumentation/openai.ts b/src/instrumentation/openai.ts index d1bf864..0f04a68 100644 --- a/src/instrumentation/openai.ts +++ b/src/instrumentation/openai.ts @@ -18,7 +18,6 @@ import { StepConstructor } from '..'; -// Define a generic type for the original function to be wrapped type OriginalFunction = ( body: any, options?: RequestOptions @@ -41,7 +40,6 @@ function cleanOpenAIArgs( return [body, restCallOptions]; } -// Utility function to wrap a method function wrapFunction( originalFunction: OriginalFunction, client: LiteralClient, diff --git a/tests/integration/openai.test.ts b/tests/integration/openai.test.ts index 98853b5..e88dd77 100644 --- a/tests/integration/openai.test.ts +++ b/tests/integration/openai.test.ts @@ -375,7 +375,7 @@ describe('OpenAI Instrumentation', () => { expect(step!.tags).toEqual(expect.arrayContaining(['tag1', 'tag2'])); expect(step!.metadata).toEqual({ key: 'value' }); - }); + }, 30_000); it('handles tags and metadata on the LLM call', async () => { const client = new LiteralClient(apiKey, url); @@ -419,6 +419,6 @@ describe('OpenAI Instrumentation', () => { ); expect(step!.metadata!.key).toEqual('value'); expect(step!.metadata!.otherKey).toEqual('otherValue'); - }); + }, 30_000); }); });