diff --git a/package.json b/package.json index 4f88ce3..be0a7e9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@literalai/client", - "version": "0.0.513", + "version": "0.0.514", "description": "", "exports": { ".": { diff --git a/src/instrumentation/vercel-sdk.ts b/src/instrumentation/vercel-sdk.ts index c122abb..2bdde9e 100644 --- a/src/instrumentation/vercel-sdk.ts +++ b/src/instrumentation/vercel-sdk.ts @@ -256,12 +256,14 @@ export const makeInstrumentVercelSDK = ( type TOptions = Options; type TResult = Result; - return async ( - options: TOptions & { literalAiParent?: Step | Thread } - ): Promise => { - const { literalAiParent: parent, ...originalOptions } = options; + return async (options: TOptions): Promise => { const startTime = Date.now(); - const result: TResult = await (fn as any)(originalOptions); + const result: TResult = await (fn as any)(options); + + const threadFromStore = client._currentThread(); + const stepFromStore = client._currentStep(); + + const parent = stepFromStore || threadFromStore; // Fork the stream to compute metrics let stream: ReadableStream; diff --git a/tests/integration/vercel-sdk.test.ts b/tests/integration/vercel-sdk.test.ts index 10274bc..e208dd0 100644 --- a/tests/integration/vercel-sdk.test.ts +++ b/tests/integration/vercel-sdk.test.ts @@ -204,44 +204,43 @@ describe('Vercel SDK Instrumentation', () => { it('should observe on a given thread', async () => { const spy = jest.spyOn(client.api, 'sendSteps'); - const thread = await client.thread({ name: 'VercelSDK Test' }).upsert(); - - const generateTextWithLiteralAI = - client.instrumentation.vercel.instrument(generateText); - - const result = await generateTextWithLiteralAI({ - model: openai('gpt-3.5-turbo'), - prompt: 'Write a vegetarian lasagna recipe for 4 people.', - literalAiParent: thread + await client.thread({ name: 'VercelSDK Test' }).wrap(async () => { + const generateTextWithLiteralAI = + client.instrumentation.vercel.instrument(generateText); + + const result = await generateTextWithLiteralAI({ + model: openai('gpt-3.5-turbo'), + prompt: 'Write a vegetarian lasagna recipe for 4 people.' + }); + + expect(result.text).toBeTruthy(); + + // Sending message is done asynchronously + await new Promise((resolve) => setTimeout(resolve, 10)); + + expect(spy).toHaveBeenCalledWith([ + expect.objectContaining({ + type: 'llm', + name: 'gpt-3.5-turbo', + threadId: client._currentThread()?.id, + generation: expect.any(Object), + input: { + content: [ + { + role: 'user', + content: [ + { + text: 'Write a vegetarian lasagna recipe for 4 people.', + type: 'text' + } + ] + } + ] + }, + output: { role: 'assistant', content: result.text } + }) + ]); }); - - expect(result.text).toBeTruthy(); - - // Sending message is done asynchronously - await new Promise((resolve) => setTimeout(resolve, 10)); - - expect(spy).toHaveBeenCalledWith([ - expect.objectContaining({ - type: 'llm', - name: 'gpt-3.5-turbo', - threadId: thread.id, - generation: expect.any(Object), - input: { - content: [ - { - role: 'user', - content: [ - { - text: 'Write a vegetarian lasagna recipe for 4 people.', - type: 'text' - } - ] - } - ] - }, - output: { role: 'assistant', content: result.text } - }) - ]); }); it('should monitor tools', async () => {