Skip to content

Commit d38e9d6

Browse files
fix(openai): fix streaming in openai (langchain-ai#8995)
1 parent df2b2fb commit d38e9d6

File tree

4 files changed

+42
-9
lines changed

4 files changed

+42
-9
lines changed

.changeset/metal-camels-join.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@langchain/openai": patch
3+
---
4+
5+
fix(openai): fix streaming in openai

libs/langchain-openai/src/chat_models.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -806,7 +806,7 @@ export abstract class BaseChatOpenAI<
806806
this.verbosity = fields?.verbosity ?? this.verbosity;
807807

808808
// disable streaming in BaseChatModel if explicitly disabled
809-
if (this.streaming === false) this.disableStreaming = true;
809+
if (fields?.streaming === false) this.disableStreaming = true;
810810
if (this.disableStreaming === true) this.streaming = false;
811811

812812
this.streamUsage = fields?.streamUsage ?? this.streamUsage;

libs/langchain-openai/src/tests/chat_models-extended.int.test.ts

Lines changed: 30 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,12 @@
11
/* eslint-disable no-promise-executor-return */
22
/* eslint-disable @typescript-eslint/no-explicit-any */
33
import { test, expect, jest } from "@jest/globals";
4-
import { AIMessage, HumanMessage, ToolMessage } from "@langchain/core/messages";
4+
import {
5+
AIMessage,
6+
AIMessageChunk,
7+
HumanMessage,
8+
ToolMessage,
9+
} from "@langchain/core/messages";
510
import { concat } from "@langchain/core/utils/stream";
611
import { InMemoryCache } from "@langchain/core/caches";
712
import { ChatOpenAI } from "../chat_models.js";
@@ -746,15 +751,38 @@ test("Test ChatOpenAI tool calling with empty schema in streaming vs non-streami
746751

747752
// Test streaming mode - this should also work but currently doesn't
748753
const stream = await llmWithTools.stream(dialogs);
749-
let finalChunk;
754+
let finalChunk: AIMessageChunk | undefined;
755+
const chunks = [];
750756
for await (const chunk of stream) {
757+
chunks.push(chunk);
751758
if (!finalChunk) {
752759
finalChunk = chunk;
753760
} else {
754761
finalChunk = finalChunk.concat(chunk);
755762
}
756763
}
757764

765+
expect(chunks.length).toBeGreaterThanOrEqual(4);
766+
expect(finalChunk?.tool_calls).toHaveLength(1);
767+
expect(finalChunk?.tool_calls?.[0]).toEqual(
768+
expect.objectContaining({
769+
name: "get_current_time",
770+
args: expect.any(Object),
771+
id: expect.any(String),
772+
type: "tool_call",
773+
})
774+
);
775+
expect(finalChunk?.tool_call_chunks).toHaveLength(1);
776+
expect(finalChunk?.tool_call_chunks?.[0]).toEqual(
777+
expect.objectContaining({
778+
name: "get_current_time",
779+
args: "{}",
780+
id: expect.any(String),
781+
index: 0,
782+
type: "tool_call_chunk",
783+
})
784+
);
785+
758786
// This should pass but currently fails due to the bug
759787
expect(finalChunk?.tool_calls).toBeDefined();
760788
expect(finalChunk?.tool_calls?.length).toBeGreaterThan(0);

libs/langchain-openai/src/tests/chat_models.test.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,19 +12,19 @@ describe("ChatOpenAI", () => {
1212
let chat = new ChatOpenAI({
1313
model: "gpt-4o-mini",
1414
});
15-
expect(chat.disableStreaming).toBe(true);
15+
expect(chat.disableStreaming).toBe(false);
1616
expect(chat.streaming).toBe(false);
1717
chat = new ChatOpenAI({
1818
model: "gpt-4o-mini",
1919
disableStreaming: undefined,
2020
} as any);
21-
expect(chat.disableStreaming).toBe(true);
21+
expect(chat.disableStreaming).toBe(false);
2222
expect(chat.streaming).toBe(false);
2323
chat = new ChatOpenAI({
2424
model: "gpt-4o-mini",
2525
disableStreaming: false,
2626
});
27-
expect(chat.disableStreaming).toBe(true);
27+
expect(chat.disableStreaming).toBe(false);
2828
expect(chat.streaming).toBe(false);
2929
chat = new ChatOpenAI({
3030
model: "gpt-4o-mini",
@@ -36,19 +36,19 @@ describe("ChatOpenAI", () => {
3636
model: "gpt-4o-mini",
3737
disableStreaming: null,
3838
} as any);
39-
expect(chatWithNull.disableStreaming).toBe(true);
39+
expect(chatWithNull.disableStreaming).toBe(false);
4040
expect(chat.streaming).toBe(false);
4141
const chatWithZero = new ChatOpenAI({
4242
model: "gpt-4o-mini",
4343
disableStreaming: 0,
4444
} as any);
45-
expect(chatWithZero.disableStreaming).toBe(true);
45+
expect(chatWithZero.disableStreaming).toBe(false);
4646
expect(chat.streaming).toBe(false);
4747
const chatWithEmptyString = new ChatOpenAI({
4848
model: "gpt-4o-mini",
4949
disableStreaming: "",
5050
} as any);
51-
expect(chatWithEmptyString.disableStreaming).toBe(true);
51+
expect(chatWithEmptyString.disableStreaming).toBe(false);
5252
expect(chat.streaming).toBe(false);
5353
chat = new ChatOpenAI({
5454
model: "gpt-4o-mini",

0 commit comments

Comments
 (0)