Skip to content

Commit 7b0dd1e

Browse files
authored
feat(core,openai): Add support for disable_streaming, set for o1 (#7503)
1 parent f73e33e commit 7b0dd1e

File tree

3 files changed

+48
-3
lines changed

3 files changed

+48
-3
lines changed

langchain-core/src/language_models/chat_models.ts

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,18 @@ export type SerializedLLM = {
7474
/**
7575
* Represents the parameters for a base chat model.
7676
*/
77-
export type BaseChatModelParams = BaseLanguageModelParams;
77+
export type BaseChatModelParams = BaseLanguageModelParams & {
78+
/**
79+
* Whether to disable streaming.
80+
*
81+
* If streaming is bypassed, then `stream()` will defer to
82+
* `invoke()`.
83+
*
84+
* - If true, will always bypass streaming case.
85+
* - If false (default), will always use streaming case if available.
86+
*/
87+
disableStreaming?: boolean;
88+
};
7889

7990
/**
8091
* Represents the call options for a base chat model.
@@ -152,6 +163,8 @@ export abstract class BaseChatModel<
152163
// Only ever instantiated in main LangChain
153164
lc_namespace = ["langchain", "chat_models", this._llmType()];
154165

166+
disableStreaming = false;
167+
155168
constructor(fields: BaseChatModelParams) {
156169
super(fields);
157170
}
@@ -220,7 +233,8 @@ export abstract class BaseChatModel<
220233
// Subclass check required to avoid double callbacks with default implementation
221234
if (
222235
this._streamResponseChunks ===
223-
BaseChatModel.prototype._streamResponseChunks
236+
BaseChatModel.prototype._streamResponseChunks ||
237+
this.disableStreaming
224238
) {
225239
yield this.invoke(input, options);
226240
} else {

libs/langchain-openai/src/chat_models.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1223,6 +1223,10 @@ export class ChatOpenAI<
12231223
this.streamUsage = false;
12241224
}
12251225

1226+
if (this.model === "o1") {
1227+
this.disableStreaming = true;
1228+
}
1229+
12261230
this.streaming = fields?.streaming ?? false;
12271231
this.streamUsage = fields?.streamUsage ?? this.streamUsage;
12281232

libs/langchain-openai/src/tests/chat_models.int.test.ts

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1165,7 +1165,7 @@ describe("Audio output", () => {
11651165
});
11661166
});
11671167

1168-
test("Can stream o1 requests", async () => {
1168+
test("Can stream o1-mini requests", async () => {
11691169
const model = new ChatOpenAI({
11701170
model: "o1-mini",
11711171
});
@@ -1192,6 +1192,33 @@ test("Can stream o1 requests", async () => {
11921192
expect(numChunks).toBeGreaterThan(3);
11931193
});
11941194

1195+
test("Doesn't stream o1 requests", async () => {
1196+
const model = new ChatOpenAI({
1197+
model: "o1",
1198+
});
1199+
const stream = await model.stream(
1200+
"Write me a very simple hello world program in Python. Ensure it is wrapped in a function called 'hello_world' and has descriptive comments."
1201+
);
1202+
let finalMsg: AIMessageChunk | undefined;
1203+
let numChunks = 0;
1204+
for await (const chunk of stream) {
1205+
finalMsg = finalMsg ? concat(finalMsg, chunk) : chunk;
1206+
numChunks += 1;
1207+
}
1208+
1209+
expect(finalMsg).toBeTruthy();
1210+
if (!finalMsg) {
1211+
throw new Error("No final message found");
1212+
}
1213+
if (typeof finalMsg.content === "string") {
1214+
expect(finalMsg.content.length).toBeGreaterThan(10);
1215+
} else {
1216+
expect(finalMsg.content.length).toBeGreaterThanOrEqual(1);
1217+
}
1218+
1219+
expect(numChunks).toBe(1);
1220+
});
1221+
11951222
test("Allows developer messages with o1", async () => {
11961223
const model = new ChatOpenAI({
11971224
model: "o1",

0 commit comments

Comments
 (0)