Skip to content

Commit a014ce4

Browse files
derangajacoblee93
andauthored
openai[minor]: OpenAI - Ability to set stream_options and streamUsage when instantiating ChatOpenAI class. (#5761)
* fix: add streamOptions and cleaning up * Added tests. Included streamUsage flag * Remove unnecessary type * Moving streamUsage to instantiation * Moving streamUsage to instantiation in tests * Altering stream option to be set based on whether streamUsage/include_usage is set to true. * Keep stream_usage defaulting to true * Lint --------- Co-authored-by: jacoblee93 <[email protected]>
1 parent f7bb6d3 commit a014ce4

File tree

3 files changed

+45
-3
lines changed

3 files changed

+45
-3
lines changed

libs/langchain-openai/src/chat_models.ts

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -261,6 +261,7 @@ export interface ChatOpenAICallOptions
261261
seed?: number;
262262
/**
263263
* Additional options to pass to streamed completions.
264+
* If provided takes precedence over "streamUsage" set at initialization time.
264265
*/
265266
stream_options?: {
266267
/**
@@ -385,6 +386,8 @@ export class ChatOpenAI<
385386

386387
streaming = false;
387388

389+
streamUsage = true;
390+
388391
maxTokens?: number;
389392

390393
logprobs?: boolean;
@@ -481,6 +484,7 @@ export class ChatOpenAI<
481484
this.user = fields?.user;
482485

483486
this.streaming = fields?.streaming ?? false;
487+
this.streamUsage = fields?.streamUsage ?? this.streamUsage;
484488

485489
if (this.azureOpenAIApiKey || this.azureADTokenProvider) {
486490
if (!this.azureOpenAIApiInstanceName && !this.azureOpenAIBasePath) {
@@ -549,6 +553,12 @@ export class ChatOpenAI<
549553
)
550554
);
551555
}
556+
let streamOptionsConfig = {};
557+
if (options?.stream_options !== undefined) {
558+
streamOptionsConfig = { stream_options: options.stream_options };
559+
} else if (this.streamUsage && this.streaming) {
560+
streamOptionsConfig = { stream_options: { include_usage: true } };
561+
}
552562
const params: Omit<
553563
OpenAIClient.Chat.ChatCompletionCreateParams,
554564
"messages"
@@ -565,6 +575,7 @@ export class ChatOpenAI<
565575
logit_bias: this.logitBias,
566576
stop: options?.stop ?? this.stopSequences,
567577
user: this.user,
578+
// if include_usage is set or streamUsage then stream must be set to true.
568579
stream: this.streaming,
569580
functions: options?.functions,
570581
function_call: options?.function_call,
@@ -574,9 +585,7 @@ export class ChatOpenAI<
574585
tool_choice: options?.tool_choice,
575586
response_format: options?.response_format,
576587
seed: options?.seed,
577-
...(options?.stream_options !== undefined
578-
? { stream_options: options.stream_options }
579-
: {}),
588+
...streamOptionsConfig,
580589
parallel_tool_calls: options?.parallel_tool_calls,
581590
...this.modelKwargs,
582591
};

libs/langchain-openai/src/tests/chat_models.int.test.ts

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -828,6 +828,33 @@ test("streaming: true tokens can be found in usage_metadata field", async () =>
828828
expect(response?.usage_metadata?.total_tokens).toBeGreaterThan(0);
829829
});
830830

831+
test("streaming: streamUsage will not override stream_options", async () => {
832+
const model = new ChatOpenAI({
833+
streaming: true,
834+
});
835+
const response = await model.invoke("Hello, how are you?", {
836+
stream_options: { include_usage: false },
837+
});
838+
console.log({
839+
usage_metadata: response?.usage_metadata,
840+
});
841+
expect(response).toBeTruthy();
842+
expect(response?.usage_metadata).toBeFalsy();
843+
});
844+
845+
test("streaming: streamUsage default is true", async () => {
846+
const model = new ChatOpenAI();
847+
const response = await model.invoke("Hello, how are you?");
848+
console.log({
849+
usage_metadata: response?.usage_metadata,
850+
});
851+
expect(response).toBeTruthy();
852+
expect(response?.usage_metadata).toBeTruthy();
853+
expect(response?.usage_metadata?.input_tokens).toBeGreaterThan(0);
854+
expect(response?.usage_metadata?.output_tokens).toBeGreaterThan(0);
855+
expect(response?.usage_metadata?.total_tokens).toBeGreaterThan(0);
856+
});
857+
831858
test("populates ID field on AIMessage", async () => {
832859
const model = new ChatOpenAI();
833860
const response = await model.invoke("Hell");

libs/langchain-openai/src/types.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,12 @@ export declare interface OpenAIBaseInput {
3838
/** Whether to stream the results or not. Enabling disables tokenUsage reporting */
3939
streaming: boolean;
4040

41+
/**
42+
* Whether or not to include token usage data in streamed chunks.
43+
* @default true
44+
*/
45+
streamUsage?: boolean;
46+
4147
/**
4248
* Model name to use
4349
* Alias for `model`

0 commit comments

Comments
 (0)