Skip to content

Commit 79467fb

Browse files
authored
fix(google-genai): Fix Google Genai usage token (#7733)
1 parent 5f805b2 commit 79467fb

File tree

1 file changed

+7
-7
lines changed

1 file changed

+7
-7
lines changed

libs/langchain-google-genai/src/chat_models.ts

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -905,21 +905,21 @@ export class ChatGoogleGenerativeAI
905905
options.streamUsage !== false
906906
) {
907907
const genAIUsageMetadata = response.usageMetadata as {
908-
promptTokenCount: number;
909-
candidatesTokenCount: number;
910-
totalTokenCount: number;
908+
promptTokenCount: number | undefined;
909+
candidatesTokenCount: number | undefined;
910+
totalTokenCount: number | undefined;
911911
};
912912
if (!usageMetadata) {
913913
usageMetadata = {
914-
input_tokens: genAIUsageMetadata.promptTokenCount,
915-
output_tokens: genAIUsageMetadata.candidatesTokenCount,
916-
total_tokens: genAIUsageMetadata.totalTokenCount,
914+
input_tokens: genAIUsageMetadata.promptTokenCount ?? 0,
915+
output_tokens: genAIUsageMetadata.candidatesTokenCount ?? 0,
916+
total_tokens: genAIUsageMetadata.totalTokenCount ?? 0,
917917
};
918918
} else {
919919
// Under the hood, LangChain combines the prompt tokens. Google returns the updated
920920
// total each time, so we need to find the difference between the tokens.
921921
const outputTokenDiff =
922-
genAIUsageMetadata.candidatesTokenCount -
922+
(genAIUsageMetadata.candidatesTokenCount ?? 0) -
923923
usageMetadata.output_tokens;
924924
usageMetadata = {
925925
input_tokens: 0,

0 commit comments

Comments
 (0)