From 6541a639a0abe6292eb6d5de4d2e039f53d6bf5d Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 16 May 2024 14:41:52 -0700 Subject: [PATCH 01/10] core[minor]: Unified model params for LS --- langchain-core/src/language_models/chat_models.ts | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/langchain-core/src/language_models/chat_models.ts b/langchain-core/src/language_models/chat_models.ts index 34484da1ae34..770dfd734070 100644 --- a/langchain-core/src/language_models/chat_models.ts +++ b/langchain-core/src/language_models/chat_models.ts @@ -80,6 +80,15 @@ export function createChatMessageChunkEncoderStream() { }); } +type LangSmithParams = { + ls_provider: string; + ls_model_name: string + ls_model_type: "chat"; + ls_temperature?: number; + ls_max_tokens?: number; + ls_stop?: Array; +} + interface ChatModelGenerateCachedParameters< T extends BaseChatModel, CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions From 26bf38a4beae73c5888708cf300dad11117665e8 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 16 May 2024 15:34:42 -0700 Subject: [PATCH 02/10] add to openai, anthropic and together --- .../src/language_models/chat_models.ts | 34 +++++++++++++++---- libs/langchain-anthropic/src/chat_models.ts | 13 +++++++ .../src/chat_models/togetherai.ts | 11 +++++- .../langchain-openai/src/azure/chat_models.ts | 11 +++++- libs/langchain-openai/src/chat_models.ts | 13 +++++++ 5 files changed, 73 insertions(+), 9 deletions(-) diff --git a/langchain-core/src/language_models/chat_models.ts b/langchain-core/src/language_models/chat_models.ts index 770dfd734070..b61dd5796aa0 100644 --- a/langchain-core/src/language_models/chat_models.ts +++ b/langchain-core/src/language_models/chat_models.ts @@ -80,14 +80,14 @@ export function createChatMessageChunkEncoderStream() { }); } -type LangSmithParams = { - ls_provider: string; - ls_model_name: string +export type LangSmithParams = { + ls_provider?: string; + ls_model_name?: string; ls_model_type: "chat"; ls_temperature?: number; ls_max_tokens?: number; ls_stop?: Array; -} +}; interface ChatModelGenerateCachedParameters< T extends BaseChatModel, @@ -194,13 +194,17 @@ export abstract class BaseChatModel< const messages = prompt.toChatMessages(); const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(options); + const inheritableMetadata = { + ...this.metadata, + ...this._getLsParams(callOptions), + }; const callbackManager_ = await CallbackManager.configure( runnableConfig.callbacks, this.callbacks, runnableConfig.tags, this.tags, runnableConfig.metadata, - this.metadata, + inheritableMetadata, { verbose: this.verbose } ); const extra = { @@ -255,6 +259,13 @@ export abstract class BaseChatModel< } } + protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + return { + ls_model_type: "chat", + ls_stop: options.stop, + }; + } + /** @ignore */ async _generateUncached( messages: BaseMessageLike[][], @@ -265,6 +276,11 @@ export abstract class BaseChatModel< messageList.map(coerceMessageLikeToMessage) ); + const inheritableMetadata = { + ...this.metadata, + ...this._getLsParams(parsedOptions), + }; + // create callback manager and start run const callbackManager_ = await CallbackManager.configure( handledOptions.callbacks, @@ -272,7 +288,7 @@ export abstract class BaseChatModel< handledOptions.tags, this.tags, handledOptions.metadata, - this.metadata, + inheritableMetadata, { verbose: this.verbose } ); const extra = { @@ -360,6 +376,10 @@ export abstract class BaseChatModel< const baseMessages = messages.map((messageList) => messageList.map(coerceMessageLikeToMessage) ); + const inheritableMetadata = { + ...this.metadata, + ...this._getLsParams(parsedOptions), + }; // create callback manager and start run const callbackManager_ = await CallbackManager.configure( @@ -368,7 +388,7 @@ export abstract class BaseChatModel< handledOptions.tags, this.tags, handledOptions.metadata, - this.metadata, + inheritableMetadata, { verbose: this.verbose } ); const extra = { diff --git a/libs/langchain-anthropic/src/chat_models.ts b/libs/langchain-anthropic/src/chat_models.ts index d39e83316e5c..cfc0270be0c8 100644 --- a/libs/langchain-anthropic/src/chat_models.ts +++ b/libs/langchain-anthropic/src/chat_models.ts @@ -20,6 +20,7 @@ import { import { getEnvironmentVariable } from "@langchain/core/utils/env"; import { BaseChatModel, + LangSmithParams, type BaseChatModelParams, } from "@langchain/core/language_models/chat_models"; import { @@ -503,6 +504,18 @@ export class ChatAnthropicMessages< this.clientOptions = fields?.clientOptions ?? {}; } + protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = this.invocationParams(options); + return { + ls_provider: "openai", + ls_model_name: this.model, + ls_model_type: "chat", + ls_temperature: params.temperature ?? undefined, + ls_max_tokens: params.max_tokens ?? undefined, + ls_stop: options.stop, + }; + } + /** * Formats LangChain StructuredTools to AnthropicTools. * diff --git a/libs/langchain-community/src/chat_models/togetherai.ts b/libs/langchain-community/src/chat_models/togetherai.ts index bec0f1d8b9a3..1bc8bcf6aeec 100644 --- a/libs/langchain-community/src/chat_models/togetherai.ts +++ b/libs/langchain-community/src/chat_models/togetherai.ts @@ -1,4 +1,7 @@ -import type { BaseChatModelParams } from "@langchain/core/language_models/chat_models"; +import type { + BaseChatModelParams, + LangSmithParams, +} from "@langchain/core/language_models/chat_models"; import { type OpenAIClient, type ChatOpenAICallOptions, @@ -113,6 +116,12 @@ export class ChatTogetherAI extends ChatOpenAI { }); } + protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = super._getLsParams(options); + params.ls_provider = "together"; + return params; + } + toJSON() { const result = super.toJSON(); diff --git a/libs/langchain-openai/src/azure/chat_models.ts b/libs/langchain-openai/src/azure/chat_models.ts index 49db684d53b0..1ea1e8765aa4 100644 --- a/libs/langchain-openai/src/azure/chat_models.ts +++ b/libs/langchain-openai/src/azure/chat_models.ts @@ -1,5 +1,8 @@ import { type ClientOptions, AzureOpenAI as AzureOpenAIClient } from "openai"; -import { type BaseChatModelParams } from "@langchain/core/language_models/chat_models"; +import { + LangSmithParams, + type BaseChatModelParams, +} from "@langchain/core/language_models/chat_models"; import { ChatOpenAI } from "../chat_models.js"; import { OpenAIEndpointConfig, getEndpoint } from "../utils/azure.js"; import { @@ -43,6 +46,12 @@ export class AzureChatOpenAI extends ChatOpenAI { super(newFields); } + protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = super._getLsParams(options); + params.ls_provider = "azure"; + return params; + } + protected _getClientOptions(options: OpenAICoreRequestOptions | undefined) { if (!this.client) { const openAIEndpointConfig: OpenAIEndpointConfig = { diff --git a/libs/langchain-openai/src/chat_models.ts b/libs/langchain-openai/src/chat_models.ts index 8f4d607ebd9f..26683049d4c9 100644 --- a/libs/langchain-openai/src/chat_models.ts +++ b/libs/langchain-openai/src/chat_models.ts @@ -24,6 +24,7 @@ import { type StructuredToolInterface } from "@langchain/core/tools"; import { getEnvironmentVariable } from "@langchain/core/utils/env"; import { BaseChatModel, + LangSmithParams, type BaseChatModelParams, } from "@langchain/core/language_models/chat_models"; import type { @@ -490,6 +491,18 @@ export class ChatOpenAI< }; } + protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = this.invocationParams(options); + return { + ls_provider: "openai", + ls_model_name: this.model, + ls_model_type: "chat", + ls_temperature: params.temperature ?? undefined, + ls_max_tokens: params.max_tokens ?? undefined, + ls_stop: options.stop, + }; + } + override bindTools( tools: (Record | StructuredToolInterface)[], kwargs?: Partial From 7aa7a7f03a11244358f8bc9a29e669e72655b99c Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 16 May 2024 15:42:03 -0700 Subject: [PATCH 03/10] mistral --- libs/langchain-mistralai/src/chat_models.ts | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/libs/langchain-mistralai/src/chat_models.ts b/libs/langchain-mistralai/src/chat_models.ts index 95fde9a21e4b..3e8dbd65eafd 100644 --- a/libs/langchain-mistralai/src/chat_models.ts +++ b/libs/langchain-mistralai/src/chat_models.ts @@ -33,6 +33,7 @@ import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager"; import { type BaseChatModelParams, BaseChatModel, + LangSmithParams, } from "@langchain/core/language_models/chat_models"; import { @@ -417,6 +418,17 @@ export class ChatMistralAI< this.model = this.modelName; } + protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = this.invocationParams(options); + return { + ls_provider: "mistral", + ls_model_name: this.model, + ls_model_type: "chat", + ls_temperature: params.temperature ?? undefined, + ls_max_tokens: params.maxTokens ?? undefined, + }; + } + _llmType() { return "mistral_ai"; } From c6a7ccf55e18c5ebf456bdb09d3ed173f1f6fdd8 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 16 May 2024 15:53:54 -0700 Subject: [PATCH 04/10] cohere fireworks ollama --- libs/langchain-cohere/src/chat_models.ts | 10 ++++++++++ libs/langchain-community/src/chat_models/fireworks.ts | 11 ++++++++++- libs/langchain-community/src/chat_models/ollama.ts | 11 +++++++++++ 3 files changed, 31 insertions(+), 1 deletion(-) diff --git a/libs/langchain-cohere/src/chat_models.ts b/libs/langchain-cohere/src/chat_models.ts index 37253653db2f..42cdc619c8a5 100644 --- a/libs/langchain-cohere/src/chat_models.ts +++ b/libs/langchain-cohere/src/chat_models.ts @@ -11,6 +11,7 @@ import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager"; import { type BaseChatModelParams, BaseChatModel, + LangSmithParams, } from "@langchain/core/language_models/chat_models"; import { ChatGeneration, @@ -145,6 +146,15 @@ export class ChatCohere< this.streaming = fields?.streaming ?? this.streaming; } + protected _getLsParams(_: this["ParsedCallOptions"]): LangSmithParams { + return { + ls_provider: "cohere", + ls_model_name: this.model, + ls_model_type: "chat", + ls_temperature: this.temperature ?? undefined, + }; + } + _llmType() { return "cohere"; } diff --git a/libs/langchain-community/src/chat_models/fireworks.ts b/libs/langchain-community/src/chat_models/fireworks.ts index 8349da0debfb..9cc6106d3b70 100644 --- a/libs/langchain-community/src/chat_models/fireworks.ts +++ b/libs/langchain-community/src/chat_models/fireworks.ts @@ -1,4 +1,7 @@ -import type { BaseChatModelParams } from "@langchain/core/language_models/chat_models"; +import type { + BaseChatModelParams, + LangSmithParams, +} from "@langchain/core/language_models/chat_models"; import { type OpenAIClient, type ChatOpenAICallOptions, @@ -101,6 +104,12 @@ export class ChatFireworks extends ChatOpenAI { this.apiKey = fireworksApiKey; } + protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = super._getLsParams(options); + params.ls_provider = "fireworks"; + return params; + } + toJSON() { const result = super.toJSON(); diff --git a/libs/langchain-community/src/chat_models/ollama.ts b/libs/langchain-community/src/chat_models/ollama.ts index efd63a16536e..10ed4adcb0db 100644 --- a/libs/langchain-community/src/chat_models/ollama.ts +++ b/libs/langchain-community/src/chat_models/ollama.ts @@ -2,6 +2,7 @@ import type { BaseLanguageModelCallOptions } from "@langchain/core/language_mode import { SimpleChatModel, type BaseChatModelParams, + LangSmithParams, } from "@langchain/core/language_models/chat_models"; import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager"; import { @@ -176,6 +177,16 @@ export class ChatOllama this.format = fields.format; } + protected _getLsParams(_: this["ParsedCallOptions"]): LangSmithParams { + return { + ls_provider: "ollama", + ls_model_name: this.model, + ls_model_type: "chat", + ls_temperature: this.temperature ?? undefined, + ls_stop: this.stop, + }; + } + _llmType() { return "ollama"; } From 0a13e601259dc68deb5d0e5a2212e63105035071 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 16 May 2024 16:06:36 -0700 Subject: [PATCH 05/10] fix tests --- .../runnables/tests/runnable_stream_events.test.ts | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/langchain-core/src/runnables/tests/runnable_stream_events.test.ts b/langchain-core/src/runnables/tests/runnable_stream_events.test.ts index 2f1f96aae969..adaf0905baac 100644 --- a/langchain-core/src/runnables/tests/runnable_stream_events.test.ts +++ b/langchain-core/src/runnables/tests/runnable_stream_events.test.ts @@ -546,6 +546,8 @@ test("Runnable streamEvents method with chat model chain", async () => { metadata: { foo: "bar", a: "b", + ls_model_type: "chat", + ls_stop: undefined, }, data: { input: { @@ -562,6 +564,8 @@ test("Runnable streamEvents method with chat model chain", async () => { metadata: { a: "b", foo: "bar", + ls_model_type: "chat", + ls_stop: undefined, }, name: "my_model", data: { chunk: new AIMessageChunk("R") }, @@ -583,6 +587,8 @@ test("Runnable streamEvents method with chat model chain", async () => { metadata: { a: "b", foo: "bar", + ls_model_type: "chat", + ls_stop: undefined, }, name: "my_model", data: { chunk: new AIMessageChunk("O") }, @@ -604,6 +610,8 @@ test("Runnable streamEvents method with chat model chain", async () => { metadata: { a: "b", foo: "bar", + ls_model_type: "chat", + ls_stop: undefined, }, name: "my_model", data: { chunk: new AIMessageChunk("A") }, @@ -625,6 +633,8 @@ test("Runnable streamEvents method with chat model chain", async () => { metadata: { a: "b", foo: "bar", + ls_model_type: "chat", + ls_stop: undefined, }, name: "my_model", data: { chunk: new AIMessageChunk("R") }, @@ -647,6 +657,8 @@ test("Runnable streamEvents method with chat model chain", async () => { metadata: { foo: "bar", a: "b", + ls_model_type: "chat", + ls_stop: undefined, }, data: { input: { From bbd8dbcc8b15ffd996b63079f8f5dfb2ac2af875 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 16 May 2024 16:08:44 -0700 Subject: [PATCH 06/10] remove underscore prefix --- langchain-core/src/language_models/chat_models.ts | 8 ++++---- libs/langchain-anthropic/src/chat_models.ts | 2 +- libs/langchain-cohere/src/chat_models.ts | 2 +- libs/langchain-community/src/chat_models/fireworks.ts | 4 ++-- libs/langchain-community/src/chat_models/ollama.ts | 2 +- libs/langchain-community/src/chat_models/togetherai.ts | 4 ++-- libs/langchain-mistralai/src/chat_models.ts | 2 +- libs/langchain-openai/src/azure/chat_models.ts | 4 ++-- libs/langchain-openai/src/chat_models.ts | 2 +- 9 files changed, 15 insertions(+), 15 deletions(-) diff --git a/langchain-core/src/language_models/chat_models.ts b/langchain-core/src/language_models/chat_models.ts index b61dd5796aa0..d20df6fa67c6 100644 --- a/langchain-core/src/language_models/chat_models.ts +++ b/langchain-core/src/language_models/chat_models.ts @@ -196,7 +196,7 @@ export abstract class BaseChatModel< this._separateRunnableConfigFromCallOptions(options); const inheritableMetadata = { ...this.metadata, - ...this._getLsParams(callOptions), + ...this.getLsParams(callOptions), }; const callbackManager_ = await CallbackManager.configure( runnableConfig.callbacks, @@ -259,7 +259,7 @@ export abstract class BaseChatModel< } } - protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { return { ls_model_type: "chat", ls_stop: options.stop, @@ -278,7 +278,7 @@ export abstract class BaseChatModel< const inheritableMetadata = { ...this.metadata, - ...this._getLsParams(parsedOptions), + ...this.getLsParams(parsedOptions), }; // create callback manager and start run @@ -378,7 +378,7 @@ export abstract class BaseChatModel< ); const inheritableMetadata = { ...this.metadata, - ...this._getLsParams(parsedOptions), + ...this.getLsParams(parsedOptions), }; // create callback manager and start run diff --git a/libs/langchain-anthropic/src/chat_models.ts b/libs/langchain-anthropic/src/chat_models.ts index cfc0270be0c8..149292d6f361 100644 --- a/libs/langchain-anthropic/src/chat_models.ts +++ b/libs/langchain-anthropic/src/chat_models.ts @@ -504,7 +504,7 @@ export class ChatAnthropicMessages< this.clientOptions = fields?.clientOptions ?? {}; } - protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { const params = this.invocationParams(options); return { ls_provider: "openai", diff --git a/libs/langchain-cohere/src/chat_models.ts b/libs/langchain-cohere/src/chat_models.ts index 42cdc619c8a5..ad9cd38e90d2 100644 --- a/libs/langchain-cohere/src/chat_models.ts +++ b/libs/langchain-cohere/src/chat_models.ts @@ -146,7 +146,7 @@ export class ChatCohere< this.streaming = fields?.streaming ?? this.streaming; } - protected _getLsParams(_: this["ParsedCallOptions"]): LangSmithParams { + protected getLsParams(_: this["ParsedCallOptions"]): LangSmithParams { return { ls_provider: "cohere", ls_model_name: this.model, diff --git a/libs/langchain-community/src/chat_models/fireworks.ts b/libs/langchain-community/src/chat_models/fireworks.ts index 9cc6106d3b70..65d6a6588c31 100644 --- a/libs/langchain-community/src/chat_models/fireworks.ts +++ b/libs/langchain-community/src/chat_models/fireworks.ts @@ -104,8 +104,8 @@ export class ChatFireworks extends ChatOpenAI { this.apiKey = fireworksApiKey; } - protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { - const params = super._getLsParams(options); + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = super.getLsParams(options); params.ls_provider = "fireworks"; return params; } diff --git a/libs/langchain-community/src/chat_models/ollama.ts b/libs/langchain-community/src/chat_models/ollama.ts index 10ed4adcb0db..c2bff0fb609b 100644 --- a/libs/langchain-community/src/chat_models/ollama.ts +++ b/libs/langchain-community/src/chat_models/ollama.ts @@ -177,7 +177,7 @@ export class ChatOllama this.format = fields.format; } - protected _getLsParams(_: this["ParsedCallOptions"]): LangSmithParams { + protected getLsParams(_: this["ParsedCallOptions"]): LangSmithParams { return { ls_provider: "ollama", ls_model_name: this.model, diff --git a/libs/langchain-community/src/chat_models/togetherai.ts b/libs/langchain-community/src/chat_models/togetherai.ts index 1bc8bcf6aeec..5488e6cf61fe 100644 --- a/libs/langchain-community/src/chat_models/togetherai.ts +++ b/libs/langchain-community/src/chat_models/togetherai.ts @@ -116,8 +116,8 @@ export class ChatTogetherAI extends ChatOpenAI { }); } - protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { - const params = super._getLsParams(options); + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = super.getLsParams(options); params.ls_provider = "together"; return params; } diff --git a/libs/langchain-mistralai/src/chat_models.ts b/libs/langchain-mistralai/src/chat_models.ts index 3e8dbd65eafd..a1075518a8e4 100644 --- a/libs/langchain-mistralai/src/chat_models.ts +++ b/libs/langchain-mistralai/src/chat_models.ts @@ -418,7 +418,7 @@ export class ChatMistralAI< this.model = this.modelName; } - protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { const params = this.invocationParams(options); return { ls_provider: "mistral", diff --git a/libs/langchain-openai/src/azure/chat_models.ts b/libs/langchain-openai/src/azure/chat_models.ts index 1ea1e8765aa4..3cf6952cad6e 100644 --- a/libs/langchain-openai/src/azure/chat_models.ts +++ b/libs/langchain-openai/src/azure/chat_models.ts @@ -46,8 +46,8 @@ export class AzureChatOpenAI extends ChatOpenAI { super(newFields); } - protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { - const params = super._getLsParams(options); + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = super.getLsParams(options); params.ls_provider = "azure"; return params; } diff --git a/libs/langchain-openai/src/chat_models.ts b/libs/langchain-openai/src/chat_models.ts index 26683049d4c9..4d96fd1a3ff5 100644 --- a/libs/langchain-openai/src/chat_models.ts +++ b/libs/langchain-openai/src/chat_models.ts @@ -491,7 +491,7 @@ export class ChatOpenAI< }; } - protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { const params = this.invocationParams(options); return { ls_provider: "openai", From 3b74bbcd878fb2762b6effa643bbc62b2a42a8a8 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 16 May 2024 16:12:00 -0700 Subject: [PATCH 07/10] fix metadata placement --- .../src/language_models/chat_models.ts | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/langchain-core/src/language_models/chat_models.ts b/langchain-core/src/language_models/chat_models.ts index d20df6fa67c6..cf7dbc91af95 100644 --- a/langchain-core/src/language_models/chat_models.ts +++ b/langchain-core/src/language_models/chat_models.ts @@ -194,8 +194,9 @@ export abstract class BaseChatModel< const messages = prompt.toChatMessages(); const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(options); + const inheritableMetadata = { - ...this.metadata, + ...runnableConfig.metadata, ...this.getLsParams(callOptions), }; const callbackManager_ = await CallbackManager.configure( @@ -203,8 +204,8 @@ export abstract class BaseChatModel< this.callbacks, runnableConfig.tags, this.tags, - runnableConfig.metadata, inheritableMetadata, + this.metadata, { verbose: this.verbose } ); const extra = { @@ -277,18 +278,17 @@ export abstract class BaseChatModel< ); const inheritableMetadata = { - ...this.metadata, + ...handledOptions.metadata, ...this.getLsParams(parsedOptions), }; - // create callback manager and start run const callbackManager_ = await CallbackManager.configure( handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, - handledOptions.metadata, inheritableMetadata, + this.metadata, { verbose: this.verbose } ); const extra = { @@ -376,19 +376,19 @@ export abstract class BaseChatModel< const baseMessages = messages.map((messageList) => messageList.map(coerceMessageLikeToMessage) ); + const inheritableMetadata = { - ...this.metadata, + ...handledOptions.metadata, ...this.getLsParams(parsedOptions), }; - // create callback manager and start run const callbackManager_ = await CallbackManager.configure( handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, - handledOptions.metadata, inheritableMetadata, + this.metadata, { verbose: this.verbose } ); const extra = { From 2f899888bd0bd0515019ac27ff74c11519b06fa8 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 16 May 2024 16:14:57 -0700 Subject: [PATCH 08/10] format --- langchain-core/src/language_models/chat_models.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/langchain-core/src/language_models/chat_models.ts b/langchain-core/src/language_models/chat_models.ts index cf7dbc91af95..82b83056ddd6 100644 --- a/langchain-core/src/language_models/chat_models.ts +++ b/langchain-core/src/language_models/chat_models.ts @@ -194,7 +194,7 @@ export abstract class BaseChatModel< const messages = prompt.toChatMessages(); const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(options); - + const inheritableMetadata = { ...runnableConfig.metadata, ...this.getLsParams(callOptions), From 25c2c8ac5c8d5b0f7cb11174603f65a124a50072 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Fri, 17 May 2024 10:48:42 -0700 Subject: [PATCH 09/10] fixes --- libs/langchain-cohere/src/chat_models.ts | 5 ++++- libs/langchain-community/src/chat_models/ollama.ts | 4 +++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/libs/langchain-cohere/src/chat_models.ts b/libs/langchain-cohere/src/chat_models.ts index ad9cd38e90d2..b4bd240c3b99 100644 --- a/libs/langchain-cohere/src/chat_models.ts +++ b/libs/langchain-cohere/src/chat_models.ts @@ -146,12 +146,15 @@ export class ChatCohere< this.streaming = fields?.streaming ?? this.streaming; } - protected getLsParams(_: this["ParsedCallOptions"]): LangSmithParams { + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = this.invocationParams(options); return { ls_provider: "cohere", ls_model_name: this.model, ls_model_type: "chat", ls_temperature: this.temperature ?? undefined, + ls_max_tokens: typeof params.maxTokens === "number" ? params.maxTokens : undefined, + ls_stop: Array.isArray(params.stopSequences) ? params.stopSequences as unknown as string[] : undefined, }; } diff --git a/libs/langchain-community/src/chat_models/ollama.ts b/libs/langchain-community/src/chat_models/ollama.ts index c2bff0fb609b..3fa78fa9c6b2 100644 --- a/libs/langchain-community/src/chat_models/ollama.ts +++ b/libs/langchain-community/src/chat_models/ollama.ts @@ -177,13 +177,15 @@ export class ChatOllama this.format = fields.format; } - protected getLsParams(_: this["ParsedCallOptions"]): LangSmithParams { + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = this.invocationParams(options); return { ls_provider: "ollama", ls_model_name: this.model, ls_model_type: "chat", ls_temperature: this.temperature ?? undefined, ls_stop: this.stop, + ls_max_tokens: params.options.num_predict, }; } From 8074943d23ca292fde7cd3ef468c2a0be868cd1c Mon Sep 17 00:00:00 2001 From: bracesproul Date: Fri, 17 May 2024 10:54:46 -0700 Subject: [PATCH 10/10] chore: lint files --- libs/langchain-cohere/src/chat_models.ts | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/libs/langchain-cohere/src/chat_models.ts b/libs/langchain-cohere/src/chat_models.ts index b4bd240c3b99..1aeca0175581 100644 --- a/libs/langchain-cohere/src/chat_models.ts +++ b/libs/langchain-cohere/src/chat_models.ts @@ -153,8 +153,11 @@ export class ChatCohere< ls_model_name: this.model, ls_model_type: "chat", ls_temperature: this.temperature ?? undefined, - ls_max_tokens: typeof params.maxTokens === "number" ? params.maxTokens : undefined, - ls_stop: Array.isArray(params.stopSequences) ? params.stopSequences as unknown as string[] : undefined, + ls_max_tokens: + typeof params.maxTokens === "number" ? params.maxTokens : undefined, + ls_stop: Array.isArray(params.stopSequences) + ? (params.stopSequences as unknown as string[]) + : undefined, }; }