diff --git a/langchain-core/src/language_models/chat_models.ts b/langchain-core/src/language_models/chat_models.ts index 34484da1ae34..82b83056ddd6 100644 --- a/langchain-core/src/language_models/chat_models.ts +++ b/langchain-core/src/language_models/chat_models.ts @@ -80,6 +80,15 @@ export function createChatMessageChunkEncoderStream() { }); } +export type LangSmithParams = { + ls_provider?: string; + ls_model_name?: string; + ls_model_type: "chat"; + ls_temperature?: number; + ls_max_tokens?: number; + ls_stop?: Array; +}; + interface ChatModelGenerateCachedParameters< T extends BaseChatModel, CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions @@ -185,12 +194,17 @@ export abstract class BaseChatModel< const messages = prompt.toChatMessages(); const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(options); + + const inheritableMetadata = { + ...runnableConfig.metadata, + ...this.getLsParams(callOptions), + }; const callbackManager_ = await CallbackManager.configure( runnableConfig.callbacks, this.callbacks, runnableConfig.tags, this.tags, - runnableConfig.metadata, + inheritableMetadata, this.metadata, { verbose: this.verbose } ); @@ -246,6 +260,13 @@ export abstract class BaseChatModel< } } + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + return { + ls_model_type: "chat", + ls_stop: options.stop, + }; + } + /** @ignore */ async _generateUncached( messages: BaseMessageLike[][], @@ -256,13 +277,17 @@ export abstract class BaseChatModel< messageList.map(coerceMessageLikeToMessage) ); + const inheritableMetadata = { + ...handledOptions.metadata, + ...this.getLsParams(parsedOptions), + }; // create callback manager and start run const callbackManager_ = await CallbackManager.configure( handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, - handledOptions.metadata, + inheritableMetadata, this.metadata, { verbose: this.verbose } ); @@ -352,13 +377,17 @@ export abstract class BaseChatModel< messageList.map(coerceMessageLikeToMessage) ); + const inheritableMetadata = { + ...handledOptions.metadata, + ...this.getLsParams(parsedOptions), + }; // create callback manager and start run const callbackManager_ = await CallbackManager.configure( handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, - handledOptions.metadata, + inheritableMetadata, this.metadata, { verbose: this.verbose } ); diff --git a/langchain-core/src/runnables/tests/runnable_stream_events.test.ts b/langchain-core/src/runnables/tests/runnable_stream_events.test.ts index 2f1f96aae969..adaf0905baac 100644 --- a/langchain-core/src/runnables/tests/runnable_stream_events.test.ts +++ b/langchain-core/src/runnables/tests/runnable_stream_events.test.ts @@ -546,6 +546,8 @@ test("Runnable streamEvents method with chat model chain", async () => { metadata: { foo: "bar", a: "b", + ls_model_type: "chat", + ls_stop: undefined, }, data: { input: { @@ -562,6 +564,8 @@ test("Runnable streamEvents method with chat model chain", async () => { metadata: { a: "b", foo: "bar", + ls_model_type: "chat", + ls_stop: undefined, }, name: "my_model", data: { chunk: new AIMessageChunk("R") }, @@ -583,6 +587,8 @@ test("Runnable streamEvents method with chat model chain", async () => { metadata: { a: "b", foo: "bar", + ls_model_type: "chat", + ls_stop: undefined, }, name: "my_model", data: { chunk: new AIMessageChunk("O") }, @@ -604,6 +610,8 @@ test("Runnable streamEvents method with chat model chain", async () => { metadata: { a: "b", foo: "bar", + ls_model_type: "chat", + ls_stop: undefined, }, name: "my_model", data: { chunk: new AIMessageChunk("A") }, @@ -625,6 +633,8 @@ test("Runnable streamEvents method with chat model chain", async () => { metadata: { a: "b", foo: "bar", + ls_model_type: "chat", + ls_stop: undefined, }, name: "my_model", data: { chunk: new AIMessageChunk("R") }, @@ -647,6 +657,8 @@ test("Runnable streamEvents method with chat model chain", async () => { metadata: { foo: "bar", a: "b", + ls_model_type: "chat", + ls_stop: undefined, }, data: { input: { diff --git a/libs/langchain-anthropic/src/chat_models.ts b/libs/langchain-anthropic/src/chat_models.ts index d39e83316e5c..149292d6f361 100644 --- a/libs/langchain-anthropic/src/chat_models.ts +++ b/libs/langchain-anthropic/src/chat_models.ts @@ -20,6 +20,7 @@ import { import { getEnvironmentVariable } from "@langchain/core/utils/env"; import { BaseChatModel, + LangSmithParams, type BaseChatModelParams, } from "@langchain/core/language_models/chat_models"; import { @@ -503,6 +504,18 @@ export class ChatAnthropicMessages< this.clientOptions = fields?.clientOptions ?? {}; } + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = this.invocationParams(options); + return { + ls_provider: "openai", + ls_model_name: this.model, + ls_model_type: "chat", + ls_temperature: params.temperature ?? undefined, + ls_max_tokens: params.max_tokens ?? undefined, + ls_stop: options.stop, + }; + } + /** * Formats LangChain StructuredTools to AnthropicTools. * diff --git a/libs/langchain-cohere/src/chat_models.ts b/libs/langchain-cohere/src/chat_models.ts index 37253653db2f..1aeca0175581 100644 --- a/libs/langchain-cohere/src/chat_models.ts +++ b/libs/langchain-cohere/src/chat_models.ts @@ -11,6 +11,7 @@ import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager"; import { type BaseChatModelParams, BaseChatModel, + LangSmithParams, } from "@langchain/core/language_models/chat_models"; import { ChatGeneration, @@ -145,6 +146,21 @@ export class ChatCohere< this.streaming = fields?.streaming ?? this.streaming; } + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = this.invocationParams(options); + return { + ls_provider: "cohere", + ls_model_name: this.model, + ls_model_type: "chat", + ls_temperature: this.temperature ?? undefined, + ls_max_tokens: + typeof params.maxTokens === "number" ? params.maxTokens : undefined, + ls_stop: Array.isArray(params.stopSequences) + ? (params.stopSequences as unknown as string[]) + : undefined, + }; + } + _llmType() { return "cohere"; } diff --git a/libs/langchain-community/src/chat_models/fireworks.ts b/libs/langchain-community/src/chat_models/fireworks.ts index 8349da0debfb..65d6a6588c31 100644 --- a/libs/langchain-community/src/chat_models/fireworks.ts +++ b/libs/langchain-community/src/chat_models/fireworks.ts @@ -1,4 +1,7 @@ -import type { BaseChatModelParams } from "@langchain/core/language_models/chat_models"; +import type { + BaseChatModelParams, + LangSmithParams, +} from "@langchain/core/language_models/chat_models"; import { type OpenAIClient, type ChatOpenAICallOptions, @@ -101,6 +104,12 @@ export class ChatFireworks extends ChatOpenAI { this.apiKey = fireworksApiKey; } + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = super.getLsParams(options); + params.ls_provider = "fireworks"; + return params; + } + toJSON() { const result = super.toJSON(); diff --git a/libs/langchain-community/src/chat_models/ollama.ts b/libs/langchain-community/src/chat_models/ollama.ts index efd63a16536e..3fa78fa9c6b2 100644 --- a/libs/langchain-community/src/chat_models/ollama.ts +++ b/libs/langchain-community/src/chat_models/ollama.ts @@ -2,6 +2,7 @@ import type { BaseLanguageModelCallOptions } from "@langchain/core/language_mode import { SimpleChatModel, type BaseChatModelParams, + LangSmithParams, } from "@langchain/core/language_models/chat_models"; import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager"; import { @@ -176,6 +177,18 @@ export class ChatOllama this.format = fields.format; } + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = this.invocationParams(options); + return { + ls_provider: "ollama", + ls_model_name: this.model, + ls_model_type: "chat", + ls_temperature: this.temperature ?? undefined, + ls_stop: this.stop, + ls_max_tokens: params.options.num_predict, + }; + } + _llmType() { return "ollama"; } diff --git a/libs/langchain-community/src/chat_models/togetherai.ts b/libs/langchain-community/src/chat_models/togetherai.ts index bec0f1d8b9a3..5488e6cf61fe 100644 --- a/libs/langchain-community/src/chat_models/togetherai.ts +++ b/libs/langchain-community/src/chat_models/togetherai.ts @@ -1,4 +1,7 @@ -import type { BaseChatModelParams } from "@langchain/core/language_models/chat_models"; +import type { + BaseChatModelParams, + LangSmithParams, +} from "@langchain/core/language_models/chat_models"; import { type OpenAIClient, type ChatOpenAICallOptions, @@ -113,6 +116,12 @@ export class ChatTogetherAI extends ChatOpenAI { }); } + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = super.getLsParams(options); + params.ls_provider = "together"; + return params; + } + toJSON() { const result = super.toJSON(); diff --git a/libs/langchain-mistralai/src/chat_models.ts b/libs/langchain-mistralai/src/chat_models.ts index 95fde9a21e4b..a1075518a8e4 100644 --- a/libs/langchain-mistralai/src/chat_models.ts +++ b/libs/langchain-mistralai/src/chat_models.ts @@ -33,6 +33,7 @@ import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager"; import { type BaseChatModelParams, BaseChatModel, + LangSmithParams, } from "@langchain/core/language_models/chat_models"; import { @@ -417,6 +418,17 @@ export class ChatMistralAI< this.model = this.modelName; } + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = this.invocationParams(options); + return { + ls_provider: "mistral", + ls_model_name: this.model, + ls_model_type: "chat", + ls_temperature: params.temperature ?? undefined, + ls_max_tokens: params.maxTokens ?? undefined, + }; + } + _llmType() { return "mistral_ai"; } diff --git a/libs/langchain-openai/src/azure/chat_models.ts b/libs/langchain-openai/src/azure/chat_models.ts index 49db684d53b0..3cf6952cad6e 100644 --- a/libs/langchain-openai/src/azure/chat_models.ts +++ b/libs/langchain-openai/src/azure/chat_models.ts @@ -1,5 +1,8 @@ import { type ClientOptions, AzureOpenAI as AzureOpenAIClient } from "openai"; -import { type BaseChatModelParams } from "@langchain/core/language_models/chat_models"; +import { + LangSmithParams, + type BaseChatModelParams, +} from "@langchain/core/language_models/chat_models"; import { ChatOpenAI } from "../chat_models.js"; import { OpenAIEndpointConfig, getEndpoint } from "../utils/azure.js"; import { @@ -43,6 +46,12 @@ export class AzureChatOpenAI extends ChatOpenAI { super(newFields); } + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = super.getLsParams(options); + params.ls_provider = "azure"; + return params; + } + protected _getClientOptions(options: OpenAICoreRequestOptions | undefined) { if (!this.client) { const openAIEndpointConfig: OpenAIEndpointConfig = { diff --git a/libs/langchain-openai/src/chat_models.ts b/libs/langchain-openai/src/chat_models.ts index 8f4d607ebd9f..4d96fd1a3ff5 100644 --- a/libs/langchain-openai/src/chat_models.ts +++ b/libs/langchain-openai/src/chat_models.ts @@ -24,6 +24,7 @@ import { type StructuredToolInterface } from "@langchain/core/tools"; import { getEnvironmentVariable } from "@langchain/core/utils/env"; import { BaseChatModel, + LangSmithParams, type BaseChatModelParams, } from "@langchain/core/language_models/chat_models"; import type { @@ -490,6 +491,18 @@ export class ChatOpenAI< }; } + protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams { + const params = this.invocationParams(options); + return { + ls_provider: "openai", + ls_model_name: this.model, + ls_model_type: "chat", + ls_temperature: params.temperature ?? undefined, + ls_max_tokens: params.max_tokens ?? undefined, + ls_stop: options.stop, + }; + } + override bindTools( tools: (Record | StructuredToolInterface)[], kwargs?: Partial