Skip to content

core[minor]: Unified model params for LS #5427

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
May 17, 2024
35 changes: 32 additions & 3 deletions langchain-core/src/language_models/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,15 @@ export function createChatMessageChunkEncoderStream() {
});
}

export type LangSmithParams = {
ls_provider?: string;
ls_model_name?: string;
ls_model_type: "chat";
ls_temperature?: number;
ls_max_tokens?: number;
ls_stop?: Array<string>;
};

interface ChatModelGenerateCachedParameters<
T extends BaseChatModel<CallOptions>,
CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions
Expand Down Expand Up @@ -185,13 +194,17 @@ export abstract class BaseChatModel<
const messages = prompt.toChatMessages();
const [runnableConfig, callOptions] =
this._separateRunnableConfigFromCallOptions(options);
const inheritableMetadata = {
...this.metadata,
...this._getLsParams(callOptions),
};
const callbackManager_ = await CallbackManager.configure(
runnableConfig.callbacks,
this.callbacks,
runnableConfig.tags,
this.tags,
runnableConfig.metadata,
this.metadata,
inheritableMetadata,
{ verbose: this.verbose }
);
const extra = {
Expand Down Expand Up @@ -246,6 +259,13 @@ export abstract class BaseChatModel<
}
}

protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams {
return {
ls_model_type: "chat",
ls_stop: options.stop,
};
}

/** @ignore */
async _generateUncached(
messages: BaseMessageLike[][],
Expand All @@ -256,14 +276,19 @@ export abstract class BaseChatModel<
messageList.map(coerceMessageLikeToMessage)
);

const inheritableMetadata = {
...this.metadata,
...this._getLsParams(parsedOptions),
};

// create callback manager and start run
const callbackManager_ = await CallbackManager.configure(
handledOptions.callbacks,
this.callbacks,
handledOptions.tags,
this.tags,
handledOptions.metadata,
this.metadata,
inheritableMetadata,
{ verbose: this.verbose }
);
const extra = {
Expand Down Expand Up @@ -351,6 +376,10 @@ export abstract class BaseChatModel<
const baseMessages = messages.map((messageList) =>
messageList.map(coerceMessageLikeToMessage)
);
const inheritableMetadata = {
...this.metadata,
...this._getLsParams(parsedOptions),
};

// create callback manager and start run
const callbackManager_ = await CallbackManager.configure(
Expand All @@ -359,7 +388,7 @@ export abstract class BaseChatModel<
handledOptions.tags,
this.tags,
handledOptions.metadata,
this.metadata,
inheritableMetadata,
{ verbose: this.verbose }
);
const extra = {
Expand Down
13 changes: 13 additions & 0 deletions libs/langchain-anthropic/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import {
import { getEnvironmentVariable } from "@langchain/core/utils/env";
import {
BaseChatModel,
LangSmithParams,
type BaseChatModelParams,
} from "@langchain/core/language_models/chat_models";
import {
Expand Down Expand Up @@ -503,6 +504,18 @@ export class ChatAnthropicMessages<
this.clientOptions = fields?.clientOptions ?? {};
}

protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams {
const params = this.invocationParams(options);
return {
ls_provider: "openai",
ls_model_name: this.model,
ls_model_type: "chat",
ls_temperature: params.temperature ?? undefined,
ls_max_tokens: params.max_tokens ?? undefined,
ls_stop: options.stop,
};
}

/**
* Formats LangChain StructuredTools to AnthropicTools.
*
Expand Down
10 changes: 10 additions & 0 deletions libs/langchain-cohere/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
import {
type BaseChatModelParams,
BaseChatModel,
LangSmithParams,
} from "@langchain/core/language_models/chat_models";
import {
ChatGeneration,
Expand Down Expand Up @@ -145,6 +146,15 @@ export class ChatCohere<
this.streaming = fields?.streaming ?? this.streaming;
}

protected _getLsParams(_: this["ParsedCallOptions"]): LangSmithParams {
return {
ls_provider: "cohere",
ls_model_name: this.model,
ls_model_type: "chat",
ls_temperature: this.temperature ?? undefined,
};
}

_llmType() {
return "cohere";
}
Expand Down
11 changes: 10 additions & 1 deletion libs/langchain-community/src/chat_models/fireworks.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
import type { BaseChatModelParams } from "@langchain/core/language_models/chat_models";
import type {
BaseChatModelParams,
LangSmithParams,
} from "@langchain/core/language_models/chat_models";
import {
type OpenAIClient,
type ChatOpenAICallOptions,
Expand Down Expand Up @@ -101,6 +104,12 @@ export class ChatFireworks extends ChatOpenAI<ChatFireworksCallOptions> {
this.apiKey = fireworksApiKey;
}

protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams {
const params = super._getLsParams(options);
params.ls_provider = "fireworks";
return params;
}

toJSON() {
const result = super.toJSON();

Expand Down
11 changes: 11 additions & 0 deletions libs/langchain-community/src/chat_models/ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import type { BaseLanguageModelCallOptions } from "@langchain/core/language_mode
import {
SimpleChatModel,
type BaseChatModelParams,
LangSmithParams,
} from "@langchain/core/language_models/chat_models";
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
import {
Expand Down Expand Up @@ -176,6 +177,16 @@ export class ChatOllama
this.format = fields.format;
}

protected _getLsParams(_: this["ParsedCallOptions"]): LangSmithParams {
return {
ls_provider: "ollama",
ls_model_name: this.model,
ls_model_type: "chat",
ls_temperature: this.temperature ?? undefined,
ls_stop: this.stop,
};
}

_llmType() {
return "ollama";
}
Expand Down
11 changes: 10 additions & 1 deletion libs/langchain-community/src/chat_models/togetherai.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
import type { BaseChatModelParams } from "@langchain/core/language_models/chat_models";
import type {
BaseChatModelParams,
LangSmithParams,
} from "@langchain/core/language_models/chat_models";
import {
type OpenAIClient,
type ChatOpenAICallOptions,
Expand Down Expand Up @@ -113,6 +116,12 @@ export class ChatTogetherAI extends ChatOpenAI<ChatTogetherAICallOptions> {
});
}

protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams {
const params = super._getLsParams(options);
params.ls_provider = "together";
return params;
}

toJSON() {
const result = super.toJSON();

Expand Down
12 changes: 12 additions & 0 deletions libs/langchain-mistralai/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
import {
type BaseChatModelParams,
BaseChatModel,
LangSmithParams,
} from "@langchain/core/language_models/chat_models";

import {
Expand Down Expand Up @@ -417,6 +418,17 @@ export class ChatMistralAI<
this.model = this.modelName;
}

protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams {
const params = this.invocationParams(options);
return {
ls_provider: "mistral",
ls_model_name: this.model,
ls_model_type: "chat",
ls_temperature: params.temperature ?? undefined,
ls_max_tokens: params.maxTokens ?? undefined,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

no stop seq?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

correct. no support

};
}

_llmType() {
return "mistral_ai";
}
Expand Down
11 changes: 10 additions & 1 deletion libs/langchain-openai/src/azure/chat_models.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import { type ClientOptions, AzureOpenAI as AzureOpenAIClient } from "openai";
import { type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
import {
LangSmithParams,
type BaseChatModelParams,
} from "@langchain/core/language_models/chat_models";
import { ChatOpenAI } from "../chat_models.js";
import { OpenAIEndpointConfig, getEndpoint } from "../utils/azure.js";
import {
Expand Down Expand Up @@ -43,6 +46,12 @@ export class AzureChatOpenAI extends ChatOpenAI {
super(newFields);
}

protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams {
const params = super._getLsParams(options);
params.ls_provider = "azure";
return params;
}

protected _getClientOptions(options: OpenAICoreRequestOptions | undefined) {
if (!this.client) {
const openAIEndpointConfig: OpenAIEndpointConfig = {
Expand Down
13 changes: 13 additions & 0 deletions libs/langchain-openai/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import { type StructuredToolInterface } from "@langchain/core/tools";
import { getEnvironmentVariable } from "@langchain/core/utils/env";
import {
BaseChatModel,
LangSmithParams,
type BaseChatModelParams,
} from "@langchain/core/language_models/chat_models";
import type {
Expand Down Expand Up @@ -490,6 +491,18 @@ export class ChatOpenAI<
};
}

protected _getLsParams(options: this["ParsedCallOptions"]): LangSmithParams {
const params = this.invocationParams(options);
return {
ls_provider: "openai",
ls_model_name: this.model,
ls_model_type: "chat",
ls_temperature: params.temperature ?? undefined,
ls_max_tokens: params.max_tokens ?? undefined,
ls_stop: options.stop,
};
}

override bindTools(
tools: (Record<string, unknown> | StructuredToolInterface)[],
kwargs?: Partial<CallOptions>
Expand Down
Loading