diff --git a/core/index.d.ts b/core/index.d.ts index 7cbf0e25bc..b923703954 100644 --- a/core/index.d.ts +++ b/core/index.d.ts @@ -543,7 +543,7 @@ export interface CustomLLMWithOptionals { signal: AbortSignal, options: CompletionOptions, fetch: (input: RequestInfo | URL, init?: RequestInit) => Promise, - ) => AsyncGenerator; + ) => AsyncGenerator; listModels?: ( fetch: (input: RequestInfo | URL, init?: RequestInit) => Promise, ) => Promise; diff --git a/core/llm/llms/CustomLLM.ts b/core/llm/llms/CustomLLM.ts index 062f141f6a..e04ea0ab73 100644 --- a/core/llm/llms/CustomLLM.ts +++ b/core/llm/llms/CustomLLM.ts @@ -1,4 +1,5 @@ import { ChatMessage, CompletionOptions, CustomLLM } from "../../index.js"; +import { renderChatMessage } from "../../util/messageContent.js"; import { BaseLLM } from "../index.js"; class CustomLLMClass extends BaseLLM { @@ -18,7 +19,7 @@ class CustomLLMClass extends BaseLLM { signal: AbortSignal, options: CompletionOptions, fetch: (input: RequestInfo | URL, init?: RequestInit) => Promise, - ) => AsyncGenerator; + ) => AsyncGenerator; constructor(custom: CustomLLM) { super(custom.options || { model: "custom" }); @@ -38,7 +39,11 @@ class CustomLLMClass extends BaseLLM { options, (...args) => this.fetch(...args), )) { - yield { role: "assistant", content }; + if (typeof content === "string") { + yield { role: "assistant", content }; + } else { + yield content; + } } } else { for await (const update of super._streamChat(messages, signal, options)) { @@ -68,7 +73,11 @@ class CustomLLMClass extends BaseLLM { options, (...args) => this.fetch(...args), )) { - yield content; + if (typeof content === "string") { + yield content; + } else { + yield renderChatMessage(content); + } } } else { throw new Error(