diff --git a/examples/src/models/chat/integration_webllm.ts b/examples/src/models/chat/integration_webllm.ts index 684e29f4fa13..d0d8b2b4de12 100644 --- a/examples/src/models/chat/integration_webllm.ts +++ b/examples/src/models/chat/integration_webllm.ts @@ -19,6 +19,10 @@ const model = new ChatWebLLM({ }, }); +await model.initialize((progress: Record) => { + console.log(progress); +}); + // Call the model with a message and await the response. const response = await model.invoke([ new HumanMessage({ content: "What is 1 + 1?" }), diff --git a/libs/langchain-community/package.json b/libs/langchain-community/package.json index a7bce52ab393..9432a52c6830 100644 --- a/libs/langchain-community/package.json +++ b/libs/langchain-community/package.json @@ -82,7 +82,7 @@ "@langchain/standard-tests": "0.0.0", "@layerup/layerup-security": "^1.5.12", "@mendable/firecrawl-js": "^0.0.13", - "@mlc-ai/web-llm": "^0.2.40", + "@mlc-ai/web-llm": "0.2.46", "@mozilla/readability": "^0.4.4", "@neondatabase/serverless": "^0.9.1", "@notionhq/client": "^2.2.10", @@ -241,7 +241,7 @@ "@huggingface/inference": "^2.6.4", "@layerup/layerup-security": "^1.5.12", "@mendable/firecrawl-js": "^0.0.13", - "@mlc-ai/web-llm": "^0.2.40", + "@mlc-ai/web-llm": "0.2.46", "@mozilla/readability": "*", "@neondatabase/serverless": "*", "@notionhq/client": "^2.2.10", diff --git a/libs/langchain-community/src/chat_models/webllm.ts b/libs/langchain-community/src/chat_models/webllm.ts index 4edf6cc46375..cf2cec8f9094 100644 --- a/libs/langchain-community/src/chat_models/webllm.ts +++ b/libs/langchain-community/src/chat_models/webllm.ts @@ -63,7 +63,9 @@ export class ChatWebLLM extends SimpleChatModel { this.chatOptions = inputs.chatOptions; this.model = inputs.model; this.temperature = inputs.temperature; - this.engine = new webllm.MLCEngine(); + this.engine = new webllm.MLCEngine({ + appConfig: this.appConfig, + }); } _llmType() { @@ -74,15 +76,11 @@ export class ChatWebLLM extends SimpleChatModel { if (progressCallback !== undefined) { this.engine.setInitProgressCallback(progressCallback); } - await this.reload(this.model, this.chatOptions, this.appConfig); + await this.reload(this.model, this.chatOptions); } - async reload( - modelId: string, - newChatOpts?: webllm.ChatOptions, - newAppConfig?: webllm.AppConfig - ) { - await this.engine.reload(modelId, newChatOpts, newAppConfig); + async reload(modelId: string, newChatOpts?: webllm.ChatOptions) { + await this.engine.reload(modelId, newChatOpts); } async *_streamResponseChunks( diff --git a/libs/langchain-community/src/memory/tests/motorhead_memory.test.ts b/libs/langchain-community/src/memory/tests/motorhead_memory.test.ts index 1ab77a1a318e..b8b630339568 100644 --- a/libs/langchain-community/src/memory/tests/motorhead_memory.test.ts +++ b/libs/langchain-community/src/memory/tests/motorhead_memory.test.ts @@ -2,7 +2,7 @@ import { test, expect, jest } from "@jest/globals"; import { HumanMessage, AIMessage } from "@langchain/core/messages"; import { MotorheadMemory } from "../motorhead_memory.js"; -test("Test motörhead memory", async () => { +test.skip("Test motörhead memory", async () => { global.fetch = jest.fn(() => Promise.resolve({ json: () => @@ -28,7 +28,8 @@ test("Test motörhead memory", async () => { expect(result2).toStrictEqual({ history: expectedString }); }); -test("Test motörhead memory with pre-loaded history", async () => { +// Flaky +test.skip("Test motörhead memory with pre-loaded history", async () => { const pastMessages = [ new HumanMessage("My name is Ozzy"), new AIMessage("Nice to meet you, Ozzy!"), diff --git a/yarn.lock b/yarn.lock index 9932c040b9f8..3d1d999e0e10 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9603,7 +9603,7 @@ __metadata: "@langchain/standard-tests": 0.0.0 "@layerup/layerup-security": ^1.5.12 "@mendable/firecrawl-js": ^0.0.13 - "@mlc-ai/web-llm": ^0.2.40 + "@mlc-ai/web-llm": 0.2.46 "@mozilla/readability": ^0.4.4 "@neondatabase/serverless": ^0.9.1 "@notionhq/client": ^2.2.10 @@ -9770,7 +9770,7 @@ __metadata: "@huggingface/inference": ^2.6.4 "@layerup/layerup-security": ^1.5.12 "@mendable/firecrawl-js": ^0.0.13 - "@mlc-ai/web-llm": ^0.2.40 + "@mlc-ai/web-llm": 0.2.46 "@mozilla/readability": "*" "@neondatabase/serverless": "*" "@notionhq/client": ^2.2.10 @@ -10998,12 +10998,12 @@ __metadata: languageName: node linkType: hard -"@mlc-ai/web-llm@npm:^0.2.40": - version: 0.2.40 - resolution: "@mlc-ai/web-llm@npm:0.2.40" +"@mlc-ai/web-llm@npm:0.2.46": + version: 0.2.46 + resolution: "@mlc-ai/web-llm@npm:0.2.46" dependencies: loglevel: ^1.9.1 - checksum: 44d46178f7b7f899893ee8096fd4188b8c343589a10428c52f87b1b7e708f7a94b2b6315c8a6f8075f14d6d92aebfd8afc7f6d049a2ef60f8b8dc950b98a82e2 + checksum: 09c83a45d7f9351ae492d8704fe580868d0b46b640eca232ebc76d552f2ffad031c9c504a0d29f69122029478af270eeeda0800e7fb032b00c11dc1632e1ae11 languageName: node linkType: hard