Skip to content

Commit 055c628

Browse files
committed
feat(puterai): add mistral
1 parent fd86934 commit 055c628

File tree

6 files changed

+113
-1
lines changed

6 files changed

+113
-1
lines changed

package-lock.json

+19
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/backend/package.json

+1
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
"@heyputer/kv.js": "^0.1.3",
1414
"@heyputer/multest": "^0.0.2",
1515
"@heyputer/puter-js-common": "^1.0.0",
16+
"@mistralai/mistralai": "^1.0.3",
1617
"@opentelemetry/api": "^1.4.1",
1718
"@opentelemetry/auto-instrumentations-node": "^0.43.0",
1819
"@opentelemetry/exporter-trace-otlp-grpc": "^0.40.0",
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
const { PassThrough } = require("stream");
2+
const BaseService = require("../../services/BaseService");
3+
const { TypedValue } = require("../../services/drivers/meta/Runtime");
4+
const { nou } = require("../../util/langutil");
5+
6+
class MistralAIService extends BaseService {
7+
static MODULES = {
8+
'@mistralai/mistralai': require('@mistralai/mistralai'),
9+
}
10+
async _init () {
11+
const require = this.require;
12+
const { Mistral } = require('@mistralai/mistralai');
13+
this.client = new Mistral({
14+
apiKey: this.config.apiKey,
15+
});
16+
}
17+
static IMPLEMENTS = {
18+
'puter-chat-completion': {
19+
async list () {
20+
// They send: { "object": "list", data }
21+
const funny_wrapper = await this.client.models.list();
22+
return funny_wrapper.data;
23+
},
24+
async complete ({ messages, stream, model }) {
25+
26+
for ( let i = 0; i < messages.length; i++ ) {
27+
const message = messages[i];
28+
if ( ! message.role ) message.role = 'user';
29+
}
30+
31+
if ( stream ) {
32+
const stream = new PassThrough();
33+
const retval = new TypedValue({
34+
$: 'stream',
35+
content_type: 'application/x-ndjson',
36+
chunked: true,
37+
}, stream);
38+
const completion = await this.client.chat.stream({
39+
model: model ?? 'mistral-large-latest',
40+
messages,
41+
});
42+
(async () => {
43+
for await ( let chunk of completion ) {
44+
// just because Mistral wants to be different
45+
chunk = chunk.data;
46+
47+
if ( chunk.choices.length < 1 ) continue;
48+
if ( chunk.choices[0].finish_reason ) {
49+
stream.end();
50+
break;
51+
}
52+
if ( nou(chunk.choices[0].delta.content) ) continue;
53+
const str = JSON.stringify({
54+
text: chunk.choices[0].delta.content
55+
});
56+
stream.write(str + '\n');
57+
}
58+
})();
59+
return retval;
60+
}
61+
62+
try {
63+
const completion = await this.client.chat.complete({
64+
model: model ?? 'mistral-large-latest',
65+
messages,
66+
});
67+
// Expected case when mistralai/client-ts#23 is fixed
68+
return completion.choices[0];
69+
} catch (e) {
70+
if ( ! e?.rawValue?.choices[0] ) {
71+
throw e;
72+
}
73+
// The SDK attempts to validate APIs response and throws
74+
// an exception, even if the response was successful
75+
// https://github.com/mistralai/client-ts/issues/23
76+
return e.rawValue.choices[0];
77+
}
78+
}
79+
}
80+
}
81+
}
82+
83+
module.exports = { MistralAIService };

src/backend/src/modules/puterai/PuterAIModule.js

+5
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,11 @@ class PuterAIModule extends AdvancedBase {
3838
const { TogetherAIService } = require('./TogetherAIService');
3939
services.registerService('together-ai', TogetherAIService);
4040
}
41+
42+
if ( !! config?.services?.['mistral'] ) {
43+
const { MistralAIService } = require('./MistralAIService');
44+
services.registerService('mistral', MistralAIService);
45+
}
4146
}
4247
}
4348

src/backend/src/modules/puterai/TogetherAIService.js

-1
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,6 @@ class TogetherAIService extends BaseService {
4848
}, stream);
4949
(async () => {
5050
for await ( const chunk of completion ) {
51-
console.log('IT IS THIS STRING', chunk);
5251
if ( chunk.choices.length < 1 ) continue;
5352
if ( chunk.choices[0].finish_reason ) {
5453
stream.end();

src/puter-js/src/modules/AI.js

+5
Original file line numberDiff line numberDiff line change
@@ -226,6 +226,9 @@ class AI{
226226
if( options.model === 'claude-3-5-sonnet' || options.model === 'claude'){
227227
options.model = 'claude-3-5-sonnet-20240620';
228228
}
229+
if ( options.model === 'mistral' ) {
230+
options.model = 'mistral-large-latest';
231+
}
229232

230233
// map model to the appropriate driver
231234
if (!options.model || options.model === 'gpt-4o' || options.model === 'gpt-4o-mini') {
@@ -234,6 +237,8 @@ class AI{
234237
driver = 'claude';
235238
}else if(options.model === 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' || options.model === 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' || options.model === 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo' || options.model === `google/gemma-2-27b-it`){
236239
driver = 'together-ai';
240+
}else if(options.model === 'mistral-large-latest' || options.model === 'codestral-latest'){
241+
driver = 'mistral';
237242
}
238243

239244
// stream flag from settings

0 commit comments

Comments
 (0)