diff --git a/package-lock.json b/package-lock.json index 97b1540c..6f7ca745 100644 --- a/package-lock.json +++ b/package-lock.json @@ -3401,6 +3401,14 @@ "node-pre-gyp": "bin/node-pre-gyp" } }, + "node_modules/@mistralai/mistralai": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@mistralai/mistralai/-/mistralai-1.0.3.tgz", + "integrity": "sha512-161lmlaMrQvQeC97LG3GVpQi+LKKmGb6VweEFq6otc4J4kEVaJu6RzmH5UuLCt2eSes1Q5faY6YAPTkGOo0atw==", + "peerDependencies": { + "zod": ">= 3" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -16390,6 +16398,16 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/zod": { + "version": "3.23.8", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz", + "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", + "license": "MIT", + "peer": true, + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, "packages/backend": { "name": "@heyputer/backend", "version": "2.1.0", @@ -16619,6 +16637,7 @@ "@heyputer/kv.js": "^0.1.3", "@heyputer/multest": "^0.0.2", "@heyputer/puter-js-common": "^1.0.0", + "@mistralai/mistralai": "^1.0.3", "@opentelemetry/api": "^1.4.1", "@opentelemetry/auto-instrumentations-node": "^0.43.0", "@opentelemetry/exporter-trace-otlp-grpc": "^0.40.0", diff --git a/src/backend/package.json b/src/backend/package.json index 5d09800e..2bd8e544 100644 --- a/src/backend/package.json +++ b/src/backend/package.json @@ -13,6 +13,7 @@ "@heyputer/kv.js": "^0.1.3", "@heyputer/multest": "^0.0.2", "@heyputer/puter-js-common": "^1.0.0", + "@mistralai/mistralai": "^1.0.3", "@opentelemetry/api": "^1.4.1", "@opentelemetry/auto-instrumentations-node": "^0.43.0", "@opentelemetry/exporter-trace-otlp-grpc": "^0.40.0", diff --git a/src/backend/src/modules/puterai/MistralAIService.js b/src/backend/src/modules/puterai/MistralAIService.js new file mode 100644 index 00000000..730dca0d --- /dev/null +++ b/src/backend/src/modules/puterai/MistralAIService.js @@ -0,0 +1,83 @@ +const { PassThrough } = require("stream"); +const BaseService = require("../../services/BaseService"); +const { TypedValue } = require("../../services/drivers/meta/Runtime"); +const { nou } = require("../../util/langutil"); + +class MistralAIService extends BaseService { + static MODULES = { + '@mistralai/mistralai': require('@mistralai/mistralai'), + } + async _init () { + const require = this.require; + const { Mistral } = require('@mistralai/mistralai'); + this.client = new Mistral({ + apiKey: this.config.apiKey, + }); + } + static IMPLEMENTS = { + 'puter-chat-completion': { + async list () { + // They send: { "object": "list", data } + const funny_wrapper = await this.client.models.list(); + return funny_wrapper.data; + }, + async complete ({ messages, stream, model }) { + + for ( let i = 0; i < messages.length; i++ ) { + const message = messages[i]; + if ( ! message.role ) message.role = 'user'; + } + + if ( stream ) { + const stream = new PassThrough(); + const retval = new TypedValue({ + $: 'stream', + content_type: 'application/x-ndjson', + chunked: true, + }, stream); + const completion = await this.client.chat.stream({ + model: model ?? 'mistral-large-latest', + messages, + }); + (async () => { + for await ( let chunk of completion ) { + // just because Mistral wants to be different + chunk = chunk.data; + + if ( chunk.choices.length < 1 ) continue; + if ( chunk.choices[0].finish_reason ) { + stream.end(); + break; + } + if ( nou(chunk.choices[0].delta.content) ) continue; + const str = JSON.stringify({ + text: chunk.choices[0].delta.content + }); + stream.write(str + '\n'); + } + })(); + return retval; + } + + try { + const completion = await this.client.chat.complete({ + model: model ?? 'mistral-large-latest', + messages, + }); + // Expected case when mistralai/client-ts#23 is fixed + return completion.choices[0]; + } catch (e) { + if ( ! e?.rawValue?.choices[0] ) { + throw e; + } + // The SDK attempts to validate APIs response and throws + // an exception, even if the response was successful + // https://github.com/mistralai/client-ts/issues/23 + return e.rawValue.choices[0]; + } + } + } + } +} + +module.exports = { MistralAIService }; diff --git a/src/backend/src/modules/puterai/PuterAIModule.js b/src/backend/src/modules/puterai/PuterAIModule.js index fdb67a41..2e0d2a48 100644 --- a/src/backend/src/modules/puterai/PuterAIModule.js +++ b/src/backend/src/modules/puterai/PuterAIModule.js @@ -38,6 +38,11 @@ class PuterAIModule extends AdvancedBase { const { TogetherAIService } = require('./TogetherAIService'); services.registerService('together-ai', TogetherAIService); } + + if ( !! config?.services?.['mistral'] ) { + const { MistralAIService } = require('./MistralAIService'); + services.registerService('mistral', MistralAIService); + } } } diff --git a/src/backend/src/modules/puterai/TogetherAIService.js b/src/backend/src/modules/puterai/TogetherAIService.js index bf6b8c83..ab0abf95 100644 --- a/src/backend/src/modules/puterai/TogetherAIService.js +++ b/src/backend/src/modules/puterai/TogetherAIService.js @@ -48,7 +48,6 @@ class TogetherAIService extends BaseService { }, stream); (async () => { for await ( const chunk of completion ) { - console.log('IT IS THIS STRING', chunk); if ( chunk.choices.length < 1 ) continue; if ( chunk.choices[0].finish_reason ) { stream.end(); diff --git a/src/puter-js/src/modules/AI.js b/src/puter-js/src/modules/AI.js index 36bf4756..59388318 100644 --- a/src/puter-js/src/modules/AI.js +++ b/src/puter-js/src/modules/AI.js @@ -226,6 +226,9 @@ class AI{ if( options.model === 'claude-3-5-sonnet' || options.model === 'claude'){ options.model = 'claude-3-5-sonnet-20240620'; } + if ( options.model === 'mistral' ) { + options.model = 'mistral-large-latest'; + } // map model to the appropriate driver if (!options.model || options.model === 'gpt-4o' || options.model === 'gpt-4o-mini') { @@ -234,6 +237,8 @@ class AI{ driver = 'claude'; }else if(options.model === 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' || options.model === 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' || options.model === 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo' || options.model === `google/gemma-2-27b-it`){ driver = 'together-ai'; + }else if(options.model === 'mistral-large-latest' || options.model === 'codestral-latest'){ + driver = 'mistral'; } // stream flag from settings