diff --git a/src/backend/src/modules/puterai/PuterAIModule.js b/src/backend/src/modules/puterai/PuterAIModule.js index 5552b390..df20aea0 100644 --- a/src/backend/src/modules/puterai/PuterAIModule.js +++ b/src/backend/src/modules/puterai/PuterAIModule.js @@ -48,6 +48,11 @@ class PuterAIModule extends AdvancedBase { const { GroqAIService } = require('./GroqAIService'); services.registerService('groq', GroqAIService); } + + if ( !! config?.services?.['xai'] ) { + const { XAIService } = require('./XAIService'); + services.registerService('xai', XAIService); + } } } diff --git a/src/backend/src/modules/puterai/XAIService.js b/src/backend/src/modules/puterai/XAIService.js new file mode 100644 index 00000000..8846da11 --- /dev/null +++ b/src/backend/src/modules/puterai/XAIService.js @@ -0,0 +1,117 @@ +const { default: Anthropic } = require("@anthropic-ai/sdk"); +const BaseService = require("../../services/BaseService"); +const { whatis } = require("../../util/langutil"); +const { PassThrough } = require("stream"); +const { TypedValue } = require("../../services/drivers/meta/Runtime"); + +const PUTER_PROMPT = ` + You are running on an open-source platform called Puter, + as the xAI implementation for a driver interface + called puter-chat-completion. + + The following JSON contains system messages from the + user of the driver interface (typically an app on Puter): +`.replace('\n', ' ').trim(); + +class XAIService extends BaseService { + static MODULES = { + Anthropic: require('@anthropic-ai/sdk'), + } + + async _init () { + this.anthropic = new Anthropic({ + apiKey: this.config.apiKey, + baseURL: 'https://api.x.ai' + }); + } + + static IMPLEMENTS = { + ['puter-chat-completion']: { + async list () { + return [ + 'grok-beta', + ]; + }, + async complete ({ messages, stream, model }) { + const adapted_messages = []; + + const system_prompts = []; + let previous_was_user = false; + for ( const message of messages ) { + if ( typeof message.content === 'string' ) { + message.content = { + type: 'text', + text: message.content, + }; + } + if ( whatis(message.content) !== 'array' ) { + message.content = [message.content]; + } + if ( ! message.role ) message.role = 'user'; + if ( message.role === 'user' && previous_was_user ) { + const last_msg = adapted_messages[adapted_messages.length-1]; + last_msg.content.push( + ...(Array.isArray ? message.content : [message.content]) + ); + continue; + } + if ( message.role === 'system' ) { + system_prompts.push(...message.content); + continue; + } + adapted_messages.push(message); + if ( message.role === 'user' ) { + previous_was_user = true; + } + } + + if ( stream ) { + const stream = new PassThrough(); + const retval = new TypedValue({ + $: 'stream', + content_type: 'application/x-ndjson', + chunked: true, + }, stream); + (async () => { + const completion = await this.anthropic.messages.stream({ + model: model ?? 'grok-beta', + max_tokens: 1000, + temperature: 0, + system: PUTER_PROMPT + JSON.stringify(system_prompts), + messages: adapted_messages, + }); + for await ( const event of completion ) { + if ( + event.type !== 'content_block_delta' || + event.delta.type !== 'text_delta' + ) continue; + const str = JSON.stringify({ + text: event.delta.text, + }); + stream.write(str + '\n'); + } + stream.end(); + })(); + + return retval; + } + + const msg = await this.anthropic.messages.create({ + model: model ?? 'grok-beta', + max_tokens: 1000, + temperature: 0, + system: PUTER_PROMPT + JSON.stringify(system_prompts), + messages: adapted_messages, + }); + return { + message: msg, + finish_reason: 'stop' + }; + } + } + } +} + +module.exports = { + XAIService, +}; diff --git a/src/puter-js/src/modules/AI.js b/src/puter-js/src/modules/AI.js index 84d9b3e4..56f409f9 100644 --- a/src/puter-js/src/modules/AI.js +++ b/src/puter-js/src/modules/AI.js @@ -267,6 +267,8 @@ class AI{ "whisper-large-v3" ].includes(options.model)) { driver = 'groq'; + }else if(options.model === 'grok-beta') { + driver = 'xai'; } // stream flag from settings