Sfoglia il codice sorgente

tweak(puerai): add model parameter

KernelDeimos 9 mesi fa
parent
commit
7b254e8676

+ 1 - 0
src/backend/src/modules/puterai/AIInterfaceService.js

@@ -34,6 +34,7 @@ class AIInterfaceService extends BaseService {
                         messages: { type: 'json' },
                         vision: { type: 'flag' },
                         stream: { type: 'flag' },
+                        model: { type: 'string' },
                     },
                     result: { type: 'json' },
                 }

+ 2 - 2
src/backend/src/modules/puterai/ClaudeService.js

@@ -26,7 +26,7 @@ class ClaudeService extends BaseService {
     
     static IMPLEMENTS = {
         ['puter-chat-completion']: {
-            async complete ({ messages, stream }) {
+            async complete ({ messages, stream, model }) {
                 const adapted_messages = [];
                 
                 const system_prompts = [];
@@ -67,7 +67,7 @@ class ClaudeService extends BaseService {
                     }, stream);
                     (async () => {
                         const completion = await this.anthropic.messages.stream({
-                            model: 'claude-3-5-sonnet-20240620',
+                            model: model ?? 'claude-3-5-sonnet-20240620',
                             max_tokens: 1000,
                             temperature: 0,
                             system: PUTER_PROMPT + JSON.stringify(system_prompts),

+ 2 - 3
src/backend/src/modules/puterai/OpenAICompletionService.js

@@ -23,7 +23,7 @@ class OpenAICompletionService extends BaseService {
 
     static IMPLEMENTS = {
         ['puter-chat-completion']: {
-            async complete ({ messages, test_mode, stream }) {
+            async complete ({ messages, test_mode, stream, model }) {
                 if ( test_mode ) {
                     const { LoremIpsum } = require('lorem-ipsum');
                     const li = new LoremIpsum({
@@ -49,9 +49,8 @@ class OpenAICompletionService extends BaseService {
                     }
                 }
 
-                const model = 'gpt-4o';
                 return await this.complete(messages, {
-                    model,
+                    model: model ?? 'gpt-4o',
                     moderation: true,
                     stream,
                 });