diff --git a/settings.js b/settings.js index 855fea7..c0f7705 100644 --- a/settings.js +++ b/settings.js @@ -7,7 +7,7 @@ const settings = { // the mindserver manages all agents and hosts the UI "mindserver_port": 8080, - "base_profile": "survival", // survival, creative, assistant, or god_mode + "base_profile": "survival", // survival, assistant, creative, or god_mode "profiles": [ "./andy.json", // "./profiles/gpt.json", diff --git a/src/agent/action_manager.js b/src/agent/action_manager.js index 470f507..9b9d0d2 100644 --- a/src/agent/action_manager.js +++ b/src/agent/action_manager.js @@ -69,7 +69,7 @@ export class ActionManager { else { this.recent_action_counter = 0; } - if (this.recent_action_counter > 2) { + if (this.recent_action_counter > 3) { console.warn('Fast action loop detected, cancelling resume.'); this.cancelResume(); // likely cause of repetition } diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 68cd4d0..4dbb6b5 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -228,28 +228,33 @@ export async function smeltItem(bot, itemName, num=1) { await furnace.putInput(mc.getItemId(itemName), null, num); // wait for the items to smelt let total = 0; - let collected_last = true; let smelted_item = null; await new Promise(resolve => setTimeout(resolve, 200)); + let last_collected = Date.now(); while (total < num) { - await new Promise(resolve => setTimeout(resolve, 10000)); - console.log('checking...'); - let collected = false; + await new Promise(resolve => setTimeout(resolve, 1000)); if (furnace.outputItem()) { smelted_item = await furnace.takeOutput(); if (smelted_item) { total += smelted_item.count; - collected = true; + last_collected = Date.now(); } } - if (!collected && !collected_last) { - break; // if nothing was collected this time or last time + if (Date.now() - last_collected > 11000) { + break; // if nothing has been collected in 11 seconds, stop } - collected_last = collected; if (bot.interrupt_code) { break; } } + // take all remaining in input/fuel slots + if (furnace.inputItem()) { + await furnace.takeInput(); + } + if (furnace.fuelItem()) { + await furnace.takeFuel(); + } + await bot.closeWindow(furnace); if (placedFurnace) { @@ -1040,7 +1045,7 @@ export async function goToGoal(bot, goal) { log(bot, `Found destructive path.`); } else { - log(bot, `Could not find a path to goal, attempting to navigate anyway using destructive movements.`); + log(bot, `Path not found, but attempting to navigate anyway using destructive movements.`); } const doorCheckInterval = startDoorInterval(bot); @@ -1288,11 +1293,29 @@ export async function followPlayer(bot, username, distance=4) { while (!bot.interrupt_code) { await new Promise(resolve => setTimeout(resolve, 500)); // in cheat mode, if the distance is too far, teleport to the player - if (bot.modes.isOn('cheat') && bot.entity.position.distanceTo(player.position) > 100 && player.isOnGround) { + const distance_from_player = bot.entity.position.distanceTo(player.position); + + const teleport_distance = 100; + const ignore_modes_distance = 30; + const nearby_distance = distance + 2; + + if (distance_from_player > teleport_distance && bot.modes.isOn('cheat')) { + // teleport with cheat mode await goToPlayer(bot, username); } - const is_nearby = bot.entity.position.distanceTo(player.position) <= distance + 2; - if (is_nearby) { + else if (distance_from_player > ignore_modes_distance) { + // these modes slow down the bot, and we want to catch up + bot.modes.pause('item_collecting'); + bot.modes.pause('hunting'); + bot.modes.pause('torch_placing'); + } + else if (distance_from_player <= ignore_modes_distance) { + bot.modes.unpause('item_collecting'); + bot.modes.unpause('hunting'); + bot.modes.unpause('torch_placing'); + } + + if (distance_from_player <= nearby_distance) { clearInterval(doorCheckInterval); doorCheckInterval = null; bot.modes.pause('unstuck'); diff --git a/src/agent/modes.js b/src/agent/modes.js index cde53fc..ce196af 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -156,7 +156,7 @@ const modes_list = [ { name: 'hunting', description: 'Hunt nearby animals when idle.', - interrupts: [], + interrupts: ['action:followPlayer'], on: true, active: false, update: async function (agent) { diff --git a/src/models/_model_map.js b/src/models/_model_map.js new file mode 100644 index 0000000..be43893 --- /dev/null +++ b/src/models/_model_map.js @@ -0,0 +1,89 @@ +import { promises as fs } from 'fs'; +import path from 'path'; +import { fileURLToPath, pathToFileURL } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Dynamically discover model classes in this directory. +// Each model class must export a static `prefix` string. +const apiMap = await (async () => { + const map = {}; + const files = (await fs.readdir(__dirname)) + .filter(f => f.endsWith('.js') && f !== '_model_map.js' && f !== 'prompter.js'); + for (const file of files) { + try { + const moduleUrl = pathToFileURL(path.join(__dirname, file)).href; + const mod = await import(moduleUrl); + for (const exported of Object.values(mod)) { + if (typeof exported === 'function' && Object.prototype.hasOwnProperty.call(exported, 'prefix')) { + const prefix = exported.prefix; + if (typeof prefix === 'string' && prefix.length > 0) { + map[prefix] = exported; + } + } + } + } catch (e) { + console.warn('Failed to load model module:', file, e?.message || e); + } + } + return map; +})(); + +export function selectAPI(profile) { + if (typeof profile === 'string' || profile instanceof String) { + profile = {model: profile}; + } + // backwards compatibility with local->ollama + if (profile.api?.includes('local') || profile.model?.includes('local')) { + profile.api = 'ollama'; + if (profile.model) { + profile.model = profile.model.replace('local', 'ollama'); + } + } + if (!profile.api) { + const api = Object.keys(apiMap).find(key => profile.model?.startsWith(key)); + if (api) { + profile.api = api; + } + else { + // check for some common models that do not require prefixes + if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3')) + profile.api = 'openai'; + else if (profile.model.includes('claude')) + profile.api = 'anthropic'; + else if (profile.model.includes('gemini')) + profile.api = "google"; + else if (profile.model.includes('grok')) + profile.api = 'grok'; + else if (profile.model.includes('mistral')) + profile.api = 'mistral'; + else if (profile.model.includes('deepseek')) + profile.api = 'deepseek'; + else if (profile.model.includes('qwen')) + profile.api = 'qwen'; + } + if (!profile.api) { + throw new Error('Unknown model:', profile.model); + } + } + if (!apiMap[profile.api]) { + throw new Error('Unknown api:', profile.api); + } + let model_name = profile.model.replace(profile.api + '/', ''); // remove prefix + profile.model = model_name === "" ? null : model_name; // if model is empty, set to null + return profile; +} + +export function createModel(profile) { + if (!!apiMap[profile.model]) { + // if the model value is an api (instead of a specific model name) + // then set model to null so it uses the default model for that api + profile.model = null; + } + if (!apiMap[profile.api]) { + throw new Error('Unknown api:', profile.api); + } + const model = new apiMap[profile.api](profile.model, profile.url, profile.params); + return model; +} \ No newline at end of file diff --git a/src/models/claude.js b/src/models/claude.js index d6e48bc..c42d2e6 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -3,6 +3,7 @@ import { strictFormat } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; export class Claude { + static prefix = 'anthropic'; constructor(model_name, url, params) { this.model_name = model_name; this.params = params || {}; diff --git a/src/models/deepseek.js b/src/models/deepseek.js index da98ba2..5596fa8 100644 --- a/src/models/deepseek.js +++ b/src/models/deepseek.js @@ -3,6 +3,7 @@ import { getKey, hasKey } from '../utils/keys.js'; import { strictFormat } from '../utils/text.js'; export class DeepSeek { + static prefix = 'deepseek'; constructor(model_name, url, params) { this.model_name = model_name; this.params = params; diff --git a/src/models/gemini.js b/src/models/gemini.js index 4d24c93..75a20e0 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -3,6 +3,7 @@ import { toSinglePrompt, strictFormat } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; export class Gemini { + static prefix = 'google'; constructor(model_name, url, params) { this.model_name = model_name; this.params = params; @@ -142,15 +143,15 @@ export class Gemini { } async embed(text) { - let model; + let model = this.model_name || "text-embedding-004"; if (this.url) { model = this.genAI.getGenerativeModel( - { model: "text-embedding-004" }, + { model }, { baseUrl: this.url } ); } else { model = this.genAI.getGenerativeModel( - { model: "text-embedding-004" } + { model } ); } diff --git a/src/models/glhf.js b/src/models/glhf.js index d41b843..b237c8d 100644 --- a/src/models/glhf.js +++ b/src/models/glhf.js @@ -2,6 +2,7 @@ import OpenAIApi from 'openai'; import { getKey } from '../utils/keys.js'; export class GLHF { + static prefix = 'glhf'; constructor(model_name, url) { this.model_name = model_name; const apiKey = getKey('GHLF_API_KEY'); diff --git a/src/models/gpt.js b/src/models/gpt.js index e8e5c5c..ea7d600 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -3,6 +3,7 @@ import { getKey, hasKey } from '../utils/keys.js'; import { strictFormat } from '../utils/text.js'; export class GPT { + static prefix = 'openai'; constructor(model_name, url, params) { this.model_name = model_name; this.params = params; @@ -22,20 +23,21 @@ export class GPT { async sendRequest(turns, systemMessage, stop_seq='***') { let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); messages = strictFormat(messages); + let model = this.model_name || "gpt-4o-mini"; const pack = { - model: this.model_name || "gpt-3.5-turbo", + model: model, messages, stop: stop_seq, ...(this.params || {}) }; - if (this.model_name.includes('o1') || this.model_name.includes('o3') || this.model_name.includes('5')) { + if (model.includes('o1') || model.includes('o3') || model.includes('5')) { delete pack.stop; } let res = null; try { - console.log('Awaiting openai api response from model', this.model_name) + console.log('Awaiting openai api response from model', model) // console.log('Messages:', messages); let completion = await this.openai.chat.completions.create(pack); if (completion.choices[0].finish_reason == 'length') @@ -88,6 +90,3 @@ export class GPT { } } - - - diff --git a/src/models/grok.js b/src/models/grok.js index 2878a10..0753f10 100644 --- a/src/models/grok.js +++ b/src/models/grok.js @@ -3,6 +3,7 @@ import { getKey } from '../utils/keys.js'; // xAI doesn't supply a SDK for their models, but fully supports OpenAI and Anthropic SDKs export class Grok { + static prefix = 'grok'; constructor(model_name, url, params) { this.model_name = model_name; this.url = url; diff --git a/src/models/groq.js b/src/models/groq.js index e4e8f3b..9da88c7 100644 --- a/src/models/groq.js +++ b/src/models/groq.js @@ -6,6 +6,7 @@ import { getKey } from '../utils/keys.js'; // Umbrella class for everything under the sun... That GroqCloud provides, that is. export class GroqCloudAPI { + static prefix = 'groq'; constructor(model_name, url, params) { @@ -63,7 +64,6 @@ export class GroqCloudAPI { if (err.message.includes("content must be a string")) { res = "Vision is only supported by certain models."; } else { - console.log(this.model_name); res = "My brain disconnected, try again."; } console.log(err); diff --git a/src/models/huggingface.js b/src/models/huggingface.js index 80c36e8..91fbdfd 100644 --- a/src/models/huggingface.js +++ b/src/models/huggingface.js @@ -3,6 +3,7 @@ import { getKey } from '../utils/keys.js'; import { HfInference } from "@huggingface/inference"; export class HuggingFace { + static prefix = 'huggingface'; constructor(model_name, url, params) { // Remove 'huggingface/' prefix if present this.model_name = model_name.replace('huggingface/', ''); diff --git a/src/models/hyperbolic.js b/src/models/hyperbolic.js index a2ccc48..f483b69 100644 --- a/src/models/hyperbolic.js +++ b/src/models/hyperbolic.js @@ -1,6 +1,7 @@ import { getKey } from '../utils/keys.js'; export class Hyperbolic { + static prefix = 'hyperbolic'; constructor(modelName, apiUrl) { this.modelName = modelName || "deepseek-ai/DeepSeek-V3"; this.apiUrl = apiUrl || "https://api.hyperbolic.xyz/v1/chat/completions"; diff --git a/src/models/mistral.js b/src/models/mistral.js index 72448f1..536b386 100644 --- a/src/models/mistral.js +++ b/src/models/mistral.js @@ -3,6 +3,7 @@ import { getKey } from '../utils/keys.js'; import { strictFormat } from '../utils/text.js'; export class Mistral { + static prefix = 'mistral'; #client; constructor(model_name, url, params) { diff --git a/src/models/novita.js b/src/models/novita.js index 8f2dd08..46140f0 100644 --- a/src/models/novita.js +++ b/src/models/novita.js @@ -4,8 +4,9 @@ import { strictFormat } from '../utils/text.js'; // llama, mistral export class Novita { + static prefix = 'novita'; constructor(model_name, url, params) { - this.model_name = model_name.replace('novita/', ''); + this.model_name = model_name; this.url = url || 'https://api.novita.ai/v3/openai'; this.params = params; diff --git a/src/models/local.js b/src/models/ollama.js similarity index 60% rename from src/models/local.js rename to src/models/ollama.js index e51bcf8..49a80e0 100644 --- a/src/models/local.js +++ b/src/models/ollama.js @@ -1,6 +1,7 @@ import { strictFormat } from '../utils/text.js'; -export class Local { +export class Ollama { + static prefix = 'ollama'; constructor(model_name, url, params) { this.model_name = model_name; this.params = params; @@ -10,11 +11,9 @@ export class Local { } async sendRequest(turns, systemMessage) { - let model = this.model_name || 'llama3.1'; // Updated to llama3.1, as it is more performant than llama3 + let model = this.model_name || 'sweaterdog/andy-4:micro-q8_0'; let messages = strictFormat(turns); messages.unshift({ role: 'system', content: systemMessage }); - - // We'll attempt up to 5 times for models with deepseek-r1-esk reasoning if the tags are mismatched. const maxAttempts = 5; let attempt = 0; let finalRes = null; @@ -24,14 +23,14 @@ export class Local { console.log(`Awaiting local response... (model: ${model}, attempt: ${attempt})`); let res = null; try { - res = await this.send(this.chat_endpoint, { + let apiResponse = await this.send(this.chat_endpoint, { model: model, messages: messages, stream: false, ...(this.params || {}) }); - if (res) { - res = res['message']['content']; + if (apiResponse) { + res = apiResponse['message']['content']; } else { res = 'No response data.'; } @@ -43,36 +42,27 @@ export class Local { console.log(err); res = 'My brain disconnected, try again.'; } - } - // If the model name includes "deepseek-r1" or "Andy-3.5-reasoning", then handle the block. - const hasOpenTag = res.includes(""); - const hasCloseTag = res.includes(""); - - // If there's a partial mismatch, retry to get a complete response. - if ((hasOpenTag && !hasCloseTag)) { - console.warn("Partial block detected. Re-generating..."); - continue; - } - - // If is present but is not, prepend - if (hasCloseTag && !hasOpenTag) { - res = '' + res; - } - // Changed this so if the model reasons, using and but doesn't start the message with , ges prepended to the message so no error occur. - - // If both tags appear, remove them (and everything inside). - if (hasOpenTag && hasCloseTag) { - res = res.replace(/[\s\S]*?<\/think>/g, ''); - } + const hasOpenTag = res.includes(""); + const hasCloseTag = res.includes(""); + if ((hasOpenTag && !hasCloseTag)) { + console.warn("Partial block detected. Re-generating..."); + if (attempt < maxAttempts) continue; + } + if (hasCloseTag && !hasOpenTag) { + res = '' + res; + } + if (hasOpenTag && hasCloseTag) { + res = res.replace(/[\s\S]*?<\/think>/g, '').trim(); + } finalRes = res; - break; // Exit the loop if we got a valid response. + break; } if (finalRes == null) { - console.warn("Could not get a valid block or normal response after max attempts."); + console.warn("Could not get a valid response after max attempts."); finalRes = 'I thought too hard, sorry, try again.'; } return finalRes; @@ -104,4 +94,22 @@ export class Local { } return data; } + + async sendVisionRequest(messages, systemMessage, imageBuffer) { + const imageMessages = [...messages]; + imageMessages.push({ + role: "user", + content: [ + { type: "text", text: systemMessage }, + { + type: "image_url", + image_url: { + url: `data:image/jpeg;base64,${imageBuffer.toString('base64')}` + } + } + ] + }); + + return this.sendRequest(imageMessages, systemMessage); + } } diff --git a/src/models/openrouter.js b/src/models/openrouter.js index 5cbc090..ca0782b 100644 --- a/src/models/openrouter.js +++ b/src/models/openrouter.js @@ -3,6 +3,7 @@ import { getKey, hasKey } from '../utils/keys.js'; import { strictFormat } from '../utils/text.js'; export class OpenRouter { + static prefix = 'openrouter'; constructor(model_name, url) { this.model_name = model_name; diff --git a/src/models/prompter.js b/src/models/prompter.js index 89d5fe9..a8c4db7 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -5,26 +5,10 @@ import { SkillLibrary } from "../agent/library/skill_library.js"; import { stringifyTurns } from '../utils/text.js'; import { getCommand } from '../agent/commands/index.js'; import settings from '../agent/settings.js'; - -import { Gemini } from './gemini.js'; -import { GPT } from './gpt.js'; -import { Claude } from './claude.js'; -import { Mistral } from './mistral.js'; -import { ReplicateAPI } from './replicate.js'; -import { Local } from './local.js'; -import { Novita } from './novita.js'; -import { GroqCloudAPI } from './groq.js'; -import { HuggingFace } from './huggingface.js'; -import { Qwen } from "./qwen.js"; -import { Grok } from "./grok.js"; -import { DeepSeek } from './deepseek.js'; -import { Hyperbolic } from './hyperbolic.js'; -import { GLHF } from './glhf.js'; -import { OpenRouter } from './openrouter.js'; -import { VLLM } from './vllm.js'; import { promises as fs } from 'fs'; import path from 'path'; import { fileURLToPath } from 'url'; +import { selectAPI, createModel } from './_model_map.js'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); @@ -66,70 +50,46 @@ export class Prompter { this.last_prompt_time = 0; this.awaiting_coding = false; - // try to get "max_tokens" parameter, else null + // for backwards compatibility, move max_tokens to params let max_tokens = null; if (this.profile.max_tokens) max_tokens = this.profile.max_tokens; - let chat_model_profile = this._selectAPI(this.profile.model); - this.chat_model = this._createModel(chat_model_profile); + let chat_model_profile = selectAPI(this.profile.model); + this.chat_model = createModel(chat_model_profile); if (this.profile.code_model) { - let code_model_profile = this._selectAPI(this.profile.code_model); - this.code_model = this._createModel(code_model_profile); + let code_model_profile = selectAPI(this.profile.code_model); + this.code_model = createModel(code_model_profile); } else { this.code_model = this.chat_model; } if (this.profile.vision_model) { - let vision_model_profile = this._selectAPI(this.profile.vision_model); - this.vision_model = this._createModel(vision_model_profile); + let vision_model_profile = selectAPI(this.profile.vision_model); + this.vision_model = createModel(vision_model_profile); } else { this.vision_model = this.chat_model; } - let embedding = this.profile.embedding; - if (embedding === undefined) { - if (chat_model_profile.api !== 'ollama') - embedding = {api: chat_model_profile.api}; - else - embedding = {api: 'none'}; - } - else if (typeof embedding === 'string' || embedding instanceof String) - embedding = {api: embedding}; - - console.log('Using embedding settings:', embedding); - - try { - if (embedding.api === 'google') - this.embedding_model = new Gemini(embedding.model, embedding.url); - else if (embedding.api === 'openai') - this.embedding_model = new GPT(embedding.model, embedding.url); - else if (embedding.api === 'replicate') - this.embedding_model = new ReplicateAPI(embedding.model, embedding.url); - else if (embedding.api === 'ollama') - this.embedding_model = new Local(embedding.model, embedding.url); - else if (embedding.api === 'qwen') - this.embedding_model = new Qwen(embedding.model, embedding.url); - else if (embedding.api === 'mistral') - this.embedding_model = new Mistral(embedding.model, embedding.url); - else if (embedding.api === 'huggingface') - this.embedding_model = new HuggingFace(embedding.model, embedding.url); - else if (embedding.api === 'novita') - this.embedding_model = new Novita(embedding.model, embedding.url); - else { - this.embedding_model = null; - let embedding_name = embedding ? embedding.api : '[NOT SPECIFIED]' - console.warn('Unsupported embedding: ' + embedding_name + '. Using word-overlap instead, expect reduced performance. Recommend using a supported embedding model. See Readme.'); + + let embedding_model_profile = null; + if (this.profile.embedding) { + try { + embedding_model_profile = selectAPI(this.profile.embedding); + } catch (e) { + embedding_model_profile = null; } } - catch (err) { - console.warn('Warning: Failed to initialize embedding model:', err.message); - console.log('Continuing anyway, using word-overlap instead.'); - this.embedding_model = null; + if (embedding_model_profile) { + this.embedding_model = createModel(embedding_model_profile); } + else { + this.embedding_model = createModel({api: chat_model_profile.api}); + } + this.skill_libary = new SkillLibrary(agent, this.embedding_model); mkdirSync(`./bots/${name}`, { recursive: true }); writeFileSync(`./bots/${name}/last_profile.json`, JSON.stringify(this.profile, null, 4), (err) => { @@ -140,88 +100,6 @@ export class Prompter { }); } - _selectAPI(profile) { - if (typeof profile === 'string' || profile instanceof String) { - profile = {model: profile}; - } - if (!profile.api) { - if (profile.model.includes('openrouter/')) - profile.api = 'openrouter'; // must do first because shares names with other models - else if (profile.model.includes('ollama/')) - profile.api = 'ollama'; // also must do early because shares names with other models - else if (profile.model.includes('gemini')) - profile.api = 'google'; - else if (profile.model.includes('vllm/')) - profile.api = 'vllm'; - else if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3')) - profile.api = 'openai'; - else if (profile.model.includes('claude')) - profile.api = 'anthropic'; - else if (profile.model.includes('huggingface/')) - profile.api = "huggingface"; - else if (profile.model.includes('replicate/')) - profile.api = 'replicate'; - else if (profile.model.includes('mistralai/') || profile.model.includes("mistral/")) - model_profile.api = 'mistral'; - else if (profile.model.includes("groq/") || profile.model.includes("groqcloud/")) - profile.api = 'groq'; - else if (profile.model.includes("glhf/")) - profile.api = 'glhf'; - else if (profile.model.includes("hyperbolic/")) - profile.api = 'hyperbolic'; - else if (profile.model.includes('novita/')) - profile.api = 'novita'; - else if (profile.model.includes('qwen')) - profile.api = 'qwen'; - else if (profile.model.includes('grok')) - profile.api = 'xai'; - else if (profile.model.includes('deepseek')) - profile.api = 'deepseek'; - else if (profile.model.includes('mistral')) - profile.api = 'mistral'; - else - throw new Error('Unknown model:', profile.model); - } - return profile; - } - _createModel(profile) { - let model = null; - if (profile.api === 'google') - model = new Gemini(profile.model, profile.url, profile.params); - else if (profile.api === 'openai') - model = new GPT(profile.model, profile.url, profile.params); - else if (profile.api === 'anthropic') - model = new Claude(profile.model, profile.url, profile.params); - else if (profile.api === 'replicate') - model = new ReplicateAPI(profile.model.replace('replicate/', ''), profile.url, profile.params); - else if (profile.api === 'ollama') - model = new Local(profile.model.replace('ollama/', ''), profile.url, profile.params); - else if (profile.api === 'mistral') - model = new Mistral(profile.model, profile.url, profile.params); - else if (profile.api === 'groq') - model = new GroqCloudAPI(profile.model.replace('groq/', '').replace('groqcloud/', ''), profile.url, profile.params); - else if (profile.api === 'huggingface') - model = new HuggingFace(profile.model, profile.url, profile.params); - else if (profile.api === 'glhf') - model = new GLHF(profile.model.replace('glhf/', ''), profile.url, profile.params); - else if (profile.api === 'hyperbolic') - model = new Hyperbolic(profile.model.replace('hyperbolic/', ''), profile.url, profile.params); - else if (profile.api === 'novita') - model = new Novita(profile.model.replace('novita/', ''), profile.url, profile.params); - else if (profile.api === 'qwen') - model = new Qwen(profile.model, profile.url, profile.params); - else if (profile.api === 'xai') - model = new Grok(profile.model, profile.url, profile.params); - else if (profile.api === 'deepseek') - model = new DeepSeek(profile.model, profile.url, profile.params); - else if (profile.api === 'openrouter') - model = new OpenRouter(profile.model.replace('openrouter/', ''), profile.url, profile.params); - else if (profile.api === 'vllm') - model = new VLLM(profile.model.replace('vllm/', ''), profile.url, profile.params); - else - throw new Error('Unknown API:', profile.api); - return model; - } getName() { return this.profile.name; } @@ -482,6 +360,4 @@ export class Prompter { logFile = path.join(logDir, logFile); await fs.appendFile(logFile, String(logEntry), 'utf-8'); } - - } diff --git a/src/models/qwen.js b/src/models/qwen.js index 4dfacfe..a768b5b 100644 --- a/src/models/qwen.js +++ b/src/models/qwen.js @@ -3,6 +3,7 @@ import { getKey, hasKey } from '../utils/keys.js'; import { strictFormat } from '../utils/text.js'; export class Qwen { + static prefix = 'qwen'; constructor(model_name, url, params) { this.model_name = model_name; this.params = params; diff --git a/src/models/replicate.js b/src/models/replicate.js index c8c3ba3..aa296c5 100644 --- a/src/models/replicate.js +++ b/src/models/replicate.js @@ -4,6 +4,7 @@ import { getKey } from '../utils/keys.js'; // llama, mistral export class ReplicateAPI { + static prefix = 'replicate'; constructor(model_name, url, params) { this.model_name = model_name; this.url = url; diff --git a/src/models/vllm.js b/src/models/vllm.js index e9116ef..d821983 100644 --- a/src/models/vllm.js +++ b/src/models/vllm.js @@ -6,6 +6,7 @@ import { getKey, hasKey } from '../utils/keys.js'; import { strictFormat } from '../utils/text.js'; export class VLLM { + static prefix = 'vllm'; constructor(model_name, url) { this.model_name = model_name; @@ -23,13 +24,14 @@ export class VLLM { async sendRequest(turns, systemMessage, stop_seq = '***') { let messages = [{ 'role': 'system', 'content': systemMessage }].concat(turns); + let model = this.model_name || "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"; - if (this.model_name.includes('deepseek') || this.model_name.includes('qwen')) { + if (model.includes('deepseek') || model.includes('qwen')) { messages = strictFormat(messages); } const pack = { - model: this.model_name || "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B", + model: model, messages, stop: stop_seq, };