mirror of
https://github.com/kolbytn/mindcraft.git
synced 2025-03-28 14:56:24 +01:00
Merge branch 'main' into main
This commit is contained in:
commit
4b19a21a1c
6 changed files with 95 additions and 17 deletions
|
@ -49,6 +49,8 @@ You can configure the agent's name, model, and prompts in their profile like `an
|
|||
| Qwen | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) |
|
||||
| Mistral | `MISTRAL_API_KEY` | `mistral-large-latest` | [docs](https://docs.mistral.ai/getting-started/models/models_overview/) |
|
||||
| xAI | `XAI_API_KEY` | `grok-beta` | [docs](https://docs.x.ai/docs) |
|
||||
| Deepseek | `DEEPSEEK_API_KEY` | `deepseek-chat` | [docs](https://api-docs.deepseek.com/) |
|
||||
| Openrouter | `OPENROUTER_API_KEY` | `openrouter/anthropic/claude-3.5-sonnet` | [docs](https://openrouter.ai/models) |
|
||||
| glhf.chat | `GHLF_API_KEY` | `hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) |
|
||||
| Hyperbolic | `HYPERBOLIC_API_KEY` | `hb:deepseek-ai/DeepSeek-V3` | [docs](https://docs.hyperbolic.xyz/docs/getting-started) |
|
||||
|
||||
|
@ -133,7 +135,7 @@ All apis have default models and urls, so those fields are optional. The `params
|
|||
|
||||
Embedding models are used to embed and efficiently select relevant examples for conversation and coding.
|
||||
|
||||
Supported Embedding APIs: `openai`, `google`, `replicate`, `huggingface`, `groq`, `novita`
|
||||
Supported Embedding APIs: `openai`, `google`, `replicate`, `huggingface`, `novita`
|
||||
|
||||
If you try to use an unsupported model, then it will default to a simple word-overlap method. Expect reduced performance, recommend mixing APIs to ensure embedding support.
|
||||
|
||||
|
|
|
@ -11,5 +11,7 @@
|
|||
"MISTRAL_API_KEY": "",
|
||||
"DEEPSEEK_API_KEY": "",
|
||||
"GHLF_API_KEY": "",
|
||||
"HYPERBOLIC_API_KEY": ""
|
||||
"HYPERBOLIC_API_KEY": "",
|
||||
"NOVITA_API_KEY": "",
|
||||
"OPENROUTER_API_KEY": ""
|
||||
}
|
||||
|
|
|
@ -13,13 +13,18 @@ export class SkillLibrary {
|
|||
const skillDocs = getSkillDocs();
|
||||
this.skill_docs = skillDocs;
|
||||
if (this.embedding_model) {
|
||||
const embeddingPromises = skillDocs.map((doc) => {
|
||||
return (async () => {
|
||||
let func_name_desc = doc.split('\n').slice(0, 2).join('');
|
||||
this.skill_docs_embeddings[doc] = await this.embedding_model.embed(func_name_desc);
|
||||
})();
|
||||
});
|
||||
await Promise.all(embeddingPromises);
|
||||
try {
|
||||
const embeddingPromises = skillDocs.map((doc) => {
|
||||
return (async () => {
|
||||
let func_name_desc = doc.split('\n').slice(0, 2).join('');
|
||||
this.skill_docs_embeddings[doc] = await this.embedding_model.embed(func_name_desc);
|
||||
})();
|
||||
});
|
||||
await Promise.all(embeddingPromises);
|
||||
} catch (error) {
|
||||
console.warn('Error with embedding model, using word-overlap instead.');
|
||||
this.embedding_model = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
58
src/models/openrouter.js
Normal file
58
src/models/openrouter.js
Normal file
|
@ -0,0 +1,58 @@
|
|||
import OpenAIApi from 'openai';
|
||||
import { getKey, hasKey } from '../utils/keys.js';
|
||||
import { strictFormat } from '../utils/text.js';
|
||||
|
||||
export class OpenRouter {
|
||||
constructor(model_name, url) {
|
||||
this.model_name = model_name;
|
||||
|
||||
let config = {};
|
||||
config.baseURL = url || 'https://openrouter.ai/api/v1';
|
||||
|
||||
const apiKey = getKey('OPENROUTER_API_KEY');
|
||||
if (!apiKey) {
|
||||
console.error('Error: OPENROUTER_API_KEY not found. Make sure it is set properly.');
|
||||
}
|
||||
|
||||
// Pass the API key to OpenAI compatible Api
|
||||
config.apiKey = apiKey;
|
||||
|
||||
this.openai = new OpenAIApi(config);
|
||||
}
|
||||
|
||||
async sendRequest(turns, systemMessage, stop_seq='*') {
|
||||
let messages = [{ role: 'system', content: systemMessage }, ...turns];
|
||||
messages = strictFormat(messages);
|
||||
|
||||
// Choose a valid model from openrouter.ai (for example, "openai/gpt-4o")
|
||||
const pack = {
|
||||
model: this.model_name,
|
||||
messages,
|
||||
stop: stop_seq
|
||||
};
|
||||
|
||||
let res = null;
|
||||
try {
|
||||
console.log('Awaiting openrouter api response...');
|
||||
let completion = await this.openai.chat.completions.create(pack);
|
||||
if (!completion?.choices?.[0]) {
|
||||
console.error('No completion or choices returned:', completion);
|
||||
return 'No response received.';
|
||||
}
|
||||
if (completion.choices[0].finish_reason === 'length') {
|
||||
throw new Error('Context length exceeded');
|
||||
}
|
||||
console.log('Received.');
|
||||
res = completion.choices[0].message.content;
|
||||
} catch (err) {
|
||||
console.error('Error while awaiting response:', err);
|
||||
// If the error indicates a context-length problem, we can slice the turns array, etc.
|
||||
res = 'My brain disconnected, try again.';
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
async embed(text) {
|
||||
throw new Error('Embeddings are not supported by Openrouter.');
|
||||
}
|
||||
}
|
|
@ -21,6 +21,7 @@ import { Grok } from "./grok.js";
|
|||
import { DeepSeek } from './deepseek.js';
|
||||
import { hyperbolic } from './hyperbolic.js';
|
||||
import { glhf } from './glhf.js';
|
||||
import { OpenRouter } from './openrouter.js';
|
||||
|
||||
export class Prompter {
|
||||
constructor(agent, fp) {
|
||||
|
@ -94,8 +95,6 @@ export class Prompter {
|
|||
this.embedding_model = new Mistral(embedding.model, embedding.url);
|
||||
else if (embedding.api === 'huggingface')
|
||||
this.embedding_model = new HuggingFace(embedding.model, embedding.url);
|
||||
else if (embedding.api === 'groq')
|
||||
this.embedding_model = new GroqCloudAPI(embedding.model, embedding.url);
|
||||
else if (embedding.api === 'novita')
|
||||
this.embedding_model = new Novita(embedding.model, embedding.url);
|
||||
else {
|
||||
|
@ -126,6 +125,8 @@ export class Prompter {
|
|||
if (!profile.api) {
|
||||
if (profile.model.includes('gemini'))
|
||||
profile.api = 'google';
|
||||
else if (profile.model.includes('openrouter/'))
|
||||
profile.api = 'openrouter'; // must do before others bc shares model names
|
||||
else if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3'))
|
||||
profile.api = 'openai';
|
||||
else if (profile.model.includes('claude'))
|
||||
|
@ -150,8 +151,10 @@ export class Prompter {
|
|||
profile.api = 'xai';
|
||||
else if (profile.model.includes('deepseek'))
|
||||
profile.api = 'deepseek';
|
||||
else
|
||||
profile.api = 'ollama';
|
||||
else if (profile.model.includes('llama3'))
|
||||
profile.api = 'ollama';
|
||||
else
|
||||
throw new Error('Unknown model:', profile.model);
|
||||
}
|
||||
return profile;
|
||||
}
|
||||
|
@ -165,7 +168,7 @@ export class Prompter {
|
|||
else if (profile.api === 'anthropic')
|
||||
model = new Claude(profile.model, profile.url, profile.params);
|
||||
else if (profile.api === 'replicate')
|
||||
model = new ReplicateAPI(profile.model, profile.url, profile.params);
|
||||
model = new ReplicateAPI(profile.model.replace('replicate/', ''), profile.url, profile.params);
|
||||
else if (profile.api === 'ollama')
|
||||
model = new Local(profile.model, profile.url, profile.params);
|
||||
else if (profile.api === 'mistral')
|
||||
|
@ -186,6 +189,8 @@ export class Prompter {
|
|||
model = new Grok(profile.model, profile.url, profile.params);
|
||||
else if (profile.api === 'deepseek')
|
||||
model = new DeepSeek(profile.model, profile.url, profile.params);
|
||||
else if (profile.api === 'openrouter')
|
||||
model = new OpenRouter(profile.model.replace('openrouter/', ''), profile.url, profile.params);
|
||||
else
|
||||
throw new Error('Unknown API:', profile.api);
|
||||
return model;
|
||||
|
@ -209,12 +214,18 @@ export class Prompter {
|
|||
this.convo_examples.load(this.profile.conversation_examples),
|
||||
this.coding_examples.load(this.profile.coding_examples),
|
||||
this.skill_libary.initSkillLibrary()
|
||||
]);
|
||||
]).catch(error => {
|
||||
// Preserve error details
|
||||
console.error('Failed to initialize examples. Error details:', error);
|
||||
console.error('Stack trace:', error.stack);
|
||||
throw error;
|
||||
});
|
||||
|
||||
console.log('Examples initialized.');
|
||||
} catch (error) {
|
||||
console.error('Failed to initialize examples:', error);
|
||||
throw error;
|
||||
console.error('Stack trace:', error.stack);
|
||||
throw error; // Re-throw with preserved details
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ export class Examples {
|
|||
// Wait for all embeddings to complete
|
||||
await Promise.all(embeddingPromises);
|
||||
} catch (err) {
|
||||
console.warn('Error with embedding model, using word overlap instead:', err);
|
||||
console.warn('Error with embedding model, using word-overlap instead.');
|
||||
this.model = null;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue