mirror of
https://github.com/kolbytn/mindcraft.git
synced 2025-03-28 14:56:24 +01:00
added openrouter
This commit is contained in:
parent
b59996fc1d
commit
0c8620fb3c
4 changed files with 68 additions and 2 deletions
|
@ -49,6 +49,7 @@ You can configure the agent's name, model, and prompts in their profile like `an
|
|||
| `qwen` | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) |
|
||||
| `xai` | `MISTRAL_API_KEY` | `mistral-large-latest` | [docs](https://docs.mistral.ai/getting-started/models/models_overview/) |
|
||||
| `deepseek` | `XAI_API_KEY` | `grok-beta` | [docs](https://docs.x.ai/docs) |
|
||||
| `openrouter` | `OPENROUTER_API_KEY` | `openrouter/anthropic/claude-3.5-sonnet` | [docs](https://openrouter.ai/models) |
|
||||
|
||||
If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command:
|
||||
`ollama pull llama3 && ollama pull nomic-embed-text`
|
||||
|
|
|
@ -9,5 +9,7 @@
|
|||
"QWEN_API_KEY": "",
|
||||
"XAI_API_KEY": "",
|
||||
"MISTRAL_API_KEY": "",
|
||||
"DEEPSEEK_API_KEY": ""
|
||||
"DEEPSEEK_API_KEY": "",
|
||||
"NOVITA_API_KEY": "",
|
||||
"OPENROUTER_API_KEY": ""
|
||||
}
|
||||
|
|
58
src/models/openrouter.js
Normal file
58
src/models/openrouter.js
Normal file
|
@ -0,0 +1,58 @@
|
|||
import OpenAIApi from 'openai';
|
||||
import { getKey, hasKey } from '../utils/keys.js';
|
||||
import { strictFormat } from '../utils/text.js';
|
||||
|
||||
export class OpenRouter {
|
||||
constructor(model_name, url) {
|
||||
this.model_name = model_name;
|
||||
|
||||
let config = {};
|
||||
config.baseURL = url || 'https://openrouter.ai/api/v1';
|
||||
|
||||
const apiKey = getKey('OPENROUTER_API_KEY');
|
||||
if (!apiKey) {
|
||||
console.error('Error: OPENROUTER_API_KEY not found. Make sure it is set properly.');
|
||||
}
|
||||
|
||||
// Pass the API key to OpenAI compatible Api
|
||||
config.apiKey = apiKey;
|
||||
|
||||
this.openai = new OpenAIApi(config);
|
||||
}
|
||||
|
||||
async sendRequest(turns, systemMessage, stop_seq='*') {
|
||||
let messages = [{ role: 'system', content: systemMessage }, ...turns];
|
||||
messages = strictFormat(messages);
|
||||
|
||||
// Choose a valid model from openrouter.ai (for example, "openai/gpt-4o")
|
||||
const pack = {
|
||||
model: this.model_name,
|
||||
messages,
|
||||
stop: stop_seq
|
||||
};
|
||||
|
||||
let res = null;
|
||||
try {
|
||||
console.log('Awaiting openrouter api response...');
|
||||
let completion = await this.openai.chat.completions.create(pack);
|
||||
if (!completion?.choices?.[0]) {
|
||||
console.error('No completion or choices returned:', completion);
|
||||
return 'No response received.';
|
||||
}
|
||||
if (completion.choices[0].finish_reason === 'length') {
|
||||
throw new Error('Context length exceeded');
|
||||
}
|
||||
console.log('Received.');
|
||||
res = completion.choices[0].message.content;
|
||||
} catch (err) {
|
||||
console.error('Error while awaiting response:', err);
|
||||
// If the error indicates a context-length problem, we can slice the turns array, etc.
|
||||
res = 'My brain disconnected, try again.';
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
async embed(text) {
|
||||
throw new Error('Embeddings are not supported by Openrouter.');
|
||||
}
|
||||
}
|
|
@ -19,6 +19,7 @@ import { HuggingFace } from './huggingface.js';
|
|||
import { Qwen } from "./qwen.js";
|
||||
import { Grok } from "./grok.js";
|
||||
import { DeepSeek } from './deepseek.js';
|
||||
import { OpenRouter } from './openrouter.js';
|
||||
|
||||
export class Prompter {
|
||||
constructor(agent, fp) {
|
||||
|
@ -124,6 +125,8 @@ export class Prompter {
|
|||
if (!profile.api) {
|
||||
if (profile.model.includes('gemini'))
|
||||
profile.api = 'google';
|
||||
else if (profile.model.includes('openrouter/'))
|
||||
profile.api = 'openrouter'; // must do before others bc shares model names
|
||||
else if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3'))
|
||||
profile.api = 'openai';
|
||||
else if (profile.model.includes('claude'))
|
||||
|
@ -159,7 +162,7 @@ export class Prompter {
|
|||
else if (profile.api === 'anthropic')
|
||||
model = new Claude(profile.model, profile.url, profile.params);
|
||||
else if (profile.api === 'replicate')
|
||||
model = new ReplicateAPI(profile.model, profile.url, profile.params);
|
||||
model = new ReplicateAPI(profile.model.replace('replicate/', ''), profile.url, profile.params);
|
||||
else if (profile.api === 'ollama')
|
||||
model = new Local(profile.model, profile.url, profile.params);
|
||||
else if (profile.api === 'mistral')
|
||||
|
@ -176,6 +179,8 @@ export class Prompter {
|
|||
model = new Grok(profile.model, profile.url, profile.params);
|
||||
else if (profile.api === 'deepseek')
|
||||
model = new DeepSeek(profile.model, profile.url, profile.params);
|
||||
else if (profile.api === 'openrouter')
|
||||
model = new OpenRouter(profile.model.replace('openrouter/', ''), profile.url, profile.params);
|
||||
else
|
||||
throw new Error('Unknown API:', profile.api);
|
||||
return model;
|
||||
|
|
Loading…
Add table
Reference in a new issue