Merge pull request #403 from kolbytn/mistral

Mistral
This commit is contained in:
Max Robinson 2025-01-07 13:48:49 -06:00 committed by GitHub
commit 8282b94996
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 105 additions and 17 deletions

View file

@ -13,7 +13,7 @@ Do not connect this bot to public servers with coding enabled. This project allo
- [Minecraft Java Edition](https://www.minecraft.net/en-us/store/minecraft-java-bedrock-edition-pc) (up to v1.21.1, recommend v1.20.4)
- [Node.js Installed](https://nodejs.org/) (at least v14)
- One of these: [OpenAI API Key](https://openai.com/blog/openai-api) | [Gemini API Key](https://aistudio.google.com/app/apikey) | [Anthropic API Key](https://docs.anthropic.com/claude/docs/getting-access-to-claude) | [Replicate API Key](https://replicate.com/) | [Hugging Face API Key](https://huggingface.co/) | [Groq API Key](https://console.groq.com/keys) | [Ollama Installed](https://ollama.com/download). | [Qwen API Key [Intl.]](https://www.alibabacloud.com/help/en/model-studio/developer-reference/get-api-key)/[[cn]](https://help.aliyun.com/zh/model-studio/getting-started/first-api-call-to-qwen?) | [Novita AI API Key](https://novita.ai/settings?utm_source=github_mindcraft&utm_medium=github_readme&utm_campaign=link#key-management) |
- One of these: [OpenAI API Key](https://openai.com/blog/openai-api) | [Gemini API Key](https://aistudio.google.com/app/apikey) | [Anthropic API Key](https://docs.anthropic.com/claude/docs/getting-access-to-claude) | [Replicate API Key](https://replicate.com/) | [Hugging Face API Key](https://huggingface.co/) | [Groq API Key](https://console.groq.com/keys) | [Ollama Installed](https://ollama.com/download). | [Mistral API Key](https://docs.mistral.ai/getting-started/models/models_overview/) | [Qwen API Key [Intl.]](https://www.alibabacloud.com/help/en/model-studio/developer-reference/get-api-key)/[[cn]](https://help.aliyun.com/zh/model-studio/getting-started/first-api-call-to-qwen?) | [Novita AI API Key](https://novita.ai/settings?utm_source=github_mindcraft&utm_medium=github_readme&utm_campaign=link#key-management) |
## Install and Run
@ -48,6 +48,7 @@ You can configure the agent's name, model, and prompts in their profile like `an
| Hugging Face | `HUGGINGFACE_API_KEY` | `huggingface/mistralai/Mistral-Nemo-Instruct-2407` | [docs](https://huggingface.co/models) |
| Novita AI | `NOVITA_API_KEY` | `gryphe/mythomax-l2-13b` | [docs](https://novita.ai/model-api/product/llm-api?utm_source=github_mindcraft&utm_medium=github_readme&utm_campaign=link) |
| Qwen | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) |
| Mistral | `MISTRAL_API_KEY` | `mistral-large-latest` | [docs](https://docs.mistral.ai/getting-started/models/models_overview/) |
| xAI | `XAI_API_KEY` | `grok-beta` | [docs](https://docs.x.ai/docs) |
If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command:

View file

@ -8,5 +8,6 @@
"HUGGINGFACE_API_KEY": "",
"QWEN_API_KEY": "",
"XAI_API_KEY": "",
"MISTRAL_API_KEY": "",
"DEEPSEEK_API_KEY": ""
}

View file

@ -4,6 +4,7 @@
"@anthropic-ai/sdk": "^0.17.1",
"@google/generative-ai": "^0.2.1",
"@huggingface/inference": "^2.8.1",
"@mistralai/mistralai": "^1.1.0",
"google-translate-api-x": "^10.7.1",
"groq-sdk": "^0.5.0",
"minecraft-data": "^3.78.0",

5
profiles/mistral.json Normal file
View file

@ -0,0 +1,5 @@
{
"name": "Mistral",
"model": "mistral/mistral-large-latest"
}

View file

@ -17,9 +17,11 @@ export default
// "./profiles/gemini.json",
// "./profiles/llama.json",
// "./profiles/qwen.json",
// "./profiles/mistral.json",
// "./profiles/grok.json",
// "./profiles/mistral.json",
// "./profiles/deepseek.json",
// using more than 1 profile requires you to /msg each bot indivually
],
"load_memory": false, // load memory from previous session

View file

@ -406,7 +406,7 @@ export const actionsList = [
convoManager.endConversation(player_name);
return `Converstaion with ${player_name} ended.`;
}
}
},
// { // commented for now, causes confusion with goal command
// name: '!npcGoal',
// description: 'Set a simple goal for an item or building to automatically work towards. Do not use for complex goals.',
@ -420,16 +420,4 @@ export const actionsList = [
// return 'Set npc goal: ' + agent.npc.data.curr_goal.name;
// }
// },
{
name: '!help',
description: 'Lists all available commands and their descriptions.',
perform: async function (agent) {
const commandList = actionsList.map(action => {
return `${action.name.padEnd(15)} - ${action.description}`; // Ensure consistent spacing
}).join('\n');
console.log(commandList);
return `Available Commands:\n${commandList}`;
}
},
];

View file

@ -176,5 +176,17 @@ export const queryList = [
perform: async function (agent) {
return "Saved place names: " + agent.memory_bank.getKeys();
}
}
},
{
name: '!help',
description: 'Lists all available commands and their descriptions.',
perform: async function (agent) {
const commandList = actionsList.map(action => {
return `${action.name.padEnd(15)} - ${action.description}`; // Ensure consistent spacing
}).join('\n');
console.log(commandList);
return `Available Commands:\n${commandList}`;
}
},
];

View file

@ -8,6 +8,7 @@ import { getCommand } from './commands/index.js';
import { Gemini } from '../models/gemini.js';
import { GPT } from '../models/gpt.js';
import { Claude } from '../models/claude.js';
import { Mistral } from '../models/mistral.js';
import { ReplicateAPI } from '../models/replicate.js';
import { Local } from '../models/local.js';
import { Novita } from '../models/novita.js';
@ -51,8 +52,10 @@ export class Prompter {
chat.api = 'anthropic';
else if (chat.model.includes('huggingface/'))
chat.api = "huggingface";
else if (chat.model.includes('meta/') || chat.model.includes('mistralai/') || chat.model.includes('replicate/'))
else if (chat.model.includes('meta/') || chat.model.includes('replicate/'))
chat.api = 'replicate';
else if (chat.model.includes('mistralai/') || chat.model.includes("mistral/"))
chat.api = 'mistral';
else if (chat.model.includes("groq/") || chat.model.includes("groqcloud/"))
chat.api = 'groq';
else if (chat.model.includes('novita/'))
@ -79,6 +82,8 @@ export class Prompter {
this.chat_model = new ReplicateAPI(chat.model, chat.url);
else if (chat.api === 'ollama')
this.chat_model = new Local(chat.model, chat.url);
else if (chat.api === 'mistral')
this.chat_model = new Mistral(chat.model, chat.url);
else if (chat.api === 'groq') {
this.chat_model = new GroqCloudAPI(chat.model.replace('groq/', '').replace('groqcloud/', ''), chat.url, max_tokens ? max_tokens : 8192);
}
@ -118,6 +123,8 @@ export class Prompter {
this.embedding_model = new Local(embedding.model, embedding.url);
else if (embedding.api === 'qwen')
this.embedding_model = new Qwen(embedding.model, embedding.url);
else if (embedding.api === 'mistral')
this.embedding_model = new Mistral(embedding.model, embedding.url);
else {
this.embedding_model = null;
console.log('Unknown embedding: ', embedding ? embedding.api : '[NOT SPECIFIED]', '. Using word overlap.');

71
src/models/mistral.js Normal file
View file

@ -0,0 +1,71 @@
import { Mistral as MistralClient } from '@mistralai/mistralai';
import { getKey } from '../utils/keys.js';
import { strictFormat } from '../utils/text.js';
export class Mistral {
#client;
constructor(model_name, url) {
if (typeof url === "string") {
console.warn("Mistral does not support custom URL's, ignoring!");
}
if (!getKey("MISTRAL_API_KEY")) {
throw new Error("Mistral API Key missing, make sure to set MISTRAL_API_KEY in settings.json")
}
this.#client = new MistralClient(
{
apiKey: getKey("MISTRAL_API_KEY")
}
);
this.model_name = model_name;
// Prevents the following code from running when model not specified
if (typeof this.model_name === "undefined") return;
// get the model name without the "mistral" or "mistralai" prefix
// e.g "mistral/mistral-large-latest" -> "mistral-large-latest"
if (typeof model_name.split("/")[1] !== "undefined") {
this.model_name = model_name.split("/")[1];
}
}
async sendRequest(turns, systemMessage) {
let result;
try {
const model = this.model_name || "mistral-large-latest";
const messages = [
{ role: "system", content: systemMessage }
];
messages.push(...strictFormat(turns));
const response = await this.#client.chat.complete({
model,
messages,
});
result = response.choices[0].message.content;
} catch (err) {
console.log(err)
result = "My brain disconnected, try again.";
}
return result;
}
async embed(text) {
const embedding = await this.#client.embeddings.create({
model: "mistral-embed",
inputs: text
});
return embedding.data[0].embedding;
}
}