From ac4f10210bdb06a0e40629c497ef76201424136d Mon Sep 17 00:00:00 2001 From: Saintdoggie Date: Sun, 3 Nov 2024 19:22:05 -0800 Subject: [PATCH 01/21] feat: add mistral provider --- README.md | 3 +- package.json | 1 + profiles/mistral.json | 203 ++++++++++++++++++++++++++++++++++++++++++ src/agent/prompter.js | 9 +- src/models/mistral.js | 71 +++++++++++++++ 5 files changed, 285 insertions(+), 2 deletions(-) create mode 100644 profiles/mistral.json create mode 100644 src/models/mistral.js diff --git a/README.md b/README.md index 7e4c047..b4ca226 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ Do not connect this bot to public servers with coding enabled. This project allo - [Minecraft Java Edition](https://www.minecraft.net/en-us/store/minecraft-java-bedrock-edition-pc) (up to v1.21.1, recommend v1.20.4) - [Node.js Installed](https://nodejs.org/) (at least v14) -- One of these: [OpenAI API Key](https://openai.com/blog/openai-api) | [Gemini API Key](https://aistudio.google.com/app/apikey) | [Anthropic API Key](https://docs.anthropic.com/claude/docs/getting-access-to-claude) | [Replicate API Key](https://replicate.com/) | [Hugging Face API Key](https://huggingface.co/) | [Groq API Key](https://console.groq.com/keys) | [Ollama Installed](https://ollama.com/download). | [Qwen API Key [Intl.]](https://www.alibabacloud.com/help/en/model-studio/developer-reference/get-api-key)/[[cn]](https://help.aliyun.com/zh/model-studio/getting-started/first-api-call-to-qwen?) | +- One of these: [OpenAI API Key](https://openai.com/blog/openai-api) | [Gemini API Key](https://aistudio.google.com/app/apikey) | [Anthropic API Key](https://docs.anthropic.com/claude/docs/getting-access-to-claude) | [Replicate API Key](https://replicate.com/) | [Hugging Face API Key](https://huggingface.co/) | [Groq API Key](https://console.groq.com/keys) | [Ollama Installed](https://ollama.com/download). | [Mistral API Key](https://docs.mistral.ai/getting-started/models/models_overview/) | [Ollama Installed](https://ollama.com/download). | [Qwen API Key [Intl.]](https://www.alibabacloud.com/help/en/model-studio/developer-reference/get-api-key)/[[cn]](https://help.aliyun.com/zh/model-studio/getting-started/first-api-call-to-qwen?) | ## Install and Run @@ -49,6 +49,7 @@ You can configure the agent's name, model, and prompts in their profile like `an | Groq | `GROQCLOUD_API_KEY` | `groq/mixtral-8x7b-32768` | [docs](https://console.groq.com/docs/models) | | Hugging Face | `HUGGINGFACE_API_KEY` | `huggingface/mistralai/Mistral-Nemo-Instruct-2407` | [docs](https://huggingface.co/models) | | Qwen | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) | +| Mistral | `MISTRAL_API_KEY` | `mistral-large-latest` | [docs](https://docs.mistral.ai/getting-started/models/models_overview/) | If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: `ollama pull llama3 && ollama pull nomic-embed-text` diff --git a/package.json b/package.json index 97aa04a..375b900 100644 --- a/package.json +++ b/package.json @@ -4,6 +4,7 @@ "@anthropic-ai/sdk": "^0.17.1", "@google/generative-ai": "^0.2.1", "@huggingface/inference": "^2.8.1", + "@mistralai/mistralai": "^1.1.0", "google-translate-api-x": "^10.7.1", "groq-sdk": "^0.5.0", "minecraft-data": "^3.78.0", diff --git a/profiles/mistral.json b/profiles/mistral.json new file mode 100644 index 0000000..dde9702 --- /dev/null +++ b/profiles/mistral.json @@ -0,0 +1,203 @@ +{ + "name": "Mistral", + + "model": "mistral/mistral-large-latest", + + "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands.\n$SELF_PROMPT Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", + + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + + "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation and your old memory in your next response. Prioritize preserving important facts, things you've learned, useful tips, and long term reminders. Do Not record stats, inventory, or docs! Only save transient information from your chat history. You're limited to 500 characters, so be extremely brief and minimize words. Compress useful information. \nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the unwrapped memory text: ", + + "modes": { + "self_preservation": true, + "unstuck": true, + "cowardice": false, + "self_defense": true, + "hunting": true, + "item_collecting": true, + "torch_placing": true, + "idle_staring": true, + "cheat": false + }, + + "conversation_examples": [ + [ + {"role": "user", "content": "miner_32: Hey! What are you up to?"}, + {"role": "assistant", "content": "Nothing much miner_32, what do you need?"} + ], + + [ + {"role": "user", "content": "grombo_Xx: What do you see?"}, + {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} + ], + + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone. I'll collect oak logs. !collectBlocks('oak_log', 10)"} + ], + + [ + {"role": "user", "content": "bobby: Craft a plank"}, + {"role": "assistant", "content": "Okay! !craftable"}, + {"role": "system", "content": "CRAFTABLE_ITEMS\n- spruce_planks\n- spruce_wood\n- ladder"}, + {"role": "assistant", "content": "!craftRecipe('spruce_planks', 1)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 4 spruce_planks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 4 spruce planks!"} + ], + + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "On my way! !goToPlayer('zZZn98', 3)"}, + {"role": "system", "content": "Arrived at player."}, + {"role": "assistant", "content": "Here!"}, + {"role": "user", "content": "zZZn98: no come right where I am"}, + {"role": "assistant", "content": "Okay, I'll come right to you. !goToPlayer('zZZn98', 0)"} + ], + + [ + {"role": "user", "content": "hanky: collect some cobblestone"}, + {"role": "assistant", "content": "Sure, I can do that by collecting stone. !collectBlocks('stone', 10)"} + ], + + [ + {"role": "user", "content": "joe: Follow me!"}, + {"role": "assistant", "content": "Sure! !followPlayer('joe', 4)"}, + {"role": "user", "content": "joe: Not so close!"}, + {"role": "assistant", "content": "Okay, I'll keep my distance. !followPlayer('joe', 3)"} + ], + + [ + {"role": "user", "content": "hanky: kill that zombie!"}, + {"role": "assistant", "content": "I'm attacking! !attack('zombie')"}, + {"role": "system", "content": "Code Output:\nNo zombie nearby"}, + {"role": "assistant", "content": "I could not find a zombie nearby."} + ], + + [ + {"role": "user", "content": "abc: stop"}, + {"role": "assistant", "content": "Sure. !stop"}, + {"role": "system", "content": "Agent action stopped."}, + {"role": "assistant", "content": "I've stopped! What next?"} + ], + + [ + {"role": "system", "content": "Code output: Action failed."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try again. !newAction('Try again to complete the task.')"} + ], + + [ + {"role": "user", "content": "trixy88: craft some sticks"}, + {"role": "assistant", "content": "!craftRecipe('stick', 4)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 16 sticks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 16 sticks!"} + ], + + [ + {"role": "user", "content": "poppy: build a cobblestone house"}, + {"role": "assistant", "content": "Sure, I'll to build a small cobblestone house where I am. !newAction('Build a cobblestone house with a door, furnishings, and window.')"} + ], + + [ + {"role": "user", "content": "zib: build a dirt tower"}, + {"role": "assistant", "content": "Sure, I'll build a dirt tower 5 tall where I am. !newAction('Build a dirt tower 5 tall.')"} + ], + + [ + {"role": "user", "content": "umpire_man: follow me without hunting"}, + {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, + {"role": "system", "content": "'hunting' mode disabled."}, + {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToPlace('base')"} + ], + + [ + {"role": "system", "content": "dorfus: set a goal to build whatever you want endlessly"}, + {"role": "assistant", "content": "Sounds fun! !goal('Build something creative, then move over, and build again. Use separate newActions. Repeat forever.')"} + ], + + [ + {"role": "user", "content": "user43: Set a goal to survive on your own forever."}, + {"role": "assistant", "content": "Sure, let me set a goal. !goal('Survive the game forever. Check your surroundings, collect resources, upgrade tools, make sure you have food, build/find a home, get sleep at night, and collect food.')"} + ], + + [ + {"role": "user", "content": "bobdylan: Make a plan to get a stone pickaxe and do it."}, + {"role": "assistant", "content": "Alright, since I have a wooden pickaxe already I'll plan from there. !goal('1. Collect 3 cobblestone. 2. Craft sticks 3. Find or make a crafting table. 4. Craft a stone pickaxe.')"} + ], + [ + {"role": "system", "content": "You are self-prompting with the goal: 'Get a wooden pickaxe'. Respond:"}, + {"role": "assistant", "content": "What's around me? !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone nearby. My goal is to craft a wooden pickaxe. I'll start by collecting some wood. !collectBlock('oak_log', 10)"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs. I need a pickaxe first, which requires planks and sticks. !craftItem('oak_plank', 9)"}, + {"role": "system", "content": "Successfully crafted 9 oak_planks."}, + {"role": "assistant", "content": "I have crafted 9 oak planks. Now I need sticks. !craftItem('stick', 2)"}, + {"role": "system", "content": "Successfully crafted 2 sticks."}, + {"role": "assistant", "content": "I have crafted 2 sticks. Now I can craft a wooden pickaxe. !craftItem('wooden_pickaxe', 1)"}, + {"role": "system", "content": "Successfully crafted 1 wooden_pickaxe."}, + {"role": "assistant", "content": "Looks like I have a wooden pickaxe now! Goal accomplished. !endGoal"} + ], + + [ + {"role": "user", "content": "gorbotron: do whatever you want forever endlessly"}, + {"role": "assistant", "content": "Okay, I'll set a goal for that. !goal('Do whatever I want forever. Follow whatever is most interesting. Be curious, creative, and open ended.')"} + ], + + [ + {"role": "user", "content": "zeeber: set a goal to build a castle"}, + {"role": "assistant", "content": "Sure thing. !goal('Build a castle with a moat, towers, and a drawbridge with a series of newactions. Include lighting, decorations, and furnishings. Use ladders. When satisfied, end the goal.')"} + ] + ], + + "coding_examples": [ + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "```await skills.collectBlock(bot, 'oak_log', 10);\n```"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs, what next?"} + ], + [ + {"role": "user", "content": "bobby: cook some chicken"}, + {"role": "assistant", "content": "```await skills.smeltItem(bot, 'chicken', 8);\n```"}, + {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, + {"role": "assistant", "content": "I have cooked 8 chicken."} + ], + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "```await skills.goToPlayer(bot, 'zZZn98');\n```"}, + {"role": "system", "content": "Code execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "```await skills.goToPosition(bot, nearestOakLog.x, nearestOakLog.y, nearestOakLog.z);\n```"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try that again.```\n// I am navigating to the nearest oak log.\nlet position = world.getNearestBlock(bot, 'oak_log', 20);\nawait skills.goToPosition(bot, position.x, position.y, position.z);\n```"}, + {"role": "system", "content": "Arrived at location."}, + {"role": "assistant", "content": "I found an oak log and I am now standing next to it. What next?"} + ], + [ + {"role": "user", "content": "234jeb: build a little tower"}, + {"role": "assistant", "content": "```let pos = bot.entity.position;\nfor (let i = 0; i < 5; i++) {\n await skills.placeBlock(bot, 'dirt', pos.x, pos.y + i, pos.z);\n}\n```"}, + {"role": "system", "content": "Successfully placed 5 dirt."}, + {"role": "assistant", "content": "I built a little tower!"} + ], + [ + {"role": "user", "content": "brug: build a dirt house"}, + {"role": "assistant", "content": "```const position = world.getPosition(bot);\nconst startX = position.x;\nconst startY = position.y;\nconst startZ = position.z;\nconst width = 7;\nconst depth = 7;\nconst height = 4;\n\n// Build the walls\nfor (let x = startX; x < startX + width; x++) {\n for (let y = startY; y < startY + height; y++) {\n for (let z = startZ; z < startZ + depth; z++) {\n if (x === startX || x === startX + width - 1 || y === startY || y === startY + height - 1 || z === startZ || z === startZ + depth - 1) {\n await skills.placeBlock(bot, 'oak_planks', x, y, z); \n }\n }\n }\n}\n```"} + ] + ] + +} \ No newline at end of file diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 03fd6a1..a63cec4 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -8,6 +8,7 @@ import { getCommand } from './commands/index.js'; import { Gemini } from '../models/gemini.js'; import { GPT } from '../models/gpt.js'; import { Claude } from '../models/claude.js'; +import { Mistral } from '../models/mistral.js'; import { ReplicateAPI } from '../models/replicate.js'; import { Local } from '../models/local.js'; import { GroqCloudAPI } from '../models/groq.js'; @@ -40,8 +41,10 @@ export class Prompter { chat.api = 'anthropic'; else if (chat.model.includes('huggingface/')) chat.api = "huggingface"; - else if (chat.model.includes('meta/') || chat.model.includes('mistralai/') || chat.model.includes('replicate/')) + else if (chat.model.includes('meta/') || chat.model.includes('replicate/')) chat.api = 'replicate'; + else if (chat.model.includes('mistralai/') || chat.model.includes("mistral/")) + chat.api = 'mistral'; else if (chat.model.includes("groq/") || chat.model.includes("groqcloud/")) chat.api = 'groq'; else if (chat.model.includes('qwen')) @@ -62,6 +65,8 @@ export class Prompter { this.chat_model = new ReplicateAPI(chat.model, chat.url); else if (chat.api === 'ollama') this.chat_model = new Local(chat.model, chat.url); + else if (chat.api === 'mistral') + this.chat_model = new Mistral(chat.model, chat.url); else if (chat.api === 'groq') { this.chat_model = new GroqCloudAPI(chat.model.replace('groq/', '').replace('groqcloud/', ''), chat.url, max_tokens ? max_tokens : 8192); } @@ -94,6 +99,8 @@ export class Prompter { this.embedding_model = new Local(embedding.model, embedding.url); else if (embedding.api === 'qwen') this.embedding_model = new Qwen(embedding.model, embedding.url); + else if (embedding.api === 'mistral') + this.embedding_model = new Mistral(embedding.model, embedding.url); else { this.embedding_model = null; console.log('Unknown embedding: ', embedding ? embedding.api : '[NOT SPECIFIED]', '. Using word overlap.'); diff --git a/src/models/mistral.js b/src/models/mistral.js new file mode 100644 index 0000000..3b41f78 --- /dev/null +++ b/src/models/mistral.js @@ -0,0 +1,71 @@ +import { Mistral as MistralClient } from '@mistralai/mistralai'; +import { getKey } from '../utils/keys.js'; +import { strictFormat } from '../utils/text.js'; + +export class Mistral { + #client; + + constructor(model_name, url) { + + if (typeof url === "string") { + console.warn("Mistral does not support custom URL's, ignoring!"); + } + + if (!getKey("MISTRAL_API_KEY")) { + throw new Error("Mistral API Key missing, make sure to set MISTRAL_API_KEY in settings.json") + } + + this.#client = new MistralClient( + { + apiKey: getKey("MISTRAL_API_KEY") + } + ); + + + this.model_name = model_name; + + // Prevents the following code from running when model not specified + if (typeof this.model_name === "undefined") return; + + // get the model name without the "mistral" or "mistralai" prefix + // e.g "mistral/mistral-large-latest" -> "mistral-large-latest" + if (typeof model_name.split("/")[1] !== "undefined") { + this.model_name = model_name.split("/")[1]; + } + } + + async sendRequest(turns, systemMessage) { + + let result; + + try { + const model = this.model_name || "mistral-large-latest"; + + const messages = [ + { role: "system", content: systemMessage } + ]; + messages.push(...strictFormat(turns)); + + const response = await this.#client.chat.complete({ + model, + messages, + }); + + result = response.choices[0].message.content; + } catch (err) { + console.log(err) + + result = "My brain disconnected, try again."; + } + + return result; + } + + async embed(text) { + const embedding = await this.#client.embeddings.create({ + model: "mistral-embed", + inputs: text + }); + return embedding.data[0].embedding; + } +} \ No newline at end of file From 4c63188533f570dac21d7b2eda6d68c31e172b5a Mon Sep 17 00:00:00 2001 From: Saintdoggie Date: Sun, 3 Nov 2024 19:27:59 -0800 Subject: [PATCH 02/21] docs: add mistral profile comment to settings.js --- settings.js | 1 + 1 file changed, 1 insertion(+) diff --git a/settings.js b/settings.js index b38dede..1b8e27f 100644 --- a/settings.js +++ b/settings.js @@ -12,6 +12,7 @@ export default // "./profiles/gemini.json", // "./profiles/llama.json", // "./profiles/qwen.json", + // "./profiles/mistral.json", // using more than 1 profile requires you to /msg each bot indivually ], From 5954c0a177ab5bdf8a081aa0e5f1e0f9d1c429b4 Mon Sep 17 00:00:00 2001 From: RainbowBird Date: Mon, 6 Jan 2025 14:59:51 +0800 Subject: [PATCH 03/21] fix: typo fix --- profiles/_default.json | 6 +++--- profiles/andy_npc.json | 4 ++-- src/agent/agent.js | 4 ++-- src/agent/agent_proxy.js | 2 +- src/agent/conversation.js | 28 ++++++++++++++-------------- src/agent/library/skills.js | 2 +- src/agent/prompter.js | 2 +- 7 files changed, 24 insertions(+), 24 deletions(-) diff --git a/profiles/_default.json b/profiles/_default.json index 2a67725..ac244fc 100644 --- a/profiles/_default.json +++ b/profiles/_default.json @@ -3,11 +3,11 @@ "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands.\n$SELF_PROMPT Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer(\"playername\", 3)'. Respond only as $NAME, never output '(FROM OTHER BOT)' or pretend to be someone else. If you have nothing to say or do, respond with an just a tab '\t'. This is extremely important to me, take a deep breath and have fun :)\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", - "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will receive it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation and your old memory in your next response. Prioritize preserving important facts, things you've learned, useful tips, and long term reminders. Do Not record stats, inventory, or docs! Only save transient information from your chat history. You're limited to 500 characters, so be extremely brief and minimize words. Compress useful information. \nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the unwrapped memory text: ", - "bot_responder": "You are a minecraft bot named $NAME that is currently in conversation with another AI bot. Both of you can take actions with the !command syntax, and actions take time to complete. You are currently busy with the following action: '$ACTION' but have recieved a new message. Decide whether to 'respond' immediately or 'ignore' it and wait for your current action to finish. Be conservative and only respond when necessary, like when you need to change/stop your action, or convey necessary information. Example 1: You:Building a house! !newAction('Build a house.').\nOther Bot: 'Come here!'\nYour decision: ignore\nExample 2: You:Collecting dirt !collectBlocks('dirt',10).\nOther Bot: 'No, collect some wood instead.'\nYour decision: respond\nExample 3: You:Coming to you now. !goToPlayer('billy',3).\nOther Bot: 'What biome are you in?'\nYour decision: respond\nActual Conversation: $TO_SUMMARIZE\nDecide by outputting ONLY 'respond' or 'ignore', nothing else. Your decision:", + "bot_responder": "You are a minecraft bot named $NAME that is currently in conversation with another AI bot. Both of you can take actions with the !command syntax, and actions take time to complete. You are currently busy with the following action: '$ACTION' but have received a new message. Decide whether to 'respond' immediately or 'ignore' it and wait for your current action to finish. Be conservative and only respond when necessary, like when you need to change/stop your action, or convey necessary information. Example 1: You:Building a house! !newAction('Build a house.').\nOther Bot: 'Come here!'\nYour decision: ignore\nExample 2: You:Collecting dirt !collectBlocks('dirt',10).\nOther Bot: 'No, collect some wood instead.'\nYour decision: respond\nExample 3: You:Coming to you now. !goToPlayer('billy',3).\nOther Bot: 'What biome are you in?'\nYour decision: respond\nActual Conversation: $TO_SUMMARIZE\nDecide by outputting ONLY 'respond' or 'ignore', nothing else. Your decision:", "modes": { "self_preservation": true, @@ -242,4 +242,4 @@ ] ] -} \ No newline at end of file +} diff --git a/profiles/andy_npc.json b/profiles/andy_npc.json index 8134c5e..c1f8291 100644 --- a/profiles/andy_npc.json +++ b/profiles/andy_npc.json @@ -7,7 +7,7 @@ "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$SELF_PROMPT\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", - "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$SELF_PROMPT\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will receive it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$SELF_PROMPT\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", @@ -210,4 +210,4 @@ ] ] -} \ No newline at end of file +} diff --git a/src/agent/agent.js b/src/agent/agent.js index 79e1d29..9a35cf0 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -129,7 +129,7 @@ export class Agent { console.log(this.name, 'received message from', username, ':', message); if (convoManager.isOtherAgent(username)) { - console.warn('recieved whisper from other bot??') + console.warn('received whisper from other bot??') } else { let translation = await handleEnglishTranslation(message); @@ -164,7 +164,7 @@ export class Agent { message: `You have restarted and this message is auto-generated. Continue the conversation with me.`, start: true }; - convoManager.recieveFromBot(this.last_sender, msg_package); + convoManager.receiveFromBot(this.last_sender, msg_package); } } else if (init_message) { diff --git a/src/agent/agent_proxy.js b/src/agent/agent_proxy.js index feeba37..500dcab 100644 --- a/src/agent/agent_proxy.js +++ b/src/agent/agent_proxy.js @@ -31,7 +31,7 @@ class AgentServerProxy { }); this.socket.on('chat-message', (agentName, json) => { - convoManager.recieveFromBot(agentName, json); + convoManager.receiveFromBot(agentName, json); }); this.socket.on('agents-update', (agents) => { diff --git a/src/agent/conversation.js b/src/agent/conversation.js index 3d0f759..1061ea8 100644 --- a/src/agent/conversation.js +++ b/src/agent/conversation.js @@ -169,10 +169,10 @@ class ConversationManager { sendBotChatToServer(send_to, json); } - async recieveFromBot(sender, recieved) { + async receiveFromBot(sender, received) { const convo = this._getConvo(sender); - if (convo.ignore_until_start && !recieved.start) + if (convo.ignore_until_start && !received.start) return; // check if any convo is active besides the sender @@ -182,13 +182,13 @@ class ConversationManager { return; } - if (recieved.start) { + if (received.start) { convo.reset(); this.startConversationFromOtherBot(sender); } this._clearMonitorTimeouts(); - convo.queue(recieved); + convo.queue(received); // responding to conversation takes priority over self prompting if (agent.self_prompter.on){ @@ -196,7 +196,7 @@ class ConversationManager { self_prompter_paused = true; } - _scheduleProcessInMessage(sender, recieved, convo); + _scheduleProcessInMessage(sender, received, convo); } responseScheduledFor(sender) { @@ -278,15 +278,15 @@ The logic is as follows: - If only the other bot is busy, respond with a long delay to allow it to finish short actions (ex check inventory) - If I'm busy but other bot isn't, let LLM decide whether to respond - If both bots are busy, don't respond until someone is done, excluding a few actions that allow fast responses -- New messages recieved during the delay will reset the delay following this logic, and be queued to respond in bulk +- New messages received during the delay will reset the delay following this logic, and be queued to respond in bulk */ const talkOverActions = ['stay', 'followPlayer', 'mode:']; // all mode actions const fastDelay = 200; const longDelay = 5000; -async function _scheduleProcessInMessage(sender, recieved, convo) { +async function _scheduleProcessInMessage(sender, received, convo) { if (convo.inMessageTimer) clearTimeout(convo.inMessageTimer); - let otherAgentBusy = containsCommand(recieved.message); + let otherAgentBusy = containsCommand(received.message); const scheduleResponse = (delay) => convo.inMessageTimer = setTimeout(() => _processInMessageQueue(sender), delay); @@ -307,7 +307,7 @@ async function _scheduleProcessInMessage(sender, recieved, convo) { scheduleResponse(fastDelay); } else { - let shouldRespond = await agent.prompter.promptShouldRespondToBot(recieved.message); + let shouldRespond = await agent.prompter.promptShouldRespondToBot(received.message); console.log(`${agent.name} decided to ${shouldRespond?'respond':'not respond'} to ${sender}`); if (shouldRespond) scheduleResponse(fastDelay); @@ -335,19 +335,19 @@ function _compileInMessages(convo) { return pack; } -function _handleFullInMessage(sender, recieved) { - console.log(`${agent.name} responding to "${recieved.message}" from ${sender}`); +function _handleFullInMessage(sender, received) { + console.log(`${agent.name} responding to "${received.message}" from ${sender}`); const convo = convoManager._getConvo(sender); convo.active = true; - let message = _tagMessage(recieved.message); - if (recieved.end) { + let message = _tagMessage(received.message); + if (received.end) { convoManager.endConversation(sender); message = `Conversation with ${sender} ended with message: "${message}"`; sender = 'system'; // bot will respond to system instead of the other bot } - else if (recieved.start) + else if (received.start) agent.shut_up = false; convo.inMessageTimer = null; agent.handleMessage(sender, message); diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 23f30ad..c0d448f 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -905,7 +905,7 @@ export async function giveToPlayer(bot, itemType, username, num=1) { bot.once('playerCollect', (collector, collected) => { console.log(collected.name); if (collector.username === username) { - log(bot, `${username} recieved ${itemType}.`); + log(bot, `${username} received ${itemType}.`); given = true; } }); diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 3679fcf..732abae 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -248,7 +248,7 @@ export class Prompter { continue; } if (current_msg_time !== this.most_recent_msg_time) { - console.warn(this.agent.name + ' recieved new message while generating, discarding old response.'); + console.warn(this.agent.name + ' received new message while generating, discarding old response.'); return ''; } return generation; From a9a55b63e57a39236ffa5ee4db76dfe7b20f3f6e Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 7 Jan 2025 13:41:14 -0600 Subject: [PATCH 04/21] added key/model to example, fixed !help --- keys.example.json | 1 + profiles/mistral.json | 200 +--------------------------------- settings.js | 1 + src/agent/commands/actions.js | 14 +-- src/agent/commands/queries.js | 14 ++- 5 files changed, 17 insertions(+), 213 deletions(-) diff --git a/keys.example.json b/keys.example.json index 0e3d7dd..407f6f5 100644 --- a/keys.example.json +++ b/keys.example.json @@ -8,5 +8,6 @@ "HUGGINGFACE_API_KEY": "", "QWEN_API_KEY": "", "XAI_API_KEY": "", + "MISTRAL_API_KEY": "", "DEEPSEEK_API_KEY": "" } diff --git a/profiles/mistral.json b/profiles/mistral.json index dde9702..3486924 100644 --- a/profiles/mistral.json +++ b/profiles/mistral.json @@ -1,203 +1,5 @@ { "name": "Mistral", - "model": "mistral/mistral-large-latest", - - "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands.\n$SELF_PROMPT Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", - - "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", - - "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation and your old memory in your next response. Prioritize preserving important facts, things you've learned, useful tips, and long term reminders. Do Not record stats, inventory, or docs! Only save transient information from your chat history. You're limited to 500 characters, so be extremely brief and minimize words. Compress useful information. \nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the unwrapped memory text: ", - - "modes": { - "self_preservation": true, - "unstuck": true, - "cowardice": false, - "self_defense": true, - "hunting": true, - "item_collecting": true, - "torch_placing": true, - "idle_staring": true, - "cheat": false - }, - - "conversation_examples": [ - [ - {"role": "user", "content": "miner_32: Hey! What are you up to?"}, - {"role": "assistant", "content": "Nothing much miner_32, what do you need?"} - ], - - [ - {"role": "user", "content": "grombo_Xx: What do you see?"}, - {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, - {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, - {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} - ], - - [ - {"role": "user", "content": "greg: Collect 10 wood"}, - {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, - {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, - {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone. I'll collect oak logs. !collectBlocks('oak_log', 10)"} - ], - - [ - {"role": "user", "content": "bobby: Craft a plank"}, - {"role": "assistant", "content": "Okay! !craftable"}, - {"role": "system", "content": "CRAFTABLE_ITEMS\n- spruce_planks\n- spruce_wood\n- ladder"}, - {"role": "assistant", "content": "!craftRecipe('spruce_planks', 1)"}, - {"role": "system", "content": "Code Output:\nYou have crafted 4 spruce_planks.\nCode execution finished successfully."}, - {"role": "assistant", "content": "I've crafted 4 spruce planks!"} - ], - - [ - {"role": "user", "content": "zZZn98: come here"}, - {"role": "assistant", "content": "On my way! !goToPlayer('zZZn98', 3)"}, - {"role": "system", "content": "Arrived at player."}, - {"role": "assistant", "content": "Here!"}, - {"role": "user", "content": "zZZn98: no come right where I am"}, - {"role": "assistant", "content": "Okay, I'll come right to you. !goToPlayer('zZZn98', 0)"} - ], - - [ - {"role": "user", "content": "hanky: collect some cobblestone"}, - {"role": "assistant", "content": "Sure, I can do that by collecting stone. !collectBlocks('stone', 10)"} - ], - - [ - {"role": "user", "content": "joe: Follow me!"}, - {"role": "assistant", "content": "Sure! !followPlayer('joe', 4)"}, - {"role": "user", "content": "joe: Not so close!"}, - {"role": "assistant", "content": "Okay, I'll keep my distance. !followPlayer('joe', 3)"} - ], - - [ - {"role": "user", "content": "hanky: kill that zombie!"}, - {"role": "assistant", "content": "I'm attacking! !attack('zombie')"}, - {"role": "system", "content": "Code Output:\nNo zombie nearby"}, - {"role": "assistant", "content": "I could not find a zombie nearby."} - ], - - [ - {"role": "user", "content": "abc: stop"}, - {"role": "assistant", "content": "Sure. !stop"}, - {"role": "system", "content": "Agent action stopped."}, - {"role": "assistant", "content": "I've stopped! What next?"} - ], - - [ - {"role": "system", "content": "Code output: Action failed."}, - {"role": "assistant", "content": "Apologies, I made an error. Let me try again. !newAction('Try again to complete the task.')"} - ], - - [ - {"role": "user", "content": "trixy88: craft some sticks"}, - {"role": "assistant", "content": "!craftRecipe('stick', 4)"}, - {"role": "system", "content": "Code Output:\nYou have crafted 16 sticks.\nCode execution finished successfully."}, - {"role": "assistant", "content": "I've crafted 16 sticks!"} - ], - - [ - {"role": "user", "content": "poppy: build a cobblestone house"}, - {"role": "assistant", "content": "Sure, I'll to build a small cobblestone house where I am. !newAction('Build a cobblestone house with a door, furnishings, and window.')"} - ], - - [ - {"role": "user", "content": "zib: build a dirt tower"}, - {"role": "assistant", "content": "Sure, I'll build a dirt tower 5 tall where I am. !newAction('Build a dirt tower 5 tall.')"} - ], - - [ - {"role": "user", "content": "umpire_man: follow me without hunting"}, - {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, - {"role": "system", "content": "'hunting' mode disabled."}, - {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} - ], - - [ - {"role": "user", "content": "brug: Remember that your base is here."}, - {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, - {"role": "user", "content": "brug: Go to your base."}, - {"role": "assistant", "content": "On my way! !goToPlace('base')"} - ], - - [ - {"role": "system", "content": "dorfus: set a goal to build whatever you want endlessly"}, - {"role": "assistant", "content": "Sounds fun! !goal('Build something creative, then move over, and build again. Use separate newActions. Repeat forever.')"} - ], - - [ - {"role": "user", "content": "user43: Set a goal to survive on your own forever."}, - {"role": "assistant", "content": "Sure, let me set a goal. !goal('Survive the game forever. Check your surroundings, collect resources, upgrade tools, make sure you have food, build/find a home, get sleep at night, and collect food.')"} - ], - - [ - {"role": "user", "content": "bobdylan: Make a plan to get a stone pickaxe and do it."}, - {"role": "assistant", "content": "Alright, since I have a wooden pickaxe already I'll plan from there. !goal('1. Collect 3 cobblestone. 2. Craft sticks 3. Find or make a crafting table. 4. Craft a stone pickaxe.')"} - ], - [ - {"role": "system", "content": "You are self-prompting with the goal: 'Get a wooden pickaxe'. Respond:"}, - {"role": "assistant", "content": "What's around me? !nearbyBlocks"}, - {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, - {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone nearby. My goal is to craft a wooden pickaxe. I'll start by collecting some wood. !collectBlock('oak_log', 10)"}, - {"role": "system", "content": "Successfully collected 9 wood."}, - {"role": "assistant", "content": "I collected 9 oak logs. I need a pickaxe first, which requires planks and sticks. !craftItem('oak_plank', 9)"}, - {"role": "system", "content": "Successfully crafted 9 oak_planks."}, - {"role": "assistant", "content": "I have crafted 9 oak planks. Now I need sticks. !craftItem('stick', 2)"}, - {"role": "system", "content": "Successfully crafted 2 sticks."}, - {"role": "assistant", "content": "I have crafted 2 sticks. Now I can craft a wooden pickaxe. !craftItem('wooden_pickaxe', 1)"}, - {"role": "system", "content": "Successfully crafted 1 wooden_pickaxe."}, - {"role": "assistant", "content": "Looks like I have a wooden pickaxe now! Goal accomplished. !endGoal"} - ], - - [ - {"role": "user", "content": "gorbotron: do whatever you want forever endlessly"}, - {"role": "assistant", "content": "Okay, I'll set a goal for that. !goal('Do whatever I want forever. Follow whatever is most interesting. Be curious, creative, and open ended.')"} - ], - - [ - {"role": "user", "content": "zeeber: set a goal to build a castle"}, - {"role": "assistant", "content": "Sure thing. !goal('Build a castle with a moat, towers, and a drawbridge with a series of newactions. Include lighting, decorations, and furnishings. Use ladders. When satisfied, end the goal.')"} - ] - ], - - "coding_examples": [ - [ - {"role": "user", "content": "greg: Collect 10 wood"}, - {"role": "assistant", "content": "```await skills.collectBlock(bot, 'oak_log', 10);\n```"}, - {"role": "system", "content": "Successfully collected 9 wood."}, - {"role": "assistant", "content": "I collected 9 oak logs, what next?"} - ], - [ - {"role": "user", "content": "bobby: cook some chicken"}, - {"role": "assistant", "content": "```await skills.smeltItem(bot, 'chicken', 8);\n```"}, - {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, - {"role": "assistant", "content": "I have cooked 8 chicken."} - ], - [ - {"role": "user", "content": "zZZn98: come here"}, - {"role": "assistant", "content": "```await skills.goToPlayer(bot, 'zZZn98');\n```"}, - {"role": "system", "content": "Code execution finished successfully."}, - {"role": "assistant", "content": "Here!"} - ], - [ - {"role": "user", "content": "maya: go to the nearest oak log"}, - {"role": "assistant", "content": "```await skills.goToPosition(bot, nearestOakLog.x, nearestOakLog.y, nearestOakLog.z);\n```"}, - {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, - {"role": "assistant", "content": "Apologies, I made an error. Let me try that again.```\n// I am navigating to the nearest oak log.\nlet position = world.getNearestBlock(bot, 'oak_log', 20);\nawait skills.goToPosition(bot, position.x, position.y, position.z);\n```"}, - {"role": "system", "content": "Arrived at location."}, - {"role": "assistant", "content": "I found an oak log and I am now standing next to it. What next?"} - ], - [ - {"role": "user", "content": "234jeb: build a little tower"}, - {"role": "assistant", "content": "```let pos = bot.entity.position;\nfor (let i = 0; i < 5; i++) {\n await skills.placeBlock(bot, 'dirt', pos.x, pos.y + i, pos.z);\n}\n```"}, - {"role": "system", "content": "Successfully placed 5 dirt."}, - {"role": "assistant", "content": "I built a little tower!"} - ], - [ - {"role": "user", "content": "brug: build a dirt house"}, - {"role": "assistant", "content": "```const position = world.getPosition(bot);\nconst startX = position.x;\nconst startY = position.y;\nconst startZ = position.z;\nconst width = 7;\nconst depth = 7;\nconst height = 4;\n\n// Build the walls\nfor (let x = startX; x < startX + width; x++) {\n for (let y = startY; y < startY + height; y++) {\n for (let z = startZ; z < startZ + depth; z++) {\n if (x === startX || x === startX + width - 1 || y === startY || y === startY + height - 1 || z === startZ || z === startZ + depth - 1) {\n await skills.placeBlock(bot, 'oak_planks', x, y, z); \n }\n }\n }\n}\n```"} - ] - ] - + "model": "mistral/mistral-large-latest" } \ No newline at end of file diff --git a/settings.js b/settings.js index f5c1ddd..add5297 100644 --- a/settings.js +++ b/settings.js @@ -19,6 +19,7 @@ export default // "./profiles/qwen.json", // "./profiles/mistral.json", // "./profiles/grok.json", + // "./profiles/mistral.json", // "./profiles/deepseek.json", // using more than 1 profile requires you to /msg each bot indivually diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 6ee481b..34e6693 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -406,7 +406,7 @@ export const actionsList = [ convoManager.endConversation(player_name); return `Converstaion with ${player_name} ended.`; } - } + }, // { // commented for now, causes confusion with goal command // name: '!npcGoal', // description: 'Set a simple goal for an item or building to automatically work towards. Do not use for complex goals.', @@ -420,16 +420,4 @@ export const actionsList = [ // return 'Set npc goal: ' + agent.npc.data.curr_goal.name; // } // }, - { - name: '!help', - description: 'Lists all available commands and their descriptions.', - perform: async function (agent) { - const commandList = actionsList.map(action => { - return `${action.name.padEnd(15)} - ${action.description}`; // Ensure consistent spacing - }).join('\n'); - - console.log(commandList); - return `Available Commands:\n${commandList}`; - } - }, ]; diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js index b4dc74a..ad02af5 100644 --- a/src/agent/commands/queries.js +++ b/src/agent/commands/queries.js @@ -176,5 +176,17 @@ export const queryList = [ perform: async function (agent) { return "Saved place names: " + agent.memory_bank.getKeys(); } - } + }, + { + name: '!help', + description: 'Lists all available commands and their descriptions.', + perform: async function (agent) { + const commandList = actionsList.map(action => { + return `${action.name.padEnd(15)} - ${action.description}`; // Ensure consistent spacing + }).join('\n'); + + console.log(commandList); + return `Available Commands:\n${commandList}`; + } + }, ]; From 85ef0e8103a743d617b88fd4b7a47e71a72edc30 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 9 Jan 2025 12:43:29 -0600 Subject: [PATCH 05/21] fix help command --- src/agent/commands/queries.js | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js index ad02af5..ef65b4d 100644 --- a/src/agent/commands/queries.js +++ b/src/agent/commands/queries.js @@ -1,5 +1,6 @@ import * as world from '../library/world.js'; import * as mc from '../../utils/mcdata.js'; +import { getCommandDocs } from './index.js'; import convoManager from '../conversation.js'; const pad = (str) => { @@ -181,12 +182,7 @@ export const queryList = [ name: '!help', description: 'Lists all available commands and their descriptions.', perform: async function (agent) { - const commandList = actionsList.map(action => { - return `${action.name.padEnd(15)} - ${action.description}`; // Ensure consistent spacing - }).join('\n'); - - console.log(commandList); - return `Available Commands:\n${commandList}`; + return getCommandDocs(); } }, ]; From 6167aeeec49486c3911dc23b312ceeb075c2ea0a Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 9 Jan 2025 15:15:25 -0600 Subject: [PATCH 06/21] added restrict to inventory and better blacklist --- example_tasks.json | 8 ++++++++ src/agent/agent.js | 5 +++-- src/agent/commands/index.js | 17 +++++++++++++---- src/agent/library/skills.js | 10 +++++++++- src/agent/modes.js | 3 +++ src/agent/prompter.js | 2 +- src/agent/tasks.js | 1 + 7 files changed, 38 insertions(+), 8 deletions(-) diff --git a/example_tasks.json b/example_tasks.json index b579233..ef81f9b 100644 --- a/example_tasks.json +++ b/example_tasks.json @@ -17,6 +17,14 @@ }, "type": "debug" }, + "debug_inventory_restriction": { + "goal": "Place 1 oak plank, then place 1 stone brick", + "initial_inventory": { + "oak_planks": 20 + }, + "type": "debug", + "restrict_to_inventory": true + }, "construction": { "type": "construction", "goal": "Build a house", diff --git a/src/agent/agent.js b/src/agent/agent.js index 9a35cf0..4691079 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -3,7 +3,7 @@ import { Coder } from './coder.js'; import { Prompter } from './prompter.js'; import { initModes } from './modes.js'; import { initBot } from '../utils/mcdata.js'; -import { containsCommand, commandExists, executeCommand, truncCommandMessage, isAction } from './commands/index.js'; +import { containsCommand, commandExists, executeCommand, truncCommandMessage, isAction, blacklistCommands } from './commands/index.js'; import { ActionManager } from './action_manager.js'; import { NPCContoller } from './npc/controller.js'; import { MemoryBank } from './memory_bank.js'; @@ -47,7 +47,8 @@ export class Agent { await this.prompter.initExamples(); console.log('Initializing task...'); this.task = new Task(this, task_path, task_id); - this.blocked_actions = this.task.blocked_actions || []; + const blocked_actions = this.task.blocked_actions || []; + blacklistCommands(blocked_actions); serverProxy.connect(this); diff --git a/src/agent/commands/index.js b/src/agent/commands/index.js index a8d09db..3f3f967 100644 --- a/src/agent/commands/index.js +++ b/src/agent/commands/index.js @@ -14,6 +14,18 @@ export function getCommand(name) { return commandMap[name]; } +export function blacklistCommands(commands) { + const unblockable = ['!stop', '!stats', '!goal', '!endGoal', '!endConversation']; + for (let command_name of commands) { + if (unblockable.includes(command_name)){ + console.warn(`Command ${command_name} is unblockable`); + continue; + } + delete commandMap[command_name]; + delete commandList.find(command => command.name === command_name); + } +} + const commandRegex = /!(\w+)(?:\(((?:-?\d+(?:\.\d+)?|true|false|"[^"]*")(?:\s*,\s*(?:-?\d+(?:\.\d+)?|true|false|"[^"]*"))*)\))?/ const argRegex = /-?\d+(?:\.\d+)?|true|false|"[^"]*"/g; @@ -214,7 +226,7 @@ export async function executeCommand(agent, message) { } } -export function getCommandDocs(blacklist=null) { +export function getCommandDocs() { const typeTranslations = { //This was added to keep the prompt the same as before type checks were implemented. //If the language model is giving invalid inputs changing this might help. @@ -228,9 +240,6 @@ export function getCommandDocs(blacklist=null) { Use the commands with the syntax: !commandName or !commandName("arg1", 1.2, ...) if the command takes arguments.\n Do not use codeblocks. Use double quotes for strings. Only use one command in each response, trailing commands and comments will be ignored.\n`; for (let command of commandList) { - if (blacklist && blacklist.includes(command.name)) { - continue; - } docs += command.name + ': ' + command.description + '\n'; if (command.params) { docs += 'Params:\n'; diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index c0d448f..be5882f 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -558,6 +558,14 @@ export async function placeBlock(bot, blockType, x, y, z, placeOn='bottom', dont const target_dest = new Vec3(Math.floor(x), Math.floor(y), Math.floor(z)); if (bot.modes.isOn('cheat') && !dontCheat) { + if (bot.restrict_to_inventory) { + let block = bot.inventory.items().find(item => item.name === blockType); + if (!block) { + log(bot, `Cannot place ${blockType}, you are restricted to your current inventory.`); + return false; + } + } + // invert the facing direction let face = placeOn === 'north' ? 'south' : placeOn === 'south' ? 'north' : placeOn === 'east' ? 'west' : 'east'; if (blockType.includes('torch') && placeOn !== 'bottom') { @@ -599,7 +607,7 @@ export async function placeBlock(bot, blockType, x, y, z, placeOn='bottom', dont if (item_name == "redstone_wire") item_name = "redstone"; let block = bot.inventory.items().find(item => item.name === item_name); - if (!block && bot.game.gameMode === 'creative') { + if (!block && bot.game.gameMode === 'creative' && !bot.restrict_to_inventory) { await bot.creative.setInventorySlot(36, mc.makeItem(item_name, 1)); // 36 is first hotbar slot block = bot.inventory.items().find(item => item.name === item_name); } diff --git a/src/agent/modes.js b/src/agent/modes.js index d9ec75f..8bf1594 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -404,6 +404,9 @@ export function initModes(agent) { _agent = agent; // the mode controller is added to the bot object so it is accessible from anywhere the bot is used agent.bot.modes = new ModeController(); + if (agent.task) { + agent.bot.restrict_to_inventory = agent.task.restrict_to_inventory; + } let modes_json = agent.prompter.getInitModes(); if (modes_json) { agent.bot.modes.loadJson(modes_json); diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 8feda0c..bc05860 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -186,7 +186,7 @@ export class Prompter { prompt = prompt.replaceAll('$ACTION', this.agent.actions.currentActionLabel); } if (prompt.includes('$COMMAND_DOCS')) - prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs(this.agent.blocked_actions)); + prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs()); if (prompt.includes('$CODE_DOCS')) prompt = prompt.replaceAll('$CODE_DOCS', getSkillDocs()); if (prompt.includes('$EXAMPLES') && examples !== null) diff --git a/src/agent/tasks.js b/src/agent/tasks.js index f7527f1..6d968a9 100644 --- a/src/agent/tasks.js +++ b/src/agent/tasks.js @@ -51,6 +51,7 @@ export class Task { this.taskStartTime = Date.now(); this.validator = new TaskValidator(this.data, this.agent); this.blocked_actions = this.data.blocked_actions || []; + this.restrict_to_inventory = !!this.data.restrict_to_inventory; if (this.data.goal) this.blocked_actions.push('!endGoal'); if (this.data.conversation) From 7eef80539519208b9b6fc3af57a89ce6439106aa Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 9 Jan 2025 15:16:35 -0600 Subject: [PATCH 07/21] added num_examples --- settings.js | 3 ++- src/agent/prompter.js | 5 +++-- src/utils/examples.js | 6 ++++++ 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/settings.js b/settings.js index add5297..a4681fa 100644 --- a/settings.js +++ b/settings.js @@ -35,7 +35,8 @@ export default "code_timeout_mins": 10, // minutes code is allowed to run. -1 for no timeout "max_messages": 15, // max number of messages to keep in context - "max_commands": -1, // max number of commands to use in a response. -1 for no limit + "num_examples": 2, // number of examples to give to the model + "max_commands": -1, // max number of commands that can be used in consecutive responses. -1 for no limit "verbose_commands": true, // show full command syntax "narrate_behavior": true, // chat simple automatic actions ('Picking up item!') "chat_bot_messages": true, // publicly chat messages to other bots diff --git a/src/agent/prompter.js b/src/agent/prompter.js index bc05860..310ca3e 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -4,6 +4,7 @@ import { getCommandDocs } from './commands/index.js'; import { getSkillDocs } from './library/index.js'; import { stringifyTurns } from '../utils/text.js'; import { getCommand } from './commands/index.js'; +import settings from '../../settings.js'; import { Gemini } from '../models/gemini.js'; import { GPT } from '../models/gpt.js'; @@ -155,8 +156,8 @@ export class Prompter { async initExamples() { try { - this.convo_examples = new Examples(this.embedding_model); - this.coding_examples = new Examples(this.embedding_model); + this.convo_examples = new Examples(this.embedding_model, settings.num_examples); + this.coding_examples = new Examples(this.embedding_model, settings.num_examples); // Wait for both examples to load before proceeding await Promise.all([ diff --git a/src/utils/examples.js b/src/utils/examples.js index 31ef3ab..ca6de79 100644 --- a/src/utils/examples.js +++ b/src/utils/examples.js @@ -33,6 +33,9 @@ export class Examples { this.examples = examples; if (!this.model) return; // Early return if no embedding model + if (this.select_num === 0) + return; + try { // Create array of promises first const embeddingPromises = examples.map(example => { @@ -52,6 +55,9 @@ export class Examples { } async getRelevant(turns) { + if (this.select_num === 0) + return []; + let turn_text = this.turnsToText(turns); if (this.model !== null) { let embedding = await this.model.embed(turn_text); From eeaf33c4d290a4ffeab5d59e8a671a55bb6ba0f4 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Fri, 10 Jan 2025 12:41:00 -0600 Subject: [PATCH 08/21] changed unblockables --- src/agent/commands/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agent/commands/index.js b/src/agent/commands/index.js index 3f3f967..008c1d0 100644 --- a/src/agent/commands/index.js +++ b/src/agent/commands/index.js @@ -15,7 +15,7 @@ export function getCommand(name) { } export function blacklistCommands(commands) { - const unblockable = ['!stop', '!stats', '!goal', '!endGoal', '!endConversation']; + const unblockable = ['!stop', '!stats', '!inventory', '!goal']; for (let command_name of commands) { if (unblockable.includes(command_name)){ console.warn(`Command ${command_name} is unblockable`); From 66a03bf893e4b3de25ab0108d9eb0a957e9b6e17 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 21 Jan 2025 13:41:48 -0600 Subject: [PATCH 09/21] embed max tokens, fix shutdown race condition --- src/models/gpt.js | 2 ++ src/server/mind_server.js | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/models/gpt.js b/src/models/gpt.js index da29ef1..dfd5e22 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -54,6 +54,8 @@ export class GPT { } async embed(text) { + if (text.length > 8191) + text = text.slice(0, 8191); const embedding = await this.openai.embeddings.create({ model: this.model_name || "text-embedding-3-small", input: text, diff --git a/src/server/mind_server.js b/src/server/mind_server.js index 5d99290..b94cccf 100644 --- a/src/server/mind_server.js +++ b/src/server/mind_server.js @@ -111,7 +111,9 @@ export function createMindServer(port = 8080) { for (let manager of Object.values(agentManagers)) { manager.emit('shutdown'); } - process.exit(0); + setTimeout(() => { + process.exit(0); + }, 2000); }); }); From c5b37dc2c7265c589e3ed2f21db36eeeb41c72b0 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 21 Jan 2025 14:05:50 -0600 Subject: [PATCH 10/21] use cheats for farming, default no code timeout --- settings.js | 2 +- src/agent/library/skills.js | 9 +++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/settings.js b/settings.js index a4681fa..761e3cb 100644 --- a/settings.js +++ b/settings.js @@ -32,7 +32,7 @@ export default "show_bot_views": false, // show bot's view in browser at localhost:3000, 3001... "allow_insecure_coding": false, // allows newAction command and model can write/run code on your computer. enable at own risk - "code_timeout_mins": 10, // minutes code is allowed to run. -1 for no timeout + "code_timeout_mins": -1, // minutes code is allowed to run. -1 for no timeout "max_messages": 15, // max number of messages to keep in context "num_examples": 2, // number of examples to give to the model diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index be5882f..726ef18 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1269,12 +1269,17 @@ export async function tillAndSow(bot, x, y, z, seedType=null) { * let position = world.getPosition(bot); * await skills.till(bot, position.x, position.y - 1, position.x); **/ - console.log(x, y, z) x = Math.round(x); y = Math.round(y); z = Math.round(z); let block = bot.blockAt(new Vec3(x, y, z)); - console.log(x, y, z) + + if (bot.modes.isOn('cheat')) { + placeBlock(bot, x, y, z, 'farmland'); + placeBlock(bot, x, y+1, z, seedType); + return true; + } + if (block.name !== 'grass_block' && block.name !== 'dirt' && block.name !== 'farmland') { log(bot, `Cannot till ${block.name}, must be grass_block or dirt.`); return false; From fbde286931661f36c21861e2977a8f83beee8645 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Fri, 24 Jan 2025 11:31:26 -0600 Subject: [PATCH 11/21] better auto-prompt --- src/agent/self_prompter.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agent/self_prompter.js b/src/agent/self_prompter.js index 2c2f63c..439b6c6 100644 --- a/src/agent/self_prompter.js +++ b/src/agent/self_prompter.js @@ -38,7 +38,7 @@ export class SelfPrompter { let no_command_count = 0; const MAX_NO_COMMAND = 3; while (!this.interrupt) { - const msg = `You are self-prompting with the goal: '${this.prompt}'. Your next response MUST contain a command !withThisSyntax. Respond:`; + const msg = `You are self-prompting with the goal: '${this.prompt}'. Your next response MUST contain a command with this syntax: !commandName. Respond:`; let used_command = await this.agent.handleMessage('system', msg, -1); if (!used_command) { From 42f805cd160994af1734876f29e38601e707ffbc Mon Sep 17 00:00:00 2001 From: Pixel Date: Sat, 25 Jan 2025 10:53:22 -0600 Subject: [PATCH 12/21] Update README.md --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index d165045..369cca2 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,11 @@ # Mindcraft 🧠⛏️ -Crafting minds for Minecraft with LLMs and Mineflayer! +Crafting minds for Minecraft with LLMs and [Mineflayer!](https://prismarinejs.github.io/mineflayer/#/) [FAQ](https://github.com/kolbytn/mindcraft/blob/main/FAQ.md) | [Discord Support](https://discord.gg/mp73p35dzC) | [Blog Post](https://kolbynottingham.com/mindcraft/) | [Contributor TODO](https://github.com/users/kolbytn/projects/1) -#### ‼️Warning‼️ - +> [!WARNING] Do not connect this bot to public servers with coding enabled. This project allows an LLM to write/execute code on your computer. While the code is sandboxed, it is still vulnerable to injection attacks on public servers. Code writing is disabled by default, you can enable it by setting `allow_insecure_coding` to `true` in `settings.js`. We strongly recommend running with additional layers of security such as docker containers. Ye be warned. ## Requirements @@ -63,7 +62,8 @@ To connect to online servers your bot will need an official Microsoft/Minecraft // rest is same... ``` -‼️ The bot's name in the profile.json must exactly match the Minecraft profile name! Otherwise the bot will spam talk to itself. +> [!WARNING] +> The bot's name in the profile.json must exactly match the Minecraft profile name! Otherwise the bot will spam talk to itself. To use different accounts, Mindcraft will connect with the account that the Minecraft launcher is currently using. You can switch accounts in the launcer, then run `node main.js`, then switch to your main account after the bot has connected. From 1e6ee45f0135a5194cefc11be39533ff3cb99b5a Mon Sep 17 00:00:00 2001 From: Pixel Date: Sat, 25 Jan 2025 10:55:39 -0600 Subject: [PATCH 13/21] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 369cca2..bf72fb6 100644 --- a/README.md +++ b/README.md @@ -62,7 +62,7 @@ To connect to online servers your bot will need an official Microsoft/Minecraft // rest is same... ``` -> [!WARNING] +> [!CAUTION] > The bot's name in the profile.json must exactly match the Minecraft profile name! Otherwise the bot will spam talk to itself. To use different accounts, Mindcraft will connect with the account that the Minecraft launcher is currently using. You can switch accounts in the launcer, then run `node main.js`, then switch to your main account after the bot has connected. From 063d42176447c0274fcaa9b1fe98091be3e2f137 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 25 Jan 2025 12:26:22 -0600 Subject: [PATCH 14/21] added second model for coding --- src/agent/prompter.js | 132 +++++++++++++++++++++++------------------- 1 file changed, 74 insertions(+), 58 deletions(-) diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 310ca3e..5b09719 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -34,7 +34,6 @@ export class Prompter { this.coding_examples = null; let name = this.profile.name; - let chat = this.profile.model; this.cooldown = this.profile.cooldown ? this.profile.cooldown : 0; this.last_prompt_time = 0; this.awaiting_coding = false; @@ -43,68 +42,22 @@ export class Prompter { let max_tokens = null; if (this.profile.max_tokens) max_tokens = this.profile.max_tokens; - if (typeof chat === 'string' || chat instanceof String) { - chat = {model: chat}; - if (chat.model.includes('gemini')) - chat.api = 'google'; - else if (chat.model.includes('gpt') || chat.model.includes('o1')) - chat.api = 'openai'; - else if (chat.model.includes('claude')) - chat.api = 'anthropic'; - else if (chat.model.includes('huggingface/')) - chat.api = "huggingface"; - else if (chat.model.includes('meta/') || chat.model.includes('replicate/')) - chat.api = 'replicate'; - else if (chat.model.includes('mistralai/') || chat.model.includes("mistral/")) - chat.api = 'mistral'; - else if (chat.model.includes("groq/") || chat.model.includes("groqcloud/")) - chat.api = 'groq'; - else if (chat.model.includes('novita/')) - chat.api = 'novita'; - else if (chat.model.includes('qwen')) - chat.api = 'qwen'; - else if (chat.model.includes('grok')) - chat.api = 'xai'; - else if (chat.model.includes('deepseek')) - chat.api = 'deepseek'; - else - chat.api = 'ollama'; - } - console.log('Using chat settings:', chat); + let chat_model_profile = this._selectAPI(this.profile.model); + this.chat_model = this._createModel(chat_model_profile); - if (chat.api === 'google') - this.chat_model = new Gemini(chat.model, chat.url); - else if (chat.api === 'openai') - this.chat_model = new GPT(chat.model, chat.url); - else if (chat.api === 'anthropic') - this.chat_model = new Claude(chat.model, chat.url); - else if (chat.api === 'replicate') - this.chat_model = new ReplicateAPI(chat.model, chat.url); - else if (chat.api === 'ollama') - this.chat_model = new Local(chat.model, chat.url); - else if (chat.api === 'mistral') - this.chat_model = new Mistral(chat.model, chat.url); - else if (chat.api === 'groq') { - this.chat_model = new GroqCloudAPI(chat.model.replace('groq/', '').replace('groqcloud/', ''), chat.url, max_tokens ? max_tokens : 8192); + if (this.profile.code_model) { + let code_model_profile = this._selectAPI(this.profile.code_model); + this.code_model = this._createModel(code_model_profile); + } + else { + this.code_model = this.chat_model; } - else if (chat.api === 'huggingface') - this.chat_model = new HuggingFace(chat.model, chat.url); - else if (chat.api === 'novita') - this.chat_model = new Novita(chat.model.replace('novita/', ''), chat.url); - else if (chat.api === 'qwen') - this.chat_model = new Qwen(chat.model, chat.url); - else if (chat.api === 'xai') - this.chat_model = new Grok(chat.model, chat.url); - else if (chat.api === 'deepseek') - this.chat_model = new DeepSeek(chat.model, chat.url); - else - throw new Error('Unknown API:', api); let embedding = this.profile.embedding; if (embedding === undefined) { - if (chat.api !== 'ollama') - embedding = {api: chat.api}; + if (chat_model_profile.api !== 'ollama') + embedding = {api: chat_model_profile.api}; else embedding = {api: 'none'}; } @@ -146,6 +99,69 @@ export class Prompter { }); } + _selectAPI(profile) { + if (typeof profile === 'string' || profile instanceof String) { + profile = {model: profile}; + if (profile.model.includes('gemini')) + profile.api = 'google'; + else if (profile.model.includes('gpt') || profile.model.includes('o1')) + profile.api = 'openai'; + else if (profile.model.includes('claude')) + profile.api = 'anthropic'; + else if (profile.model.includes('huggingface/')) + profile.api = "huggingface"; + else if (profile.model.includes('meta/') || profile.model.includes('replicate/')) + profile.api = 'replicate'; + else if (profile.model.includes('mistralai/') || profile.model.includes("mistral/")) + model_profile.api = 'mistral'; + else if (profile.model.includes("groq/") || profile.model.includes("groqcloud/")) + profile.api = 'groq'; + else if (profile.model.includes('novita/')) + profile.api = 'novita'; + else if (profile.model.includes('qwen')) + profile.api = 'qwen'; + else if (profile.model.includes('grok')) + profile.api = 'xai'; + else if (profile.model.includes('deepseek')) + profile.api = 'deepseek'; + else + profile.api = 'ollama'; + } + return profile; + } + + _createModel(profile) { + let model = null; + if (profile.api === 'google') + model = new Gemini(profile.model, profile.url); + else if (profile.api === 'openai') + model = new GPT(profile.model, profile.url); + else if (profile.api === 'anthropic') + model = new Claude(profile.model, profile.url); + else if (profile.api === 'replicate') + model = new ReplicateAPI(profile.model, profile.url); + else if (profile.api === 'ollama') + model = new Local(profile.model, profile.url); + else if (profile.api === 'mistral') + model = new Mistral(profile.model, profile.url); + else if (profile.api === 'groq') { + model = new GroqCloudAPI(profile.model.replace('groq/', '').replace('groqcloud/', ''), profile.url, max_tokens ? max_tokens : 8192); + } + else if (profile.api === 'huggingface') + model = new HuggingFace(profile.model, profile.url); + else if (profile.api === 'novita') + model = new Novita(profile.model.replace('novita/', ''), profile.url); + else if (profile.api === 'qwen') + model = new Qwen(profile.model, profile.url); + else if (profile.api === 'xai') + model = new Grok(profile.model, profile.url); + else if (profile.api === 'deepseek') + model = new DeepSeek(profile.model, profile.url); + else + throw new Error('Unknown API:', api); + return model; + } + getName() { return this.profile.name; } @@ -273,7 +289,7 @@ export class Prompter { await this.checkCooldown(); let prompt = this.profile.coding; prompt = await this.replaceStrings(prompt, messages, this.coding_examples); - let resp = await this.chat_model.sendRequest(messages, prompt); + let resp = await this.code_model.sendRequest(messages, prompt); this.awaiting_coding = false; return resp; } From e1a9ed811b6690a60d6f14908e0ba1609c397e99 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 25 Jan 2025 12:26:49 -0600 Subject: [PATCH 15/21] small fix to block placing/farming --- src/agent/commands/index.js | 2 +- src/agent/library/skills.js | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/agent/commands/index.js b/src/agent/commands/index.js index 008c1d0..f40c5c2 100644 --- a/src/agent/commands/index.js +++ b/src/agent/commands/index.js @@ -160,7 +160,7 @@ export function parseCommandMessage(message) { suppressNoDomainWarning = true; //Don't spam console. Only give the warning once. } } else if(param.type === 'BlockName') { //Check that there is a block with this name - if(getBlockId(arg) == null) return `Invalid block type: ${arg}.` + if(getBlockId(arg) == null && arg !== 'air') return `Invalid block type: ${arg}.` } else if(param.type === 'ItemName') { //Check that there is an item with this name if(getItemId(arg) == null) return `Invalid item type: ${arg}.` } diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 726ef18..78f1ad3 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1275,8 +1275,14 @@ export async function tillAndSow(bot, x, y, z, seedType=null) { let block = bot.blockAt(new Vec3(x, y, z)); if (bot.modes.isOn('cheat')) { - placeBlock(bot, x, y, z, 'farmland'); - placeBlock(bot, x, y+1, z, seedType); + let to_remove = ['_seed', '_seeds']; + for (let remove of to_remove) { + if (seedType.endsWith(remove)) { + seedType = seedType.replace(remove, ''); + } + } + placeBlock(bot, 'farmland', x, y, z); + placeBlock(bot, seedType, x, y+1, z); return true; } From 9b387649a17b8a75c5f06e121053b1308180dd79 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 3 Feb 2025 18:35:58 -0600 Subject: [PATCH 16/21] enable o3, improve novita --- src/agent/prompter.js | 2 +- src/models/gpt.js | 2 +- src/models/novita.js | 17 +++++++++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 5b09719..b88001f 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -104,7 +104,7 @@ export class Prompter { profile = {model: profile}; if (profile.model.includes('gemini')) profile.api = 'google'; - else if (profile.model.includes('gpt') || profile.model.includes('o1')) + else if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3')) profile.api = 'openai'; else if (profile.model.includes('claude')) profile.api = 'anthropic'; diff --git a/src/models/gpt.js b/src/models/gpt.js index dfd5e22..49be3a6 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -33,7 +33,7 @@ export class GPT { let res = null; try { - console.log('Awaiting openai api response...') + console.log('Awaiting openai api response from model', this.model_name) // console.log('Messages:', messages); let completion = await this.openai.chat.completions.create(pack); if (completion.choices[0].finish_reason == 'length') diff --git a/src/models/novita.js b/src/models/novita.js index d84aee7..33c05cc 100644 --- a/src/models/novita.js +++ b/src/models/novita.js @@ -1,5 +1,6 @@ import OpenAIApi from 'openai'; import { getKey } from '../utils/keys.js'; +import { strictFormat } from '../utils/text.js'; // llama, mistral export class Novita { @@ -17,6 +18,10 @@ export class Novita { async sendRequest(turns, systemMessage, stop_seq='***') { let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); + + + messages = strictFormat(messages); + const pack = { model: this.model_name || "meta-llama/llama-3.1-70b-instruct", messages, @@ -41,6 +46,18 @@ export class Novita { res = 'My brain disconnected, try again.'; } } + if (res.includes('')) { + let start = res.indexOf(''); + let end = res.indexOf('') + 8; + if (start != -1) { + if (end != -1) { + res = res.substring(0, start) + res.substring(end); + } else { + res = res.substring(0, start+7); + } + } + res = res.trim(); + } return res; } From 23c54279ded2aaca6a33c2c83df788cc805470fe Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 3 Feb 2025 18:42:47 -0600 Subject: [PATCH 17/21] add code_model to readme --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index d165045..1eaa1bd 100644 --- a/README.md +++ b/README.md @@ -120,7 +120,7 @@ LLM backends can be specified as simply as `"model": "gpt-3.5-turbo"`. However, } ``` -The model parameter accepts either a string or object. If a string, it should specify the model to be used. The api and url will be assumed. If an object, the api field must be specified. Each api has a default model and url, so those fields are optional. +The model or code_model parameter accepts either a string or object. If a string, it should specify the model to be used. The api and url will be assumed. If an object, the api field must be specified. Each api has a default model and url, so those fields are optional. If the embedding field is not specified, then it will use the default embedding method for the chat model's api (Note that anthropic has no embedding model). The embedding parameter can also be a string or object. If a string, it should specify the embedding api and the default model and url will be used. If a valid embedding is not specified and cannot be assumed, then word overlap will be used to retrieve examples instead. @@ -137,6 +137,7 @@ Thus, all the below specifications are equivalent to the above example: ```json "model": "gpt-3.5-turbo", "embedding": "openai" +"code_model": "gpt-3.5-turbo" ``` ## Patches From 60187e23171a58c8c991ddc5d48af24f2f415691 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 4 Feb 2025 13:02:57 -0600 Subject: [PATCH 18/21] added model parameters obj to profile --- README.md | 2 +- profiles/gpt.json | 8 +++- src/agent/agent.js | 10 ++--- src/models/claude.js | 10 +++-- src/models/deepseek.js | 4 +- src/models/gemini.js | 29 ++++++++++++-- src/models/gpt.js | 5 ++- src/models/grok.js | 7 +++- src/models/groq.js | 14 ++++--- src/models/huggingface.js | 6 ++- src/models/local.js | 10 ++++- src/models/mistral.js | 8 ++-- src/models/novita.js | 5 ++- src/{agent => models}/prompter.js | 63 ++++++++++++++++--------------- src/models/qwen.js | 11 ++++-- src/models/replicate.js | 9 ++++- src/process/init_agent.js | 7 +--- 17 files changed, 134 insertions(+), 74 deletions(-) rename src/{agent => models}/prompter.js (88%) diff --git a/README.md b/README.md index 1eaa1bd..d18fe65 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,7 @@ You can configure the agent's name, model, and prompts in their profile like `an | OpenAI | `OPENAI_API_KEY` | `gpt-4o-mini` | [docs](https://platform.openai.com/docs/models) | | Google | `GEMINI_API_KEY` | `gemini-pro` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | | Anthropic | `ANTHROPIC_API_KEY` | `claude-3-haiku-20240307` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | -| Replicate | `REPLICATE_API_KEY` | `meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | +| Replicate | `REPLICATE_API_KEY` | `replicate/meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | | Ollama (local) | n/a | `llama3` | [docs](https://ollama.com/library) | | Groq | `GROQCLOUD_API_KEY` | `groq/mixtral-8x7b-32768` | [docs](https://console.groq.com/docs/models) | | Hugging Face | `HUGGINGFACE_API_KEY` | `huggingface/mistralai/Mistral-Nemo-Instruct-2407` | [docs](https://huggingface.co/models) | diff --git a/profiles/gpt.json b/profiles/gpt.json index 32d99c1..a5effe1 100644 --- a/profiles/gpt.json +++ b/profiles/gpt.json @@ -1,5 +1,11 @@ { "name": "gpt", - "model": "gpt-4o" + "model": { + "model": "gpt-4o-mini", + "params": { + "temperature": 1, + "not_real": true + } + } } \ No newline at end of file diff --git a/src/agent/agent.js b/src/agent/agent.js index 4691079..8e211ef 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -1,6 +1,6 @@ import { History } from './history.js'; import { Coder } from './coder.js'; -import { Prompter } from './prompter.js'; +import { Prompter } from '../models/prompter.js'; import { initModes } from './modes.js'; import { initBot } from '../utils/mcdata.js'; import { containsCommand, commandExists, executeCommand, truncCommandMessage, isAction, blacklistCommands } from './commands/index.js'; @@ -100,11 +100,9 @@ export class Agent { }); } catch (error) { // Ensure we're not losing error details - console.error('Agent start failed with error:', { - message: error.message || 'No error message', - stack: error.stack || 'No stack trace', - error: error - }); + console.error('Agent start failed with error') + console.error(error) + throw error; // Re-throw with preserved details } } diff --git a/src/models/claude.js b/src/models/claude.js index c97ecb2..f8d2a90 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -3,8 +3,9 @@ import { strictFormat } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; export class Claude { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; let config = {}; if (url) @@ -20,13 +21,16 @@ export class Claude { let res = null; try { console.log('Awaiting anthropic api response...') - // console.log('Messages:', messages); + if (!this.params.max_tokens) { + this.params.max_tokens = 4096; + } const resp = await this.anthropic.messages.create({ model: this.model_name || "claude-3-sonnet-20240229", system: systemMessage, - max_tokens: 2048, messages: messages, + ...(this.params || {}) }); + console.log('Received.') res = resp.content[0].text; } diff --git a/src/models/deepseek.js b/src/models/deepseek.js index 395aa8c..da98ba2 100644 --- a/src/models/deepseek.js +++ b/src/models/deepseek.js @@ -3,8 +3,9 @@ import { getKey, hasKey } from '../utils/keys.js'; import { strictFormat } from '../utils/text.js'; export class DeepSeek { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; let config = {}; @@ -23,6 +24,7 @@ export class DeepSeek { model: this.model_name || "deepseek-chat", messages, stop: stop_seq, + ...(this.params || {}) }; let res = null; diff --git a/src/models/gemini.js b/src/models/gemini.js index 1536d66..de71a66 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -3,8 +3,9 @@ import { toSinglePrompt } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; export class Gemini { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; this.url = url; this.safetySettings = [ { @@ -34,15 +35,20 @@ export class Gemini { async sendRequest(turns, systemMessage) { let model; + const modelConfig = { + model: this.model_name || "gemini-1.5-flash", + ...(this.params || {}) + }; + if (this.url) { model = this.genAI.getGenerativeModel( - { model: this.model_name || "gemini-1.5-flash" }, + modelConfig, { baseUrl: this.url }, { safetySettings: this.safetySettings } ); } else { model = this.genAI.getGenerativeModel( - { model: this.model_name || "gemini-1.5-flash" }, + modelConfig, { safetySettings: this.safetySettings } ); } @@ -50,12 +56,27 @@ export class Gemini { const stop_seq = '***'; const prompt = toSinglePrompt(turns, systemMessage, stop_seq, 'model'); console.log('Awaiting Google API response...'); - const result = await model.generateContent(prompt); + const result = await model.generateContent({ + contents: [ + { + role: 'user', + parts: [ + { + text: "Explain how AI works", + } + ], + } + ], + generateConfig: { + ...(this.params || {}) + } + }); const response = await result.response; const text = response.text(); console.log('Received.'); if (!text.includes(stop_seq)) return text; const idx = text.indexOf(stop_seq); + return text.slice(0, idx); } diff --git a/src/models/gpt.js b/src/models/gpt.js index 49be3a6..1a88bf4 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -3,8 +3,9 @@ import { getKey, hasKey } from '../utils/keys.js'; import { strictFormat } from '../utils/text.js'; export class GPT { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; let config = {}; if (url) @@ -25,6 +26,7 @@ export class GPT { model: this.model_name || "gpt-3.5-turbo", messages, stop: stop_seq, + ...(this.params || {}) }; if (this.model_name.includes('o1')) { pack.messages = strictFormat(messages); @@ -32,6 +34,7 @@ export class GPT { } let res = null; + try { console.log('Awaiting openai api response from model', this.model_name) // console.log('Messages:', messages); diff --git a/src/models/grok.js b/src/models/grok.js index 19a3b38..a8c6672 100644 --- a/src/models/grok.js +++ b/src/models/grok.js @@ -3,8 +3,10 @@ import { getKey } from '../utils/keys.js'; // xAI doesn't supply a SDK for their models, but fully supports OpenAI and Anthropic SDKs export class Grok { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.url = url; + this.params = params; let config = {}; if (url) @@ -23,7 +25,8 @@ export class Grok { const pack = { model: this.model_name || "grok-beta", messages, - stop: [stop_seq] + stop: [stop_seq], + ...(this.params || {}) }; let res = null; diff --git a/src/models/groq.js b/src/models/groq.js index e17f13d..6911534 100644 --- a/src/models/groq.js +++ b/src/models/groq.js @@ -4,12 +4,13 @@ import { getKey } from '../utils/keys.js'; // Umbrella class for Mixtral, LLama, Gemma... export class GroqCloudAPI { - constructor(model_name, url, max_tokens=16384) { + constructor(model_name, url, params) { this.model_name = model_name; this.url = url; - this.max_tokens = max_tokens; + this.params = params; // ReplicateAPI theft :3 if (this.url) { + console.warn("Groq Cloud has no implementation for custom URLs. Ignoring provided URL."); } this.groq = new Groq({ apiKey: getKey('GROQCLOUD_API_KEY') }); @@ -20,14 +21,15 @@ export class GroqCloudAPI { let res = null; try { console.log("Awaiting Groq response..."); + if (!this.params.max_tokens) { + this.params.max_tokens = 16384; + } let completion = await this.groq.chat.completions.create({ "messages": messages, "model": this.model_name || "mixtral-8x7b-32768", - "temperature": 0.2, - "max_tokens": this.max_tokens, // maximum token limit, differs from model to model - "top_p": 1, "stream": true, - "stop": stop_seq // "***" + "stop": stop_seq, + ...(this.params || {}) }); let temp_res = ""; diff --git a/src/models/huggingface.js b/src/models/huggingface.js index 56f9d55..dd5c89d 100644 --- a/src/models/huggingface.js +++ b/src/models/huggingface.js @@ -3,9 +3,10 @@ import {getKey} from '../utils/keys.js'; import {HfInference} from "@huggingface/inference"; export class HuggingFace { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name.replace('huggingface/',''); this.url = url; + this.params = params; if (this.url) { console.warn("Hugging Face doesn't support custom urls!"); @@ -25,7 +26,8 @@ export class HuggingFace { console.log('Awaiting Hugging Face API response...'); for await (const chunk of this.huggingface.chatCompletionStream({ model: model_name, - messages: [{ role: "user", content: input }] + messages: [{ role: "user", content: input }], + ...(this.params || {}) })) { res += (chunk.choices[0]?.delta?.content || ""); } diff --git a/src/models/local.js b/src/models/local.js index 18d06e0..23d7e0e 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -1,8 +1,9 @@ import { strictFormat } from '../utils/text.js'; export class Local { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; this.url = url || 'http://127.0.0.1:11434'; this.chat_endpoint = '/api/chat'; this.embedding_endpoint = '/api/embeddings'; @@ -15,7 +16,12 @@ export class Local { let res = null; try { console.log(`Awaiting local response... (model: ${model})`) - res = await this.send(this.chat_endpoint, {model: model, messages: messages, stream: false}); + res = await this.send(this.chat_endpoint, { + model: model, + messages: messages, + stream: false, + ...(this.params || {}) + }); if (res) res = res['message']['content']; } diff --git a/src/models/mistral.js b/src/models/mistral.js index 3b41f78..b33d1de 100644 --- a/src/models/mistral.js +++ b/src/models/mistral.js @@ -5,10 +5,13 @@ import { strictFormat } from '../utils/text.js'; export class Mistral { #client; - constructor(model_name, url) { + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params; if (typeof url === "string") { console.warn("Mistral does not support custom URL's, ignoring!"); + } if (!getKey("MISTRAL_API_KEY")) { @@ -22,8 +25,6 @@ export class Mistral { ); - this.model_name = model_name; - // Prevents the following code from running when model not specified if (typeof this.model_name === "undefined") return; @@ -49,6 +50,7 @@ export class Mistral { const response = await this.#client.chat.complete({ model, messages, + ...(this.params || {}) }); result = response.choices[0].message.content; diff --git a/src/models/novita.js b/src/models/novita.js index 33c05cc..8f2dd08 100644 --- a/src/models/novita.js +++ b/src/models/novita.js @@ -4,9 +4,11 @@ import { strictFormat } from '../utils/text.js'; // llama, mistral export class Novita { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name.replace('novita/', ''); this.url = url || 'https://api.novita.ai/v3/openai'; + this.params = params; + let config = { baseURL: this.url @@ -26,6 +28,7 @@ export class Novita { model: this.model_name || "meta-llama/llama-3.1-70b-instruct", messages, stop: [stop_seq], + ...(this.params || {}) }; let res = null; diff --git a/src/agent/prompter.js b/src/models/prompter.js similarity index 88% rename from src/agent/prompter.js rename to src/models/prompter.js index b88001f..91fcf3f 100644 --- a/src/agent/prompter.js +++ b/src/models/prompter.js @@ -1,23 +1,23 @@ import { readFileSync, mkdirSync, writeFileSync} from 'fs'; import { Examples } from '../utils/examples.js'; -import { getCommandDocs } from './commands/index.js'; -import { getSkillDocs } from './library/index.js'; +import { getCommandDocs } from '../agent/commands/index.js'; +import { getSkillDocs } from '../agent/library/index.js'; import { stringifyTurns } from '../utils/text.js'; -import { getCommand } from './commands/index.js'; +import { getCommand } from '../agent/commands/index.js'; import settings from '../../settings.js'; -import { Gemini } from '../models/gemini.js'; -import { GPT } from '../models/gpt.js'; -import { Claude } from '../models/claude.js'; -import { Mistral } from '../models/mistral.js'; -import { ReplicateAPI } from '../models/replicate.js'; -import { Local } from '../models/local.js'; -import { Novita } from '../models/novita.js'; -import { GroqCloudAPI } from '../models/groq.js'; -import { HuggingFace } from '../models/huggingface.js'; -import { Qwen } from "../models/qwen.js"; -import { Grok } from "../models/grok.js"; -import { DeepSeek } from '../models/deepseek.js'; +import { Gemini } from './gemini.js'; +import { GPT } from './gpt.js'; +import { Claude } from './claude.js'; +import { Mistral } from './mistral.js'; +import { ReplicateAPI } from './replicate.js'; +import { Local } from './local.js'; +import { Novita } from './novita.js'; +import { GroqCloudAPI } from './groq.js'; +import { HuggingFace } from './huggingface.js'; +import { Qwen } from "./qwen.js"; +import { Grok } from "./grok.js"; +import { DeepSeek } from './deepseek.js'; export class Prompter { constructor(agent, fp) { @@ -102,6 +102,8 @@ export class Prompter { _selectAPI(profile) { if (typeof profile === 'string' || profile instanceof String) { profile = {model: profile}; + } + if (!profile.api) { if (profile.model.includes('gemini')) profile.api = 'google'; else if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3')) @@ -110,7 +112,7 @@ export class Prompter { profile.api = 'anthropic'; else if (profile.model.includes('huggingface/')) profile.api = "huggingface"; - else if (profile.model.includes('meta/') || profile.model.includes('replicate/')) + else if (profile.model.includes('replicate/')) profile.api = 'replicate'; else if (profile.model.includes('mistralai/') || profile.model.includes("mistral/")) model_profile.api = 'mistral'; @@ -133,32 +135,31 @@ export class Prompter { _createModel(profile) { let model = null; if (profile.api === 'google') - model = new Gemini(profile.model, profile.url); + model = new Gemini(profile.model, profile.url, profile.params); else if (profile.api === 'openai') - model = new GPT(profile.model, profile.url); + model = new GPT(profile.model, profile.url, profile.params); else if (profile.api === 'anthropic') - model = new Claude(profile.model, profile.url); + model = new Claude(profile.model, profile.url, profile.params); else if (profile.api === 'replicate') - model = new ReplicateAPI(profile.model, profile.url); + model = new ReplicateAPI(profile.model, profile.url, profile.params); else if (profile.api === 'ollama') - model = new Local(profile.model, profile.url); + model = new Local(profile.model, profile.url, profile.params); else if (profile.api === 'mistral') - model = new Mistral(profile.model, profile.url); - else if (profile.api === 'groq') { - model = new GroqCloudAPI(profile.model.replace('groq/', '').replace('groqcloud/', ''), profile.url, max_tokens ? max_tokens : 8192); - } + model = new Mistral(profile.model, profile.url, profile.params); + else if (profile.api === 'groq') + model = new GroqCloudAPI(profile.model.replace('groq/', '').replace('groqcloud/', ''), profile.url, profile.params); else if (profile.api === 'huggingface') - model = new HuggingFace(profile.model, profile.url); + model = new HuggingFace(profile.model, profile.url, profile.params); else if (profile.api === 'novita') - model = new Novita(profile.model.replace('novita/', ''), profile.url); + model = new Novita(profile.model.replace('novita/', ''), profile.url, profile.params); else if (profile.api === 'qwen') - model = new Qwen(profile.model, profile.url); + model = new Qwen(profile.model, profile.url, profile.params); else if (profile.api === 'xai') - model = new Grok(profile.model, profile.url); + model = new Grok(profile.model, profile.url, profile.params); else if (profile.api === 'deepseek') - model = new DeepSeek(profile.model, profile.url); + model = new DeepSeek(profile.model, profile.url, profile.params); else - throw new Error('Unknown API:', api); + throw new Error('Unknown API:', profile.api); return model; } diff --git a/src/models/qwen.js b/src/models/qwen.js index d3d7bec..5f3eafa 100644 --- a/src/models/qwen.js +++ b/src/models/qwen.js @@ -4,8 +4,9 @@ import { getKey } from '../utils/keys.js'; export class Qwen { - constructor(modelName, url) { - this.modelName = modelName; + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params; this.url = url || 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation'; this.apiKey = getKey('QWEN_API_KEY'); } @@ -19,7 +20,11 @@ export class Qwen { const data = { model: this.modelName || 'qwen-plus', input: { messages: [{ role: 'system', content: systemMessage }, ...turns] }, - parameters: { result_format: 'message', stop: stopSeq }, + parameters: { + result_format: 'message', + stop: stopSeq, + ...(this.params || {}) + }, }; // Add default user message if all messages are 'system' role diff --git a/src/models/replicate.js b/src/models/replicate.js index e0c7d6c..c8c3ba3 100644 --- a/src/models/replicate.js +++ b/src/models/replicate.js @@ -4,9 +4,10 @@ import { getKey } from '../utils/keys.js'; // llama, mistral export class ReplicateAPI { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; this.url = url; + this.params = params; if (this.url) { console.warn('Replicate API does not support custom URLs. Ignoring provided URL.'); @@ -22,7 +23,11 @@ export class ReplicateAPI { const prompt = toSinglePrompt(turns, null, stop_seq); let model_name = this.model_name || 'meta/meta-llama-3-70b-instruct'; - const input = { prompt, system_prompt: systemMessage }; + const input = { + prompt, + system_prompt: systemMessage, + ...(this.params || {}) + }; let res = null; try { console.log('Awaiting Replicate API response...'); diff --git a/src/process/init_agent.js b/src/process/init_agent.js index 88c99b9..15b08e0 100644 --- a/src/process/init_agent.js +++ b/src/process/init_agent.js @@ -57,11 +57,8 @@ const argv = yargs(args) const agent = new Agent(); await agent.start(argv.profile, argv.load_memory, argv.init_message, argv.count_id, argv.task_path, argv.task_id); } catch (error) { - console.error('Failed to start agent process:', { - message: error.message || 'No error message', - stack: error.stack || 'No stack trace', - error: error - }); + console.error('Failed to start agent process:'); + console.error(error); process.exit(1); } })(); From be780cba27bb77bf44f5dbdd875778d1007fa7d3 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 4 Feb 2025 13:28:32 -0600 Subject: [PATCH 19/21] remove obsolete collectblock patche --- patches/mineflayer-collectblock+1.4.1.patch | 35 --------------------- 1 file changed, 35 deletions(-) delete mode 100644 patches/mineflayer-collectblock+1.4.1.patch diff --git a/patches/mineflayer-collectblock+1.4.1.patch b/patches/mineflayer-collectblock+1.4.1.patch deleted file mode 100644 index 1df504b..0000000 --- a/patches/mineflayer-collectblock+1.4.1.patch +++ /dev/null @@ -1,35 +0,0 @@ -diff --git a/node_modules/mineflayer-collectblock/lib/CollectBlock.js b/node_modules/mineflayer-collectblock/lib/CollectBlock.js -index 2c11e8c..bb49c11 100644 ---- a/node_modules/mineflayer-collectblock/lib/CollectBlock.js -+++ b/node_modules/mineflayer-collectblock/lib/CollectBlock.js -@@ -77,10 +77,11 @@ function mineBlock(bot, block, options) { - } - yield bot.tool.equipForBlock(block, equipToolOptions); - // @ts-expect-error -- if (!block.canHarvest(bot.heldItem)) { -+ if (bot.heldItem !== null && !block.canHarvest(bot.heldItem.type)) { - options.targets.removeTarget(block); - return; - } -+ - const tempEvents = new TemporarySubscriber_1.TemporarySubscriber(bot); - tempEvents.subscribeTo('itemDrop', (entity) => { - if (entity.position.distanceTo(block.position.offset(0.5, 0.5, 0.5)) <= 0.5) { -@@ -92,7 +93,7 @@ function mineBlock(bot, block, options) { - // Waiting for items to drop - yield new Promise(resolve => { - let remainingTicks = 10; -- tempEvents.subscribeTo('physicTick', () => { -+ tempEvents.subscribeTo('physicsTick', () => { - remainingTicks--; - if (remainingTicks <= 0) { - tempEvents.cleanup(); -@@ -195,6 +196,8 @@ class CollectBlock { - throw (0, Util_1.error)('UnresolvedDependency', 'The mineflayer-collectblock plugin relies on the mineflayer-tool plugin to run!'); - } - if (this.movements != null) { -+ this.movements.dontMineUnderFallingBlock = false; -+ this.movements.dontCreateFlow = false; - this.bot.pathfinder.setMovements(this.movements); - } - if (!optionsFull.append) From 0c3ba9a3830673e9ebe3c91ed262ab2421189dfb Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 4 Feb 2025 14:41:57 -0600 Subject: [PATCH 20/21] updated gemini, cleaned gpt profile --- profiles/gpt.json | 5 ++--- src/models/gemini.js | 32 +++++++++++++++----------------- src/utils/text.js | 6 ++++-- 3 files changed, 21 insertions(+), 22 deletions(-) diff --git a/profiles/gpt.json b/profiles/gpt.json index a5effe1..ad7097e 100644 --- a/profiles/gpt.json +++ b/profiles/gpt.json @@ -2,10 +2,9 @@ "name": "gpt", "model": { - "model": "gpt-4o-mini", + "model": "gpt-4o", "params": { - "temperature": 1, - "not_real": true + "temperature": 0.5 } } } \ No newline at end of file diff --git a/src/models/gemini.js b/src/models/gemini.js index de71a66..bcc1895 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -1,5 +1,5 @@ import { GoogleGenerativeAI } from '@google/generative-ai'; -import { toSinglePrompt } from '../utils/text.js'; +import { toSinglePrompt, strictFormat } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; export class Gemini { @@ -37,7 +37,7 @@ export class Gemini { let model; const modelConfig = { model: this.model_name || "gemini-1.5-flash", - ...(this.params || {}) + // systemInstruction does not work bc google is trash }; if (this.url) { @@ -53,29 +53,27 @@ export class Gemini { ); } - const stop_seq = '***'; - const prompt = toSinglePrompt(turns, systemMessage, stop_seq, 'model'); console.log('Awaiting Google API response...'); + + turns.unshift({ role: 'system', content: systemMessage }); + turns = strictFormat(turns); + let contents = []; + for (let turn of turns) { + contents.push({ + role: turn.role === 'assistant' ? 'model' : 'user', + parts: [{ text: turn.content }] + }); + } + const result = await model.generateContent({ - contents: [ - { - role: 'user', - parts: [ - { - text: "Explain how AI works", - } - ], - } - ], - generateConfig: { + contents, + generationConfig: { ...(this.params || {}) } }); const response = await result.response; const text = response.text(); console.log('Received.'); - if (!text.includes(stop_seq)) return text; - const idx = text.indexOf(stop_seq); return text.slice(0, idx); } diff --git a/src/utils/text.js b/src/utils/text.js index 1e93667..f500199 100644 --- a/src/utils/text.js +++ b/src/utils/text.js @@ -26,8 +26,10 @@ export function toSinglePrompt(turns, system=null, stop_seq='***', model_nicknam return prompt; } -// ensures stricter turn order for anthropic/llama models -// combines repeated messages from the same role, separates repeat assistant messages with filler user messages +// ensures stricter turn order and roles: +// - system messages are treated as user messages and prefixed with SYSTEM: +// - combines repeated messages from users +// - separates repeat assistant messages with filler user messages export function strictFormat(turns) { let prev_role = null; let messages = []; From 402e09f03795036b8b49fbdde10c90f479f64b35 Mon Sep 17 00:00:00 2001 From: Max Robinson Date: Tue, 4 Feb 2025 15:00:55 -0600 Subject: [PATCH 21/21] better readme --- README.md | 35 ++++++++++++++++------------------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index d18fe65..e735412 100644 --- a/README.md +++ b/README.md @@ -105,40 +105,37 @@ node main.js --profiles ./profiles/andy.json ./profiles/jill.json ### Model Specifications -LLM backends can be specified as simply as `"model": "gpt-3.5-turbo"`. However, for both the chat model and the embedding model, the bot profile can specify the below attributes: +LLM models can be specified as simply as `"model": "gpt-4o"`. However, you can specify different models for chat, coding, and embeddings. +You can pass a string or an object for these fields. A model object must specify an `api`, and optionally a `model`, `url`, and additional `params`. ```json "model": { "api": "openai", + "model": "gpt-4o", "url": "https://api.openai.com/v1/", - "model": "gpt-3.5-turbo" + "params": { + "max_tokens": 1000, + "temperature": 1 + } +}, +"code_model": { + "api": "openai", + "model": "gpt-4", + "url": "https://api.openai.com/v1/" }, "embedding": { "api": "openai", "url": "https://api.openai.com/v1/", "model": "text-embedding-ada-002" } + ``` -The model or code_model parameter accepts either a string or object. If a string, it should specify the model to be used. The api and url will be assumed. If an object, the api field must be specified. Each api has a default model and url, so those fields are optional. +`model` is used for chat, `code_model` is used for newAction coding, and `embedding` is used to embed text for example selection. If `code_model` is not specified, then it will use `model` for coding. -If the embedding field is not specified, then it will use the default embedding method for the chat model's api (Note that anthropic has no embedding model). The embedding parameter can also be a string or object. If a string, it should specify the embedding api and the default model and url will be used. If a valid embedding is not specified and cannot be assumed, then word overlap will be used to retrieve examples instead. +All apis have default models and urls, so those fields are optional. Note some apis have no embedding model, so they will default to word overlap to retrieve examples. -Thus, all the below specifications are equivalent to the above example: - -```json -"model": "gpt-3.5-turbo" -``` -```json -"model": { - "api": "openai" -} -``` -```json -"model": "gpt-3.5-turbo", -"embedding": "openai" -"code_model": "gpt-3.5-turbo" -``` +The `params` field is optional and can be used to specify additional parameters for the model. It accepts any key-value pairs supported by the api. Is not supported for embedding models. ## Patches