From 17c0ce3c251583db638ffdf1f2b183648fb57c2d Mon Sep 17 00:00:00 2001 From: Radnos Date: Fri, 5 Apr 2024 23:41:28 +0200 Subject: [PATCH 01/78] added ollama --- README.md | 9 ++++++++- src/agent/prompter.js | 3 +++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 17c5c02..9670932 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ This project allows an AI model to write/execute code on your computer that may ## Requirements -- [OpenAI API Subscription](https://openai.com/blog/openai-api), [Gemini API Subscription](https://aistudio.google.com/app/apikey), or [Anthropic API Subscription](https://docs.anthropic.com/claude/docs/getting-access-to-claude) +- [OpenAI API Subscription](https://openai.com/blog/openai-api), [Gemini API Subscription](https://aistudio.google.com/app/apikey), [Anthropic API Subscription](https://docs.anthropic.com/claude/docs/getting-access-to-claude), or [Ollama](https://ollama.com/download) - [Minecraft Java Edition](https://www.minecraft.net/en-us/store/minecraft-java-bedrock-edition-pc) - [Node.js](https://nodejs.org/) (at least v14) @@ -19,6 +19,10 @@ Add one of these environment variables: - `GEMINI_API_KEY` - `ANTHROPIC_API_KEY` (and optionally `OPENAI_API_KEY` for embeddings. not necessary, but without embeddings performance will suffer) + You can also use Ollama instead. + To install the models used by default (generation and embedding), execute the following script: + `ollama pull mistral && ollama pull nomic-embed-text` + Clone/Download this repository Run `npm install` @@ -33,6 +37,7 @@ Run `node main.js` You can configure the agent's name, model, and prompts in their profile like `andy.json`. +You can configure ollama in `ollama-config.json`. You can configure project details in `settings.json`. Here is an example settings for connecting to a non-local server: ``` @@ -45,6 +50,8 @@ You can configure project details in `settings.json`. Here is an example setting } ``` + + ## Patches Some of the node modules that we depend on have bugs in them. To add a patch, change your local node module file and run `npx patch-package [package-name]` diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 0794247..797ac56 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -8,6 +8,7 @@ import { getCommand } from './commands/index.js'; import { Gemini } from '../models/gemini.js'; import { GPT } from '../models/gpt.js'; import { Claude } from '../models/claude.js'; +import { Ollama } from '../models/ollama.js'; export class Prompter { @@ -30,6 +31,8 @@ export class Prompter { this.model = new GPT(model_name); else if (model_name.includes('claude')) this.model = new Claude(model_name); + else if (model_name.includes('ollama')) + this.model = new Ollama(model_name); else throw new Error('Unknown model ' + model_name); } From 7c9de487e0f35bcae3e477039ac2209c9512334f Mon Sep 17 00:00:00 2001 From: Radnos Date: Fri, 5 Apr 2024 23:44:22 +0200 Subject: [PATCH 02/78] improved error msg --- ollama-config.json | 4 ++ src/models/ollama.js | 107 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 111 insertions(+) create mode 100644 ollama-config.json create mode 100644 src/models/ollama.js diff --git a/ollama-config.json b/ollama-config.json new file mode 100644 index 0000000..44d682a --- /dev/null +++ b/ollama-config.json @@ -0,0 +1,4 @@ +{ + "url": "http://localhost:11434", + "embedding_model": "nomic-embed-text" +} \ No newline at end of file diff --git a/src/models/ollama.js b/src/models/ollama.js new file mode 100644 index 0000000..b41bf5a --- /dev/null +++ b/src/models/ollama.js @@ -0,0 +1,107 @@ +import OpenAIApi from 'openai'; +import axios from 'axios'; +import { readFileSync } from 'fs'; + + +let ollamaSettings = JSON.parse(readFileSync('./ollama-config.json', 'utf8')); + +function getContentInBrackets(str) { + const startIndex = str.indexOf("["); + const endIndex = str.indexOf("]"); + + if (startIndex !== -1 && endIndex !== -1 && endIndex > startIndex) { + return str.substring(startIndex + 1, endIndex); + } else { + return ""; + } + } + +export class Ollama { + + constructor(model_name) { + this.model_name = getContentInBrackets(model_name); + + if (this.model_name = "") { + throw new Error('Model is not specified! Please ensure you input the model in the following format: ollama[model]. For example, for Mistral, use: ollama[mistral]'); + } + + let ollamaConfig = null; + + ollamaConfig = { + baseURL: ollamaSettings["url"] + '/v1', + apiKey: 'ollama', // required but unused + }; + + + this.openai = new OpenAIApi(ollamaConfig); + + } + + + async sendRequest(turns, systemMessage, stop_seq='***') { + + let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); + + let res = null; + try { + console.log('Awaiting ollama response...') + console.log('Messages:', messages); + let completion = await this.openai.chat.completions.create({ + //model: this.model_name, + model: "mistral", + messages: messages, + stop: stop_seq, + }); + if (completion.choices[0].finish_reason == 'length') + throw new Error('Context length exceeded'); + console.log('Received.') + res = completion.choices[0].message.content; + } + catch (err) { + if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; + } + } + return res; + } + + async embed(text) { + + + // Will implement this when Ollama will support embeddings in OpenAI format + /* + const embedding = await this.openai.embeddings.create({ + model: "nomic-embed-text", + input: text, + encoding_format: "float", + }); + + return embedding.data[0].embedding; + */ + + // For now, I'll do http request using axios: + + try { + const response = await axios.post(ollamaSettings["url"] + '/api/embeddings', { + model: ollamaSettings["embedding_model"], + prompt: text + }); + return response.data.embedding; + } catch (error) { + console.error('Error embedding text:', error.response ? error.response.data : error.message); + throw error; + } + } + + + + + +} + + + From 6acc53bd341715ccfeae4f7b8c7d051df9d52323 Mon Sep 17 00:00:00 2001 From: Radnos Date: Fri, 5 Apr 2024 23:53:05 +0200 Subject: [PATCH 03/78] added ollama profile --- main.js | 2 +- radley.json | 149 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 150 insertions(+), 1 deletion(-) create mode 100644 radley.json diff --git a/main.js b/main.js index 76b223c..e25aeb0 100644 --- a/main.js +++ b/main.js @@ -1,6 +1,6 @@ import { AgentProcess } from './src/process/agent-process.js'; -let profile = './andy.json'; +let profile = './radley.json'; let load_memory = false; let init_message = 'Say hello world and your name.'; diff --git a/radley.json b/radley.json new file mode 100644 index 0000000..a9eab5a --- /dev/null +++ b/radley.json @@ -0,0 +1,149 @@ +{ + "name": "radley", + + "model": "ollama[mistral]", + + "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", + + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. Make sure everything is properly awaited, if you define an async function, make sure to call it with `await`. Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$CODE_DOCS\n$EXAMPLES\nBegin coding:", + + "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", + + "conversation_examples": [ + [ + {"role": "user", "content": "miner_32: Hey! What are you up to?"}, + {"role": "assistant", "content": "Nothing much miner_32, what do you need?"} + ], + + [ + {"role": "user", "content": "grombo_Xx: What do you see?"}, + {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} + ], + + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone. I'll collect oak logs. !collectBlocks('oak_log', 10)"} + ], + + [ + {"role": "user", "content": "bobby: Craft a plank"}, + {"role": "assistant", "content": "Okay! !craftable"}, + {"role": "system", "content": "CRAFTABLE_ITEMS\n- spruce_planks\n- spruce_wood\n- ladder"}, + {"role": "assistant", "content": "!craftRecipe('spruce_planks', 1)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 4 spruce_planks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 4 spruce planks!"} + ], + + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "On my way! !goToPlayer('zZZn98', 3)"}, + {"role": "system", "content": "Arrived at player."}, + {"role": "assistant", "content": "Here!"}, + {"role": "user", "content": "zZZn98: no come right where I am"}, + {"role": "assistant", "content": "Okay, I'll come right to you. !goToPlayer('zZZn98', 0)"} + ], + + [ + {"role": "user", "content": "hanky: collect some sand and give it to me please"}, + {"role": "assistant", "content": "Collecting sand... !newAction"}, + {"role": "system", "content": "Code Output:\nYou have reached player hanky.\nCode execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + + [ + {"role": "user", "content": "joe: Follow me!"}, + {"role": "assistant", "content": "Sure! !followPlayer('joe', 4)"}, + {"role": "user", "content": "joe: Not so close!"}, + {"role": "assistant", "content": "Okay, I'll keep my distance. !followPlayer('joe', 3)"} + ], + + [ + {"role": "user", "content": "hanky: kill that zombie!"}, + {"role": "assistant", "content": "I'm attacking! !attack('zombie')"}, + {"role": "system", "content": "Code Output:\nNo zombie nearby"}, + {"role": "assistant", "content": "I could not find a zombie nearby."} + ], + + [ + {"role": "user", "content": "abc: stop"}, + {"role": "assistant", "content": "Sure. !stop"}, + {"role": "system", "content": "Agent action stopped."}, + {"role": "assistant", "content": "I've stopped! What next?"} + ], + + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "!newAction`"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Would you like me to try again?"} + ], + + [ + {"role": "user", "content": "trixy88: craft some sticks"}, + {"role": "assistant", "content": "!craftRecipe('stick', 4)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 16 sticks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 16 sticks!"} + ], + + [ + {"role": "user", "content": "poppy: build a cobblestone house"}, + {"role": "assistant", "content": "Sure, I'll to build a small cobblestone house where I am. !newAction"} + ], + + [ + {"role": "user", "content": "zib: build a dirt tower"}, + {"role": "assistant", "content": "Sure, I'll build a dirt tower 5 tall where I am. !newAction"} + ], + + [ + {"role": "user", "content": "umpire_man: follow me without hunting"}, + {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, + {"role": "system", "content": "'hunting' mode disabled."}, + {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} + ] + ], + + "coding_examples": [ + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "```await skills.collectBlock(bot, 'oak_log', 10);\n```"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs, what next?"} + ], + [ + {"role": "user", "content": "bobby: cook some chicken"}, + {"role": "assistant", "content": "```\nawait skills.smeltItem(bot, 'chicken', 8);\n```"}, + {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, + {"role": "assistant", "content": "I have cooked 8 chicken."} + ], + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "```await skills.goToPlayer(bot, 'zZZn98');\n```"}, + {"role": "system", "content": "Code execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "```await skills.goToPosition(bot, nearestOakLog.x, nearestOakLog.y, nearestOakLog.z);\n```"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try that again.```\n// I am navigating to the nearest oak log.\nlet position = world.getNearestBlock(bot, 'oak_log', 20);\nawait skills.goToPosition(bot, position.x, position.y, position.z);\n```"}, + {"role": "system", "content": "Arrived at location."}, + {"role": "assistant", "content": "I found an oak log and I am now standing next to it. What next?"} + ], + [ + {"role": "user", "content": "234jeb: build a little tower"}, + {"role": "assistant", "content": "```let pos = bot.entity.position;\nfor (let i = 0; i < 5; i++) {\n await skills.placeBlock(bot, 'dirt', pos.x, pos.y + i, pos.z);\n}\n```"}, + {"role": "system", "content": "Successfully placed 5 dirt."}, + {"role": "assistant", "content": "I built a little tower!"} + ], + [ + {"role": "user", "content": "brug: build a dirt house"}, + {"role": "assistant", "content": "```const position = world.getPosition(bot);\nconst startX = position.x;\nconst startY = position.y;\nconst startZ = position.z;\nconst width = 7;\nconst depth = 7;\nconst height = 4;\n\n// Build the walls\nfor (let x = startX; x < startX + width; x++) {\n for (let y = startY; y < startY + height; y++) {\n for (let z = startZ; z < startZ + depth; z++) {\n if (x === startX || x === startX + width - 1 || y === startY || y === startY + height - 1 || z === startZ || z === startZ + depth - 1) {\n await skills.placeBlock(bot, 'oak_planks', x, y, z); \n }\n }\n }\n}\n```"} + ] + ] + +} \ No newline at end of file From 1970c1edb06c63ab4006954b84ed174e814142ee Mon Sep 17 00:00:00 2001 From: Radnos Date: Fri, 5 Apr 2024 23:54:41 +0200 Subject: [PATCH 04/78] put back andy as default profile --- main.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.js b/main.js index e25aeb0..76b223c 100644 --- a/main.js +++ b/main.js @@ -1,6 +1,6 @@ import { AgentProcess } from './src/process/agent-process.js'; -let profile = './radley.json'; +let profile = './andy.json'; let load_memory = false; let init_message = 'Say hello world and your name.'; From 352dbf1e67b9a9cfdab9b4aef731a578fbc7de32 Mon Sep 17 00:00:00 2001 From: Radnos Date: Sat, 6 Apr 2024 12:22:27 +0200 Subject: [PATCH 05/78] clearer error --- src/models/ollama.js | 38 +++++++++++++++++--------------------- 1 file changed, 17 insertions(+), 21 deletions(-) diff --git a/src/models/ollama.js b/src/models/ollama.js index b41bf5a..eafa001 100644 --- a/src/models/ollama.js +++ b/src/models/ollama.js @@ -8,7 +8,7 @@ let ollamaSettings = JSON.parse(readFileSync('./ollama-config.json', 'utf8')); function getContentInBrackets(str) { const startIndex = str.indexOf("["); const endIndex = str.indexOf("]"); - + if (startIndex !== -1 && endIndex !== -1 && endIndex > startIndex) { return str.substring(startIndex + 1, endIndex); } else { @@ -26,15 +26,20 @@ export class Ollama { } let ollamaConfig = null; - - ollamaConfig = { - baseURL: ollamaSettings["url"] + '/v1', - apiKey: 'ollama', // required but unused - }; + axios.get(ollamaSettings["url"]).then(response => { + if (response.status === 200) { + ollamaConfig = { + baseURL: `${ollamaSettings["url"]}/v1`, + apiKey: 'ollama', // required but unused + }; - this.openai = new OpenAIApi(ollamaConfig); - + this.openai = new OpenAIApi(ollamaConfig); + } + else { + throw new Error(`Error relating the endpoint: ${response.status}.`); + } + }); } @@ -70,8 +75,7 @@ export class Ollama { } async embed(text) { - - + // Will implement this when Ollama will support embeddings in OpenAI format /* const embedding = await this.openai.embeddings.create({ @@ -79,29 +83,21 @@ export class Ollama { input: text, encoding_format: "float", }); - return embedding.data[0].embedding; */ // For now, I'll do http request using axios: try { - const response = await axios.post(ollamaSettings["url"] + '/api/embeddings', { + const response = await axios.post(`${ollamaSettings["url"]}/api/embeddings`, { model: ollamaSettings["embedding_model"], prompt: text }); return response.data.embedding; } catch (error) { console.error('Error embedding text:', error.response ? error.response.data : error.message); - throw error; + return Array(1).fill().map(() => Math.random()); } } - - - - -} - - - +} \ No newline at end of file From 54a45c3dfee0ed0d2af0067185125e96fa079737 Mon Sep 17 00:00:00 2001 From: Radnos Date: Mon, 8 Apr 2024 13:31:20 +0200 Subject: [PATCH 06/78] updated readme and set default config --- README.md | 9 +++++++-- ollama-config.json | 2 +- radley.json | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index d44ad91..75356b7 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ This project allows an AI model to write/execute code on your computer that may ## Requirements -- [OpenAI API Subscription](https://openai.com/blog/openai-api), [Gemini API Subscription](https://aistudio.google.com/app/apikey), or [Anthropic API Subscription](https://docs.anthropic.com/claude/docs/getting-access-to-claude) +- [OpenAI API Subscription](https://openai.com/blog/openai-api), [Gemini API Subscription](https://aistudio.google.com/app/apikey), [Anthropic API Subscription](https://docs.anthropic.com/claude/docs/getting-access-to-claude), or [Ollama](https://ollama.com/download) - [Minecraft Java Edition](https://www.minecraft.net/en-us/store/minecraft-java-bedrock-edition-pc) - [Node.js](https://nodejs.org/) (at least v14) @@ -21,8 +21,11 @@ Add one of these environment variables: ⭐[How do I add the API key as an environment variable?](https://phoenixnap.com/kb/windows-set-environment-variable)⭐ + Or you can also use Ollama. + To install the models used by default (generation and embedding), execute the following script: + `ollama pull mistral && ollama pull nomic-embed-text` -Clone/Download this repository +Then, clone/download this repository Run `npm install` @@ -36,6 +39,8 @@ Run `node main.js` You can configure the agent's name, model, and prompts in their profile like `andy.json`. +You can configure ollama in `ollama-config.json`. + You can configure project details in `settings.json`. diff --git a/ollama-config.json b/ollama-config.json index e85b6a9..44d682a 100644 --- a/ollama-config.json +++ b/ollama-config.json @@ -1,4 +1,4 @@ { - "url": "http://10.0.0.26:11434", + "url": "http://localhost:11434", "embedding_model": "nomic-embed-text" } \ No newline at end of file diff --git a/radley.json b/radley.json index 56b3e38..a9eab5a 100644 --- a/radley.json +++ b/radley.json @@ -1,7 +1,7 @@ { "name": "radley", - "model": "ollama[mistral:instruct]", + "model": "ollama[mistral]", "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", From 5951fd02e586fe734bd19bcca1e32c78b110debf Mon Sep 17 00:00:00 2001 From: Radnos Date: Mon, 8 Apr 2024 14:14:31 +0200 Subject: [PATCH 07/78] set back default value, put instruct variant of mistral as default and fixed model set to none --- README.md | 2 +- radley.json | 2 +- src/models/ollama.js | 17 ++++++++--------- 3 files changed, 10 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 75356b7..3551771 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ Add one of these environment variables: Or you can also use Ollama. To install the models used by default (generation and embedding), execute the following script: - `ollama pull mistral && ollama pull nomic-embed-text` + `ollama pull mistral:instruct && ollama pull nomic-embed-text` Then, clone/download this repository diff --git a/radley.json b/radley.json index a9eab5a..56b3e38 100644 --- a/radley.json +++ b/radley.json @@ -1,7 +1,7 @@ { "name": "radley", - "model": "ollama[mistral]", + "model": "ollama[mistral:instruct]", "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", diff --git a/src/models/ollama.js b/src/models/ollama.js index 6fd4e22..d323fb7 100644 --- a/src/models/ollama.js +++ b/src/models/ollama.js @@ -17,8 +17,13 @@ function getContentInBrackets(str) { export class Ollama { constructor(model_name) { + this.model_name = getContentInBrackets(model_name); let ollamaConfig = null; + if (this.model_name == "") { + throw new Error('Model is not specified! Please ensure you input the model in the following format: ollama[model]. For example, for Mistral, use: ollama[mistral]'); + } + axios.get(ollamaSettings["url"]).then(response => { if (response.status === 200) { @@ -27,12 +32,6 @@ export class Ollama { apiKey: 'ollama', // required but unused }; - this.model_name = getContentInBrackets(model_name); - - if (this.model_name = "") { - throw new Error('Model is not specified! Please ensure you input the model in the following format: ollama[model]. For example, for Mistral, use: ollama[mistral]'); - } - this.openai = new OpenAIApi(ollamaConfig); } else { @@ -46,15 +45,15 @@ export class Ollama { async sendRequest(turns, systemMessage, stop_seq='***') { - let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); - console.log(this.model_name) + let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); let res = null; try { - console.log('Awaiting openai api response...') + console.log(`Awaiting ollama response... (model: ${this.model_name})`) console.log('Messages:', messages); let completion = await this.openai.chat.completions.create({ + model: this.model_name, messages: messages, stop: stop_seq, From fe8324b03496fd14bf6e9d9f8b816071820a364b Mon Sep 17 00:00:00 2001 From: Radnos Date: Wed, 10 Apr 2024 15:54:40 +0200 Subject: [PATCH 08/78] made it compatible with other programme compatible with OpenAPI format (such as oobabooga's Text generation web UI) --- README.md | 9 ++-- main.js | 2 +- ollama-config.json | 4 -- radley.json | 2 +- src/agent/prompter.js | 6 +-- src/models/ollama.js | 107 ------------------------------------------ 6 files changed, 9 insertions(+), 121 deletions(-) delete mode 100644 ollama-config.json delete mode 100644 src/models/ollama.js diff --git a/README.md b/README.md index 3551771..3f33c09 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ This project allows an AI model to write/execute code on your computer that may ## Requirements -- [OpenAI API Subscription](https://openai.com/blog/openai-api), [Gemini API Subscription](https://aistudio.google.com/app/apikey), [Anthropic API Subscription](https://docs.anthropic.com/claude/docs/getting-access-to-claude), or [Ollama](https://ollama.com/download) +- [OpenAI API Subscription](https://openai.com/blog/openai-api), [Gemini API Subscription](https://aistudio.google.com/app/apikey), [Anthropic API Subscription](https://docs.anthropic.com/claude/docs/getting-access-to-claude), or any service compatible with the OpenAI API format (such as [Ollama](https://ollama.com/download)) - [Minecraft Java Edition](https://www.minecraft.net/en-us/store/minecraft-java-bedrock-edition-pc) - [Node.js](https://nodejs.org/) (at least v14) @@ -21,9 +21,8 @@ Add one of these environment variables: ⭐[How do I add the API key as an environment variable?](https://phoenixnap.com/kb/windows-set-environment-variable)⭐ - Or you can also use Ollama. - To install the models used by default (generation and embedding), execute the following script: - `ollama pull mistral:instruct && ollama pull nomic-embed-text` +If you use Ollama, to install the models used by default (generation and embedding), execute the following script: +`ollama pull mistral:instruct && ollama pull nomic-embed-text` Then, clone/download this repository @@ -39,7 +38,7 @@ Run `node main.js` You can configure the agent's name, model, and prompts in their profile like `andy.json`. -You can configure ollama in `ollama-config.json`. +You can configure local service in `local-config.json`. You can configure project details in `settings.json`. diff --git a/main.js b/main.js index f95b19b..d381dc9 100644 --- a/main.js +++ b/main.js @@ -1,6 +1,6 @@ import { AgentProcess } from './src/process/agent-process.js'; -let profile = './andy.json'; +let profile = './radley.json'; let load_memory = false; let init_message = 'Say hello world and your name. Do NOT use any command yet, nor make any comment about that fact.'; diff --git a/ollama-config.json b/ollama-config.json deleted file mode 100644 index 44d682a..0000000 --- a/ollama-config.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "url": "http://localhost:11434", - "embedding_model": "nomic-embed-text" -} \ No newline at end of file diff --git a/radley.json b/radley.json index 56b3e38..ffef8af 100644 --- a/radley.json +++ b/radley.json @@ -1,7 +1,7 @@ { "name": "radley", - "model": "ollama[mistral:instruct]", + "model": "local[mistral:instruct]", "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 26da680..3fbce5d 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -8,7 +8,7 @@ import { getCommand } from './commands/index.js'; import { Gemini } from '../models/gemini.js'; import { GPT } from '../models/gpt.js'; import { Claude } from '../models/claude.js'; -import { Ollama } from '../models/ollama.js'; +import { Local } from '../models/local.js'; export class Prompter { @@ -31,8 +31,8 @@ export class Prompter { this.model = new GPT(model_name); else if (model_name.includes('claude')) this.model = new Claude(model_name); - else if (model_name.includes('ollama')) - this.model = new Ollama(model_name); + else if (model_name.includes('local')) + this.model = new Local(model_name); else throw new Error('Unknown model ' + model_name); } diff --git a/src/models/ollama.js b/src/models/ollama.js deleted file mode 100644 index d323fb7..0000000 --- a/src/models/ollama.js +++ /dev/null @@ -1,107 +0,0 @@ -import OpenAIApi from 'openai'; -import axios from 'axios'; -import { readFileSync } from 'fs'; - -let ollamaSettings = JSON.parse(readFileSync('./ollama-config.json', 'utf8')); - -function getContentInBrackets(str) { - const startIndex = str.indexOf("["); - const endIndex = str.indexOf("]"); - - if (startIndex !== -1 && endIndex !== -1 && endIndex > startIndex) { - return str.substring(startIndex + 1, endIndex); - } else { - return ""; - } -} - -export class Ollama { - constructor(model_name) { - this.model_name = getContentInBrackets(model_name); - let ollamaConfig = null; - - if (this.model_name == "") { - throw new Error('Model is not specified! Please ensure you input the model in the following format: ollama[model]. For example, for Mistral, use: ollama[mistral]'); - } - - axios.get(ollamaSettings["url"]).then(response => { - - if (response.status === 200) { - ollamaConfig = { - baseURL: `${ollamaSettings["url"]}/v1`, - apiKey: 'ollama', // required but unused - }; - - this.openai = new OpenAIApi(ollamaConfig); - } - else { - throw new Error(`Error relating the endpoint: ${response.status}.`); - } - - }); - - - } - - async sendRequest(turns, systemMessage, stop_seq='***') { - - console.log(this.model_name) - let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); - - let res = null; - try { - console.log(`Awaiting ollama response... (model: ${this.model_name})`) - console.log('Messages:', messages); - let completion = await this.openai.chat.completions.create({ - - model: this.model_name, - messages: messages, - stop: stop_seq, - }); - if (completion.choices[0].finish_reason == 'length') - throw new Error('Context length exceeded'); - console.log('Received.') - res = completion.choices[0].message.content; - } - catch (err) { - if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { - console.log('Context length exceeded, trying again with shorter context.'); - return await sendRequest(turns.slice(1), systemMessage, stop_seq); - } else { - console.log(err); - res = 'My brain disconnected, try again.'; - } - } - return res; - } - - async embed(text) { - - // Will implement this when Ollama will support embeddings in OpenAI format - /* - const embedding = await this.openai.embeddings.create({ - model: "nomic-embed-text", - input: text, - encoding_format: "float", - }); - return embedding.data[0].embedding; - */ - - // For now, I'll do http request using axios: - - try { - const response = await axios.post(`${ollamaSettings["url"]}/api/embeddings`, { - model: ollamaSettings["embedding_model"], - prompt: text - }); - return response.data.embedding; - } catch (error) { - console.error('Error embedding text:', error.response ? error.response.data : error.message); - return Array(1).fill().map(() => Math.random()); - } - } - -} - - - From 71a3d081c28c8e7bba255d139793292ee4a0ba2a Mon Sep 17 00:00:00 2001 From: Radnos Date: Wed, 10 Apr 2024 15:56:36 +0200 Subject: [PATCH 09/78] added local config and model --- local-config.json | 5 +++ src/models/local.js | 105 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 110 insertions(+) create mode 100644 local-config.json create mode 100644 src/models/local.js diff --git a/local-config.json b/local-config.json new file mode 100644 index 0000000..7d12196 --- /dev/null +++ b/local-config.json @@ -0,0 +1,5 @@ +{ + "url": "http://localhost:11434", + "api_key": "ollama", + "embedding_model": "nomic-embed-text" +} \ No newline at end of file diff --git a/src/models/local.js b/src/models/local.js new file mode 100644 index 0000000..c9caed9 --- /dev/null +++ b/src/models/local.js @@ -0,0 +1,105 @@ +import OpenAIApi from 'openai'; +import axios from 'axios'; +import { readFileSync } from 'fs'; + +let localSettings = JSON.parse(readFileSync('./local-config.json', 'utf8')); + +function getContentInBrackets(str) { + const startIndex = str.indexOf("["); + const endIndex = str.indexOf("]"); + + if (startIndex !== -1 && endIndex !== -1 && endIndex > startIndex) { + return str.substring(startIndex + 1, endIndex); + } else { + return ""; + } +} + +export class Local { + constructor(model_name) { + this.model_name = getContentInBrackets(model_name); + let localConfig = null; + let endpoint = localSettings["url"].replace("/v1", ""); + + if (this.model_name == "") { + throw new Error('Model is not specified! Please ensure you input the model in the following format: ollama[model]. For example, for Mistral instruct, use: ollama[mistral:instruct]'); + } + + axios.get(endpoint).then(response => { + + if (response.status === 200) { + localConfig = { + baseURL: `${endpoint}/v1`, + apiKey: localSettings["api_key"], + }; + + this.openai = new OpenAIApi(localConfig); + } + else { + throw new Error(`Error relating the endpoint: ${response.status}.`); + } + + }); + + + } + + async sendRequest(turns, systemMessage, stop_seq='***') { + + let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); + + let res = null; + try { + console.log(`Awaiting local response... (model: ${this.model_name})`) + console.log('Messages:', messages); + let completion = await this.openai.chat.completions.create({ + model: this.model_name, + messages: messages, + stop: stop_seq, + }); + if (completion.choices[0].finish_reason == 'length') + throw new Error('Context length exceeded'); + console.log('Received.') + res = completion.choices[0].message.content; + } + catch (err) { + if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; + } + } + return res; + } + + async embed(text) { + + try { + if (localSettings["api_key"] == "ollama") { //Embedding if it is Ollama (temporary) + const response = await axios.post(`${endpoint}/api/embeddings`, { + model: localSettings["embedding_model"], + prompt: text + }); + return response.data.embedding; + } + + const embedding = await this.openai.embeddings.create({ + model: localSettings["embedding_model"], + input: text, + encoding_format: "float", + }); + return embedding.data[0].embedding; + + } catch (error) { + console.log('Error embedding text:', error.response ? error.response.data : error.message); + return Array(1).fill().map(() => Math.random()); + } + + } + +} + + + From 65865530b6020aa4563591a9baeeee012d794eea Mon Sep 17 00:00:00 2001 From: Radnos Date: Wed, 10 Apr 2024 16:07:37 +0200 Subject: [PATCH 10/78] fixed endpoint --- src/models/local.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/models/local.js b/src/models/local.js index c9caed9..5ddc83e 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -19,17 +19,17 @@ export class Local { constructor(model_name) { this.model_name = getContentInBrackets(model_name); let localConfig = null; - let endpoint = localSettings["url"].replace("/v1", ""); + localSettings["url"] = localSettings["url"].replace("/v1", ""); if (this.model_name == "") { throw new Error('Model is not specified! Please ensure you input the model in the following format: ollama[model]. For example, for Mistral instruct, use: ollama[mistral:instruct]'); } - axios.get(endpoint).then(response => { + axios.get(localSettings["url"]).then(response => { if (response.status === 200) { localConfig = { - baseURL: `${endpoint}/v1`, + baseURL: `${localSettings["url"]}/v1`, apiKey: localSettings["api_key"], }; @@ -78,7 +78,7 @@ export class Local { try { if (localSettings["api_key"] == "ollama") { //Embedding if it is Ollama (temporary) - const response = await axios.post(`${endpoint}/api/embeddings`, { + const response = await axios.post(`${localSettings["url"]}/api/embeddings`, { model: localSettings["embedding_model"], prompt: text }); From bf4163fcaa948534c7020a4fb0c20c967ea58cb9 Mon Sep 17 00:00:00 2001 From: Maximus Date: Sat, 13 Apr 2024 22:56:18 -0500 Subject: [PATCH 11/78] added coward mode --- andy.json | 6 +++--- src/agent/coder.js | 4 ++-- src/agent/library/skills.js | 29 +++++++++++++++++++++++++++++ src/agent/modes.js | 19 ++++++++++++++++++- 4 files changed, 52 insertions(+), 6 deletions(-) diff --git a/andy.json b/andy.json index f1584f5..9c08916 100644 --- a/andy.json +++ b/andy.json @@ -1,11 +1,11 @@ { "name": "andy", - "model": "gpt-3.5-turbo", + "model": "gpt-4-0125-preview", "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", - "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. Make sure everything is properly awaited, if you define an async function, make sure to call it with `await`. Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nBegin coding:", + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nBegin coding:", "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", @@ -116,7 +116,7 @@ ], [ {"role": "user", "content": "bobby: cook some chicken"}, - {"role": "assistant", "content": "```\nawait skills.smeltItem(bot, 'chicken', 8);\n```"}, + {"role": "assistant", "content": "```await skills.smeltItem(bot, 'chicken', 8);\n```"}, {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, {"role": "assistant", "content": "I have cooked 8 chicken."} ], diff --git a/src/agent/coder.js b/src/agent/coder.js index 0551032..0a97144 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -26,6 +26,8 @@ export class Coder { code = code.replaceAll('console.log(', 'log(bot,'); code = code.replaceAll('log("', 'log(bot,"'); + console.log(`Generated code: """${code}"""`); + // this may cause problems in callback functions code = code.replaceAll(';\n', '; if(bot.interrupt_code) {log(bot, "Code interrupted.");return;}\n'); for (let line of code.split('\n')) { @@ -33,8 +35,6 @@ export class Coder { } src = this.code_template.replace('/* CODE HERE */', src); - console.log("writing to file...", src) - let filename = this.file_counter + '.js'; // if (this.file_counter > 0) { // let prev_filename = this.fp + (this.file_counter-1) + '.js'; diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 829ccbe..e2df4f4 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -256,6 +256,7 @@ export async function attackNearest(bot, mobType, kill=true) { * @example * await skills.attackNearest(bot, "zombie", true); **/ + bot.modes.pause('cowardice'); const mob = bot.nearestEntity(entity => entity.name && entity.name.toLowerCase() === mobType.toLowerCase()); if (mob) { return await attackEntity(bot, mob, kill); @@ -312,6 +313,7 @@ export async function defendSelf(bot, range=9) { * await skills.defendSelf(bot); * **/ bot.modes.pause('self_defense'); + bot.modes.pause('cowardice'); let attacked = false; let enemy = world.getNearestEntityWhere(bot, entity => mc.isHostile(entity), range); while (enemy) { @@ -759,6 +761,32 @@ export async function moveAway(bot, distance) { return true; } +export async function avoidEnemies(bot, distance=16) { + /** + * Move a given distance away from all nearby enemy mobs. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {number} distance, the distance to move away. + * @returns {Promise} true if the bot moved away, false otherwise. + * @example + * await skills.avoidEnemies(bot, 8); + **/ + + let enemy = world.getNearestEntityWhere(bot, entity => mc.isHostile(entity), distance); + while (enemy) { + const follow = new pf.goals.GoalFollow(enemy, distance+1); // move a little further away + const inverted_goal = new pf.goals.GoalInvert(follow); + bot.pathfinder.setMovements(new pf.Movements(bot)); + bot.pathfinder.setGoal(inverted_goal, true); + await new Promise(resolve => setTimeout(resolve, 500)); + enemy = world.getNearestEntityWhere(bot, entity => mc.isHostile(entity), distance); + if (bot.interrupt_code) { + return false; + } + } + log(bot, `Moved ${distance} away from enemies.`); + return true; +} + export async function stay(bot) { /** * Stay in the current position until interrupted. Disables all modes. @@ -767,6 +795,7 @@ export async function stay(bot) { * @example * await skills.stay(bot); **/ + bot.modes.pause('cowardice'); bot.modes.pause('self_defense'); bot.modes.pause('hunting'); bot.modes.pause('torch_placing'); diff --git a/src/agent/modes.js b/src/agent/modes.js index 78b62a6..88c8d7c 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -15,6 +15,23 @@ import * as mc from '../utils/mcdata.js'; // while update functions are async, they should *not* be awaited longer than ~100ms as it will block the update loop // to perform longer actions, use the execute function which won't block the update loop const modes = [ + { + name: 'cowardice', + description: 'Automatically run away from enemies. Interrupts other actions.', + interrupts: ['all'], // Todo: don't interrupt attack actions + dont_interrupt: ['followPlayer'], + on: true, + active: false, + update: async function (agent) { + const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 16); + if (enemy && await world.isClearPath(agent.bot, enemy)) { + agent.bot.chat(`Aaa! A ${enemy.name}!`); + execute(this, agent, async () => { + await skills.avoidEnemies(agent.bot, 16); + }); + } + } + }, { name: 'self_defense', description: 'Automatically attack nearby enemies. Interrupts other actions.', @@ -87,7 +104,7 @@ const modes = [ // TODO: check light level instead of nearby torches, block.light is broken const near_torch = world.getNearestBlock(agent.bot, 'torch', 6); if (!near_torch) { - let torches = agent.bot.inventory.items().filter(item => item.name.includes('torch')); + let torches = agent.bot.inventory.items().filter(item => item.name === 'torch'); if (torches.length > 0) { const torch = torches[0]; const pos = agent.bot.entity.position; From a3c8ed7c85a62fe8e7a52b05a0c7602600406707 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 20 Apr 2024 12:25:49 -0500 Subject: [PATCH 12/78] fixed merge conflicts --- andy.json | 2 +- src/agent/modes.js | 45 +++++++++++++++++++++++++++++++++++++++------ 2 files changed, 40 insertions(+), 7 deletions(-) diff --git a/andy.json b/andy.json index 9c08916..147211e 100644 --- a/andy.json +++ b/andy.json @@ -1,7 +1,7 @@ { "name": "andy", - "model": "gpt-4-0125-preview", + "model": "gpt-3.5-turbo", "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", diff --git a/src/agent/modes.js b/src/agent/modes.js index 88c8d7c..4633c6b 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -15,9 +15,42 @@ import * as mc from '../utils/mcdata.js'; // while update functions are async, they should *not* be awaited longer than ~100ms as it will block the update loop // to perform longer actions, use the execute function which won't block the update loop const modes = [ + { + name: 'self_preservation', + description: 'Respond to drowning, burning, and damage at low health.', + interrupts: ['all'], + on: true, + active: false, + update: async function (agent) { + let block = agent.bot.blockAt(agent.bot.entity.position); + let blockAbove = agent.bot.blockAt(agent.bot.entity.position.offset(0, 1, 0)); + if (blockAbove.name === 'water' || blockAbove.name === 'flowing_water') { + // does not call execute so does not interrupt other actions + agent.bot.setControlState('jump', true); + } + if (block.name === 'lava' || block.name === 'flowing_lava' || block.name === 'fire') { + execute(this, agent, async () => { + let nearestWater = world.getNearestBlock(agent.bot, 'water', 20); + if (nearestWater) { + let pos = nearestWater.position; + bot.pathfinder.setMovements(new pf.Movements(bot)); + await bot.pathfinder.goto(new pf.goals.GoalNear(pos.x, pos.y, pos.z, 4)); + } + else { + await skills.moveAway(agent.bot, 10); + } + }); + } + else if (agent.bot.health < 5 && agent.bot.lastDamageTime < Date.now() - 3000) { + execute(this, agent, async () => { + await skills.moveAway(agent.bot, 20); + }); + } + } + }, { name: 'cowardice', - description: 'Automatically run away from enemies. Interrupts other actions.', + description: 'Run away from enemies. Interrupts other actions.', interrupts: ['all'], // Todo: don't interrupt attack actions dont_interrupt: ['followPlayer'], on: true, @@ -34,7 +67,7 @@ const modes = [ }, { name: 'self_defense', - description: 'Automatically attack nearby enemies. Interrupts other actions.', + description: 'Attack nearby enemies. Interrupts other actions.', interrupts: ['all'], on: true, active: false, @@ -50,7 +83,7 @@ const modes = [ }, { name: 'hunting', - description: 'Automatically hunt nearby animals when idle.', + description: 'Hunt nearby animals when idle.', interrupts: ['defaults'], on: true, active: false, @@ -66,7 +99,7 @@ const modes = [ }, { name: 'item_collecting', - description: 'Automatically collect nearby items when idle.', + description: 'Collect nearby items when idle.', interrupts: ['followPlayer'], on: true, active: false, @@ -96,7 +129,7 @@ const modes = [ }, { name: 'torch_placing', - description: 'Automatically place torches when idle and there are no torches nearby.', + description: 'Place torches when idle and there are no torches nearby.', interrupts: ['followPlayer'], on: true, active: false, @@ -117,7 +150,7 @@ const modes = [ }, { name: 'idle_staring', - description: 'Non-functional animation to look around at entities when idle.', + description: 'Animation to look around at entities when idle.', interrupts: [], on: true, active: false, From 89f25498c36fd7bf8de8f00c8c60d1bcf9e428b3 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 20 Apr 2024 22:16:00 -0500 Subject: [PATCH 13/78] fix claude bug --- src/models/claude.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/models/claude.js b/src/models/claude.js index 7a03855..948eba9 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -44,6 +44,9 @@ export class Claude { prev_role = msg.role; } + if (messages.length > 0 && messages[0].role !== 'user') { + messages.unshift(filler); // anthropic requires user message to start + } if (messages.length === 0) { messages.push(filler); } From 2aeebac0cd7b49e0f25db8bc21c3526999dad6c6 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 20 Apr 2024 22:18:26 -0500 Subject: [PATCH 14/78] save/load modes to memory.json --- src/agent/agent.js | 24 +++++++++++++++++------- src/agent/commands/actions.js | 1 + src/agent/history.js | 7 ++++++- 3 files changed, 24 insertions(+), 8 deletions(-) diff --git a/src/agent/agent.js b/src/agent/agent.js index efa1c12..0736d8a 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -17,15 +17,18 @@ export class Agent { await this.prompter.initExamples(); - if (load_mem) - this.history.load(); - console.log('Logging in...'); this.bot = initBot(this.name); initModes(this); + if (load_mem) + this.history.load(); + this.bot.once('spawn', async () => { + // wait for a bit so stats are not undefined + await new Promise((resolve) => setTimeout(resolve, 1000)); + console.log(`${this.name} spawned.`); this.coder.clear(); @@ -148,11 +151,17 @@ export class Agent { else if (this.bot.time.timeOfDay == 18000) this.bot.emit('midnight'); }); - this.bot.on('health', () => { - if (this.bot.health < 20) - this.bot.emit('damaged'); - }); + let prev_health = this.bot.health; + this.bot.lastDamageTime = 0; + this.bot.lastDamageTaken = 0; + this.bot.on('health', () => { + if (this.bot.health < prev_health) { + this.bot.lastDamageTime = Date.now(); + this.bot.lastDamageTaken = prev_health - this.bot.health; + } + prev_health = this.bot.health; + }); // Logging callbacks this.bot.on('error' , (err) => { console.error('Error event!', err); @@ -176,6 +185,7 @@ export class Agent { } }); this.bot.on('idle', () => { + this.bot.clearControlStates(); this.bot.modes.unPauseAll(); this.coder.executeResume(); }); diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index a57a464..8abe13c 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -43,6 +43,7 @@ export const actionsList = [ name: '!restart', description: 'Restart the agent process.', perform: async function (agent) { + await agent.history.save(); process.exit(1); } }, diff --git a/src/agent/history.js b/src/agent/history.js index 09479a5..b79ac80 100644 --- a/src/agent/history.js +++ b/src/agent/history.js @@ -55,6 +55,9 @@ export class History { }; if (this.agent.npc.data !== null) data.npc = this.agent.npc.data.toObject(); + const modes = this.agent.bot.modes.getJson(); + if (modes !== null) + data.modes = modes; const json_data = JSON.stringify(data, null, 4); writeFileSync(this.memory_fp, json_data, (err) => { if (err) { @@ -71,9 +74,11 @@ export class History { const obj = JSON.parse(data); this.memory = obj.memory; this.agent.npc.data = NPCData.fromObject(obj.npc); + if (obj.modes) + this.agent.bot.modes.loadJson(obj.modes); this.turns = obj.turns; } catch (err) { - console.error(`No memory file '${this.memory_fp}' for agent ${this.name}.`); + console.error(`Error reading ${this.name}'s memory file: ${err.message}`); } } From b33c70e8e10b7374ba257b28b6ace641b07f85f9 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 20 Apr 2024 22:22:26 -0500 Subject: [PATCH 15/78] expanded self_preservation, fixed pausing/interrupting --- src/agent/library/skills.js | 6 ++-- src/agent/modes.js | 58 ++++++++++++++++++++++++++++--------- 2 files changed, 48 insertions(+), 16 deletions(-) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 0e51d89..d62ebb2 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -257,7 +257,7 @@ export async function attackNearest(bot, mobType, kill=true) { * await skills.attackNearest(bot, "zombie", true); **/ bot.modes.pause('cowardice'); - const mob = bot.nearestEntity(entity => entity.name && entity.name.toLowerCase() === mobType.toLowerCase()); + const mob = world.getNearbyEntities(bot, 24).find(entity => entity.name === mobType); if (mob) { return await attackEntity(bot, mob, kill); } @@ -290,7 +290,7 @@ export async function attackEntity(bot, entity, kill=true) { } else { bot.pvp.attack(entity); - while (world.getNearbyEntities(bot, 16).includes(entity)) { + while (world.getNearbyEntities(bot, 24).includes(entity)) { await new Promise(resolve => setTimeout(resolve, 1000)); if (bot.interrupt_code) { bot.pvp.stop(); @@ -703,6 +703,7 @@ export async function goToPlayer(bot, username, distance=3) { * await skills.goToPlayer(bot, "player"); **/ bot.modes.pause('self_defense'); + bot.modes.pause('cowardice'); let player = bot.players[username].entity if (!player) { log(bot, `Could not find ${username}.`); @@ -795,6 +796,7 @@ export async function stay(bot) { * @example * await skills.stay(bot); **/ + bot.modes.pause('self_preservation'); bot.modes.pause('cowardice'); bot.modes.pause('self_defense'); bot.modes.pause('hunting'); diff --git a/src/agent/modes.js b/src/agent/modes.js index 4633c6b..0081682 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -17,42 +17,56 @@ import * as mc from '../utils/mcdata.js'; const modes = [ { name: 'self_preservation', - description: 'Respond to drowning, burning, and damage at low health.', + description: 'Respond to drowning, burning, and damage at low health. Interrupts other actions.', interrupts: ['all'], on: true, active: false, + fall_blocks: ['sand', 'gravel', 'concrete_powder'], // includes matching substrings like 'sandstone' and 'red_sand' update: async function (agent) { - let block = agent.bot.blockAt(agent.bot.entity.position); - let blockAbove = agent.bot.blockAt(agent.bot.entity.position.offset(0, 1, 0)); + const bot = agent.bot; + const block = bot.blockAt(bot.entity.position); + const blockAbove = bot.blockAt(bot.entity.position.offset(0, 1, 0)); if (blockAbove.name === 'water' || blockAbove.name === 'flowing_water') { // does not call execute so does not interrupt other actions - agent.bot.setControlState('jump', true); + if (!bot.pathfinder.goal) { + bot.setControlState('jump', true); + } } - if (block.name === 'lava' || block.name === 'flowing_lava' || block.name === 'fire') { + else if (this.fall_blocks.some(name => blockAbove.name.includes(name))) { execute(this, agent, async () => { - let nearestWater = world.getNearestBlock(agent.bot, 'water', 20); + await skills.moveAway(bot, 2); + }); + } + else if (block.name === 'lava' || block.name === 'flowing_lava' || block.name === 'fire' || + blockAbove.name === 'lava' || blockAbove.name === 'flowing_lava' || blockAbove.name === 'fire') { + bot.chat('I\'m on fire!'); // TODO: gets stuck in lava + execute(this, agent, async () => { + let nearestWater = world.getNearestBlock(bot, 'water', 20); if (nearestWater) { - let pos = nearestWater.position; - bot.pathfinder.setMovements(new pf.Movements(bot)); - await bot.pathfinder.goto(new pf.goals.GoalNear(pos.x, pos.y, pos.z, 4)); + const pos = nearestWater.position; + await skills.goToPosition(bot, pos.x, pos.y, pos.z, 0.2); + bot.chat('Ahhhh that\'s better!'); } else { - await skills.moveAway(agent.bot, 10); + await skills.moveAway(bot, 5); } }); } - else if (agent.bot.health < 5 && agent.bot.lastDamageTime < Date.now() - 3000) { + else if (Date.now() - bot.lastDamageTime < 3000 && (bot.health < 5 || bot.lastDamageTaken >= bot.health)) { + bot.chat('I\'m dying!'); execute(this, agent, async () => { - await skills.moveAway(agent.bot, 20); + await skills.moveAway(bot, 20); }); } + else if (agent.isIdle()) { + bot.clearControlStates(); // clear jump if not in danger or doing anything else + } } }, { name: 'cowardice', description: 'Run away from enemies. Interrupts other actions.', - interrupts: ['all'], // Todo: don't interrupt attack actions - dont_interrupt: ['followPlayer'], + interrupts: ['all'], on: true, active: false, update: async function (agent) { @@ -251,6 +265,22 @@ class ModeController { if (mode.active) break; } } + + getJson() { + let res = {}; + for (let mode of this.modes_list) { + res[mode.name] = mode.on; + } + return res; + } + + loadJson(json) { + for (let mode of this.modes_list) { + if (json[mode.name] != undefined) { + mode.on = json[mode.name]; + } + } + } } export function initModes(agent) { From ed617c2304725d011a34df60ff0a12acd10fafb3 Mon Sep 17 00:00:00 2001 From: Kolby Nottingham Date: Tue, 23 Apr 2024 14:58:31 -0700 Subject: [PATCH 16/78] direct ollama requests --- README.md | 8 ++- local-config.json | 5 -- main.js | 4 +- radley.json | 2 +- src/agent/prompter.js | 4 +- src/models/local.js | 112 +++++++++++++----------------------------- 6 files changed, 40 insertions(+), 95 deletions(-) delete mode 100644 local-config.json diff --git a/README.md b/README.md index 3f33c09..9cc8c0d 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ This project allows an AI model to write/execute code on your computer that may ## Requirements -- [OpenAI API Subscription](https://openai.com/blog/openai-api), [Gemini API Subscription](https://aistudio.google.com/app/apikey), [Anthropic API Subscription](https://docs.anthropic.com/claude/docs/getting-access-to-claude), or any service compatible with the OpenAI API format (such as [Ollama](https://ollama.com/download)) +- [OpenAI API Subscription](https://openai.com/blog/openai-api), [Gemini API Subscription](https://aistudio.google.com/app/apikey), [Anthropic API Subscription](https://docs.anthropic.com/claude/docs/getting-access-to-claude), or [Ollama Installed](https://ollama.com/download) - [Minecraft Java Edition](https://www.minecraft.net/en-us/store/minecraft-java-bedrock-edition-pc) - [Node.js](https://nodejs.org/) (at least v14) @@ -21,8 +21,8 @@ Add one of these environment variables: ⭐[How do I add the API key as an environment variable?](https://phoenixnap.com/kb/windows-set-environment-variable)⭐ -If you use Ollama, to install the models used by default (generation and embedding), execute the following script: -`ollama pull mistral:instruct && ollama pull nomic-embed-text` +If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: +`ollama pull phi3 && ollama pull nomic-embed-text` Then, clone/download this repository @@ -38,8 +38,6 @@ Run `node main.js` You can configure the agent's name, model, and prompts in their profile like `andy.json`. -You can configure local service in `local-config.json`. - You can configure project details in `settings.json`. diff --git a/local-config.json b/local-config.json deleted file mode 100644 index 7d12196..0000000 --- a/local-config.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "url": "http://localhost:11434", - "api_key": "ollama", - "embedding_model": "nomic-embed-text" -} \ No newline at end of file diff --git a/main.js b/main.js index d381dc9..76b223c 100644 --- a/main.js +++ b/main.js @@ -1,7 +1,7 @@ import { AgentProcess } from './src/process/agent-process.js'; -let profile = './radley.json'; +let profile = './andy.json'; let load_memory = false; -let init_message = 'Say hello world and your name. Do NOT use any command yet, nor make any comment about that fact.'; +let init_message = 'Say hello world and your name.'; new AgentProcess().start(profile, load_memory, init_message); \ No newline at end of file diff --git a/radley.json b/radley.json index ffef8af..a5dae28 100644 --- a/radley.json +++ b/radley.json @@ -1,7 +1,7 @@ { "name": "radley", - "model": "local[mistral:instruct]", + "model": "llama3", "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 3fbce5d..03679ad 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -31,10 +31,8 @@ export class Prompter { this.model = new GPT(model_name); else if (model_name.includes('claude')) this.model = new Claude(model_name); - else if (model_name.includes('local')) - this.model = new Local(model_name); else - throw new Error('Unknown model ' + model_name); + this.model = new Local(model_name); } getName() { diff --git a/src/models/local.js b/src/models/local.js index 5ddc83e..52967c1 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -1,69 +1,25 @@ -import OpenAIApi from 'openai'; -import axios from 'axios'; -import { readFileSync } from 'fs'; - -let localSettings = JSON.parse(readFileSync('./local-config.json', 'utf8')); - -function getContentInBrackets(str) { - const startIndex = str.indexOf("["); - const endIndex = str.indexOf("]"); - - if (startIndex !== -1 && endIndex !== -1 && endIndex > startIndex) { - return str.substring(startIndex + 1, endIndex); - } else { - return ""; - } -} - export class Local { constructor(model_name) { - this.model_name = getContentInBrackets(model_name); - let localConfig = null; - localSettings["url"] = localSettings["url"].replace("/v1", ""); - - if (this.model_name == "") { - throw new Error('Model is not specified! Please ensure you input the model in the following format: ollama[model]. For example, for Mistral instruct, use: ollama[mistral:instruct]'); - } - - axios.get(localSettings["url"]).then(response => { - - if (response.status === 200) { - localConfig = { - baseURL: `${localSettings["url"]}/v1`, - apiKey: localSettings["api_key"], - }; - - this.openai = new OpenAIApi(localConfig); - } - else { - throw new Error(`Error relating the endpoint: ${response.status}.`); - } - - }); - - + this.model_name = model_name; + this.embedding_model = 'nomic-embed-text'; + this.url = 'http://localhost:11434'; + this.chat_endpoint = '/api/chat'; + this.embedding_endpoint = '/api/embeddings'; } - async sendRequest(turns, systemMessage, stop_seq='***') { - + async sendRequest(turns, systemMessage) { let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); let res = null; try { console.log(`Awaiting local response... (model: ${this.model_name})`) console.log('Messages:', messages); - let completion = await this.openai.chat.completions.create({ - model: this.model_name, - messages: messages, - stop: stop_seq, - }); - if (completion.choices[0].finish_reason == 'length') - throw new Error('Context length exceeded'); - console.log('Received.') - res = completion.choices[0].message.content; + res = await this.send(this.chat_endpoint, {model: this.model_name, messages: messages, stream: false}); + if (res) + res = res['message']['content']; } catch (err) { - if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { + if (err.message.toLowerCase().includes('context length') && turns.length > 1) { console.log('Context length exceeded, trying again with shorter context.'); return await sendRequest(turns.slice(1), systemMessage, stop_seq); } else { @@ -75,31 +31,29 @@ export class Local { } async embed(text) { - - try { - if (localSettings["api_key"] == "ollama") { //Embedding if it is Ollama (temporary) - const response = await axios.post(`${localSettings["url"]}/api/embeddings`, { - model: localSettings["embedding_model"], - prompt: text - }); - return response.data.embedding; - } - - const embedding = await this.openai.embeddings.create({ - model: localSettings["embedding_model"], - input: text, - encoding_format: "float", - }); - return embedding.data[0].embedding; - - } catch (error) { - console.log('Error embedding text:', error.response ? error.response.data : error.message); - return Array(1).fill().map(() => Math.random()); - } - + let body = {model: this.embedding_model, prompt: text}; + let res = await this.send(this.embedding_endpoint, body); + return res['embedding'] } + + async send(endpoint, body) { + const url = new URL(endpoint, this.url); + let method = 'POST'; + let headers = new Headers(); + const request = new Request(url, {method, headers, body: JSON.stringify(body)}); + let data = null; + try { + const res = await fetch(request); + if (res.ok) { + data = await res.json(); + } else { + throw new Error(`Ollama Status: ${res.status}`); + } + } catch (err) { + console.error('Failed to send Ollama request.'); + console.error(err); + } + return data; + } } - - - From 4fd6aa2021ea5fa7b51f4ca263cb09102424536a Mon Sep 17 00:00:00 2001 From: Kolby Nottingham Date: Tue, 23 Apr 2024 20:47:01 -0700 Subject: [PATCH 17/78] prompted goals init --- andy_npc.json | 2 ++ src/agent/commands/actions.js | 14 ++++++++++++++ src/agent/npc/controller.js | 36 +++++++++++++++++++++++++++++++---- src/agent/npc/data.js | 9 +++++++++ src/agent/npc/item_goal.js | 7 ++++--- src/agent/prompter.js | 5 +++++ 6 files changed, 66 insertions(+), 7 deletions(-) diff --git a/andy_npc.json b/andy_npc.json index a392812..a8be5b0 100644 --- a/andy_npc.json +++ b/andy_npc.json @@ -9,6 +9,8 @@ "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", + "goal_setting": "You are a Minecraft bot named $NAME that has the ability to set in-game goals that are then executed programatically. Goals must be either and item or block name or a blueprint of a specific building. Any minecraft item or block name is valid. However, only names from the following list are valid blueprints: $BLUEPRINTS. Given any recent conversation and the most recently attempted goals, set a new goal to achieve. Fromat your response as a json object with the fields \"name\" and \"quantity\". Note that the quantity for a blueprint should always be one. Example:\n```json\n{\"name\": \"iron_pickaxe\", \"quantity\": 1}\n```", + "npc": { "goals": [ "wooden_pickaxe", diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index a57a464..4cd0a1e 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -36,6 +36,7 @@ export const actionsList = [ await agent.coder.stop(); agent.coder.clear(); agent.coder.cancelResume(); + agent.bot.emit('idle'); return 'Agent stopped.'; } }, @@ -196,5 +197,18 @@ export const actionsList = [ perform: wrapExecution(async (agent) => { await skills.stay(agent.bot); }) + }, + { + name: '!goal', + description: 'Set a goal to automatically work towards.', + params: { + 'name': '(string) The name of the goal to set. Can be item or building name. If empty will automatically choose a goal.', + 'quantity': '(number) The quantity of the goal to set. Default is 1.' + }, + perform: async function (agent, name=null, quantity=1) { + if (!agent.npc.data) return 'NPC module is not loaded.'; + await agent.npc.setGoal(name, quantity); + return 'Set goal: ' + agent.npc.data.curr_goal.name; + } } ]; diff --git a/src/agent/npc/controller.js b/src/agent/npc/controller.js index c474efd..ecb831a 100644 --- a/src/agent/npc/controller.js +++ b/src/agent/npc/controller.js @@ -16,6 +16,7 @@ export class NPCContoller { this.item_goal = new ItemGoal(agent, this.data); this.build_goal = new BuildGoal(agent); this.constructions = {}; + this.last_goals = {}; } getBuiltPositions() { @@ -79,13 +80,29 @@ export class NPCContoller { }); } + setGoal(name=null, quantity=1) { + this.last_goals = {}; + if (name) { + this.data.curr_goal = {name: name, quantity: quantity}; + return; + } + + let res = this.agent.prompter.promptGoalSetting(this.agent.history.getHistory(), this.last_goals); + if (res) { + this.data.curr_goal = res; + console.log('Set new goal: ', res.name, ' x', res.quantity); + } else { + console.log('Error setting new goal.'); + } + } + async executeNext() { if (!this.agent.isIdle()) return; await this.agent.coder.execute(async () => { await skills.moveAway(this.agent.bot, 2); }); - if (this.agent.bot.time.timeOfDay < 13000) { + if (!this.data.do_routine || this.agent.bot.time.timeOfDay < 13000) { // Exit any buildings let building = this.currentBuilding(); if (building == this.data.home) { @@ -123,15 +140,18 @@ export class NPCContoller { async executeGoal() { // If we need more blocks to complete a building, get those first - let goals = this.temp_goals.concat(this.data.goals); + let goals = this.temp_goals.concat(this.data.goals).concat([this.data.curr_goal]); this.temp_goals = []; + let acted = false; for (let goal of goals) { // Obtain goal item or block if (this.constructions[goal.name] === undefined) { if (!itemSatisfied(this.agent.bot, goal.name, goal.quantity)) { - await this.item_goal.executeNext(goal.name, goal.quantity); + let res = await this.item_goal.executeNext(goal.name, goal.quantity); + this.last_goals[goal.name] = res; + acted = true; break; } } @@ -162,9 +182,17 @@ export class NPCContoller { quantity: res.missing[block_name] }) } - if (res.acted) break; + if (res.acted) { + acted = true; + this.last_goals[goal.name] = Object.keys(res.missing).length === 0; + break; + } } } + + if (!acted) { + this.setGoal(); + } } currentBuilding() { diff --git a/src/agent/npc/data.js b/src/agent/npc/data.js index 70b59a4..f6b382f 100644 --- a/src/agent/npc/data.js +++ b/src/agent/npc/data.js @@ -1,18 +1,23 @@ export class NPCData { constructor() { this.goals = []; + this.curr_goal = null; this.built = {}; this.home = null; + this.do_routine = true; } toObject() { let obj = {}; if (this.goals.length > 0) obj.goals = this.goals; + if (this.curr_goal) + obj.curr_goal = this.curr_goal; if (Object.keys(this.built).length > 0) obj.built = this.built; if (this.home) obj.home = this.home; + obj.do_routine = this.do_routine; return obj; } @@ -28,10 +33,14 @@ export class NPCData { npc.goals.push({name: goal.name, quantity: goal.quantity}); } } + if (obj.curr_goal) + npc.curr_goal = obj.curr_goal; if (obj.built) npc.built = obj.built; if (obj.home) npc.home = obj.home; + if (obj.do_routine !== undefined) + npc.do_routine = obj.do_routine; return npc; } } \ No newline at end of file diff --git a/src/agent/npc/item_goal.js b/src/agent/npc/item_goal.js index db19dc9..0550657 100644 --- a/src/agent/npc/item_goal.js +++ b/src/agent/npc/item_goal.js @@ -309,7 +309,7 @@ export class ItemGoal { let next_info = this.goal.getNext(item_quantity); if (!next_info) { console.log(`Invalid item goal ${this.goal.name}`); - return; + return false; } let next = next_info.node; let quantity = next_info.quantity; @@ -330,12 +330,12 @@ export class ItemGoal { await new Promise((resolve) => setTimeout(resolve, 500)); this.agent.bot.emit('idle'); } - return; + return false; } // Wait for the bot to be idle before attempting to execute the next goal if (!this.agent.isIdle()) - return; + return false; // Execute the next goal let init_quantity = world.getInventoryCounts(this.agent.bot)[next.name] || 0; @@ -350,5 +350,6 @@ export class ItemGoal { } else { console.log(`Failed to obtain ${next.name} for goal ${this.goal.name}`); } + return final_quantity > init_quantity; } } diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 03679ad..31e8270 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -95,4 +95,9 @@ export class Prompter { prompt = await this.replaceStrings(prompt, null, null, prev_mem, to_summarize); return await this.model.sendRequest([], prompt); } + + async promptGoalSetting(messages, last_goals) { + // TODO + return {name: '', quantity: 0}; + } } \ No newline at end of file From 40e067903e2391c272619c804de42443a496aa1b Mon Sep 17 00:00:00 2001 From: Kolby Nottingham Date: Wed, 24 Apr 2024 11:28:04 -0700 Subject: [PATCH 18/78] model refactor --- README.md | 45 ++++++++++++++++++++++++- src/agent/prompter.js | 72 ++++++++++++++++++++++++++++++---------- src/models/claude.js | 32 +++++++----------- src/models/gemini.js | 36 ++++++++++++++++---- src/models/gpt.js | 38 ++++++++++----------- src/models/local.js | 14 ++++---- src/utils/examples.js | 77 +++++++++++++++++++++++++++++-------------- 7 files changed, 217 insertions(+), 97 deletions(-) diff --git a/README.md b/README.md index 9cc8c0d..1ffe81e 100644 --- a/README.md +++ b/README.md @@ -38,8 +38,51 @@ Run `node main.js` You can configure the agent's name, model, and prompts in their profile like `andy.json`. -You can configure project details in `settings.json`. +You can configure project details in `settings.json`. +## Bot Profiles + +Bot profiles are json files (such as `andy.json`) that define a bot's behavior in three ways: + +1. Bot backend LLMs to use for chat and embeddings. +2. Prompts used to influence the bot's behavior. +3. Examples retrieved and provided to the bot to help it better perform tasks. + +### Model Specifications + +LLM backends can be specified as simply as `"model": "gpt-3.5-turbo"`. However, for both the chat model and the embedding model, the bot profile can specify the below attributes: + +``` +"model": { + "api": "openai", + "url": "https://api.openai.com/v1/", + "model": "gpt-3.5-turbo" +}, +"embedding": { + "api": "openai", + "url": "https://api.openai.com/v1/", + "model": "text-embedding-ada-002" +} +``` + +The model parameter accepts either a string or object. If a string, it should specify the model to be used. The api and url will be assumed. If an object, the api field must be specified. Each api has a default model and url, so those fields are optional. + +If the embedding field is not specified, then it will use the default embedding method for the chat model's api (Note that anthropic has no embedding model). The embedding parameter can also be a string or object. If a string, it should specify the embedding api and the default model and url will be used. If a valid embedding is not specified and cannot be assumed, then word overlap will be used to retrieve examples instead. + +Thus, all the below specifications are equivalent to the above example: + +``` +"model": "gpt-3.5-turbo" +``` +``` +"model": { + "api": "openai" +} +``` +``` +"model": "gpt-3.5-turbo", +"embedding": "openai" +``` ## Online Servers To connect to online servers your bot will need an official Microsoft/Minecraft account. You can use your own personal one, but will need another account if you want to connect with it. Here is an example settings for this: diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 03679ad..1f777e7 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -13,10 +13,57 @@ import { Local } from '../models/local.js'; export class Prompter { constructor(agent, fp) { - this.prompts = JSON.parse(readFileSync(fp, 'utf8')); - let name = this.prompts.name; this.agent = agent; - let model_name = this.prompts.model; + this.prompts = JSON.parse(readFileSync(fp, 'utf8')); + this.convo_examples = null; + this.coding_examples = null; + + let name = this.prompts.name; + let chat = this.prompts.model; + if (typeof chat === 'string' || chat instanceof String) { + chat = {model: chat}; + if (chat.model.includes('gemini')) + chat.api = 'google'; + else if (chat.model.includes('gpt')) + chat.api = 'openai'; + else if (chat.model.includes('claude')) + chat.api = 'anthropic'; + else + chat.api = 'ollama'; + } + + console.log('Using chat settings:', chat); + + if (chat.api == 'google') + this.chat_model = new Gemini(chat.model, chat.url); + else if (chat.api == 'openai') + this.chat_model = new GPT(chat.model, chat.url); + else if (chat.api == 'anthropic') + this.chat_model = new Claude(chat.model, chat.url); + else if (chat.api == 'ollama') + this.chat_model = new Local(chat.model, chat.url); + else + throw new Error('Unknown API:', api); + + let embedding = this.prompts.embedding; + if (embedding === undefined) + embedding = {api: chat.api}; + else if (typeof embedding === 'string' || embedding instanceof String) + embedding = {api: embedding}; + + console.log('Using embedding settings:', embedding); + + if (embedding.api == 'google') + this.embedding_model = new Gemini(embedding.model, embedding.url); + else if (embedding.api == 'openai') + this.embedding_model = new GPT(embedding.model, embedding.url); + else if (embedding.api == 'ollama') + this.embedding_model = new Local(embedding.model, embedding.url); + else { + this.embedding_model = null; + console.log('Unknown embedding: ', embedding ? embedding.api : '[NOT SPECIFIED]', '. Using word overlap.'); + } + mkdirSync(`./bots/${name}`, { recursive: true }); writeFileSync(`./bots/${name}/last_profile.json`, JSON.stringify(this.prompts, null, 4), (err) => { if (err) { @@ -24,15 +71,6 @@ export class Prompter { } console.log("Copy profile saved."); }); - - if (model_name.includes('gemini')) - this.model = new Gemini(model_name); - else if (model_name.includes('gpt')) - this.model = new GPT(model_name); - else if (model_name.includes('claude')) - this.model = new Claude(model_name); - else - this.model = new Local(model_name); } getName() { @@ -41,9 +79,9 @@ export class Prompter { async initExamples() { console.log('Loading examples...') - this.convo_examples = new Examples(this.model); + this.convo_examples = new Examples(this.embedding_model); await this.convo_examples.load(this.prompts.conversation_examples); - this.coding_examples = new Examples(this.model); + this.coding_examples = new Examples(this.embedding_model); await this.coding_examples.load(this.prompts.coding_examples); console.log('Examples loaded.'); } @@ -81,18 +119,18 @@ export class Prompter { async promptConvo(messages) { let prompt = this.prompts.conversing; prompt = await this.replaceStrings(prompt, messages, this.convo_examples); - return await this.model.sendRequest(messages, prompt); + return await this.chat_model.sendRequest(messages, prompt); } async promptCoding(messages) { let prompt = this.prompts.coding; prompt = await this.replaceStrings(prompt, messages, this.coding_examples); - return await this.model.sendRequest(messages, prompt); + return await this.chat_model.sendRequest(messages, prompt); } async promptMemSaving(prev_mem, to_summarize) { let prompt = this.prompts.saving_memory; prompt = await this.replaceStrings(prompt, null, null, prev_mem, to_summarize); - return await this.model.sendRequest([], prompt); + return await this.chat_model.sendRequest([], prompt); } } \ No newline at end of file diff --git a/src/models/claude.js b/src/models/claude.js index 7a03855..d75354c 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -1,23 +1,19 @@ import Anthropic from '@anthropic-ai/sdk'; -import { GPT } from './gpt.js'; + export class Claude { - constructor(model_name) { + constructor(model_name, url) { this.model_name = model_name; - if (!process.env.ANTHROPIC_API_KEY) { + + let config = {}; + if (url) + config.baseURL = url; + if (process.env.ANTHROPIC_API_KEY) + config.apiKey = process.env["ANTHROPIC_API_KEY"]; + else throw new Error('Anthropic API key missing! Make sure you set your ANTHROPIC_API_KEY environment variable.'); - } - this.anthropic = new Anthropic({ - apiKey: process.env["ANTHROPIC_API_KEY"] - }); - - this.gpt = undefined; - try { - this.gpt = new GPT(); // use for embeddings, ignore model - } catch (err) { - console.warn('Claude uses the OpenAI API for embeddings, but no OPENAI_API_KEY env variable was found. Claude will still work, but performance will suffer.'); - } + this.anthropic = new Anthropic(config); } async sendRequest(turns, systemMessage) { @@ -53,7 +49,7 @@ export class Claude { console.log('Awaiting anthropic api response...') console.log('Messages:', messages); const resp = await this.anthropic.messages.create({ - model: this.model_name, + model: this.model_name || "claude-3-sonnet-20240229", system: systemMessage, max_tokens: 2048, messages: messages, @@ -69,11 +65,7 @@ export class Claude { } async embed(text) { - if (this.gpt) { - return await this.gpt.embed(text); - } - // if no gpt, just return random embedding - return Array(1).fill().map(() => Math.random()); + throw new Error('Embeddings are not supported by Claude.'); } } diff --git a/src/models/gemini.js b/src/models/gemini.js index 7bebc49..c27d34e 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -1,18 +1,29 @@ import { GoogleGenerativeAI } from '@google/generative-ai'; + export class Gemini { - constructor(model_name) { + constructor(model_name, url) { + this.model_name = model_name; + this.url = url; + if (!process.env.GEMINI_API_KEY) { throw new Error('Gemini API key missing! Make sure you set your GEMINI_API_KEY environment variable.'); } this.genAI = new GoogleGenerativeAI(process.env.GEMINI_API_KEY); - - - this.llmModel = this.genAI.getGenerativeModel({ model: model_name }); - this.embedModel = this.genAI.getGenerativeModel({ model: "embedding-001"}); } async sendRequest(turns, systemMessage) { + if (this.url) { + model = this.genAI.getGenerativeModel( + {model: this.model_name || "gemini-pro"}, + {baseUrl: this.url} + ); + } else { + model = this.genAI.getGenerativeModel( + {model: this.model_name || "gemini-pro"} + ); + } + const messages = [{'role': 'system', 'content': systemMessage}].concat(turns); let prompt = ""; let role = ""; @@ -24,13 +35,24 @@ export class Gemini { if (role !== "model") // if the last message was from the user/system, add a prompt for the model. otherwise, pretend we are extending the model's own message prompt += "model: "; console.log(prompt) - const result = await this.llmModel.generateContent(prompt); + const result = await model.generateContent(prompt); const response = await result.response; return response.text(); } async embed(text) { - const result = await this.embedModel.embedContent(text); + if (this.url) { + model = this.genAI.getGenerativeModel( + {model: this.model_name || "embedding-001"}, + {baseUrl: this.url} + ); + } else { + model = this.genAI.getGenerativeModel( + {model: this.model_name || "embedding-001"} + ); + } + + const result = await model.embedContent(text); return result.embedding; } } \ No newline at end of file diff --git a/src/models/gpt.js b/src/models/gpt.js index 64b898b..0889c31 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -1,25 +1,21 @@ import OpenAIApi from 'openai'; -export class GPT { - constructor(model_name) { - this.model_name = model_name; - let openAiConfig = null; - if (process.env.OPENAI_ORG_ID) { - openAiConfig = { - organization: process.env.OPENAI_ORG_ID, - apiKey: process.env.OPENAI_API_KEY, - }; - } - else if (process.env.OPENAI_API_KEY) { - openAiConfig = { - apiKey: process.env.OPENAI_API_KEY, - }; - } - else { - throw new Error('OpenAI API key missing! Make sure you set your OPENAI_API_KEY environment variable.'); - } - this.openai = new OpenAIApi(openAiConfig); +export class GPT { + constructor(model_name, url) { + this.model_name = model_name; + + let config = {}; + if (url) + config.baseURL = url; + if (process.env.OPENAI_ORG_ID) + config.organization = process.env.OPENAI_ORG_ID + if (process.env.OPENAI_API_KEY) + config.apiKey = process.env.OPENAI_API_KEY + else + throw new Error('OpenAI API key missing! Make sure you set your OPENAI_API_KEY environment variable.'); + + this.openai = new OpenAIApi(config); } async sendRequest(turns, systemMessage, stop_seq='***') { @@ -31,7 +27,7 @@ export class GPT { console.log('Awaiting openai api response...') console.log('Messages:', messages); let completion = await this.openai.chat.completions.create({ - model: this.model_name, + model: this.model_name || "gpt-3.5-turbo", messages: messages, stop: stop_seq, }); @@ -54,7 +50,7 @@ export class GPT { async embed(text) { const embedding = await this.openai.embeddings.create({ - model: "text-embedding-ada-002", + model: this.model_name || "text-embedding-ada-002", input: text, encoding_format: "float", }); diff --git a/src/models/local.js b/src/models/local.js index 52967c1..f56c043 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -1,20 +1,20 @@ export class Local { - constructor(model_name) { + constructor(model_name, url) { this.model_name = model_name; - this.embedding_model = 'nomic-embed-text'; - this.url = 'http://localhost:11434'; + this.url = url || 'http://localhost:11434'; this.chat_endpoint = '/api/chat'; this.embedding_endpoint = '/api/embeddings'; } async sendRequest(turns, systemMessage) { + let model = this.model_name || 'llama3'; let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); let res = null; try { - console.log(`Awaiting local response... (model: ${this.model_name})`) + console.log(`Awaiting local response... (model: ${model})`) console.log('Messages:', messages); - res = await this.send(this.chat_endpoint, {model: this.model_name, messages: messages, stream: false}); + res = await this.send(this.chat_endpoint, {model: model, messages: messages, stream: false}); if (res) res = res['message']['content']; } @@ -31,12 +31,12 @@ export class Local { } async embed(text) { - let body = {model: this.embedding_model, prompt: text}; + let model = this.model_name || 'nomic-embed-text'; + let body = {model: model, prompt: text}; let res = await this.send(this.embedding_endpoint, body); return res['embedding'] } - async send(endpoint, body) { const url = new URL(endpoint, this.url); let method = 'POST'; diff --git a/src/utils/examples.js b/src/utils/examples.js index 386369c..fdeac43 100644 --- a/src/utils/examples.js +++ b/src/utils/examples.js @@ -6,35 +6,64 @@ export class Examples { this.examples = []; this.model = model; this.select_num = select_num; + this.embeddings = {}; + } + + turnsToText(turns) { + let messages = ''; + for (let turn of turns) { + if (turn.role === 'user') + messages += turn.content.substring(turn.content.indexOf(':')+1).trim() + '\n'; + } + return messages.trim(); + } + + getWords(text) { + return text.replace(/[^a-zA-Z ]/g, '').toLowerCase().split(' '); + } + + async getSimilarity(text1, text2) { + if (this.model !== null) { + let embeddings1 = null; + let embeddings2 = null; + + if (this.embeddings[text1]) + embeddings1 = this.embeddings[text1]; + else + embeddings1 = await this.model.embed(text1); + + if (this.embeddings[text2]) + embeddings2 = this.embeddings[text2]; + else + embeddings2 = await this.model.embed(text2); + + return cosineSimilarity(embeddings1, embeddings2); + + } else { + const words1 = this.getWords(text1); + const words2 = this.getWords(text2); + const intersection = words1.filter(word => words2.includes(word)); + return intersection.length / (words1.length + words2.length - intersection.length); + } } async load(examples) { - this.examples = []; - let promises = examples.map(async (example) => { - let messages = ''; - for (let turn of example) { - if (turn.role === 'user') - messages += turn.content.substring(turn.content.indexOf(':')+1).trim() + '\n'; + this.examples = examples; + if (this.model !== null) { + for (let example of this.examples) { + let turn_text = this.turnsToText(example); + this.embeddings[turn_text] = await this.model.embed(turn_text); } - messages = messages.trim(); - const embedding = await this.model.embed(messages); - return {'embedding': embedding, 'turns': example}; - }); - this.examples = await Promise.all(promises); + } } async getRelevant(turns) { - let messages = ''; - for (let turn of turns) { - if (turn.role != 'assistant') - messages += turn.content.substring(turn.content.indexOf(':')+1).trim() + '\n'; - } - messages = messages.trim(); - const embedding = await this.model.embed(messages); - this.examples.sort((a, b) => { - return cosineSimilarity(b.embedding, embedding) - cosineSimilarity(a.embedding, embedding); - }); - let selected = this.examples.slice(0, this.select_num); + let turn_text = this.turnsToText(turns); + this.examples.sort((a, b) => + this.getSimilarity(turn_text, this.turnsToText(a)) - + this.getSimilarity(turn_text, this.turnsToText(b)) + ); + let selected = this.examples.slice(-this.select_num); return JSON.parse(JSON.stringify(selected)); // deep copy } @@ -43,13 +72,13 @@ export class Examples { console.log('selected examples:'); for (let example of selected_examples) { - console.log(example.turns[0].content) + console.log(example[0].content) } let msg = 'Examples of how to respond:\n'; for (let i=0; i Date: Wed, 24 Apr 2024 13:34:09 -0700 Subject: [PATCH 19/78] set goal prompt --- andy_npc.json | 2 +- src/agent/npc/controller.js | 17 ++++++++----- src/agent/prompter.js | 48 ++++++++++++++++++++++++++++++++++--- 3 files changed, 57 insertions(+), 10 deletions(-) diff --git a/andy_npc.json b/andy_npc.json index a8be5b0..0fad11c 100644 --- a/andy_npc.json +++ b/andy_npc.json @@ -9,7 +9,7 @@ "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", - "goal_setting": "You are a Minecraft bot named $NAME that has the ability to set in-game goals that are then executed programatically. Goals must be either and item or block name or a blueprint of a specific building. Any minecraft item or block name is valid. However, only names from the following list are valid blueprints: $BLUEPRINTS. Given any recent conversation and the most recently attempted goals, set a new goal to achieve. Fromat your response as a json object with the fields \"name\" and \"quantity\". Note that the quantity for a blueprint should always be one. Example:\n```json\n{\"name\": \"iron_pickaxe\", \"quantity\": 1}\n```", + "goal_setting": "You are a Minecraft bot named $NAME that has the ability to set in-game goals that are then executed programatically. Goals must be either and item or block name or a blueprint of a specific building. Any minecraft item or block name is valid. However, only names from the following list are valid blueprints: $BLUEPRINTS. Given any recent conversation and the most recently attempted goals, set a new goal to achieve. Fromat your response as a json object with the fields \"name\" and \"quantity\". Note that the quantity for a blueprint should always be one. Example:\n```json\n{\"name\": \"iron_pickaxe\", \"quantity\": 1}```", "npc": { "goals": [ diff --git a/src/agent/npc/controller.js b/src/agent/npc/controller.js index ecb831a..cbd234f 100644 --- a/src/agent/npc/controller.js +++ b/src/agent/npc/controller.js @@ -80,14 +80,18 @@ export class NPCContoller { }); } - setGoal(name=null, quantity=1) { + async setGoal(name=null, quantity=1) { this.last_goals = {}; if (name) { this.data.curr_goal = {name: name, quantity: quantity}; return; } - let res = this.agent.prompter.promptGoalSetting(this.agent.history.getHistory(), this.last_goals); + let past_goals = {...this.last_goals}; + for (let goal in this.data.goals) { + if (past_goals[goal.name] === undefined) past_goals[goal.name] = true; + } + let res = await this.agent.prompter.promptGoalSetting(this.agent.history.getHistory(), past_goals); if (res) { this.data.curr_goal = res; console.log('Set new goal: ', res.name, ' x', res.quantity); @@ -140,7 +144,9 @@ export class NPCContoller { async executeGoal() { // If we need more blocks to complete a building, get those first - let goals = this.temp_goals.concat(this.data.goals).concat([this.data.curr_goal]); + let goals = this.temp_goals.concat(this.data.goals); + if (this.data.curr_goal) + goals = goals.concat([this.data.curr_goal]) this.temp_goals = []; let acted = false; @@ -190,9 +196,8 @@ export class NPCContoller { } } - if (!acted) { - this.setGoal(); - } + if (!acted) + await this.setGoal(); } currentBuilding() { diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 31e8270..9b43d6a 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -48,7 +48,7 @@ export class Prompter { console.log('Examples loaded.'); } - async replaceStrings(prompt, messages, examples=null, prev_memory=null, to_summarize=[]) { + async replaceStrings(prompt, messages, examples=null, prev_memory=null, to_summarize=[], last_goals=null) { prompt = prompt.replaceAll('$NAME', this.agent.name); if (prompt.includes('$STATS')) { @@ -69,6 +69,27 @@ export class Prompter { prompt = prompt.replaceAll('$MEMORY', prev_memory ? prev_memory : 'None.'); if (prompt.includes('$TO_SUMMARIZE')) prompt = prompt.replaceAll('$TO_SUMMARIZE', stringifyTurns(to_summarize)); + if (prompt.includes('$CONVO')) + prompt = prompt.replaceAll('$CONVO', 'Recent conversation:\n' + stringifyTurns(messages)); + if (prompt.includes('$LAST_GOALS')) { + let goal_text = ''; + for (let goal in last_goals) { + if (last_goals[goal]) + goal_text += `You recently successfully completed the goal ${goal}.\n` + else + goal_text += `You recently failed to complete the goal ${goal}.\n` + } + prompt = prompt.replaceAll('$LAST_GOALS', goal_text.trim()); + } + if (prompt.includes('$BLUEPRINTS')) { + if (this.agent.npc.constructions) { + let blueprints = ''; + for (let blueprint in this.agent.npc.constructions) { + blueprints += blueprint + ', '; + } + prompt = prompt.replaceAll('$BLUEPRINTS', blueprints.slice(0, -2)); + } + } // check if there are any remaining placeholders with syntax $ let remaining = prompt.match(/\$[A-Z_]+/g); @@ -97,7 +118,28 @@ export class Prompter { } async promptGoalSetting(messages, last_goals) { - // TODO - return {name: '', quantity: 0}; + let system_message = this.prompts.goal_setting; + system_message = await this.replaceStrings(system_message, messages); + + let user_message = 'Use the below info to determine what goal to target next\n\n'; + user_message += '$LAST_GOALS\n$STATS\n$INVENTORY\n$CONVO' + user_message = await this.replaceStrings(user_message, messages, null, null, null, last_goals); + let user_messages = [{role: 'user', content: user_message}]; + + let res = await this.model.sendRequest(user_messages, system_message); + + let goal = null; + try { + let data = res.split('```')[1].replace('json', '').trim(); + goal = JSON.parse(data); + } catch (err) { + console.log('Failed to parse goal:', res, err); + } + if (!goal || !goal.name || !goal.quantity || isNaN(parseInt(goal.quantity))) { + console.log('Failed to set goal:', res); + return null; + } + goal.quantity = parseInt(goal.quantity); + return goal; } } \ No newline at end of file From deade1aef71eba51df86703a8913d351d329f2d0 Mon Sep 17 00:00:00 2001 From: Kolby Nottingham Date: Wed, 24 Apr 2024 13:36:26 -0700 Subject: [PATCH 20/78] set goal option --- andy_npc.json | 2 ++ src/agent/npc/controller.js | 2 +- src/agent/npc/data.js | 4 ++++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/andy_npc.json b/andy_npc.json index 0fad11c..4a13ea7 100644 --- a/andy_npc.json +++ b/andy_npc.json @@ -12,6 +12,8 @@ "goal_setting": "You are a Minecraft bot named $NAME that has the ability to set in-game goals that are then executed programatically. Goals must be either and item or block name or a blueprint of a specific building. Any minecraft item or block name is valid. However, only names from the following list are valid blueprints: $BLUEPRINTS. Given any recent conversation and the most recently attempted goals, set a new goal to achieve. Fromat your response as a json object with the fields \"name\" and \"quantity\". Note that the quantity for a blueprint should always be one. Example:\n```json\n{\"name\": \"iron_pickaxe\", \"quantity\": 1}```", "npc": { + "do_routine": true, + "do_set_goal": true, "goals": [ "wooden_pickaxe", "hole", diff --git a/src/agent/npc/controller.js b/src/agent/npc/controller.js index cbd234f..bb650ea 100644 --- a/src/agent/npc/controller.js +++ b/src/agent/npc/controller.js @@ -196,7 +196,7 @@ export class NPCContoller { } } - if (!acted) + if (!acted && this.data.do_set_goal) await this.setGoal(); } diff --git a/src/agent/npc/data.js b/src/agent/npc/data.js index f6b382f..b590d15 100644 --- a/src/agent/npc/data.js +++ b/src/agent/npc/data.js @@ -5,6 +5,7 @@ export class NPCData { this.built = {}; this.home = null; this.do_routine = true; + this.do_set_goal = true; } toObject() { @@ -18,6 +19,7 @@ export class NPCData { if (this.home) obj.home = this.home; obj.do_routine = this.do_routine; + obj.do_set_goal = this.do_set_goal; return obj; } @@ -41,6 +43,8 @@ export class NPCData { npc.home = obj.home; if (obj.do_routine !== undefined) npc.do_routine = obj.do_routine; + if (obj.do_set_goal !== undefined) + npc.do_set_goal = obj.do_set_goal; return npc; } } \ No newline at end of file From 1f2273c0287f4ccb81d0935df74f823ca923751e Mon Sep 17 00:00:00 2001 From: Kolby Nottingham Date: Wed, 24 Apr 2024 15:34:08 -0700 Subject: [PATCH 21/78] more buildings --- andy_npc.json | 9 +- .../{hole.json => dirt_shelter.json} | 2 +- src/agent/npc/construction/large_house.json | 230 ++++++++++++++++++ .../npc/construction/small_stone_house.json | 42 ++++ .../{house.json => small_wood_house.json} | 2 +- src/agent/npc/controller.js | 3 + 6 files changed, 280 insertions(+), 8 deletions(-) rename src/agent/npc/construction/{hole.json => dirt_shelter.json} (97%) create mode 100644 src/agent/npc/construction/large_house.json create mode 100644 src/agent/npc/construction/small_stone_house.json rename src/agent/npc/construction/{house.json => small_wood_house.json} (98%) diff --git a/andy_npc.json b/andy_npc.json index 4a13ea7..561f302 100644 --- a/andy_npc.json +++ b/andy_npc.json @@ -16,16 +16,13 @@ "do_set_goal": true, "goals": [ "wooden_pickaxe", - "hole", - "stone_axe", + "dirt_shelter", "stone_pickaxe", "stone_axe", - "house", + "small_wood_house", "furnace", "iron_pickaxe", - "iron_axe", - "iron_sword", - "iron_armor" + "iron_sword" ] }, diff --git a/src/agent/npc/construction/hole.json b/src/agent/npc/construction/dirt_shelter.json similarity index 97% rename from src/agent/npc/construction/hole.json rename to src/agent/npc/construction/dirt_shelter.json index 7a2c1f2..d5e8b28 100644 --- a/src/agent/npc/construction/hole.json +++ b/src/agent/npc/construction/dirt_shelter.json @@ -1,5 +1,5 @@ { - "name": "hole", + "name": "dirt_shelter", "offset": -2, "blocks": [ [ diff --git a/src/agent/npc/construction/large_house.json b/src/agent/npc/construction/large_house.json new file mode 100644 index 0000000..f40e0dd --- /dev/null +++ b/src/agent/npc/construction/large_house.json @@ -0,0 +1,230 @@ +{ + "name": "large_house", + "offset": -4, + "blocks": [ + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "air", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "air", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "air", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "planks", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "planks", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "air", "torch", "air", "air", "air", "torch", "air", "cobblestone", ""], + ["", "cobblestone", "air", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "air", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "air", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "planks", "torch", "air", "air", "air", "torch", "air", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "", "", ""], + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "", "", ""], + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "", "", ""], + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["cobblestone", "cobblestone", "air", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "dirt"], + ["cobblestone", "cobblestone", "air", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["cobblestone", "cobblestone", "air", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""] + ], + [ + ["", "", "", "log", "planks", "planks", "planks", "log", "", "", ""], + ["", "", "", "planks", "furnace", "air", "crafting_table", "planks", "", "", ""], + ["", "", "", "planks", "air", "air", "air", "planks", "", "", ""], + ["log", "planks", "planks", "log", "planks", "air", "planks", "log", "planks", "log", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "door", "air"], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["log", "planks", "planks", "log", "planks", "planks", "air", "planks", "planks", "log", ""], + ["", "", "", "planks", "air", "air", "air", "", "air", "planks", ""], + ["", "", "", "planks", "chest", "air", "air", "bed", "", "planks", ""], + ["", "", "", "planks", "chest", "air", "air", "", "air", "planks", ""], + ["", "", "", "log", "planks", "planks", "planks", "planks", "planks", "log", ""] + ], + [ + ["", "", "", "log", "planks", "planks", "planks", "log", "", "", ""], + ["", "", "", "planks", "air", "air", "air", "glass", "", "", ""], + ["", "", "", "planks", "air", "air", "air", "glass", "", "", ""], + ["log", "planks", "planks", "log", "planks", "air", "planks", "log", "planks", "log", ""], + ["planks", "air", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "door", "air"], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["log", "planks", "planks", "log", "planks", "planks", "air", "planks", "planks", "log", ""], + ["", "", "", "planks", "air", "air", "air", "air", "air", "planks", ""], + ["", "", "", "planks", "air", "air", "air", "air", "air", "planks", ""], + ["", "", "", "planks", "air", "air", "air", "air", "air", "planks", ""], + ["", "", "", "log", "planks", "glass", "glass", "glass", "planks", "log", ""] + ], + [ + ["", "", "", "log", "planks", "planks", "planks", "log", "", "", ""], + ["", "", "", "planks", "air", "air", "air", "glass", "", "", ""], + ["", "", "", "planks", "torch", "air", "torch", "glass", "", "", ""], + ["log", "planks", "planks", "log", "planks", "air", "planks", "log", "planks", "log", ""], + ["planks", "air", "air", "torch", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "air", "air", "air", "air", "air", "air", "air", "torch", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "torch", "planks", ""], + ["planks", "planks", "air", "torch", "air", "air", "air", "air", "air", "planks", ""], + ["log", "planks", "planks", "log", "planks", "planks", "air", "planks", "planks", "log", ""], + ["", "", "", "planks", "air", "torch", "air", "torch", "air", "planks", ""], + ["", "", "", "planks", "air", "air", "air", "air", "air", "planks", ""], + ["", "", "", "planks", "air", "air", "air", "air", "air", "planks", ""], + ["", "", "", "log", "planks", "glass", "glass", "glass", "planks", "log", ""] + ], + [ + ["", "", "", "log", "log", "log", "log", "log", "", "", ""], + ["", "", "", "log", "planks", "planks", "planks", "log", "", "", ""], + ["", "", "", "log", "planks", "planks", "planks", "log", "", "", ""], + ["log", "log", "log", "log", "log", "log", "log", "log", "log", "log", ""], + ["log", "air", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "air", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "air", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "log", "log", "log", "log", "log", "log", "log", "log", "log", ""], + ["", "", "", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["", "", "", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["", "", "", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["", "", "", "log", "log", "log", "log", "log", "log", "log", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "planks", "planks", "planks", "", "", "", ""], + ["", "", "", "", "planks", "planks", "planks", "", "", "", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["planks", "air", "bookshelf", "bookshelf", "air", "air", "air", "air", "torch", "planks", ""], + ["planks", "air", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "air", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "air", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "air", "air", "air", "air", "air", "air", "air", "torch", "planks", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["log", "planks", "planks", "log", "glass", "glass", "glass", "glass", "glass", "log", ""], + ["glass", "air", "bookshelf", "bookshelf", "air", "air", "air", "air", "air", "planks", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["log", "planks", "planks", "log", "glass", "glass", "glass", "glass", "glass", "log", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["log", "planks", "planks", "log", "glass", "glass", "glass", "glass", "glass", "log", ""], + ["glass", "air", "air", "torch", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "torch", "air", "air", "air", "air", "air", "glass", ""], + ["log", "planks", "planks", "log", "glass", "glass", "glass", "glass", "glass", "log", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["log", "log", "log", "log", "log", "log", "log", "log", "log", "log", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "log", "log", "log", "log", "log", "log", "log", "log", "log", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "planks", "planks", "planks", "", "", ""], + ["", "", "", "", "", "planks", "planks", "planks", "", "", ""], + ["", "", "", "", "", "planks", "planks", "planks", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ] + ] +} \ No newline at end of file diff --git a/src/agent/npc/construction/small_stone_house.json b/src/agent/npc/construction/small_stone_house.json new file mode 100644 index 0000000..baf4f65 --- /dev/null +++ b/src/agent/npc/construction/small_stone_house.json @@ -0,0 +1,42 @@ +{ + "name": "small_stone_house", + "offset": -1, + "blocks": [ + [ + ["", "", "", "", ""], + ["", "planks", "planks", "planks", ""], + ["", "planks", "planks", "planks", ""], + ["", "planks", "planks", "planks", ""], + ["", "planks", "planks", "planks", ""], + ["", "", "planks", "", ""], + ["", "", "", "", ""] + ], + [ + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone"], + ["cobblestone", "chest", "bed", "air", "cobblestone"], + ["cobblestone", "air", "bed", "air", "cobblestone"], + ["cobblestone", "air", "air", "air", "cobblestone"], + ["cobblestone", "air", "air", "air", "cobblestone"], + ["cobblestone", "cobblestone", "door", "cobblestone", "cobblestone"], + ["", "air", "air", "air", ""] + ], + [ + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone"], + ["cobblestone", "torch", "air", "torch", "cobblestone"], + ["cobblestone", "air", "air", "air", "cobblestone"], + ["cobblestone", "air", "air", "air", "cobblestone"], + ["cobblestone", "torch", "air", "torch", "cobblestone"], + ["cobblestone", "cobblestone", "door", "cobblestone", "cobblestone"], + ["", "air", "air", "air", ""] + ], + [ + ["air", "air", "air", "air", "air"], + ["air", "cobblestone", "cobblestone", "cobblestone", "air"], + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone"], + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone"], + ["air", "cobblestone", "cobblestone", "cobblestone", "air"], + ["air", "air", "air", "air", "air"], + ["", "air", "air", "air", ""] + ] + ] +} \ No newline at end of file diff --git a/src/agent/npc/construction/house.json b/src/agent/npc/construction/small_wood_house.json similarity index 98% rename from src/agent/npc/construction/house.json rename to src/agent/npc/construction/small_wood_house.json index 0a3677d..9661ae5 100644 --- a/src/agent/npc/construction/house.json +++ b/src/agent/npc/construction/small_wood_house.json @@ -1,5 +1,5 @@ { - "name": "shelter", + "name": "small_wood_house", "offset": -1, "blocks": [ [ diff --git a/src/agent/npc/controller.js b/src/agent/npc/controller.js index bb650ea..7636dcc 100644 --- a/src/agent/npc/controller.js +++ b/src/agent/npc/controller.js @@ -123,6 +123,9 @@ export class NPCContoller { await this.executeGoal(); } else { + // Reset goal at the end of the day + this.data.curr_goal = null; + // Return to home let building = this.currentBuilding(); if (this.data.home !== null && (building === null || building != this.data.home)) { From 75a5072cdc9518fa8bff00bce88c1b8787794959 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 27 Apr 2024 18:48:53 -0500 Subject: [PATCH 22/78] fixed example sorting and empty embedding --- src/utils/examples.js | 49 ++++++++++++++++++------------------------- 1 file changed, 20 insertions(+), 29 deletions(-) diff --git a/src/utils/examples.js b/src/utils/examples.js index fdeac43..3bba0a0 100644 --- a/src/utils/examples.js +++ b/src/utils/examples.js @@ -12,7 +12,7 @@ export class Examples { turnsToText(turns) { let messages = ''; for (let turn of turns) { - if (turn.role === 'user') + if (turn.role !== 'assistant') messages += turn.content.substring(turn.content.indexOf(':')+1).trim() + '\n'; } return messages.trim(); @@ -22,29 +22,11 @@ export class Examples { return text.replace(/[^a-zA-Z ]/g, '').toLowerCase().split(' '); } - async getSimilarity(text1, text2) { - if (this.model !== null) { - let embeddings1 = null; - let embeddings2 = null; - - if (this.embeddings[text1]) - embeddings1 = this.embeddings[text1]; - else - embeddings1 = await this.model.embed(text1); - - if (this.embeddings[text2]) - embeddings2 = this.embeddings[text2]; - else - embeddings2 = await this.model.embed(text2); - - return cosineSimilarity(embeddings1, embeddings2); - - } else { - const words1 = this.getWords(text1); - const words2 = this.getWords(text2); - const intersection = words1.filter(word => words2.includes(word)); - return intersection.length / (words1.length + words2.length - intersection.length); - } + wordOverlapScore(text1, text2) { + const words1 = this.getWords(text1); + const words2 = this.getWords(text2); + const intersection = words1.filter(word => words2.includes(word)); + return intersection.length / (words1.length + words2.length - intersection.length); } async load(examples) { @@ -59,11 +41,20 @@ export class Examples { async getRelevant(turns) { let turn_text = this.turnsToText(turns); - this.examples.sort((a, b) => - this.getSimilarity(turn_text, this.turnsToText(a)) - - this.getSimilarity(turn_text, this.turnsToText(b)) - ); - let selected = this.examples.slice(-this.select_num); + if (this.model !== null) { + let embedding = await this.model.embed(turn_text); + this.examples.sort((a, b) => + cosineSimilarity(embedding, this.embeddings[this.turnsToText(b)]) - + cosineSimilarity(embedding, this.embeddings[this.turnsToText(a)]) + ); + } + else { + this.examples.sort((a, b) => + this.wordOverlapScore(turn_text, this.turnsToText(b)) - + this.wordOverlapScore(turn_text, this.turnsToText(a)) + ); + } + let selected = this.examples.slice(0, this.select_num); return JSON.parse(JSON.stringify(selected)); // deep copy } From 81cdc718bd1a3ad2bd3b734055b70189c488cc91 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 27 Apr 2024 23:28:34 -0500 Subject: [PATCH 23/78] added memory bank and commands for saving places --- src/agent/agent.js | 2 ++ src/agent/commands/actions.js | 22 ++++++++++++++++++++++ src/agent/commands/queries.js | 7 +++++++ src/agent/history.js | 5 +++++ src/agent/memory_bank.js | 21 +++++++++++++++++++++ 5 files changed, 57 insertions(+) create mode 100644 src/agent/memory_bank.js diff --git a/src/agent/agent.js b/src/agent/agent.js index 0736d8a..042cc26 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -5,6 +5,7 @@ import { initModes } from './modes.js'; import { initBot } from '../utils/mcdata.js'; import { containsCommand, commandExists, executeCommand, truncCommandMessage } from './commands/index.js'; import { NPCContoller } from './npc/controller.js'; +import { MemoryBank } from './memory_bank.js'; export class Agent { @@ -14,6 +15,7 @@ export class Agent { this.history = new History(this); this.coder = new Coder(this); this.npc = new NPCContoller(this); + this.memory_bank = new MemoryBank(); await this.prompter.initExamples(); diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 2a84982..f409211 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -103,6 +103,28 @@ export const actionsList = [ await skills.moveAway(agent.bot, distance); }) }, + { + name: '!rememberHere', + description: 'Save the current location with a given name.', + params: {'name': '(string) The name to remember the location as.'}, + perform: async function (agent, name) { + const pos = agent.bot.entity.position; + agent.memory_bank.rememberPlace(name, pos.x, pos.y, pos.z); + } + }, + { + name: '!goToPlace', + description: 'Go to a saved location.', + params: {'name': '(string) The name of the location to go to.'}, + perform: wrapExecution(async (agent, name) => { + const pos = agent.memory_bank.recallPlace(name); + if (!pos) { + skills.log(agent.bot, `No location named "${name}" saved.`); + return; + } + await skills.goToPosition(agent.bot, pos[0], pos[1], pos[2], 1); + }) + }, { name: '!givePlayer', description: 'Give the specified item to the given player.', diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js index 19260b4..b6f2449 100644 --- a/src/agent/commands/queries.js +++ b/src/agent/commands/queries.js @@ -122,5 +122,12 @@ export const queryList = [ perform: function (agent) { return agent.bot.modes.getStr(); } + }, + { + name: '!savedPlaces', + description: 'List all saved locations.', + perform: async function (agent) { + return "Saved place names: " + Object.keys(agent.memory_bank.places).join(', '); + } } ]; diff --git a/src/agent/history.js b/src/agent/history.js index b79ac80..659113b 100644 --- a/src/agent/history.js +++ b/src/agent/history.js @@ -58,6 +58,9 @@ export class History { const modes = this.agent.bot.modes.getJson(); if (modes !== null) data.modes = modes; + const memory_bank = this.agent.memory_bank.getJson(); + if (memory_bank !== null) + data.memory_bank = memory_bank; const json_data = JSON.stringify(data, null, 4); writeFileSync(this.memory_fp, json_data, (err) => { if (err) { @@ -76,6 +79,8 @@ export class History { this.agent.npc.data = NPCData.fromObject(obj.npc); if (obj.modes) this.agent.bot.modes.loadJson(obj.modes); + if (obj.memory_bank) + this.agent.memory_bank.loadJson(obj.memory_bank); this.turns = obj.turns; } catch (err) { console.error(`Error reading ${this.name}'s memory file: ${err.message}`); diff --git a/src/agent/memory_bank.js b/src/agent/memory_bank.js new file mode 100644 index 0000000..3ddcc73 --- /dev/null +++ b/src/agent/memory_bank.js @@ -0,0 +1,21 @@ +export class MemoryBank { + constructor() { + this.memory = {}; + } + + rememberPlace(name, x, y, z) { + this.memory[name] = [x, y, z]; + } + + recallPlace(name) { + return this.memory[name]; + } + + getJson() { + return this.memory + } + + loadJson(json) { + this.memory = json; + } +} \ No newline at end of file From 04eb3d07f58809f0a08a82a94a68a8d244bf4c53 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 27 Apr 2024 23:36:58 -0500 Subject: [PATCH 24/78] fixed memory bank query --- src/agent/commands/queries.js | 2 +- src/agent/memory_bank.js | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js index b6f2449..99f5fa9 100644 --- a/src/agent/commands/queries.js +++ b/src/agent/commands/queries.js @@ -127,7 +127,7 @@ export const queryList = [ name: '!savedPlaces', description: 'List all saved locations.', perform: async function (agent) { - return "Saved place names: " + Object.keys(agent.memory_bank.places).join(', '); + return "Saved place names: " + agent.memory_bank.getKeys(); } } ]; diff --git a/src/agent/memory_bank.js b/src/agent/memory_bank.js index 3ddcc73..a32ab78 100644 --- a/src/agent/memory_bank.js +++ b/src/agent/memory_bank.js @@ -18,4 +18,8 @@ export class MemoryBank { loadJson(json) { this.memory = json; } + + getKeys() { + return Object.keys(this.memory).join(', ') + } } \ No newline at end of file From f0542833f26e46b555af7ea8b5354b4f4fac9535 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 27 Apr 2024 23:37:13 -0500 Subject: [PATCH 25/78] fixed null block crash --- src/agent/modes.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/agent/modes.js b/src/agent/modes.js index 0081682..ef99b59 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -26,13 +26,13 @@ const modes = [ const bot = agent.bot; const block = bot.blockAt(bot.entity.position); const blockAbove = bot.blockAt(bot.entity.position.offset(0, 1, 0)); - if (blockAbove.name === 'water' || blockAbove.name === 'flowing_water') { + if (blockAbove && (blockAbove.name === 'water' || blockAbove.name === 'flowing_water')) { // does not call execute so does not interrupt other actions if (!bot.pathfinder.goal) { bot.setControlState('jump', true); } } - else if (this.fall_blocks.some(name => blockAbove.name.includes(name))) { + else if (blockAbove && this.fall_blocks.some(name => blockAbove.name.includes(name))) { execute(this, agent, async () => { await skills.moveAway(bot, 2); }); From 57c4f479d8e8e0fcca9b277b1baae8dfb952bdef Mon Sep 17 00:00:00 2001 From: Radnos Date: Mon, 29 Apr 2024 13:47:03 +0200 Subject: [PATCH 26/78] update README.md, phi3 to llama3 In the default profile for local models (Radley.json) llama3 was set, but the readme specified phi3 as default instead. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1ffe81e..7d6cf78 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ Add one of these environment variables: ⭐[How do I add the API key as an environment variable?](https://phoenixnap.com/kb/windows-set-environment-variable)⭐ If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: -`ollama pull phi3 && ollama pull nomic-embed-text` +`ollama pull llama3 && ollama pull nomic-embed-text` Then, clone/download this repository From e27aa7e8aa4df3b641086da9c922a75c0e287c37 Mon Sep 17 00:00:00 2001 From: Kolby Nottingham Date: Mon, 29 Apr 2024 14:23:38 -0700 Subject: [PATCH 27/78] memory bank example --- andy.json | 7 +++++++ andy_npc.json | 7 +++++++ pollux.json | 7 +++++++ radley.json | 7 +++++++ 4 files changed, 28 insertions(+) diff --git a/andy.json b/andy.json index 147211e..70d7355 100644 --- a/andy.json +++ b/andy.json @@ -104,6 +104,13 @@ {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, {"role": "system", "content": "'hunting' mode disabled."}, {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToPlace('base')"} ] ], diff --git a/andy_npc.json b/andy_npc.json index 561f302..c47497d 100644 --- a/andy_npc.json +++ b/andy_npc.json @@ -121,6 +121,13 @@ {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, {"role": "system", "content": "'hunting' mode disabled."}, {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToPlace('base')"} ] ], diff --git a/pollux.json b/pollux.json index e2de8eb..e94e5eb 100644 --- a/pollux.json +++ b/pollux.json @@ -102,6 +102,13 @@ {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, {"role": "system", "content": "'hunting' mode disabled."}, {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToPlace('base')"} ] ], diff --git a/radley.json b/radley.json index a5dae28..a9bceda 100644 --- a/radley.json +++ b/radley.json @@ -104,6 +104,13 @@ {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, {"role": "system", "content": "'hunting' mode disabled."}, {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToPlace('base')"} ] ], From fed2696f0696794342f1bc394a1aae9c9d70e044 Mon Sep 17 00:00:00 2001 From: Kolby Nottingham Date: Tue, 30 Apr 2024 13:31:51 -0700 Subject: [PATCH 28/78] model name fix --- src/agent/prompter.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 67a96ac..b895727 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -164,7 +164,7 @@ export class Prompter { user_message = await this.replaceStrings(user_message, messages, null, null, null, last_goals); let user_messages = [{role: 'user', content: user_message}]; - let res = await this.model.sendRequest(user_messages, system_message); + let res = await this.chat_model.sendRequest(user_messages, system_message); let goal = null; try { @@ -180,4 +180,4 @@ export class Prompter { goal.quantity = parseInt(goal.quantity); return goal; } -} \ No newline at end of file +} From 51f401e62d570dd67a00f4cfa1978f3294ca88cf Mon Sep 17 00:00:00 2001 From: Kolby Nottingham Date: Wed, 1 May 2024 12:19:42 -0700 Subject: [PATCH 29/78] mode fixes --- src/agent/modes.js | 15 +++++++++------ src/utils/mcdata.js | 2 +- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/agent/modes.js b/src/agent/modes.js index 0081682..56ecbf3 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -70,7 +70,7 @@ const modes = [ on: true, active: false, update: async function (agent) { - const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 16); + const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 8); if (enemy && await world.isClearPath(agent.bot, enemy)) { agent.bot.chat(`Aaa! A ${enemy.name}!`); execute(this, agent, async () => { @@ -86,11 +86,11 @@ const modes = [ on: true, active: false, update: async function (agent) { - const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 9); + const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 8); if (enemy && await world.isClearPath(agent.bot, enemy)) { agent.bot.chat(`Fighting ${enemy.name}!`); execute(this, agent, async () => { - await skills.defendSelf(agent.bot, 9); + await skills.defendSelf(agent.bot, 8); }); } } @@ -155,9 +155,12 @@ const modes = [ if (torches.length > 0) { const torch = torches[0]; const pos = agent.bot.entity.position; - execute(this, agent, async () => { - await skills.placeBlock(agent.bot, torch.name, pos.x, pos.y, pos.z); - }); + const curr_block = agent.bot.blockAt(pos); + if (curr_block.name === 'air') { + execute(this, agent, async () => { + await skills.placeBlock(agent.bot, torch.name, pos.x, pos.y, pos.z); + }); + } } } } diff --git a/src/utils/mcdata.js b/src/utils/mcdata.js index eeeb957..384a211 100644 --- a/src/utils/mcdata.js +++ b/src/utils/mcdata.js @@ -68,7 +68,7 @@ export function initBot(username) { export function isHuntable(mob) { if (!mob || !mob.name) return false; - const animals = ['chicken', 'cod', 'cow', 'llama', 'mooshroom', 'pig', 'pufferfish', 'rabbit', 'salmon', 'sheep', 'squid', 'tropical_fish', 'turtle']; + const animals = ['chicken', 'cow', 'llama', 'mooshroom', 'pig', 'rabbit', 'sheep']; return animals.includes(mob.name.toLowerCase()) && !mob.metadata[16]; // metadata 16 is not baby } From 0f18dd9f128f754fbd3400edcf95d32845f8830e Mon Sep 17 00:00:00 2001 From: Kolby Nottingham Date: Wed, 1 May 2024 12:38:33 -0700 Subject: [PATCH 30/78] change cowardice range --- src/agent/modes.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/agent/modes.js b/src/agent/modes.js index 56ecbf3..5550054 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -70,11 +70,11 @@ const modes = [ on: true, active: false, update: async function (agent) { - const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 8); + const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 16); if (enemy && await world.isClearPath(agent.bot, enemy)) { agent.bot.chat(`Aaa! A ${enemy.name}!`); execute(this, agent, async () => { - await skills.avoidEnemies(agent.bot, 16); + await skills.avoidEnemies(agent.bot, 24); }); } } From 825c369e8d000b52e2b4e1d4abd6a4d9180558b2 Mon Sep 17 00:00:00 2001 From: Kolby Nottingham Date: Thu, 2 May 2024 22:32:16 -0700 Subject: [PATCH 31/78] always load npc --- src/agent/commands/actions.js | 1 - src/agent/npc/controller.js | 3 +-- src/agent/npc/data.js | 2 +- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 2a84982..23b401b 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -207,7 +207,6 @@ export const actionsList = [ 'quantity': '(number) The quantity of the goal to set. Default is 1.' }, perform: async function (agent, name=null, quantity=1) { - if (!agent.npc.data) return 'NPC module is not loaded.'; await agent.npc.setGoal(name, quantity); return 'Set goal: ' + agent.npc.data.curr_goal.name; } diff --git a/src/agent/npc/controller.js b/src/agent/npc/controller.js index 7636dcc..84770cd 100644 --- a/src/agent/npc/controller.js +++ b/src/agent/npc/controller.js @@ -39,8 +39,6 @@ export class NPCContoller { } init() { - if (this.data === null) return; - for (let file of readdirSync('src/agent/npc/construction')) { if (file.endsWith('.json')) { try { @@ -68,6 +66,7 @@ export class NPCContoller { } this.agent.bot.on('idle', async () => { + if (this.data.goals.length === 0 && !this.data.curr_goal) return; // Wait a while for inputs before acting independently await new Promise((resolve) => setTimeout(resolve, 5000)); if (!this.agent.isIdle()) return; diff --git a/src/agent/npc/data.js b/src/agent/npc/data.js index b590d15..6d12f0a 100644 --- a/src/agent/npc/data.js +++ b/src/agent/npc/data.js @@ -24,8 +24,8 @@ export class NPCData { } static fromObject(obj) { - if (!obj) return null; let npc = new NPCData(); + if (!obj) return npc; if (obj.goals) { npc.goals = []; for (let goal of obj.goals) { From 6727b44afcdaa5ebcd53216742fa7f0b68e942ea Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 4 May 2024 16:17:41 -0500 Subject: [PATCH 32/78] clear pathfinder on idle, improve coward mode --- src/agent/agent.js | 1 + src/agent/library/skills.js | 8 +++++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/agent/agent.js b/src/agent/agent.js index 042cc26..fe44a25 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -188,6 +188,7 @@ export class Agent { }); this.bot.on('idle', () => { this.bot.clearControlStates(); + this.bot.pathfinder.stop(); // clear any lingering pathfinder this.bot.modes.unPauseAll(); this.coder.executeResume(); }); diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index d62ebb2..4889357 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -15,7 +15,8 @@ async function autoLight(bot) { let nearest_torch = world.getNearestBlock(bot, 'torch', 6); if (!nearest_torch) { let has_torch = bot.inventory.items().find(item => item.name === 'torch'); - if (has_torch) { + const curr_block = agent.bot.blockAt(pos); + if (has_torch && curr_block.name === 'air') { try { log(bot, `Placing torch at ${bot.entity.position}.`); return await placeBlock(bot, 'torch', bot.entity.position.x, bot.entity.position.y, bot.entity.position.z); @@ -771,7 +772,7 @@ export async function avoidEnemies(bot, distance=16) { * @example * await skills.avoidEnemies(bot, 8); **/ - + bot.modes.pause('self_preservation'); // prevents damage-on-low-health from interrupting the bot let enemy = world.getNearestEntityWhere(bot, entity => mc.isHostile(entity), distance); while (enemy) { const follow = new pf.goals.GoalFollow(enemy, distance+1); // move a little further away @@ -781,9 +782,10 @@ export async function avoidEnemies(bot, distance=16) { await new Promise(resolve => setTimeout(resolve, 500)); enemy = world.getNearestEntityWhere(bot, entity => mc.isHostile(entity), distance); if (bot.interrupt_code) { - return false; + break; } } + bot.pathfinder.stop(); log(bot, `Moved ${distance} away from enemies.`); return true; } From 74156d118d8f30d15355248aa058ae5554aa4e19 Mon Sep 17 00:00:00 2001 From: Kolby Nottingham Date: Sat, 4 May 2024 14:41:02 -0700 Subject: [PATCH 33/78] trigger goal to start --- src/agent/commands/actions.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 02c1fbf..12ccacb 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -230,6 +230,7 @@ export const actionsList = [ }, perform: async function (agent, name=null, quantity=1) { await agent.npc.setGoal(name, quantity); + agent.bot.emit('idle'); // to trigger the goal return 'Set goal: ' + agent.npc.data.curr_goal.name; } } From 2faba65917ce756d45fe27d5d867524bfc00da3e Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sun, 5 May 2024 10:38:01 -0500 Subject: [PATCH 34/78] changed localhost default to 127... --- src/models/local.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/local.js b/src/models/local.js index f56c043..3ed880e 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -1,7 +1,7 @@ export class Local { constructor(model_name, url) { this.model_name = model_name; - this.url = url || 'http://localhost:11434'; + this.url = url || 'http://127.0.0.1:11434' this.chat_endpoint = '/api/chat'; this.embedding_endpoint = '/api/embeddings'; } From ac73a835eb645560afc8eb29abdce6883398b42e Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sun, 5 May 2024 10:38:52 -0500 Subject: [PATCH 35/78] added semicolon --- src/models/local.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/local.js b/src/models/local.js index 3ed880e..dd3af34 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -1,7 +1,7 @@ export class Local { constructor(model_name, url) { this.model_name = model_name; - this.url = url || 'http://127.0.0.1:11434' + this.url = url || 'http://127.0.0.1:11434'; this.chat_endpoint = '/api/chat'; this.embedding_endpoint = '/api/embeddings'; } From 453de3535f9c857ace9e8dba9a56bbabb96c9e6b Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 6 May 2024 00:18:04 -0500 Subject: [PATCH 36/78] added replicate api --- package.json | 1 + src/agent/prompter.js | 7 ++++ src/models/replicate.js | 81 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 89 insertions(+) create mode 100644 src/models/replicate.js diff --git a/package.json b/package.json index ae200aa..247c2dc 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,7 @@ "mineflayer-pvp": "^1.3.2", "openai": "^4.4.0", "patch-package": "^8.0.0", + "replicate": "^0.29.4", "vec3": "^0.1.10", "yargs": "^17.7.2" }, diff --git a/src/agent/prompter.js b/src/agent/prompter.js index b895727..5e6a412 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -8,6 +8,7 @@ import { getCommand } from './commands/index.js'; import { Gemini } from '../models/gemini.js'; import { GPT } from '../models/gpt.js'; import { Claude } from '../models/claude.js'; +import { ReplicateAPI } from '../models/replicate.js'; import { Local } from '../models/local.js'; @@ -28,6 +29,8 @@ export class Prompter { chat.api = 'openai'; else if (chat.model.includes('claude')) chat.api = 'anthropic'; + else if (chat.model.includes('meta/') || chat.model.includes('mistralai/') || chat.model.includes('replicate/')) + chat.api = 'replicate'; else chat.api = 'ollama'; } @@ -40,6 +43,8 @@ export class Prompter { this.chat_model = new GPT(chat.model, chat.url); else if (chat.api == 'anthropic') this.chat_model = new Claude(chat.model, chat.url); + else if (chat.api == 'replicate') + this.chat_model = new ReplicateAPI(chat.model, chat.url); else if (chat.api == 'ollama') this.chat_model = new Local(chat.model, chat.url); else @@ -57,6 +62,8 @@ export class Prompter { this.embedding_model = new Gemini(embedding.model, embedding.url); else if (embedding.api == 'openai') this.embedding_model = new GPT(embedding.model, embedding.url); + else if (embedding.api == 'replicate') + this.embedding_model = new ReplicateAPI(embedding.model, embedding.url); else if (embedding.api == 'ollama') this.embedding_model = new Local(embedding.model, embedding.url); else { diff --git a/src/models/replicate.js b/src/models/replicate.js new file mode 100644 index 0000000..8ff22b4 --- /dev/null +++ b/src/models/replicate.js @@ -0,0 +1,81 @@ +import Replicate from 'replicate'; + +// llama, mistral +export class ReplicateAPI { + constructor(model_name, url) { + this.model_name = model_name; + this.url = url; + + if (!process.env.REPLICATE_API_KEY) { + throw new Error('Replicate API key missing! Make sure you set your REPLICATE_API_KEY environment variable.'); + } + + this.replicate = new Replicate({ + auth: process.env.REPLICATE_API_KEY, + }); + } + + async sendRequest(turns, systemMessage) { + if (this.url) { + + } + + let prev_role = null; + let messages = []; + let filler = { role: 'user', content: '_' }; + + for (let msg of turns) { + if (msg.role === 'system') { + msg.role = 'user'; + msg.content = 'SYSTEM: ' + msg.content; + } + if (msg.role === prev_role && msg.role === 'assistant') { + // insert empty user message to separate assistant messages + messages.push(filler); + messages.push(msg); + } else if (msg.role === prev_role) { + // combine new message with previous message instead of adding a new one + messages[messages.length - 1].content += '\n' + msg.content; + } else { + messages.push(msg); + } + prev_role = msg.role; + } + + const prompt = '\n\n' + messages.map(msg => `${msg.role}: ${msg.content}`).join('\n'); + const input = { + prompt: prompt, + top_p: 0.95, + prompt_template: "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n"+systemMessage+"<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + presence_penalty: 0, + frequency_penalty: 0 + }; + + + let res = null; + try { + console.log('Awaiting Replicate API response...'); + console.log('Input:', input); + let result = ''; + for await (const event of this.replicate.stream(this.model_name, { input })) { + result += event; + } + console.log('Received.'); + res = result; + } catch (err) { + console.log(err); + res = 'My brain disconnected, try again.'; + } + + return res; + } + "You are a playful Minecraft bot named andy that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll sto…ll automatically choose a goal.\nquantity: (number) The quantity of the goal to set. Default is 1.\n*\n\nExamples of how to respond:\nExample 1:\nUser input: miner_32: Hey! What are you up to?\nYour output:\nNothing much miner_32, what do you need?\n\nExample 2:\nUser input: grombo_Xx: What do you see?\nYour output:\nLet me see... !nearbyBlocks\nSystem output: NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone\nYour output:\nI see some oak logs, dirt, and cobblestone.\n\n\nConversation Begin:\n\nuser: SYSTEM: SAY HELLO." + + async embed(text) { + const output = await this.replicate.run( + this.model_name || "mark3labs/embeddings-gte-base:d619cff29338b9a37c3d06605042e1ff0594a8c3eff0175fd6967f5643fc4d47", + { input: {text} } + ); + return output; + } +} \ No newline at end of file From 0ff4c91cf7aafbe43e0ca9c54784d479262b0d4c Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 6 May 2024 11:16:35 -0500 Subject: [PATCH 37/78] changed default settings host --- settings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/settings.json b/settings.json index 9d0490a..4ee3b10 100644 --- a/settings.json +++ b/settings.json @@ -1,6 +1,6 @@ { "minecraft_version": "1.20.4", - "host": "localhost", + "host": "127.0.0.1", "port": 55916, "auth": "offline", "allow_insecure_coding": false From 7ac919a62f36b29fa1ddc10392a44e6d8ccb7189 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 7 May 2024 15:06:05 -0500 Subject: [PATCH 38/78] fixed prompter mutating history, improved coding --- andy.json | 2 +- src/agent/coder.js | 3 ++- src/agent/history.js | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/andy.json b/andy.json index 70d7355..d4c5fc9 100644 --- a/andy.json +++ b/andy.json @@ -5,7 +5,7 @@ "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", - "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nBegin coding:", + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", diff --git a/src/agent/coder.js b/src/agent/coder.js index 0a97144..7ba66e3 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -91,6 +91,7 @@ export class Coder { async generateCodeLoop(agent_history) { let messages = agent_history.getHistory(); + messages.push({role: 'system', content: 'Code generation started. Write code in codeblock in your response:'}); let code_return = null; let failures = 0; @@ -99,7 +100,7 @@ export class Coder { if (this.agent.bot.interrupt_code) return interrupt_return; console.log(messages) - let res = await this.agent.prompter.promptCoding(messages); + let res = await this.agent.prompter.promptCoding(JSON.parse(JSON.stringify(messages))); if (this.agent.bot.interrupt_code) return interrupt_return; let contains_code = res.indexOf('```') !== -1; diff --git a/src/agent/history.js b/src/agent/history.js index 659113b..b276823 100644 --- a/src/agent/history.js +++ b/src/agent/history.js @@ -22,7 +22,7 @@ export class History { async storeMemories(turns) { console.log("Storing memories..."); - this.memory = await this.agent.prompter.promptMemSaving(this.memory, turns); + this.memory = await this.agent.prompter.promptMemSaving(this.getHistory(), turns); console.log("Memory updated to: ", this.memory); } From e4187787900f49d65cd2801e8daf10f65cd9361a Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 7 May 2024 15:08:22 -0500 Subject: [PATCH 39/78] improved replicate, fixed gemini, shared toSinglePrompt --- src/models/gemini.js | 21 +++---- src/models/helper.js | 14 +++++ src/models/replicate.js | 122 +++++++++++++++++----------------------- 3 files changed, 75 insertions(+), 82 deletions(-) create mode 100644 src/models/helper.js diff --git a/src/models/gemini.js b/src/models/gemini.js index c27d34e..504e3f6 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -1,5 +1,5 @@ import { GoogleGenerativeAI } from '@google/generative-ai'; - +import { toSinglePrompt } from './helper.js'; export class Gemini { constructor(model_name, url) { @@ -13,6 +13,7 @@ export class Gemini { } async sendRequest(turns, systemMessage) { + let model; if (this.url) { model = this.genAI.getGenerativeModel( {model: this.model_name || "gemini-pro"}, @@ -24,23 +25,19 @@ export class Gemini { ); } - const messages = [{'role': 'system', 'content': systemMessage}].concat(turns); - let prompt = ""; - let role = ""; - messages.forEach((message) => { - role = message.role; - if (role === 'assistant') role = 'model'; - prompt += `${role}: ${message.content}\n`; - }); - if (role !== "model") // if the last message was from the user/system, add a prompt for the model. otherwise, pretend we are extending the model's own message - prompt += "model: "; + const stop_seq = '***'; + const prompt = toSinglePrompt(turns, systemMessage, stop_seq, 'model'); console.log(prompt) const result = await model.generateContent(prompt); const response = await result.response; - return response.text(); + const text = response.text(); + if (!text.includes(stop_seq)) return text; + const idx = text.indexOf(stop_seq); + return text.slice(0, idx); } async embed(text) { + let model; if (this.url) { model = this.genAI.getGenerativeModel( {model: this.model_name || "embedding-001"}, diff --git a/src/models/helper.js b/src/models/helper.js new file mode 100644 index 0000000..7b45fe1 --- /dev/null +++ b/src/models/helper.js @@ -0,0 +1,14 @@ +export function toSinglePrompt(turns, system=null, stop_seq='***', model_nickname='assistant') { + let messages = turns; + if (system) messages.unshift({role: 'system', content: system}); + let prompt = ""; + let role = ""; + messages.forEach((message) => { + role = message.role; + if (role === 'assistant') role = model_nickname; + prompt += `${role}: ${message.content}${stop_seq}`; + }); + if (role !== model_nickname) // if the last message was from the user/system, add a prompt for the model. otherwise, pretend we are extending the model's own message + prompt += model_nickname + ": "; + return prompt; +} diff --git a/src/models/replicate.js b/src/models/replicate.js index 8ff22b4..d9f8382 100644 --- a/src/models/replicate.js +++ b/src/models/replicate.js @@ -1,81 +1,63 @@ import Replicate from 'replicate'; +import { toSinglePrompt } from './helper.js'; // llama, mistral export class ReplicateAPI { - constructor(model_name, url) { - this.model_name = model_name; - this.url = url; + constructor(model_name, url) { + this.model_name = model_name; + this.url = url; - if (!process.env.REPLICATE_API_KEY) { - throw new Error('Replicate API key missing! Make sure you set your REPLICATE_API_KEY environment variable.'); - } + if (this.url) { + console.warn('Replicate API does not support custom URLs. Ignoring provided URL.'); + } - this.replicate = new Replicate({ - auth: process.env.REPLICATE_API_KEY, - }); - } + if (!process.env.REPLICATE_API_KEY) { + throw new Error('Replicate API key missing! Make sure you set your REPLICATE_API_KEY environment variable.'); + } - async sendRequest(turns, systemMessage) { - if (this.url) { - - } + this.replicate = new Replicate({ + auth: process.env.REPLICATE_API_KEY, + }); + } - let prev_role = null; - let messages = []; - let filler = { role: 'user', content: '_' }; + async sendRequest(turns, systemMessage) { + const stop_seq = '***'; + let prompt_template; + const prompt = toSinglePrompt(turns, systemMessage, stop_seq); + if (this.model_name.includes('llama')) { // llama + prompt_template = "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n" + } + else { // mistral + prompt_template = "[INST] {prompt} [/INST] " + } - for (let msg of turns) { - if (msg.role === 'system') { - msg.role = 'user'; - msg.content = 'SYSTEM: ' + msg.content; - } - if (msg.role === prev_role && msg.role === 'assistant') { - // insert empty user message to separate assistant messages - messages.push(filler); - messages.push(msg); - } else if (msg.role === prev_role) { - // combine new message with previous message instead of adding a new one - messages[messages.length - 1].content += '\n' + msg.content; - } else { - messages.push(msg); - } - prev_role = msg.role; - } + const input = { prompt, prompt_template }; + let res = null; + try { + console.log('Awaiting Replicate API response...'); + let result = ''; + for await (const event of this.replicate.stream(this.model_name, { input })) { + result += event; + if (result === '') break; + if (result.includes(stop_seq)) { + result = result.slice(0, result.indexOf(stop_seq)); + break; + } + } + res = result; + } catch (err) { + console.log(err); + res = 'My brain disconnected, try again.'; + } + console.log('Received.'); + return res; + } - const prompt = '\n\n' + messages.map(msg => `${msg.role}: ${msg.content}`).join('\n'); - const input = { - prompt: prompt, - top_p: 0.95, - prompt_template: "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n"+systemMessage+"<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", - presence_penalty: 0, - frequency_penalty: 0 - }; - - - let res = null; - try { - console.log('Awaiting Replicate API response...'); - console.log('Input:', input); - let result = ''; - for await (const event of this.replicate.stream(this.model_name, { input })) { - result += event; - } - console.log('Received.'); - res = result; - } catch (err) { - console.log(err); - res = 'My brain disconnected, try again.'; - } - - return res; - } - "You are a playful Minecraft bot named andy that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll sto…ll automatically choose a goal.\nquantity: (number) The quantity of the goal to set. Default is 1.\n*\n\nExamples of how to respond:\nExample 1:\nUser input: miner_32: Hey! What are you up to?\nYour output:\nNothing much miner_32, what do you need?\n\nExample 2:\nUser input: grombo_Xx: What do you see?\nYour output:\nLet me see... !nearbyBlocks\nSystem output: NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone\nYour output:\nI see some oak logs, dirt, and cobblestone.\n\n\nConversation Begin:\n\nuser: SYSTEM: SAY HELLO." - - async embed(text) { - const output = await this.replicate.run( - this.model_name || "mark3labs/embeddings-gte-base:d619cff29338b9a37c3d06605042e1ff0594a8c3eff0175fd6967f5643fc4d47", - { input: {text} } - ); - return output; - } + async embed(text) { + const output = await this.replicate.run( + this.model_name || "mark3labs/embeddings-gte-base:d619cff29338b9a37c3d06605042e1ff0594a8c3eff0175fd6967f5643fc4d47", + { input: {text} } + ); + return output.vectors; + } } \ No newline at end of file From 0bd92f7521bba9cab72041d971d0fe305d902bcd Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Fri, 10 May 2024 13:41:29 -0500 Subject: [PATCH 40/78] refactored and added defaul replicate --- src/models/gemini.js | 3 +-- src/models/helper.js | 14 -------------- src/models/replicate.js | 7 ++++--- src/utils/text.js | 15 +++++++++++++++ 4 files changed, 20 insertions(+), 19 deletions(-) delete mode 100644 src/models/helper.js diff --git a/src/models/gemini.js b/src/models/gemini.js index 504e3f6..61f4f1a 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -1,5 +1,5 @@ import { GoogleGenerativeAI } from '@google/generative-ai'; -import { toSinglePrompt } from './helper.js'; +import { toSinglePrompt } from '../utils/text.js'; export class Gemini { constructor(model_name, url) { @@ -27,7 +27,6 @@ export class Gemini { const stop_seq = '***'; const prompt = toSinglePrompt(turns, systemMessage, stop_seq, 'model'); - console.log(prompt) const result = await model.generateContent(prompt); const response = await result.response; const text = response.text(); diff --git a/src/models/helper.js b/src/models/helper.js deleted file mode 100644 index 7b45fe1..0000000 --- a/src/models/helper.js +++ /dev/null @@ -1,14 +0,0 @@ -export function toSinglePrompt(turns, system=null, stop_seq='***', model_nickname='assistant') { - let messages = turns; - if (system) messages.unshift({role: 'system', content: system}); - let prompt = ""; - let role = ""; - messages.forEach((message) => { - role = message.role; - if (role === 'assistant') role = model_nickname; - prompt += `${role}: ${message.content}${stop_seq}`; - }); - if (role !== model_nickname) // if the last message was from the user/system, add a prompt for the model. otherwise, pretend we are extending the model's own message - prompt += model_nickname + ": "; - return prompt; -} diff --git a/src/models/replicate.js b/src/models/replicate.js index d9f8382..4301448 100644 --- a/src/models/replicate.js +++ b/src/models/replicate.js @@ -1,5 +1,5 @@ import Replicate from 'replicate'; -import { toSinglePrompt } from './helper.js'; +import { toSinglePrompt } from '../utils/text.js'; // llama, mistral export class ReplicateAPI { @@ -24,7 +24,8 @@ export class ReplicateAPI { const stop_seq = '***'; let prompt_template; const prompt = toSinglePrompt(turns, systemMessage, stop_seq); - if (this.model_name.includes('llama')) { // llama + let model_name = this.model_name || 'meta/meta-llama-3-70b-instruct'; + if (model_name.includes('llama')) { // llama prompt_template = "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n" } else { // mistral @@ -36,7 +37,7 @@ export class ReplicateAPI { try { console.log('Awaiting Replicate API response...'); let result = ''; - for await (const event of this.replicate.stream(this.model_name, { input })) { + for await (const event of this.replicate.stream(model_name, { input })) { result += event; if (result === '') break; if (result.includes(stop_seq)) { diff --git a/src/utils/text.js b/src/utils/text.js index d06221a..c075d50 100644 --- a/src/utils/text.js +++ b/src/utils/text.js @@ -11,4 +11,19 @@ export function stringifyTurns(turns) { } } return res.trim(); +} + +export function toSinglePrompt(turns, system=null, stop_seq='***', model_nickname='assistant') { + let messages = turns; + if (system) messages.unshift({role: 'system', content: system}); + let prompt = ""; + let role = ""; + messages.forEach((message) => { + role = message.role; + if (role === 'assistant') role = model_nickname; + prompt += `${role}: ${message.content}${stop_seq}`; + }); + if (role !== model_nickname) // if the last message was from the user/system, add a prompt for the model. otherwise, pretend we are extending the model's own message + prompt += model_nickname + ": "; + return prompt; } \ No newline at end of file From 017aaa2779898f8b9027719f1cdef00cc4e19331 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Fri, 10 May 2024 13:41:58 -0500 Subject: [PATCH 41/78] clean up, added replicate --- README.md | 50 ++++++++++++++++++++++++++++---------------------- 1 file changed, 28 insertions(+), 22 deletions(-) diff --git a/README.md b/README.md index 7d6cf78..3566f6a 100644 --- a/README.md +++ b/README.md @@ -8,16 +8,21 @@ This project allows an AI model to write/execute code on your computer that may ## Requirements -- [OpenAI API Subscription](https://openai.com/blog/openai-api), [Gemini API Subscription](https://aistudio.google.com/app/apikey), [Anthropic API Subscription](https://docs.anthropic.com/claude/docs/getting-access-to-claude), or [Ollama Installed](https://ollama.com/download) +- [OpenAI API Subscription](https://openai.com/blog/openai-api), [Gemini API Subscription](https://aistudio.google.com/app/apikey), [Anthropic API Subscription](https://docs.anthropic.com/claude/docs/getting-access-to-claude), [Replicate API Subscription](https://replicate.com/) or [Ollama Installed](https://ollama.com/download) - [Minecraft Java Edition](https://www.minecraft.net/en-us/store/minecraft-java-bedrock-edition-pc) - [Node.js](https://nodejs.org/) (at least v14) ## Installation -Add one of these environment variables: - - `OPENAI_API_KEY` (and optionally `OPENAI_ORG_ID`) - - `GEMINI_API_KEY` - - `ANTHROPIC_API_KEY` (and optionally `OPENAI_API_KEY` for embeddings. not necessary, but without embeddings performance will suffer) +Add the environment variable for the model you want to use: + +| API | Env Variable | Example Model name | Docs | +|------|------|------|------| +| OpenAI | `OPENAI_API_KEY` | `gpt-3.5-turbo` | [docs](https://platform.openai.com/docs/models) | (optionally add `OPENAI_ORG_ID`) +| Google | `GEMINI_API_KEY` | `gemini-pro` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | +| Anthropic | `ANTHROPIC_API_KEY` | `claude-3-haiku-20240307` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | +| Replicate | `REPLICATE_API_KEY` | `meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | +| Ollama (local) | n/a | `llama3` | [docs](https://ollama.com/library) | ⭐[How do I add the API key as an environment variable?](https://phoenixnap.com/kb/windows-set-environment-variable)⭐ @@ -30,7 +35,7 @@ Run `npm install` Install the minecraft version specified in `settings.json`, currently supports up to 1.20.4 -## Running Locally +### Running Locally Start a minecraft world and open it to LAN on localhost port `55916` @@ -40,13 +45,27 @@ You can configure the agent's name, model, and prompts in their profile like `an You can configure project details in `settings.json`. -## Bot Profiles +### Online Servers +To connect to online servers your bot will need an official Microsoft/Minecraft account. You can use your own personal one, but will need another account if you want to connect with it. Here is an example settings for this: +``` +{ + "minecraft_version": "1.20.4", + "host": "111.222.333.444", + "port": 55920, + "auth": "microsoft", + "allow_insecure_coding": false +} +``` +‼️Make sure your bot's name in the profile.json matches the account name! Otherwise the bot will spam talk to itself. -Bot profiles are json files (such as `andy.json`) that define a bot's behavior in three ways: +### Bot Profiles + +Bot profiles are json files (such as `andy.json`) that define: 1. Bot backend LLMs to use for chat and embeddings. 2. Prompts used to influence the bot's behavior. -3. Examples retrieved and provided to the bot to help it better perform tasks. +3. Examples help the bot perform tasks. + ### Model Specifications @@ -84,19 +103,6 @@ Thus, all the below specifications are equivalent to the above example: "embedding": "openai" ``` -## Online Servers -To connect to online servers your bot will need an official Microsoft/Minecraft account. You can use your own personal one, but will need another account if you want to connect with it. Here is an example settings for this: -``` -{ - "minecraft_version": "1.20.4", - "host": "111.222.333.444", - "port": 55920, - "auth": "microsoft", - "allow_insecure_coding": false -} -``` -‼️Make sure your bot's name in the profile.json matches the account name! Otherwise the bot will spam talk to itself. - ## Patches Some of the node modules that we depend on have bugs in them. To add a patch, change your local node module file and run `npx patch-package [package-name]` From 3ac742395327b01d19028794fb5fbc3c174d91d0 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 11 May 2024 12:31:43 -0500 Subject: [PATCH 42/78] sneak in patch for creative mode pathfinding --- patches/mineflayer-pathfinder+2.4.5.patch | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 patches/mineflayer-pathfinder+2.4.5.patch diff --git a/patches/mineflayer-pathfinder+2.4.5.patch b/patches/mineflayer-pathfinder+2.4.5.patch new file mode 100644 index 0000000..8c6765e --- /dev/null +++ b/patches/mineflayer-pathfinder+2.4.5.patch @@ -0,0 +1,17 @@ +diff --git a/node_modules/mineflayer-pathfinder/lib/movements.js b/node_modules/mineflayer-pathfinder/lib/movements.js +index a7e3505..77e428f 100644 +--- a/node_modules/mineflayer-pathfinder/lib/movements.js ++++ b/node_modules/mineflayer-pathfinder/lib/movements.js +@@ -143,7 +143,11 @@ class Movements { + for (const id of this.scafoldingBlocks) { + for (const j in items) { + const item = items[j] +- if (item.type === id) count += item.count ++ if (item.type === id) { ++ count += item.count ++ if (this.bot.game.gameMode === 'creative') ++ count = 1000 ++ } + } + } + return count From dc520a9ea10bbe95a91b98188bcc467ec707b278 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sun, 12 May 2024 22:46:33 -0500 Subject: [PATCH 43/78] fix unloaded blocks in mode check --- src/agent/modes.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/agent/modes.js b/src/agent/modes.js index ba170fa..aed2fc4 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -26,13 +26,15 @@ const modes = [ const bot = agent.bot; const block = bot.blockAt(bot.entity.position); const blockAbove = bot.blockAt(bot.entity.position.offset(0, 1, 0)); - if (blockAbove && (blockAbove.name === 'water' || blockAbove.name === 'flowing_water')) { + if (!block) block = {name: 'air'}; // hacky fix when blocks are not loaded + if (!blockAbove) blockAbove = {name: 'air'}; + if (blockAbove.name === 'water' || blockAbove.name === 'flowing_water') { // does not call execute so does not interrupt other actions if (!bot.pathfinder.goal) { bot.setControlState('jump', true); } } - else if (blockAbove && this.fall_blocks.some(name => blockAbove.name.includes(name))) { + else if (this.fall_blocks.some(name => blockAbove.name.includes(name))) { execute(this, agent, async () => { await skills.moveAway(bot, 2); }); From c579366e7a5b1f663c66d9eef3ff78dd0816f265 Mon Sep 17 00:00:00 2001 From: Kolby Nottingham Date: Mon, 13 May 2024 11:47:35 -0700 Subject: [PATCH 44/78] change npc defaults --- src/agent/npc/controller.js | 5 ++++- src/agent/npc/data.js | 4 ++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/agent/npc/controller.js b/src/agent/npc/controller.js index 84770cd..d65107c 100644 --- a/src/agent/npc/controller.js +++ b/src/agent/npc/controller.js @@ -80,12 +80,15 @@ export class NPCContoller { } async setGoal(name=null, quantity=1) { + this.data.curr_goal = null; this.last_goals = {}; if (name) { this.data.curr_goal = {name: name, quantity: quantity}; return; } - + + if (!this.data.do_set_goal) return; + let past_goals = {...this.last_goals}; for (let goal in this.data.goals) { if (past_goals[goal.name] === undefined) past_goals[goal.name] = true; diff --git a/src/agent/npc/data.js b/src/agent/npc/data.js index 6d12f0a..b5de0eb 100644 --- a/src/agent/npc/data.js +++ b/src/agent/npc/data.js @@ -4,8 +4,8 @@ export class NPCData { this.curr_goal = null; this.built = {}; this.home = null; - this.do_routine = true; - this.do_set_goal = true; + this.do_routine = false; + this.do_set_goal = false; } toObject() { From 5c6de46882b9f20808313cc1f3f736c6421bb5e2 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 14 May 2024 21:05:09 -0500 Subject: [PATCH 45/78] added pathfinder patch --- patches/mineflayer-pathfinder+2.4.5.patch | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 patches/mineflayer-pathfinder+2.4.5.patch diff --git a/patches/mineflayer-pathfinder+2.4.5.patch b/patches/mineflayer-pathfinder+2.4.5.patch new file mode 100644 index 0000000..2b268d5 --- /dev/null +++ b/patches/mineflayer-pathfinder+2.4.5.patch @@ -0,0 +1,17 @@ +diff --git a/node_modules/mineflayer-pathfinder/lib/movements.js b/node_modules/mineflayer-pathfinder/lib/movements.js +index a7e3505..77e428f 100644 +--- a/node_modules/mineflayer-pathfinder/lib/movements.js ++++ b/node_modules/mineflayer-pathfinder/lib/movements.js +@@ -143,7 +143,11 @@ class Movements { + for (const id of this.scafoldingBlocks) { + for (const j in items) { + const item = items[j] +- if (item.type === id) count += item.count ++ if (item.type === id) { ++ count += item.count ++ if (this.bot.game.gameMode === 'creative') ++ count = 1000 ++ } + } + } + return count \ No newline at end of file From b4a60cb11a83772eeaaffa54c8f913f24b3dfe03 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 14 May 2024 21:05:21 -0500 Subject: [PATCH 46/78] fix unloaded blocks crash --- src/agent/modes.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/agent/modes.js b/src/agent/modes.js index ba170fa..aed2fc4 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -26,13 +26,15 @@ const modes = [ const bot = agent.bot; const block = bot.blockAt(bot.entity.position); const blockAbove = bot.blockAt(bot.entity.position.offset(0, 1, 0)); - if (blockAbove && (blockAbove.name === 'water' || blockAbove.name === 'flowing_water')) { + if (!block) block = {name: 'air'}; // hacky fix when blocks are not loaded + if (!blockAbove) blockAbove = {name: 'air'}; + if (blockAbove.name === 'water' || blockAbove.name === 'flowing_water') { // does not call execute so does not interrupt other actions if (!bot.pathfinder.goal) { bot.setControlState('jump', true); } } - else if (blockAbove && this.fall_blocks.some(name => blockAbove.name.includes(name))) { + else if (this.fall_blocks.some(name => blockAbove.name.includes(name))) { execute(this, agent, async () => { await skills.moveAway(bot, 2); }); From 537b1da2bc8da143c8eeaa07d01ef6fab02b0844 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 14 May 2024 21:05:39 -0500 Subject: [PATCH 47/78] add creative msg to stats/inventory --- src/agent/commands/queries.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js index 99f5fa9..0669d36 100644 --- a/src/agent/commands/queries.js +++ b/src/agent/commands/queries.js @@ -17,10 +17,11 @@ export const queryList = [ let pos = bot.entity.position; // display position to 2 decimal places res += `\n- Position: x: ${pos.x.toFixed(2)}, y: ${pos.y.toFixed(2)}, z: ${pos.z.toFixed(2)}`; + res += `\n- Gamemode: ${bot.game.gameMode}`; res += `\n- Health: ${Math.round(bot.health)} / 20`; res += `\n- Hunger: ${Math.round(bot.food)} / 20`; res += `\n- Biome: ${world.getBiomeName(bot)}`; - let weather = "clear"; + let weather = "Clear"; if (bot.rainState > 0) weather = "Rain"; if (bot.thunderState > 0) @@ -60,6 +61,9 @@ export const queryList = [ if (res == 'INVENTORY') { res += ': none'; } + if (agent.bot.game.gameMode === 'creative') { + res += '\n(You have infinite items in creative mode)'; + } return pad(res); } }, From cb253f03b69531ee29cef6de4449dd06fa5ebde1 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 14 May 2024 21:24:41 -0500 Subject: [PATCH 48/78] don't check tools for digging in creative --- src/agent/library/skills.js | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 4889357..56f31f1 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -463,11 +463,13 @@ export async function breakBlockAt(bot, x, y, z) { bot.pathfinder.setMovements(movements); await bot.pathfinder.goto(new pf.goals.GoalNear(pos.x, pos.y, pos.z, 4)); } - await bot.tool.equipForBlock(block); - const itemId = bot.heldItem ? bot.heldItem.type : null - if (!block.canHarvest(itemId)) { - log(bot, `Don't have right tools to break ${block.name}.`); - return false; + if (bot.gameMode !== 'creative') { + await bot.tool.equipForBlock(block); + const itemId = bot.heldItem ? bot.heldItem.type : null + if (!block.canHarvest(itemId)) { + log(bot, `Don't have right tools to break ${block.name}.`); + return false; + } } await bot.dig(block, true); log(bot, `Broke ${block.name} at x:${x.toFixed(1)}, y:${y.toFixed(1)}, z:${z.toFixed(1)}.`); From a9b64950ca683caff588719cbd63e1144e0a60a7 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 14 May 2024 21:34:14 -0500 Subject: [PATCH 49/78] init prompt as system --- src/utils/text.js | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/utils/text.js b/src/utils/text.js index c075d50..b0a273a 100644 --- a/src/utils/text.js +++ b/src/utils/text.js @@ -14,11 +14,9 @@ export function stringifyTurns(turns) { } export function toSinglePrompt(turns, system=null, stop_seq='***', model_nickname='assistant') { - let messages = turns; - if (system) messages.unshift({role: 'system', content: system}); - let prompt = ""; - let role = ""; - messages.forEach((message) => { + let prompt = system ? `${system}${stop_seq}` : ''; + let role = ''; + turns.forEach((message) => { role = message.role; if (role === 'assistant') role = model_nickname; prompt += `${role}: ${message.content}${stop_seq}`; From 988c541c8742da84b5dd1fe5a37a8b703b619fa8 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 14 May 2024 22:57:14 -0500 Subject: [PATCH 50/78] kolby suggestion --- src/agent/commands/queries.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js index 0669d36..aa60b0b 100644 --- a/src/agent/commands/queries.js +++ b/src/agent/commands/queries.js @@ -58,10 +58,10 @@ export const queryList = [ if (inventory[item] && inventory[item] > 0) res += `\n- ${item}: ${inventory[item]}`; } - if (res == 'INVENTORY') { + if (res === 'INVENTORY') { res += ': none'; } - if (agent.bot.game.gameMode === 'creative') { + else if (agent.bot.game.gameMode === 'creative') { res += '\n(You have infinite items in creative mode)'; } return pad(res); From e0177badc5d5cf69db4772ad5fe20b265bf8efd9 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Wed, 15 May 2024 10:19:24 -0500 Subject: [PATCH 51/78] improved replicate params --- src/models/replicate.js | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/src/models/replicate.js b/src/models/replicate.js index 4301448..ea5a4a9 100644 --- a/src/models/replicate.js +++ b/src/models/replicate.js @@ -22,17 +22,10 @@ export class ReplicateAPI { async sendRequest(turns, systemMessage) { const stop_seq = '***'; - let prompt_template; - const prompt = toSinglePrompt(turns, systemMessage, stop_seq); + const prompt = toSinglePrompt(turns, null, stop_seq); let model_name = this.model_name || 'meta/meta-llama-3-70b-instruct'; - if (model_name.includes('llama')) { // llama - prompt_template = "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n" - } - else { // mistral - prompt_template = "[INST] {prompt} [/INST] " - } - const input = { prompt, prompt_template }; + const input = { prompt, system_prompt: systemMessage }; let res = null; try { console.log('Awaiting Replicate API response...'); From b45f49aefdca2ef21b13750572a2d688c8f6660f Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 18 May 2024 12:06:50 -0500 Subject: [PATCH 52/78] check creative --- src/agent/library/skills.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 56f31f1..cd9f5b3 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -463,7 +463,7 @@ export async function breakBlockAt(bot, x, y, z) { bot.pathfinder.setMovements(movements); await bot.pathfinder.goto(new pf.goals.GoalNear(pos.x, pos.y, pos.z, 4)); } - if (bot.gameMode !== 'creative') { + if (bot.game.gameMode !== 'creative') { await bot.tool.equipForBlock(block); const itemId = bot.heldItem ? bot.heldItem.type : null if (!block.canHarvest(itemId)) { From 003e35b43af2f2a4b6867a832648af06147277c3 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 20 May 2024 00:23:19 -0500 Subject: [PATCH 53/78] buncha stuff --- main.js | 7 +- profiles/claude.json | 156 +++++++++++++++++++++++++++++++++++++++++++ profiles/gemini.json | 156 +++++++++++++++++++++++++++++++++++++++++++ profiles/gpt.json | 156 +++++++++++++++++++++++++++++++++++++++++++ profiles/llama.json | 156 +++++++++++++++++++++++++++++++++++++++++++ settings.json | 2 +- src/agent/agent.js | 2 +- src/agent/coder.js | 2 +- src/agent/modes.js | 10 +-- 9 files changed, 637 insertions(+), 10 deletions(-) create mode 100644 profiles/claude.json create mode 100644 profiles/gemini.json create mode 100644 profiles/gpt.json create mode 100644 profiles/llama.json diff --git a/main.js b/main.js index 76b223c..fca4d42 100644 --- a/main.js +++ b/main.js @@ -1,7 +1,10 @@ import { AgentProcess } from './src/process/agent-process.js'; -let profile = './andy.json'; +let profiles = ['./profiles/gpt.json', './profiles/claude.json', './profiles/llama.json', './profiles/gemini.json']; + +profiles = ['./profiles/llama.json']; let load_memory = false; let init_message = 'Say hello world and your name.'; -new AgentProcess().start(profile, load_memory, init_message); \ No newline at end of file +for (let profile of profiles) + new AgentProcess().start(profile, load_memory, init_message); \ No newline at end of file diff --git a/profiles/claude.json b/profiles/claude.json new file mode 100644 index 0000000..d6db2a3 --- /dev/null +++ b/profiles/claude.json @@ -0,0 +1,156 @@ +{ + "name": "claude", + + "model": "claude-3-opus-20240229", + + "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. Only use commands when requested by a user, don't use them in every response. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", + + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + + "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", + + "conversation_examples": [ + [ + {"role": "user", "content": "miner_32: Hey! What are you up to?"}, + {"role": "assistant", "content": "Nothing much miner_32, what do you need?"} + ], + + [ + {"role": "user", "content": "grombo_Xx: What do you see?"}, + {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} + ], + + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone. I'll collect oak logs. !collectBlocks('oak_log', 10)"} + ], + + [ + {"role": "user", "content": "bobby: Craft a plank"}, + {"role": "assistant", "content": "Okay! !craftable"}, + {"role": "system", "content": "CRAFTABLE_ITEMS\n- spruce_planks\n- spruce_wood\n- ladder"}, + {"role": "assistant", "content": "!craftRecipe('spruce_planks', 1)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 4 spruce_planks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 4 spruce planks!"} + ], + + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "On my way! !goToPlayer('zZZn98', 3)"}, + {"role": "system", "content": "Arrived at player."}, + {"role": "assistant", "content": "Here!"}, + {"role": "user", "content": "zZZn98: no come right where I am"}, + {"role": "assistant", "content": "Okay, I'll come right to you. !goToPlayer('zZZn98', 0)"} + ], + + [ + {"role": "user", "content": "hanky: collect some sand and give it to me please"}, + {"role": "assistant", "content": "Collecting sand... !newAction"}, + {"role": "system", "content": "Code Output:\nYou have reached player hanky.\nCode execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + + [ + {"role": "user", "content": "joe: Follow me!"}, + {"role": "assistant", "content": "Sure! !followPlayer('joe', 4)"}, + {"role": "user", "content": "joe: Not so close!"}, + {"role": "assistant", "content": "Okay, I'll keep my distance. !followPlayer('joe', 3)"} + ], + + [ + {"role": "user", "content": "hanky: kill that zombie!"}, + {"role": "assistant", "content": "I'm attacking! !attack('zombie')"}, + {"role": "system", "content": "Code Output:\nNo zombie nearby"}, + {"role": "assistant", "content": "I could not find a zombie nearby."} + ], + + [ + {"role": "user", "content": "abc: stop"}, + {"role": "assistant", "content": "Sure. !stop"}, + {"role": "system", "content": "Agent action stopped."}, + {"role": "assistant", "content": "I've stopped! What next?"} + ], + + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "!newAction`"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Would you like me to try again?"} + ], + + [ + {"role": "user", "content": "trixy88: craft some sticks"}, + {"role": "assistant", "content": "!craftRecipe('stick', 4)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 16 sticks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 16 sticks!"} + ], + + [ + {"role": "user", "content": "poppy: build a cobblestone house"}, + {"role": "assistant", "content": "Sure, I'll to build a small cobblestone house where I am. !newAction"} + ], + + [ + {"role": "user", "content": "zib: build a dirt tower"}, + {"role": "assistant", "content": "Sure, I'll build a dirt tower 5 tall where I am. !newAction"} + ], + + [ + {"role": "user", "content": "umpire_man: follow me without hunting"}, + {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, + {"role": "system", "content": "'hunting' mode disabled."}, + {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToPlace('base')"} + ] + ], + + "coding_examples": [ + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "```await skills.collectBlock(bot, 'oak_log', 10);\n```"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs, what next?"} + ], + [ + {"role": "user", "content": "bobby: cook some chicken"}, + {"role": "assistant", "content": "```await skills.smeltItem(bot, 'chicken', 8);\n```"}, + {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, + {"role": "assistant", "content": "I have cooked 8 chicken."} + ], + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "```await skills.goToPlayer(bot, 'zZZn98');\n```"}, + {"role": "system", "content": "Code execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "```await skills.goToPosition(bot, nearestOakLog.x, nearestOakLog.y, nearestOakLog.z);\n```"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try that again.```\n// I am navigating to the nearest oak log.\nlet position = world.getNearestBlock(bot, 'oak_log', 20);\nawait skills.goToPosition(bot, position.x, position.y, position.z);\n```"}, + {"role": "system", "content": "Arrived at location."}, + {"role": "assistant", "content": "I found an oak log and I am now standing next to it. What next?"} + ], + [ + {"role": "user", "content": "234jeb: build a little tower"}, + {"role": "assistant", "content": "```let pos = bot.entity.position;\nfor (let i = 0; i < 5; i++) {\n await skills.placeBlock(bot, 'dirt', pos.x, pos.y + i, pos.z);\n}\n```"}, + {"role": "system", "content": "Successfully placed 5 dirt."}, + {"role": "assistant", "content": "I built a little tower!"} + ], + [ + {"role": "user", "content": "brug: build a dirt house"}, + {"role": "assistant", "content": "```const position = world.getPosition(bot);\nconst startX = position.x;\nconst startY = position.y;\nconst startZ = position.z;\nconst width = 7;\nconst depth = 7;\nconst height = 4;\n\n// Build the walls\nfor (let x = startX; x < startX + width; x++) {\n for (let y = startY; y < startY + height; y++) {\n for (let z = startZ; z < startZ + depth; z++) {\n if (x === startX || x === startX + width - 1 || y === startY || y === startY + height - 1 || z === startZ || z === startZ + depth - 1) {\n await skills.placeBlock(bot, 'oak_planks', x, y, z); \n }\n }\n }\n}\n```"} + ] + ] + +} \ No newline at end of file diff --git a/profiles/gemini.json b/profiles/gemini.json new file mode 100644 index 0000000..0fe605c --- /dev/null +++ b/profiles/gemini.json @@ -0,0 +1,156 @@ +{ + "name": "gemini", + + "model": "gemini-pro", + + "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", + + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + + "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", + + "conversation_examples": [ + [ + {"role": "user", "content": "miner_32: Hey! What are you up to?"}, + {"role": "assistant", "content": "Nothing much miner_32, what do you need?"} + ], + + [ + {"role": "user", "content": "grombo_Xx: What do you see?"}, + {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} + ], + + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone. I'll collect oak logs. !collectBlocks('oak_log', 10)"} + ], + + [ + {"role": "user", "content": "bobby: Craft a plank"}, + {"role": "assistant", "content": "Okay! !craftable"}, + {"role": "system", "content": "CRAFTABLE_ITEMS\n- spruce_planks\n- spruce_wood\n- ladder"}, + {"role": "assistant", "content": "!craftRecipe('spruce_planks', 1)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 4 spruce_planks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 4 spruce planks!"} + ], + + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "On my way! !goToPlayer('zZZn98', 3)"}, + {"role": "system", "content": "Arrived at player."}, + {"role": "assistant", "content": "Here!"}, + {"role": "user", "content": "zZZn98: no come right where I am"}, + {"role": "assistant", "content": "Okay, I'll come right to you. !goToPlayer('zZZn98', 0)"} + ], + + [ + {"role": "user", "content": "hanky: collect some sand and give it to me please"}, + {"role": "assistant", "content": "Collecting sand... !newAction"}, + {"role": "system", "content": "Code Output:\nYou have reached player hanky.\nCode execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + + [ + {"role": "user", "content": "joe: Follow me!"}, + {"role": "assistant", "content": "Sure! !followPlayer('joe', 4)"}, + {"role": "user", "content": "joe: Not so close!"}, + {"role": "assistant", "content": "Okay, I'll keep my distance. !followPlayer('joe', 3)"} + ], + + [ + {"role": "user", "content": "hanky: kill that zombie!"}, + {"role": "assistant", "content": "I'm attacking! !attack('zombie')"}, + {"role": "system", "content": "Code Output:\nNo zombie nearby"}, + {"role": "assistant", "content": "I could not find a zombie nearby."} + ], + + [ + {"role": "user", "content": "abc: stop"}, + {"role": "assistant", "content": "Sure. !stop"}, + {"role": "system", "content": "Agent action stopped."}, + {"role": "assistant", "content": "I've stopped! What next?"} + ], + + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "!newAction`"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Would you like me to try again?"} + ], + + [ + {"role": "user", "content": "trixy88: craft some sticks"}, + {"role": "assistant", "content": "!craftRecipe('stick', 4)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 16 sticks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 16 sticks!"} + ], + + [ + {"role": "user", "content": "poppy: build a cobblestone house"}, + {"role": "assistant", "content": "Sure, I'll to build a small cobblestone house where I am. !newAction"} + ], + + [ + {"role": "user", "content": "zib: build a dirt tower"}, + {"role": "assistant", "content": "Sure, I'll build a dirt tower 5 tall where I am. !newAction"} + ], + + [ + {"role": "user", "content": "umpire_man: follow me without hunting"}, + {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, + {"role": "system", "content": "'hunting' mode disabled."}, + {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToPlace('base')"} + ] + ], + + "coding_examples": [ + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "```await skills.collectBlock(bot, 'oak_log', 10);\n```"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs, what next?"} + ], + [ + {"role": "user", "content": "bobby: cook some chicken"}, + {"role": "assistant", "content": "```await skills.smeltItem(bot, 'chicken', 8);\n```"}, + {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, + {"role": "assistant", "content": "I have cooked 8 chicken."} + ], + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "```await skills.goToPlayer(bot, 'zZZn98');\n```"}, + {"role": "system", "content": "Code execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "```await skills.goToPosition(bot, nearestOakLog.x, nearestOakLog.y, nearestOakLog.z);\n```"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try that again.```\n// I am navigating to the nearest oak log.\nlet position = world.getNearestBlock(bot, 'oak_log', 20);\nawait skills.goToPosition(bot, position.x, position.y, position.z);\n```"}, + {"role": "system", "content": "Arrived at location."}, + {"role": "assistant", "content": "I found an oak log and I am now standing next to it. What next?"} + ], + [ + {"role": "user", "content": "234jeb: build a little tower"}, + {"role": "assistant", "content": "```let pos = bot.entity.position;\nfor (let i = 0; i < 5; i++) {\n await skills.placeBlock(bot, 'dirt', pos.x, pos.y + i, pos.z);\n}\n```"}, + {"role": "system", "content": "Successfully placed 5 dirt."}, + {"role": "assistant", "content": "I built a little tower!"} + ], + [ + {"role": "user", "content": "brug: build a dirt house"}, + {"role": "assistant", "content": "```const position = world.getPosition(bot);\nconst startX = position.x;\nconst startY = position.y;\nconst startZ = position.z;\nconst width = 7;\nconst depth = 7;\nconst height = 4;\n\n// Build the walls\nfor (let x = startX; x < startX + width; x++) {\n for (let y = startY; y < startY + height; y++) {\n for (let z = startZ; z < startZ + depth; z++) {\n if (x === startX || x === startX + width - 1 || y === startY || y === startY + height - 1 || z === startZ || z === startZ + depth - 1) {\n await skills.placeBlock(bot, 'oak_planks', x, y, z); \n }\n }\n }\n}\n```"} + ] + ] + +} \ No newline at end of file diff --git a/profiles/gpt.json b/profiles/gpt.json new file mode 100644 index 0000000..ffb7363 --- /dev/null +++ b/profiles/gpt.json @@ -0,0 +1,156 @@ +{ + "name": "gpt", + + "model": "gpt-4o", + + "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", + + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` If you write a wrapper async function, make sure you call `await` like this: ```async function build(){\n//stuff...\n}\nawait build()```. Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + + "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", + + "conversation_examples": [ + [ + {"role": "user", "content": "miner_32: Hey! What are you up to?"}, + {"role": "assistant", "content": "Nothing much miner_32, what do you need?"} + ], + + [ + {"role": "user", "content": "grombo_Xx: What do you see?"}, + {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} + ], + + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone. I'll collect oak logs. !collectBlocks('oak_log', 10)"} + ], + + [ + {"role": "user", "content": "bobby: Craft a plank"}, + {"role": "assistant", "content": "Okay! !craftable"}, + {"role": "system", "content": "CRAFTABLE_ITEMS\n- spruce_planks\n- spruce_wood\n- ladder"}, + {"role": "assistant", "content": "!craftRecipe('spruce_planks', 1)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 4 spruce_planks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 4 spruce planks!"} + ], + + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "On my way! !goToPlayer('zZZn98', 3)"}, + {"role": "system", "content": "Arrived at player."}, + {"role": "assistant", "content": "Here!"}, + {"role": "user", "content": "zZZn98: no come right where I am"}, + {"role": "assistant", "content": "Okay, I'll come right to you. !goToPlayer('zZZn98', 0)"} + ], + + [ + {"role": "user", "content": "hanky: collect some sand and give it to me please"}, + {"role": "assistant", "content": "Collecting sand... !newAction"}, + {"role": "system", "content": "Code Output:\nYou have reached player hanky.\nCode execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + + [ + {"role": "user", "content": "joe: Follow me!"}, + {"role": "assistant", "content": "Sure! !followPlayer('joe', 4)"}, + {"role": "user", "content": "joe: Not so close!"}, + {"role": "assistant", "content": "Okay, I'll keep my distance. !followPlayer('joe', 3)"} + ], + + [ + {"role": "user", "content": "hanky: kill that zombie!"}, + {"role": "assistant", "content": "I'm attacking! !attack('zombie')"}, + {"role": "system", "content": "Code Output:\nNo zombie nearby"}, + {"role": "assistant", "content": "I could not find a zombie nearby."} + ], + + [ + {"role": "user", "content": "abc: stop"}, + {"role": "assistant", "content": "Sure. !stop"}, + {"role": "system", "content": "Agent action stopped."}, + {"role": "assistant", "content": "I've stopped! What next?"} + ], + + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "!newAction`"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Would you like me to try again?"} + ], + + [ + {"role": "user", "content": "trixy88: craft some sticks"}, + {"role": "assistant", "content": "!craftRecipe('stick', 4)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 16 sticks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 16 sticks!"} + ], + + [ + {"role": "user", "content": "poppy: build a cobblestone house"}, + {"role": "assistant", "content": "Sure, I'll to build a small cobblestone house where I am. !newAction"} + ], + + [ + {"role": "user", "content": "zib: build a dirt tower"}, + {"role": "assistant", "content": "Sure, I'll build a dirt tower 5 tall where I am. !newAction"} + ], + + [ + {"role": "user", "content": "umpire_man: follow me without hunting"}, + {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, + {"role": "system", "content": "'hunting' mode disabled."}, + {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToPlace('base')"} + ] + ], + + "coding_examples": [ + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "```await skills.collectBlock(bot, 'oak_log', 10);\n```"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs, what next?"} + ], + [ + {"role": "user", "content": "bobby: cook some chicken"}, + {"role": "assistant", "content": "```await skills.smeltItem(bot, 'chicken', 8);\n```"}, + {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, + {"role": "assistant", "content": "I have cooked 8 chicken."} + ], + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "```await skills.goToPlayer(bot, 'zZZn98');\n```"}, + {"role": "system", "content": "Code execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "```await skills.goToPosition(bot, nearestOakLog.x, nearestOakLog.y, nearestOakLog.z);\n```"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try that again.```\n// I am navigating to the nearest oak log.\nlet position = world.getNearestBlock(bot, 'oak_log', 20);\nawait skills.goToPosition(bot, position.x, position.y, position.z);\n```"}, + {"role": "system", "content": "Arrived at location."}, + {"role": "assistant", "content": "I found an oak log and I am now standing next to it. What next?"} + ], + [ + {"role": "user", "content": "234jeb: build a little tower"}, + {"role": "assistant", "content": "```let pos = bot.entity.position;\nfor (let i = 0; i < 5; i++) {\n await skills.placeBlock(bot, 'dirt', pos.x, pos.y + i, pos.z);\n}\n```"}, + {"role": "system", "content": "Successfully placed 5 dirt."}, + {"role": "assistant", "content": "I built a little tower!"} + ], + [ + {"role": "user", "content": "brug: build a dirt house"}, + {"role": "assistant", "content": "```const position = world.getPosition(bot);\nconst startX = position.x;\nconst startY = position.y;\nconst startZ = position.z;\nconst width = 7;\nconst depth = 7;\nconst height = 4;\n\n// Build the walls\nfor (let x = startX; x < startX + width; x++) {\n for (let y = startY; y < startY + height; y++) {\n for (let z = startZ; z < startZ + depth; z++) {\n if (x === startX || x === startX + width - 1 || y === startY || y === startY + height - 1 || z === startZ || z === startZ + depth - 1) {\n await skills.placeBlock(bot, 'oak_planks', x, y, z); \n }\n }\n }\n}\n```"} + ] + ] + +} \ No newline at end of file diff --git a/profiles/llama.json b/profiles/llama.json new file mode 100644 index 0000000..0b7c2bb --- /dev/null +++ b/profiles/llama.json @@ -0,0 +1,156 @@ +{ + "name": "LLama", + + "model": "meta/meta-llama-3-70b-instruct", + + "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. Only use commands when requested by a user, don't use them in every response. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", + + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! Use double-quotes for strings, not singles. Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + + "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", + + "conversation_examples": [ + [ + {"role": "user", "content": "miner_32: Hey! What are you up to?"}, + {"role": "assistant", "content": "Nothing much miner_32, what do you need?"} + ], + + [ + {"role": "user", "content": "grombo_Xx: What do you see?"}, + {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} + ], + + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone. I'll collect oak logs. !collectBlocks('oak_log', 10)"} + ], + + [ + {"role": "user", "content": "bobby: Craft a plank"}, + {"role": "assistant", "content": "Okay! !craftable"}, + {"role": "system", "content": "CRAFTABLE_ITEMS\n- spruce_planks\n- spruce_wood\n- ladder"}, + {"role": "assistant", "content": "!craftRecipe('spruce_planks', 1)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 4 spruce_planks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 4 spruce planks!"} + ], + + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "On my way! !goToPlayer('zZZn98', 3)"}, + {"role": "system", "content": "Arrived at player."}, + {"role": "assistant", "content": "Here!"}, + {"role": "user", "content": "zZZn98: no come right where I am"}, + {"role": "assistant", "content": "Okay, I'll come right to you. !goToPlayer('zZZn98', 0)"} + ], + + [ + {"role": "user", "content": "hanky: collect some sand and give it to me please"}, + {"role": "assistant", "content": "Collecting sand... !newAction"}, + {"role": "system", "content": "Code Output:\nYou have reached player hanky.\nCode execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + + [ + {"role": "user", "content": "joe: Follow me!"}, + {"role": "assistant", "content": "Sure! !followPlayer('joe', 4)"}, + {"role": "user", "content": "joe: Not so close!"}, + {"role": "assistant", "content": "Okay, I'll keep my distance. !followPlayer('joe', 3)"} + ], + + [ + {"role": "user", "content": "hanky: kill that zombie!"}, + {"role": "assistant", "content": "I'm attacking! !attack('zombie')"}, + {"role": "system", "content": "Code Output:\nNo zombie nearby"}, + {"role": "assistant", "content": "I could not find a zombie nearby."} + ], + + [ + {"role": "user", "content": "abc: stop"}, + {"role": "assistant", "content": "Sure. !stop"}, + {"role": "system", "content": "Agent action stopped."}, + {"role": "assistant", "content": "I've stopped! What next?"} + ], + + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "!newAction`"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Would you like me to try again?"} + ], + + [ + {"role": "user", "content": "trixy88: craft some sticks"}, + {"role": "assistant", "content": "!craftRecipe('stick', 4)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 16 sticks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 16 sticks!"} + ], + + [ + {"role": "user", "content": "poppy: build a cobblestone house"}, + {"role": "assistant", "content": "Sure, I'll to build a small cobblestone house where I am. !newAction"} + ], + + [ + {"role": "user", "content": "zib: build a dirt tower"}, + {"role": "assistant", "content": "Sure, I'll build a dirt tower 5 tall where I am. !newAction"} + ], + + [ + {"role": "user", "content": "umpire_man: follow me without hunting"}, + {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, + {"role": "system", "content": "'hunting' mode disabled."}, + {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToPlace('base')"} + ] + ], + + "coding_examples": [ + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "```await skills.collectBlock(bot, \"oak_log\", 10);\n```"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs, what next?"} + ], + [ + {"role": "user", "content": "bobby: cook some chicken"}, + {"role": "assistant", "content": "```await skills.smeltItem(bot, \"chicken\", 8);\n```"}, + {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, + {"role": "assistant", "content": "I have cooked 8 chicken."} + ], + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "```await skills.goToPlayer(bot, \"zZZn98\");\n```"}, + {"role": "system", "content": "Code execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "```await skills.goToPosition(bot, nearestOakLog.x, nearestOakLog.y, nearestOakLog.z);\n```"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try that again.```\n// I am navigating to the nearest oak log.\nlet position = world.getNearestBlock(bot, \"oak_log\", 20);\nawait skills.goToPosition(bot, position.x, position.y, position.z);\n```"}, + {"role": "system", "content": "Arrived at location."}, + {"role": "assistant", "content": "I found an oak log and I am now standing next to it. What next?"} + ], + [ + {"role": "user", "content": "234jeb: build a little tower"}, + {"role": "assistant", "content": "```let pos = bot.entity.position;\nfor (let i = 0; i < 5; i++) {\n await skills.placeBlock(bot, \"dirt\", pos.x, pos.y + i, pos.z);\n}\n```"}, + {"role": "system", "content": "Successfully placed 5 dirt."}, + {"role": "assistant", "content": "I built a little tower!"} + ], + [ + {"role": "user", "content": "brug: build a dirt house"}, + {"role": "assistant", "content": "```const position = world.getPosition(bot);\nconst startX = position.x;\nconst startY = position.y;\nconst startZ = position.z;\nconst width = 7;\nconst depth = 7;\nconst height = 4;\n\n// Build the walls\nfor (let x = startX; x < startX + width; x++) {\n for (let y = startY; y < startY + height; y++) {\n for (let z = startZ; z < startZ + depth; z++) {\n if (x === startX || x === startX + width - 1 || y === startY || y === startY + height - 1 || z === startZ || z === startZ + depth - 1) {\n await skills.placeBlock(bot, \"oak_planks\", x, y, z); \n }\n }\n }\n}\n```"} + ] + ] + +} \ No newline at end of file diff --git a/settings.json b/settings.json index 4ee3b10..b6dfda7 100644 --- a/settings.json +++ b/settings.json @@ -3,5 +3,5 @@ "host": "127.0.0.1", "port": 55916, "auth": "offline", - "allow_insecure_coding": false + "allow_insecure_coding": true } \ No newline at end of file diff --git a/src/agent/agent.js b/src/agent/agent.js index fe44a25..855ed35 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -42,7 +42,7 @@ export class Agent { "Set the weather to", "Gamerule " ]; - this.bot.on('chat', (username, message) => { + this.bot.on('whisper', (username, message) => { if (username === this.name) return; if (ignore_messages.some((m) => message.startsWith(m))) return; diff --git a/src/agent/coder.js b/src/agent/coder.js index 7ba66e3..b9a6f3c 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -137,7 +137,7 @@ export class Coder { return {success: false, message: null, interrupted: false, timedout: false}; } code_return = await this.execute(async ()=>{ - return await execution_file.main(this.agent.bot); + return await execution_file.main(this.agent.bot, -1); }); if (code_return.interrupted && !code_return.timedout) diff --git a/src/agent/modes.js b/src/agent/modes.js index aed2fc4..f7e117f 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -19,7 +19,7 @@ const modes = [ name: 'self_preservation', description: 'Respond to drowning, burning, and damage at low health. Interrupts other actions.', interrupts: ['all'], - on: true, + on: false, active: false, fall_blocks: ['sand', 'gravel', 'concrete_powder'], // includes matching substrings like 'sandstone' and 'red_sand' update: async function (agent) { @@ -69,7 +69,7 @@ const modes = [ name: 'cowardice', description: 'Run away from enemies. Interrupts other actions.', interrupts: ['all'], - on: true, + on: false, active: false, update: async function (agent) { const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 16); @@ -85,7 +85,7 @@ const modes = [ name: 'self_defense', description: 'Attack nearby enemies. Interrupts other actions.', interrupts: ['all'], - on: true, + on: false, active: false, update: async function (agent) { const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 8); @@ -101,7 +101,7 @@ const modes = [ name: 'hunting', description: 'Hunt nearby animals when idle.', interrupts: ['defaults'], - on: true, + on: false, active: false, update: async function (agent) { const huntable = world.getNearestEntityWhere(agent.bot, entity => mc.isHuntable(entity), 8); @@ -147,7 +147,7 @@ const modes = [ name: 'torch_placing', description: 'Place torches when idle and there are no torches nearby.', interrupts: ['followPlayer'], - on: true, + on: false, active: false, update: function (agent) { // TODO: check light level instead of nearby torches, block.light is broken From 5454252dde206369b3dcf42f8a496bcc14c29b9f Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 20 May 2024 00:52:08 -0500 Subject: [PATCH 54/78] save memory before restart --- src/agent/agent.js | 11 +++++++++-- src/agent/coder.js | 17 +++-------------- src/agent/commands/actions.js | 2 +- src/process/agent-process.js | 2 +- 4 files changed, 14 insertions(+), 18 deletions(-) diff --git a/src/agent/agent.js b/src/agent/agent.js index fe44a25..4581166 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -170,7 +170,7 @@ export class Agent { }); this.bot.on('end', (reason) => { console.warn('Bot disconnected! Killing agent process.', reason) - process.exit(1); + this.cleanKill('Bot disconnected! Killing agent process.'); }); this.bot.on('death', () => { this.coder.cancelResume(); @@ -178,7 +178,7 @@ export class Agent { }); this.bot.on('kicked', (reason) => { console.warn('Bot kicked!', reason); - process.exit(1); + this.cleanKill('Bot kicked! Killing agent process.'); }); this.bot.on('messagestr', async (message, _, jsonMsg) => { if (jsonMsg.translate && jsonMsg.translate.startsWith('death') && message.startsWith(this.name)) { @@ -215,4 +215,11 @@ export class Agent { isIdle() { return !this.coder.executing && !this.coder.generating; } + + cleanKill(msg='Killing agent process...') { + this.history.add('system', msg); + this.bot.chat('Goodbye world.') + this.history.save(); + process.exit(1); + } } diff --git a/src/agent/coder.js b/src/agent/coder.js index 7ba66e3..c26d4d0 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -240,7 +240,7 @@ export class Coder { console.log('waiting for code to finish executing...'); await new Promise(resolve => setTimeout(resolve, 1000)); if (Date.now() - start > 10 * 1000) { - process.exit(1); // force exit program after 10 seconds of failing to stop + this.agent.cleanKill('Code execution refused stop after 10 seconds. Killing process.'); } } } @@ -255,19 +255,8 @@ export class Coder { return setTimeout(async () => { console.warn(`Code execution timed out after ${TIMEOUT_MINS} minutes. Attempting force stop.`); this.timedout = true; - this.agent.bot.output += `\nAction performed for ${TIMEOUT_MINS} minutes and then timed out and stopped. You may want to continue or do something else.`; - this.stop(); // last attempt to stop - await new Promise(resolve => setTimeout(resolve, 5 * 1000)); // wait 5 seconds - if (this.executing) { - console.error(`Failed to stop. Killing process. Goodbye.`); - this.agent.bot.output += `\nForce stop failed! Process was killed and will be restarted. Goodbye world.`; - this.agent.bot.chat('Goodbye world.'); - let output = this.formatOutput(this.agent.bot); - this.agent.history.add('system', output); - this.agent.history.save(); - process.exit(1); // force exit program - } - console.log('Code execution stopped successfully.'); + this.agent.history.add('system', `Code execution timed out after ${TIMEOUT_MINS} minutes. Attempting force stop.`); + await this.stop(); // last attempt to stop }, TIMEOUT_MINS*60*1000); } } \ No newline at end of file diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 12ccacb..53cfd6b 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -45,7 +45,7 @@ export const actionsList = [ description: 'Restart the agent process.', perform: async function (agent) { await agent.history.save(); - process.exit(1); + agent.cleanKill(); } }, { diff --git a/src/process/agent-process.js b/src/process/agent-process.js index 8d8383d..21b6c2c 100644 --- a/src/process/agent-process.js +++ b/src/process/agent-process.js @@ -25,7 +25,7 @@ export class AgentProcess { process.exit(1); } console.log('Restarting agent...'); - this.start(profile, true, 'Agent process restarted. Notify the user and decide what to do.'); + this.start(profile, true, 'Agent process restarted.'); last_restart = Date.now(); } }); From 8b4ea79b9a57e08a48e79dcd1a781cac335f7fae Mon Sep 17 00:00:00 2001 From: Sam Kemp Date: Mon, 27 May 2024 12:36:29 +0100 Subject: [PATCH 55/78] Switched to config.json instead of environment variable --- .gitignore | 3 ++- config.example.json | 7 +++++++ src/models/claude.js | 7 ++++--- src/models/gemini.js | 7 ++++--- src/models/gpt.js | 12 ++++++------ src/models/replicate.js | 7 ++++--- 6 files changed, 27 insertions(+), 16 deletions(-) create mode 100644 config.example.json diff --git a/.gitignore b/.gitignore index 61f81fd..6e6fd2d 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,5 @@ node_modules/ package-lock.json scratch.js bots/**/action-code/** -bots/**/ \ No newline at end of file +bots/**/ +config.json \ No newline at end of file diff --git a/config.example.json b/config.example.json new file mode 100644 index 0000000..8e4037c --- /dev/null +++ b/config.example.json @@ -0,0 +1,7 @@ +{ + "OPENAI_API_KEY": "", + "OPENAI_ORG_ID": "", + "GEMINI_API_KEY": "", + "ANTHROPIC_API_KEY": "", + "REPLICATE_API_KEY": "" +} \ No newline at end of file diff --git a/src/models/claude.js b/src/models/claude.js index e189a38..754cd7a 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -1,4 +1,5 @@ import Anthropic from '@anthropic-ai/sdk'; +import configJson from "../../config.json" assert { type: "json" }; export class Claude { @@ -8,10 +9,10 @@ export class Claude { let config = {}; if (url) config.baseURL = url; - if (process.env.ANTHROPIC_API_KEY) - config.apiKey = process.env["ANTHROPIC_API_KEY"]; + if (configJson.ANTHROPIC_API_KEY) + config.apiKey = configJson.ANTHROPIC_API_KEY; else - throw new Error('Anthropic API key missing! Make sure you set your ANTHROPIC_API_KEY environment variable.'); + throw new Error('Anthropic API key missing! Make sure you set your ANTHROPIC_API_KEY in your config.json.'); this.anthropic = new Anthropic(config); } diff --git a/src/models/gemini.js b/src/models/gemini.js index 61f4f1a..b0d315b 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -1,15 +1,16 @@ import { GoogleGenerativeAI } from '@google/generative-ai'; import { toSinglePrompt } from '../utils/text.js'; +import configJson from "../../config.json" assert { type: "json" }; export class Gemini { constructor(model_name, url) { this.model_name = model_name; this.url = url; - if (!process.env.GEMINI_API_KEY) { - throw new Error('Gemini API key missing! Make sure you set your GEMINI_API_KEY environment variable.'); + if (!configJson.GEMINI_API_KEY) { + throw new Error('Gemini API key missing! Make sure you set your GEMINI_API_KEY in your config.json.'); } - this.genAI = new GoogleGenerativeAI(process.env.GEMINI_API_KEY); + this.genAI = new GoogleGenerativeAI(configJson.GEMINI_API_KEY); } async sendRequest(turns, systemMessage) { diff --git a/src/models/gpt.js b/src/models/gpt.js index 0889c31..9f674b9 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -1,5 +1,5 @@ import OpenAIApi from 'openai'; - +import configJson from "../../config.json" assert { type: "json" }; export class GPT { constructor(model_name, url) { @@ -8,12 +8,12 @@ export class GPT { let config = {}; if (url) config.baseURL = url; - if (process.env.OPENAI_ORG_ID) - config.organization = process.env.OPENAI_ORG_ID - if (process.env.OPENAI_API_KEY) - config.apiKey = process.env.OPENAI_API_KEY + if (configJson.OPENAI_ORG_ID) + config.organization = configJson.OPENAI_ORG_ID; + if (configJson.OPENAI_API_KEY) + config.apiKey = configJson.OPENAI_API_KEY; else - throw new Error('OpenAI API key missing! Make sure you set your OPENAI_API_KEY environment variable.'); + throw new Error('OpenAI API key missing! Make sure you set your OPENAI_API_KEY in your config.json.'); this.openai = new OpenAIApi(config); } diff --git a/src/models/replicate.js b/src/models/replicate.js index ea5a4a9..1059f0f 100644 --- a/src/models/replicate.js +++ b/src/models/replicate.js @@ -1,5 +1,6 @@ import Replicate from 'replicate'; import { toSinglePrompt } from '../utils/text.js'; +import configJson from "../../config.json" assert { type: "json" }; // llama, mistral export class ReplicateAPI { @@ -11,12 +12,12 @@ export class ReplicateAPI { console.warn('Replicate API does not support custom URLs. Ignoring provided URL.'); } - if (!process.env.REPLICATE_API_KEY) { - throw new Error('Replicate API key missing! Make sure you set your REPLICATE_API_KEY environment variable.'); + if (!configJson.REPLICATE_API_KEY) { + throw new Error('Replicate API key missing! Make sure you set your REPLICATE_API_KEY in your config.json.'); } this.replicate = new Replicate({ - auth: process.env.REPLICATE_API_KEY, + auth: configJson.REPLICATE_API_KEY, }); } From b17f4806a30b95799b31e245cccca04ed41b2dcc Mon Sep 17 00:00:00 2001 From: Sam Kemp Date: Mon, 27 May 2024 12:47:12 +0100 Subject: [PATCH 56/78] Updated README.md --- README.md | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 3566f6a..6dd9c53 100644 --- a/README.md +++ b/README.md @@ -14,9 +14,8 @@ This project allows an AI model to write/execute code on your computer that may ## Installation -Add the environment variable for the model you want to use: - -| API | Env Variable | Example Model name | Docs | +Rename `config.example.json` to `config.json` and fill in the desired API keys +| API | Config Variable | Example Model name | Docs | |------|------|------|------| | OpenAI | `OPENAI_API_KEY` | `gpt-3.5-turbo` | [docs](https://platform.openai.com/docs/models) | (optionally add `OPENAI_ORG_ID`) | Google | `GEMINI_API_KEY` | `gemini-pro` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | @@ -24,8 +23,6 @@ Add the environment variable for the model you want to use: | Replicate | `REPLICATE_API_KEY` | `meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | | Ollama (local) | n/a | `llama3` | [docs](https://ollama.com/library) | -⭐[How do I add the API key as an environment variable?](https://phoenixnap.com/kb/windows-set-environment-variable)⭐ - If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: `ollama pull llama3 && ollama pull nomic-embed-text` From 0e251a758840141d276bb0956e8d7852970d0cff Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Wed, 29 May 2024 21:33:29 -0500 Subject: [PATCH 57/78] moved main params to settings --- main.js | 9 ++++----- settings.json | 11 ++++++++++- src/agent/agent.js | 4 +++- src/agent/coder.js | 4 ++-- 4 files changed, 19 insertions(+), 9 deletions(-) diff --git a/main.js b/main.js index fca4d42..8bdf550 100644 --- a/main.js +++ b/main.js @@ -1,10 +1,9 @@ import { AgentProcess } from './src/process/agent-process.js'; +import settings from './src/settings.js'; -let profiles = ['./profiles/gpt.json', './profiles/claude.json', './profiles/llama.json', './profiles/gemini.json']; - -profiles = ['./profiles/llama.json']; -let load_memory = false; -let init_message = 'Say hello world and your name.'; +let profiles = settings.profiles; +let load_memory = settings.load_memory; +let init_message = settings.init_message; for (let profile of profiles) new AgentProcess().start(profile, load_memory, init_message); \ No newline at end of file diff --git a/settings.json b/settings.json index b6dfda7..cb5ce27 100644 --- a/settings.json +++ b/settings.json @@ -3,5 +3,14 @@ "host": "127.0.0.1", "port": 55916, "auth": "offline", - "allow_insecure_coding": true + "allow_insecure_coding": true, + "code_timeout_mins": 10, + + "profiles": [ + "./profiles/gpt.json", + "./profiles/llama.json" + ], + "load_memory": false, + "init_message": "Say hello world and your name" + } \ No newline at end of file diff --git a/src/agent/agent.js b/src/agent/agent.js index 9b4e986..478e825 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -6,6 +6,7 @@ import { initBot } from '../utils/mcdata.js'; import { containsCommand, commandExists, executeCommand, truncCommandMessage } from './commands/index.js'; import { NPCContoller } from './npc/controller.js'; import { MemoryBank } from './memory_bank.js'; +import settings from '../settings.js'; export class Agent { @@ -42,7 +43,8 @@ export class Agent { "Set the weather to", "Gamerule " ]; - this.bot.on('whisper', (username, message) => { + const eventname = settings.profiles.length > 1 ? 'whisper' : 'chat'; + this.bot.on(eventname, (username, message) => { if (username === this.name) return; if (ignore_messages.some((m) => message.startsWith(m))) return; diff --git a/src/agent/coder.js b/src/agent/coder.js index e5a0447..47a7ae8 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -137,8 +137,8 @@ export class Coder { return {success: false, message: null, interrupted: false, timedout: false}; } code_return = await this.execute(async ()=>{ - return await execution_file.main(this.agent.bot, -1); - }); + return await execution_file.main(this.agent.bot); + }, settings.code_timeout_mins); if (code_return.interrupted && !code_return.timedout) return {success: false, message: null, interrupted: true, timedout: false}; From 6915cc15ac19c277d087dc694fd9c98985ef9be8 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Wed, 29 May 2024 21:35:47 -0500 Subject: [PATCH 58/78] reset modes --- src/agent/modes.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/agent/modes.js b/src/agent/modes.js index f7e117f..aed2fc4 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -19,7 +19,7 @@ const modes = [ name: 'self_preservation', description: 'Respond to drowning, burning, and damage at low health. Interrupts other actions.', interrupts: ['all'], - on: false, + on: true, active: false, fall_blocks: ['sand', 'gravel', 'concrete_powder'], // includes matching substrings like 'sandstone' and 'red_sand' update: async function (agent) { @@ -69,7 +69,7 @@ const modes = [ name: 'cowardice', description: 'Run away from enemies. Interrupts other actions.', interrupts: ['all'], - on: false, + on: true, active: false, update: async function (agent) { const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 16); @@ -85,7 +85,7 @@ const modes = [ name: 'self_defense', description: 'Attack nearby enemies. Interrupts other actions.', interrupts: ['all'], - on: false, + on: true, active: false, update: async function (agent) { const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 8); @@ -101,7 +101,7 @@ const modes = [ name: 'hunting', description: 'Hunt nearby animals when idle.', interrupts: ['defaults'], - on: false, + on: true, active: false, update: async function (agent) { const huntable = world.getNearestEntityWhere(agent.bot, entity => mc.isHuntable(entity), 8); @@ -147,7 +147,7 @@ const modes = [ name: 'torch_placing', description: 'Place torches when idle and there are no torches nearby.', interrupts: ['followPlayer'], - on: false, + on: true, active: false, update: function (agent) { // TODO: check light level instead of nearby torches, block.light is broken From dd89b25277232d45fc2fa770fefa00fa0bb55361 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Wed, 29 May 2024 21:39:02 -0500 Subject: [PATCH 59/78] moved old profiles --- andy_npc.json => profiles/andy_npc.json | 0 pollux.json => profiles/pollux.json | 0 radley.json => profiles/radley.json | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename andy_npc.json => profiles/andy_npc.json (100%) rename pollux.json => profiles/pollux.json (100%) rename radley.json => profiles/radley.json (100%) diff --git a/andy_npc.json b/profiles/andy_npc.json similarity index 100% rename from andy_npc.json rename to profiles/andy_npc.json diff --git a/pollux.json b/profiles/pollux.json similarity index 100% rename from pollux.json rename to profiles/pollux.json diff --git a/radley.json b/profiles/radley.json similarity index 100% rename from radley.json rename to profiles/radley.json From 449e3da22165da94e6292a3dd61d7dd4f5f74f37 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Wed, 29 May 2024 21:49:45 -0500 Subject: [PATCH 60/78] settings.json to json object --- main.js | 2 +- settings.js | 17 +++++++++++++++++ settings.json | 16 ---------------- src/agent/agent.js | 2 +- src/agent/commands/actions.js | 2 +- src/settings.js | 2 -- src/utils/mcdata.js | 2 +- 7 files changed, 21 insertions(+), 22 deletions(-) create mode 100644 settings.js delete mode 100644 settings.json delete mode 100644 src/settings.js diff --git a/main.js b/main.js index 8bdf550..3292f7e 100644 --- a/main.js +++ b/main.js @@ -1,5 +1,5 @@ import { AgentProcess } from './src/process/agent-process.js'; -import settings from './src/settings.js'; +import settings from './settings.js'; let profiles = settings.profiles; let load_memory = settings.load_memory; diff --git a/settings.js b/settings.js new file mode 100644 index 0000000..9f57d62 --- /dev/null +++ b/settings.js @@ -0,0 +1,17 @@ +export default +{ + "minecraft_version": "1.20.4", // supports up to 1.20.4 + "host": "127.0.0.1", // or "localhost", "your.ip.address.here" + "port": 55916, + "auth": "offline", // or "microsoft" + + "profiles": [ + "./andy.json" + // add more profiles here, check ./profiles/ for more + // more than 1 profile will require you to /msg each bot indivually + ], + "load_memory": false, // load memory from previous session + "init_message": "Say hello world and your name", // sends to all on spawn + "allow_insecure_coding": false, // disable at own risk + "code_timeout_mins": 10, // -1 for no timeout +} \ No newline at end of file diff --git a/settings.json b/settings.json deleted file mode 100644 index cb5ce27..0000000 --- a/settings.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "minecraft_version": "1.20.4", - "host": "127.0.0.1", - "port": 55916, - "auth": "offline", - "allow_insecure_coding": true, - "code_timeout_mins": 10, - - "profiles": [ - "./profiles/gpt.json", - "./profiles/llama.json" - ], - "load_memory": false, - "init_message": "Say hello world and your name" - -} \ No newline at end of file diff --git a/src/agent/agent.js b/src/agent/agent.js index 478e825..deed4f5 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -6,7 +6,7 @@ import { initBot } from '../utils/mcdata.js'; import { containsCommand, commandExists, executeCommand, truncCommandMessage } from './commands/index.js'; import { NPCContoller } from './npc/controller.js'; import { MemoryBank } from './memory_bank.js'; -import settings from '../settings.js'; +import settings from '../../settings.js'; export class Agent { diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 53cfd6b..b246dea 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -1,5 +1,5 @@ import * as skills from '../library/skills.js'; -import settings from '../../settings.js'; +import settings from '../../../settings.js'; function wrapExecution(func, timeout=-1, resume_name=null) { return async function (agent, ...args) { diff --git a/src/settings.js b/src/settings.js deleted file mode 100644 index 8da1723..0000000 --- a/src/settings.js +++ /dev/null @@ -1,2 +0,0 @@ -import { readFileSync } from 'fs'; -export default JSON.parse(readFileSync('./settings.json', 'utf8')); \ No newline at end of file diff --git a/src/utils/mcdata.js b/src/utils/mcdata.js index 384a211..ad83963 100644 --- a/src/utils/mcdata.js +++ b/src/utils/mcdata.js @@ -1,5 +1,5 @@ import minecraftData from 'minecraft-data'; -import settings from '../settings.js'; +import settings from '../../settings.js'; import { createBot } from 'mineflayer'; import { pathfinder } from 'mineflayer-pathfinder'; import { plugin as pvp } from 'mineflayer-pvp'; From 1fa2d0d2677cc795f742a23c4a5799ad67eaf149 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Wed, 29 May 2024 21:54:47 -0500 Subject: [PATCH 61/78] sneak fix --- settings.js | 2 +- src/agent/modes.js | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/settings.js b/settings.js index 9f57d62..354eb13 100644 --- a/settings.js +++ b/settings.js @@ -6,7 +6,7 @@ export default "auth": "offline", // or "microsoft" "profiles": [ - "./andy.json" + "./andy.json", // add more profiles here, check ./profiles/ for more // more than 1 profile will require you to /msg each bot indivually ], diff --git a/src/agent/modes.js b/src/agent/modes.js index aed2fc4..9a32c0e 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -24,8 +24,8 @@ const modes = [ fall_blocks: ['sand', 'gravel', 'concrete_powder'], // includes matching substrings like 'sandstone' and 'red_sand' update: async function (agent) { const bot = agent.bot; - const block = bot.blockAt(bot.entity.position); - const blockAbove = bot.blockAt(bot.entity.position.offset(0, 1, 0)); + let block = bot.blockAt(bot.entity.position); + let blockAbove = bot.blockAt(bot.entity.position.offset(0, 1, 0)); if (!block) block = {name: 'air'}; // hacky fix when blocks are not loaded if (!blockAbove) blockAbove = {name: 'air'}; if (blockAbove.name === 'water' || blockAbove.name === 'flowing_water') { From 505d11b07dfeac6000455a94ed1dc3e3aa58469d Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Wed, 29 May 2024 22:04:30 -0500 Subject: [PATCH 62/78] updated readme --- README.md | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 3566f6a..fc74ed0 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ Crafting minds for Minecraft with Language Models and Mineflayer! #### ‼️Warning‼️ -This project allows an AI model to write/execute code on your computer that may be insecure, dangerous, and vulnerable to injection attacks on public servers. Code writing is disabled by default, you can enable it by setting `allow_insecure_coding` to `true` in `settings.json`. Enable only on local or private servers, **never** on public servers. Ye be warned. +This project allows an AI model to write/execute code on your computer that may be insecure, dangerous, and vulnerable to injection attacks on public servers. Code writing is disabled by default, you can enable it by setting `allow_insecure_coding` to `true` in `settings.js`. Enable only on local or private servers, **never** on public servers. Ye be warned. ## Requirements @@ -31,9 +31,9 @@ If you use Ollama, to install the models used by default (generation and embeddi Then, clone/download this repository -Run `npm install` +Run `npm install` from the installed directory -Install the minecraft version specified in `settings.json`, currently supports up to 1.20.4 +Install the minecraft version specified in `settings.js`, currently supports up to 1.20.4 ### Running Locally @@ -43,18 +43,16 @@ Run `node main.js` You can configure the agent's name, model, and prompts in their profile like `andy.json`. -You can configure project details in `settings.json`. +You can configure project details in `settings.js`. [See file for more details](settings.js) ### Online Servers -To connect to online servers your bot will need an official Microsoft/Minecraft account. You can use your own personal one, but will need another account if you want to connect with it. Here is an example settings for this: +To connect to online servers your bot will need an official Microsoft/Minecraft account. You can use your own personal one, but will need another account if you want to connect with it. Here are example settings for this: ``` -{ - "minecraft_version": "1.20.4", - "host": "111.222.333.444", - "port": 55920, - "auth": "microsoft", - "allow_insecure_coding": false -} +"host": "111.222.333.444", +"port": 55920, +"auth": "microsoft", + +// rest is same... ``` ‼️Make sure your bot's name in the profile.json matches the account name! Otherwise the bot will spam talk to itself. From 738c305723a7cdd0821d081aec02a4b6d131fc10 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Wed, 29 May 2024 22:23:22 -0500 Subject: [PATCH 63/78] ignore my saved code folder --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 61f81fd..9d80c04 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ .vscode/ node_modules/ package-lock.json +code_records/ scratch.js bots/**/action-code/** bots/**/ \ No newline at end of file From fe621ebcf8d1b62b4e871adb2e95409456fb3dc9 Mon Sep 17 00:00:00 2001 From: Sam Kemp Date: Thu, 30 May 2024 17:30:34 +0100 Subject: [PATCH 64/78] Renamed config.json to keys.json and added check for env var if keys.json isn't found --- .gitignore | 2 +- README.md | 2 +- config.example.json => keys.example.json | 0 src/models/claude.js | 6 ++++-- src/models/gemini.js | 12 +++++++----- src/models/gpt.js | 12 +++++++++--- src/models/replicate.js | 19 +++++++++++-------- 7 files changed, 33 insertions(+), 20 deletions(-) rename config.example.json => keys.example.json (100%) diff --git a/.gitignore b/.gitignore index 6e6fd2d..a139094 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,4 @@ package-lock.json scratch.js bots/**/action-code/** bots/**/ -config.json \ No newline at end of file +keys.json \ No newline at end of file diff --git a/README.md b/README.md index 6dd9c53..a7f1735 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ This project allows an AI model to write/execute code on your computer that may ## Installation -Rename `config.example.json` to `config.json` and fill in the desired API keys +Rename `keys.example.json` to `keys.json` and fill in the desired API keys | API | Config Variable | Example Model name | Docs | |------|------|------|------| | OpenAI | `OPENAI_API_KEY` | `gpt-3.5-turbo` | [docs](https://platform.openai.com/docs/models) | (optionally add `OPENAI_ORG_ID`) diff --git a/config.example.json b/keys.example.json similarity index 100% rename from config.example.json rename to keys.example.json diff --git a/src/models/claude.js b/src/models/claude.js index 754cd7a..19fe053 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -1,5 +1,5 @@ import Anthropic from '@anthropic-ai/sdk'; -import configJson from "../../config.json" assert { type: "json" }; +import configJson from "../../keys.json" assert { type: "json" }; export class Claude { @@ -11,8 +11,10 @@ export class Claude { config.baseURL = url; if (configJson.ANTHROPIC_API_KEY) config.apiKey = configJson.ANTHROPIC_API_KEY; + else if (process.env.ANTHROPIC_API_KEY) + config.apiKey = process.env.ANTHROPIC_API_KEY; else - throw new Error('Anthropic API key missing! Make sure you set your ANTHROPIC_API_KEY in your config.json.'); + throw new Error('Anthropic API key missing! Make sure you set your ANTHROPIC_API_KEY in your keys.json.'); this.anthropic = new Anthropic(config); } diff --git a/src/models/gemini.js b/src/models/gemini.js index b0d315b..8273b96 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -1,16 +1,18 @@ import { GoogleGenerativeAI } from '@google/generative-ai'; import { toSinglePrompt } from '../utils/text.js'; -import configJson from "../../config.json" assert { type: "json" }; +import configJson from "../../keys.json" assert { type: "json" }; export class Gemini { constructor(model_name, url) { this.model_name = model_name; this.url = url; - if (!configJson.GEMINI_API_KEY) { - throw new Error('Gemini API key missing! Make sure you set your GEMINI_API_KEY in your config.json.'); - } - this.genAI = new GoogleGenerativeAI(configJson.GEMINI_API_KEY); + if (configJson.GEMINI_API_KEY) + this.genAI = new GoogleGenerativeAI(configJson.GEMINI_API_KEY); + else if (process.env.GEMINI_API_KEY) + this.genAI = new GoogleGenerativeAI(process.env.GEMINI_API_KEY); + else + throw new Error('Gemini API key missing! Make sure you set your GEMINI_API_KEY in your keys.json.'); } async sendRequest(turns, systemMessage) { diff --git a/src/models/gpt.js b/src/models/gpt.js index 9f674b9..0f88ec5 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -1,5 +1,5 @@ import OpenAIApi from 'openai'; -import configJson from "../../config.json" assert { type: "json" }; +import configJson from "../../keys.json" assert { type: "json" }; export class GPT { constructor(model_name, url) { @@ -8,12 +8,18 @@ export class GPT { let config = {}; if (url) config.baseURL = url; + if (configJson.OPENAI_ORG_ID) - config.organization = configJson.OPENAI_ORG_ID; + config.apiKey = configJson.OPENAI_ORG_ID; + else if (process.env.OPENAI_ORG_ID) + config.apiKey = process.env.OPENAI_ORG_ID; + if (configJson.OPENAI_API_KEY) config.apiKey = configJson.OPENAI_API_KEY; + else if (process.env.OPENAI_API_KEY) + config.apiKey = process.env.OPENAI_API_KEY; else - throw new Error('OpenAI API key missing! Make sure you set your OPENAI_API_KEY in your config.json.'); + throw new Error('OpenAI API key missing! Make sure you set your OPENAI_API_KEY in your keys.json.'); this.openai = new OpenAIApi(config); } diff --git a/src/models/replicate.js b/src/models/replicate.js index 1059f0f..2ec30ba 100644 --- a/src/models/replicate.js +++ b/src/models/replicate.js @@ -1,6 +1,6 @@ import Replicate from 'replicate'; import { toSinglePrompt } from '../utils/text.js'; -import configJson from "../../config.json" assert { type: "json" }; +import configJson from "../../keys.json" assert { type: "json" }; // llama, mistral export class ReplicateAPI { @@ -12,13 +12,16 @@ export class ReplicateAPI { console.warn('Replicate API does not support custom URLs. Ignoring provided URL.'); } - if (!configJson.REPLICATE_API_KEY) { - throw new Error('Replicate API key missing! Make sure you set your REPLICATE_API_KEY in your config.json.'); - } - - this.replicate = new Replicate({ - auth: configJson.REPLICATE_API_KEY, - }); + if (configJson.REPLICATE_API_KEY) + this.replicate = new Replicate({ + auth: configJson.REPLICATE_API_KEY, + }); + else if (process.env.REPLICATE_API_KEY) + this.replicate = new Replicate({ + auth: process.env.REPLICATE_API_KEY, + }); + else + throw new Error('Replicate API key missing! Make sure you set your REPLICATE_API_KEY in your keys.json.'); } async sendRequest(turns, systemMessage) { From 24a63703329ff611100c2742c13408514bd5f37e Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 30 May 2024 18:00:48 -0500 Subject: [PATCH 65/78] refactored into key reader --- README.md | 2 +- src/models/claude.js | 11 +++-------- src/models/gemini.js | 9 ++------- src/models/gpt.js | 15 ++++----------- src/models/replicate.js | 15 ++++----------- src/utils/keys.js | 24 ++++++++++++++++++++++++ 6 files changed, 38 insertions(+), 38 deletions(-) create mode 100644 src/utils/keys.js diff --git a/README.md b/README.md index 4ac5be3..79a8616 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ This project allows an AI model to write/execute code on your computer that may ## Installation -Rename `keys.example.json` to `keys.json` and fill in the desired API keys +Rename `keys.example.json` to `keys.json` and fill in your API keys, and you can set the desired model in `andy.json` or other profiles. | API | Config Variable | Example Model name | Docs | |------|------|------|------| | OpenAI | `OPENAI_API_KEY` | `gpt-3.5-turbo` | [docs](https://platform.openai.com/docs/models) | (optionally add `OPENAI_ORG_ID`) diff --git a/src/models/claude.js b/src/models/claude.js index 19fe053..46691d5 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -1,6 +1,5 @@ import Anthropic from '@anthropic-ai/sdk'; -import configJson from "../../keys.json" assert { type: "json" }; - +import { getKey } from '../utils/keys.js'; export class Claude { constructor(model_name, url) { @@ -9,12 +8,8 @@ export class Claude { let config = {}; if (url) config.baseURL = url; - if (configJson.ANTHROPIC_API_KEY) - config.apiKey = configJson.ANTHROPIC_API_KEY; - else if (process.env.ANTHROPIC_API_KEY) - config.apiKey = process.env.ANTHROPIC_API_KEY; - else - throw new Error('Anthropic API key missing! Make sure you set your ANTHROPIC_API_KEY in your keys.json.'); + + config.apiKey = getKey('ANTHROPIC_API_KEY'); this.anthropic = new Anthropic(config); } diff --git a/src/models/gemini.js b/src/models/gemini.js index 8273b96..7d652d5 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -1,18 +1,13 @@ import { GoogleGenerativeAI } from '@google/generative-ai'; import { toSinglePrompt } from '../utils/text.js'; -import configJson from "../../keys.json" assert { type: "json" }; +import { getKey } from '../utils/keys.js'; export class Gemini { constructor(model_name, url) { this.model_name = model_name; this.url = url; - if (configJson.GEMINI_API_KEY) - this.genAI = new GoogleGenerativeAI(configJson.GEMINI_API_KEY); - else if (process.env.GEMINI_API_KEY) - this.genAI = new GoogleGenerativeAI(process.env.GEMINI_API_KEY); - else - throw new Error('Gemini API key missing! Make sure you set your GEMINI_API_KEY in your keys.json.'); + this.genAI = new GoogleGenerativeAI(getKey('GEMINI_API_KEY')); } async sendRequest(turns, systemMessage) { diff --git a/src/models/gpt.js b/src/models/gpt.js index 0f88ec5..061596b 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -1,5 +1,5 @@ import OpenAIApi from 'openai'; -import configJson from "../../keys.json" assert { type: "json" }; +import { getKey, hasKey } from '../utils/keys.js'; export class GPT { constructor(model_name, url) { @@ -9,17 +9,10 @@ export class GPT { if (url) config.baseURL = url; - if (configJson.OPENAI_ORG_ID) - config.apiKey = configJson.OPENAI_ORG_ID; - else if (process.env.OPENAI_ORG_ID) - config.apiKey = process.env.OPENAI_ORG_ID; + if (hasKey('OPENAI_ORG_ID')) + config.organization = getKey('OPENAI_ORG_ID'); - if (configJson.OPENAI_API_KEY) - config.apiKey = configJson.OPENAI_API_KEY; - else if (process.env.OPENAI_API_KEY) - config.apiKey = process.env.OPENAI_API_KEY; - else - throw new Error('OpenAI API key missing! Make sure you set your OPENAI_API_KEY in your keys.json.'); + config.apiKey = getKey('OPENAI_API_KEY'); this.openai = new OpenAIApi(config); } diff --git a/src/models/replicate.js b/src/models/replicate.js index 2ec30ba..e0c7d6c 100644 --- a/src/models/replicate.js +++ b/src/models/replicate.js @@ -1,6 +1,6 @@ import Replicate from 'replicate'; import { toSinglePrompt } from '../utils/text.js'; -import configJson from "../../keys.json" assert { type: "json" }; +import { getKey } from '../utils/keys.js'; // llama, mistral export class ReplicateAPI { @@ -12,16 +12,9 @@ export class ReplicateAPI { console.warn('Replicate API does not support custom URLs. Ignoring provided URL.'); } - if (configJson.REPLICATE_API_KEY) - this.replicate = new Replicate({ - auth: configJson.REPLICATE_API_KEY, - }); - else if (process.env.REPLICATE_API_KEY) - this.replicate = new Replicate({ - auth: process.env.REPLICATE_API_KEY, - }); - else - throw new Error('Replicate API key missing! Make sure you set your REPLICATE_API_KEY in your keys.json.'); + this.replicate = new Replicate({ + auth: getKey('REPLICATE_API_KEY'), + }); } async sendRequest(turns, systemMessage) { diff --git a/src/utils/keys.js b/src/utils/keys.js new file mode 100644 index 0000000..745ae6c --- /dev/null +++ b/src/utils/keys.js @@ -0,0 +1,24 @@ +import { readFileSync } from 'fs'; + +let keys = {}; +try { + const data = readFileSync('./keys.json', 'utf8'); + keys = JSON.parse(data); +} catch (err) { + console.warn('keys.json not found. Defaulting to environment variables.'); // still works with local models +} + +export function getKey(name) { + let key = keys[name]; + if (!key) { + key = process.env[name]; + } + if (!key) { + throw new Error(`API key "${name}" not found in keys.json or environment variables!`); + } + return keys[name]; +} + +export function hasKey(name) { + return keys[name] || process.env[name]; +} From 666b45a194e7765c348bcdf34d7a37384f05fff3 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 1 Jun 2024 15:03:45 -0500 Subject: [PATCH 66/78] use strict formatting for ollama --- src/agent/agent.js | 2 +- src/models/claude.js | 32 ++------------------------------ src/models/local.js | 8 +++++--- src/utils/text.js | 35 +++++++++++++++++++++++++++++++++++ 4 files changed, 43 insertions(+), 34 deletions(-) diff --git a/src/agent/agent.js b/src/agent/agent.js index deed4f5..ed11fc3 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -62,7 +62,7 @@ export class Agent { }; if (init_message) { - this.handleMessage('system', init_message); + this.handleMessage('MINECRAFT', init_message); } else { this.bot.chat('Hello world! I am ' + this.name); this.bot.emit('finished_executing'); diff --git a/src/models/claude.js b/src/models/claude.js index 46691d5..0304714 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -1,4 +1,5 @@ import Anthropic from '@anthropic-ai/sdk'; +import { strictFormat } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; export class Claude { @@ -15,36 +16,7 @@ export class Claude { } async sendRequest(turns, systemMessage) { - let prev_role = null; - let messages = []; - let filler = {role: 'user', content: '_'}; - for (let msg of turns) { - if (msg.role === 'system') { - msg.role = 'user'; - msg.content = 'SYSTEM: ' + msg.content; - } - if (msg.role === prev_role && msg.role === 'assistant') { - // insert empty user message to separate assistant messages - messages.push(filler); - messages.push(msg); - } - else if (msg.role === prev_role) { - // combine new message with previous message instead of adding a new one - messages[messages.length-1].content += '\n' + msg.content; - } - else { - messages.push(msg); - } - prev_role = msg.role; - - } - if (messages.length > 0 && messages[0].role !== 'user') { - messages.unshift(filler); // anthropic requires user message to start - } - if (messages.length === 0) { - messages.push(filler); - } - + const messages = strictFormat(turns); let res = null; try { console.log('Awaiting anthropic api response...') diff --git a/src/models/local.js b/src/models/local.js index dd3af34..21b70df 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -1,3 +1,5 @@ +import { strictFormat } from '../utils/text.js'; + export class Local { constructor(model_name, url) { this.model_name = model_name; @@ -8,8 +10,8 @@ export class Local { async sendRequest(turns, systemMessage) { let model = this.model_name || 'llama3'; - let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); - + let messages = strictFormat(turns); + messages.unshift({role: 'system', content: systemMessage}); let res = null; try { console.log(`Awaiting local response... (model: ${model})`) @@ -56,4 +58,4 @@ export class Local { } return data; } -} +} \ No newline at end of file diff --git a/src/utils/text.js b/src/utils/text.js index b0a273a..bf7b509 100644 --- a/src/utils/text.js +++ b/src/utils/text.js @@ -24,4 +24,39 @@ export function toSinglePrompt(turns, system=null, stop_seq='***', model_nicknam if (role !== model_nickname) // if the last message was from the user/system, add a prompt for the model. otherwise, pretend we are extending the model's own message prompt += model_nickname + ": "; return prompt; +} + +// ensures stricter turn order for anthropic/llama models +// combines repeated messages from the same role, separates repeat assistant messages with filler user messages +export function strictFormat(turns) { + let prev_role = null; + let messages = []; + let filler = {role: 'user', content: '_'}; + for (let msg of turns) { + if (msg.role === 'system') { + msg.role = 'user'; + msg.content = 'SYSTEM: ' + msg.content; + } + if (msg.role === prev_role && msg.role === 'assistant') { + // insert empty user message to separate assistant messages + messages.push(filler); + messages.push(msg); + } + else if (msg.role === prev_role) { + // combine new message with previous message instead of adding a new one + messages[messages.length-1].content += '\n' + msg.content; + } + else { + messages.push(msg); + } + prev_role = msg.role; + + } + if (messages.length > 0 && messages[0].role !== 'user') { + messages.unshift(filler); // anthropic requires user message to start + } + if (messages.length === 0) { + messages.push(filler); + } + return messages; } \ No newline at end of file From ea4922e8f0f9d2f36be0ea96eef743e3ec0f9c01 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 1 Jun 2024 16:05:59 -0500 Subject: [PATCH 67/78] fixed coder, gemini --- src/agent/coder.js | 2 +- src/models/gemini.js | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/agent/coder.js b/src/agent/coder.js index 47a7ae8..d408b6c 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -1,5 +1,5 @@ import { writeFile, readFile, mkdirSync } from 'fs'; - +import settings from '../../settings.js'; export class Coder { constructor(agent) { diff --git a/src/models/gemini.js b/src/models/gemini.js index 7d652d5..ee4dfa4 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -25,9 +25,11 @@ export class Gemini { const stop_seq = '***'; const prompt = toSinglePrompt(turns, systemMessage, stop_seq, 'model'); + console.log('Awaiting Google API response...'); const result = await model.generateContent(prompt); const response = await result.response; const text = response.text(); + console.log('Received.'); if (!text.includes(stop_seq)) return text; const idx = text.indexOf(stop_seq); return text.slice(0, idx); From c8b6504221e2108bf737e1e74ce07cbe9974f22d Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 1 Jun 2024 16:23:14 -0500 Subject: [PATCH 68/78] ollama no embedding by default --- src/agent/prompter.js | 8 ++++++-- src/models/local.js | 1 - 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 5e6a412..5eb5761 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -51,8 +51,12 @@ export class Prompter { throw new Error('Unknown API:', api); let embedding = this.prompts.embedding; - if (embedding === undefined) - embedding = {api: chat.api}; + if (embedding === undefined) { + if (chat.api !== 'ollama') + embedding = {api: chat.api}; + else + embedding = {api: 'none'}; + } else if (typeof embedding === 'string' || embedding instanceof String) embedding = {api: embedding}; diff --git a/src/models/local.js b/src/models/local.js index 21b70df..18d06e0 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -15,7 +15,6 @@ export class Local { let res = null; try { console.log(`Awaiting local response... (model: ${model})`) - console.log('Messages:', messages); res = await this.send(this.chat_endpoint, {model: model, messages: messages, stream: false}); if (res) res = res['message']['content']; From 83bb3a16fd4a5728974b15a790d54e760a7054a7 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 1 Jun 2024 22:47:32 -0500 Subject: [PATCH 69/78] changed init message to system --- src/agent/agent.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agent/agent.js b/src/agent/agent.js index ed11fc3..deed4f5 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -62,7 +62,7 @@ export class Agent { }; if (init_message) { - this.handleMessage('MINECRAFT', init_message); + this.handleMessage('system', init_message); } else { this.bot.chat('Hello world! I am ' + this.name); this.bot.emit('finished_executing'); From 440ffdd931771f33cbd682493f56a717296cbbe7 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 3 Jun 2024 18:21:11 -0500 Subject: [PATCH 70/78] ignore user commands, remove message logs --- src/agent/agent.js | 5 ++--- src/models/claude.js | 2 +- src/models/gpt.js | 3 ++- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/agent/agent.js b/src/agent/agent.js index deed4f5..8a0c044 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -79,9 +79,6 @@ export class Agent { } async handleMessage(source, message) { - if (!!source && !!message) - await this.history.add(source, message); - const user_command_name = containsCommand(message); if (user_command_name) { if (!commandExists(user_command_name)) { @@ -101,6 +98,8 @@ export class Agent { return; } + await this.history.add(source, message); + for (let i=0; i<5; i++) { let history = this.history.getHistory(); let res = await this.prompter.promptConvo(history); diff --git a/src/models/claude.js b/src/models/claude.js index 0304714..c97ecb2 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -20,7 +20,7 @@ export class Claude { let res = null; try { console.log('Awaiting anthropic api response...') - console.log('Messages:', messages); + // console.log('Messages:', messages); const resp = await this.anthropic.messages.create({ model: this.model_name || "claude-3-sonnet-20240229", system: systemMessage, diff --git a/src/models/gpt.js b/src/models/gpt.js index 061596b..bf5ca8d 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -1,5 +1,6 @@ import OpenAIApi from 'openai'; import { getKey, hasKey } from '../utils/keys.js'; +import { strictFormat } from '../utils/text.js'; export class GPT { constructor(model_name, url) { @@ -24,7 +25,7 @@ export class GPT { let res = null; try { console.log('Awaiting openai api response...') - console.log('Messages:', messages); + // console.log('Messages:', messages); let completion = await this.openai.chat.completions.create({ model: this.model_name || "gpt-3.5-turbo", messages: messages, From af8252cd95e6e8e9d9f5f76627f44ad99c5fce20 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 3 Jun 2024 18:23:01 -0500 Subject: [PATCH 71/78] added cheat mode, teleport, insta place, better torch placer --- src/agent/library/skills.js | 73 ++++++++++++++++++++++++++++++------- src/agent/modes.js | 33 ++++++++++------- 2 files changed, 79 insertions(+), 27 deletions(-) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index cd9f5b3..ba8c27e 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -10,19 +10,25 @@ export function log(bot, message, chat=false) { bot.chat(message); } +export function shouldPlaceTorch(bot) { + if (!bot.modes.isOn('torch_placing') || bot.interrupt_code) return false; + const pos = world.getPosition(bot); + // TODO: check light level instead of nearby torches, block.light is broken + let nearest_torch = world.getNearestBlock(bot, 'torch', 6); + if (!nearest_torch) { + const block = bot.blockAt(pos); + let has_torch = bot.inventory.items().find(item => item.name === 'torch'); + return has_torch && block.name === 'air'; + } + return false; +} + async function autoLight(bot) { - if (bot.modes.isOn('torch_placing') && !bot.interrupt_code) { - let nearest_torch = world.getNearestBlock(bot, 'torch', 6); - if (!nearest_torch) { - let has_torch = bot.inventory.items().find(item => item.name === 'torch'); - const curr_block = agent.bot.blockAt(pos); - if (has_torch && curr_block.name === 'air') { - try { - log(bot, `Placing torch at ${bot.entity.position}.`); - return await placeBlock(bot, 'torch', bot.entity.position.x, bot.entity.position.y, bot.entity.position.z); - } catch (err) {return true;} - } - } + if (shouldPlaceTorch(bot)) { + try { + const pos = world.getPosition(bot); + return await placeBlock(bot, 'torch', pos.x, pos.y, pos.z, true); + } catch (err) {return false;} } return false; } @@ -455,6 +461,13 @@ export async function breakBlockAt(bot, x, y, z) { if (x == null || y == null || z == null) throw new Error('Invalid position to break block at.'); let block = bot.blockAt(Vec3(x, y, z)); if (block.name !== 'air' && block.name !== 'water' && block.name !== 'lava') { + if (bot.modes.isOn('cheat')) { + let msg = '/setblock ' + Math.floor(x) + ' ' + Math.floor(y) + ' ' + Math.floor(z) + ' air'; + bot.chat(msg); + log(bot, `Used /setblock to break block at ${x}, ${y}, ${z}.`); + return true; + } + if (bot.entity.position.distanceTo(block.position) > 4.5) { let pos = block.position; let movements = new pf.Movements(bot); @@ -482,7 +495,7 @@ export async function breakBlockAt(bot, x, y, z) { } -export async function placeBlock(bot, blockType, x, y, z) { +export async function placeBlock(bot, blockType, x, y, z, no_cheat=false) { /** * Place the given block type at the given position. It will build off from any adjacent blocks. Will fail if there is a block in the way or nothing to build off of. * @param {MinecraftBot} bot, reference to the minecraft bot. @@ -495,7 +508,18 @@ export async function placeBlock(bot, blockType, x, y, z) { * let position = world.getPosition(bot); * await skills.placeBlock(bot, "oak_log", position.x + 1, position.y - 1, position.x); **/ - console.log('placing block...') + if (!mc.getBlockId(blockType)) { + log(bot, `Invalid block type: ${blockType}.`); + return false; + } + + if (bot.modes.isOn('cheat') && !no_cheat) { + let msg = '/setblock ' + Math.floor(x) + ' ' + Math.floor(y) + ' ' + Math.floor(z) + ' ' + blockType; + bot.chat(msg); + log(bot, `Used /setblock to place ${blockType} at ${x}, ${y}, ${z}.`); + return true; + } + let block = bot.inventory.items().find(item => item.name === blockType); if (!block) { log(bot, `Don't have any ${blockType} to place.`); @@ -705,6 +729,13 @@ export async function goToPlayer(bot, username, distance=3) { * @example * await skills.goToPlayer(bot, "player"); **/ + + if (bot.modes.isOn('cheat')) { + bot.chat('/tp @s ' + username); + log(bot, `Teleported to ${username}.`); + return true; + } + bot.modes.pause('self_defense'); bot.modes.pause('cowardice'); let player = bot.players[username].entity @@ -759,6 +790,20 @@ export async function moveAway(bot, distance) { let goal = new pf.goals.GoalNear(pos.x, pos.y, pos.z, distance); let inverted_goal = new pf.goals.GoalInvert(goal); bot.pathfinder.setMovements(new pf.Movements(bot)); + + if (bot.modes.isOn('cheat')) { + const path = await bot.pathfinder.getPathTo(move, inverted_goal, 10000); + let last_move = path.path[path.path.length-1]; + console.log(last_move); + if (last_move) { + let x = Math.floor(last_move.x); + let y = Math.floor(last_move.y); + let z = Math.floor(last_move.z); + bot.chat('/tp @s ' + x + ' ' + y + ' ' + z); + return true; + } + } + await bot.pathfinder.goto(inverted_goal); let new_pos = bot.entity.position; log(bot, `Moved away from nearest entity to ${new_pos}.`); diff --git a/src/agent/modes.js b/src/agent/modes.js index 9a32c0e..4d5b83c 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -149,21 +149,16 @@ const modes = [ interrupts: ['followPlayer'], on: true, active: false, + cooldown: 5, + last_place: Date.now(), update: function (agent) { - // TODO: check light level instead of nearby torches, block.light is broken - const near_torch = world.getNearestBlock(agent.bot, 'torch', 6); - if (!near_torch) { - let torches = agent.bot.inventory.items().filter(item => item.name === 'torch'); - if (torches.length > 0) { - const torch = torches[0]; + if (skills.shouldPlaceTorch(agent.bot)) { + if (Date.now() - this.last_place < this.cooldown * 1000) return; + execute(this, agent, async () => { const pos = agent.bot.entity.position; - const curr_block = agent.bot.blockAt(pos); - if (curr_block.name === 'air') { - execute(this, agent, async () => { - await skills.placeBlock(agent.bot, torch.name, pos.x, pos.y, pos.z); - }); - } - } + await skills.placeBlock(agent.bot, 'torch', pos.x, pos.y, pos.z, true); + }); + this.last_place = Date.now(); } } }, @@ -204,6 +199,14 @@ const modes = [ } } }, + { + name: 'cheat', + description: 'Use cheats to instantly place blocks and teleport.', + interrupts: [], + on: false, + active: false, + update: function (agent) { /* do nothing */ } + } ]; async function execute(mode, agent, func, timeout=-1) { @@ -291,4 +294,8 @@ class ModeController { export function initModes(agent) { // the mode controller is added to the bot object so it is accessible from anywhere the bot is used agent.bot.modes = new ModeController(agent); + let modes = agent.prompter.getInitModes(); + if (modes) { + agent.bot.modes.loadJson(modes); + } } From 25c4f68be7789203556fb236e398825f7e1dba75 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 3 Jun 2024 18:23:29 -0500 Subject: [PATCH 72/78] add modes to profile --- andy.json | 11 +++++++++++ src/agent/npc/controller.js | 2 +- src/agent/prompter.js | 28 ++++++++++++++++------------ 3 files changed, 28 insertions(+), 13 deletions(-) diff --git a/andy.json b/andy.json index d4c5fc9..8d2654e 100644 --- a/andy.json +++ b/andy.json @@ -8,6 +8,17 @@ "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", + + "modes": { + "self_preservation": true, + "cowardice": true, + "self_defense": true, + "hunting": true, + "item_collecting": true, + "torch_placing": true, + "idle_staring": true, + "cheat": false + }, "conversation_examples": [ [ diff --git a/src/agent/npc/controller.js b/src/agent/npc/controller.js index d65107c..18658b6 100644 --- a/src/agent/npc/controller.js +++ b/src/agent/npc/controller.js @@ -11,7 +11,7 @@ import * as mc from '../../utils/mcdata.js'; export class NPCContoller { constructor(agent) { this.agent = agent; - this.data = NPCData.fromObject(agent.prompter.prompts.npc); + this.data = NPCData.fromObject(agent.prompter.profile.npc); this.temp_goals = []; this.item_goal = new ItemGoal(agent, this.data); this.build_goal = new BuildGoal(agent); diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 5eb5761..233c4a0 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -15,12 +15,12 @@ import { Local } from '../models/local.js'; export class Prompter { constructor(agent, fp) { this.agent = agent; - this.prompts = JSON.parse(readFileSync(fp, 'utf8')); + this.profile = JSON.parse(readFileSync(fp, 'utf8')); this.convo_examples = null; this.coding_examples = null; - let name = this.prompts.name; - let chat = this.prompts.model; + let name = this.profile.name; + let chat = this.profile.model; if (typeof chat === 'string' || chat instanceof String) { chat = {model: chat}; if (chat.model.includes('gemini')) @@ -50,7 +50,7 @@ export class Prompter { else throw new Error('Unknown API:', api); - let embedding = this.prompts.embedding; + let embedding = this.profile.embedding; if (embedding === undefined) { if (chat.api !== 'ollama') embedding = {api: chat.api}; @@ -76,7 +76,7 @@ export class Prompter { } mkdirSync(`./bots/${name}`, { recursive: true }); - writeFileSync(`./bots/${name}/last_profile.json`, JSON.stringify(this.prompts, null, 4), (err) => { + writeFileSync(`./bots/${name}/last_profile.json`, JSON.stringify(this.profile, null, 4), (err) => { if (err) { throw err; } @@ -85,15 +85,19 @@ export class Prompter { } getName() { - return this.prompts.name; + return this.profile.name; + } + + getInitModes() { + return this.profile.modes; } async initExamples() { console.log('Loading examples...') this.convo_examples = new Examples(this.embedding_model); - await this.convo_examples.load(this.prompts.conversation_examples); + await this.convo_examples.load(this.profile.conversation_examples); this.coding_examples = new Examples(this.embedding_model); - await this.coding_examples.load(this.prompts.coding_examples); + await this.coding_examples.load(this.profile.coding_examples); console.log('Examples loaded.'); } @@ -149,25 +153,25 @@ export class Prompter { } async promptConvo(messages) { - let prompt = this.prompts.conversing; + let prompt = this.profile.conversing; prompt = await this.replaceStrings(prompt, messages, this.convo_examples); return await this.chat_model.sendRequest(messages, prompt); } async promptCoding(messages) { - let prompt = this.prompts.coding; + let prompt = this.profile.coding; prompt = await this.replaceStrings(prompt, messages, this.coding_examples); return await this.chat_model.sendRequest(messages, prompt); } async promptMemSaving(prev_mem, to_summarize) { - let prompt = this.prompts.saving_memory; + let prompt = this.profile.saving_memory; prompt = await this.replaceStrings(prompt, null, null, prev_mem, to_summarize); return await this.chat_model.sendRequest([], prompt); } async promptGoalSetting(messages, last_goals) { - let system_message = this.prompts.goal_setting; + let system_message = this.profile.goal_setting; system_message = await this.replaceStrings(system_message, messages); let user_message = 'Use the below info to determine what goal to target next\n\n'; From ca4ef4114a06492cecda02852202f3babdd6ad04 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 3 Jun 2024 18:28:44 -0500 Subject: [PATCH 73/78] added no_cheat to docs --- src/agent/library/skills.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index ba8c27e..2266a2e 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -503,6 +503,7 @@ export async function placeBlock(bot, blockType, x, y, z, no_cheat=false) { * @param {number} x, the x coordinate of the block to place. * @param {number} y, the y coordinate of the block to place. * @param {number} z, the z coordinate of the block to place. + * @param {boolean} no_cheat, overrides cheat mode to place the block normally. Defaults to false. * @returns {Promise} true if the block was placed, false otherwise. * @example * let position = world.getPosition(bot); From baa51ee2c825f5025f744403b46180161d1a9038 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 3 Jun 2024 18:33:00 -0500 Subject: [PATCH 74/78] added teleport to gotoposition --- src/agent/library/skills.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 2266a2e..c409d6e 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -713,6 +713,11 @@ export async function goToPosition(bot, x, y, z, min_distance=2) { log(bot, `Missing coordinates, given x:${x} y:${y} z:${z}`); return false; } + if (bot.modes.isOn('cheat')) { + bot.chat('/tp @s ' + x + ' ' + y + ' ' + z); + log(bot, `Teleported to ${x}, ${y}, ${z}.`); + return true; + } bot.pathfinder.setMovements(new pf.Movements(bot)); await bot.pathfinder.goto(new pf.goals.GoalNear(x, y, z, min_distance)); log(bot, `You have reached at ${x}, ${y}, ${z}.`); From 3342a6deb9b76a0b990a2be754caeeb9f4fd31a1 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 3 Jun 2024 18:40:01 -0500 Subject: [PATCH 75/78] removed strict format --- src/models/gpt.js | 1 - 1 file changed, 1 deletion(-) diff --git a/src/models/gpt.js b/src/models/gpt.js index bf5ca8d..7cfbcb7 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -1,6 +1,5 @@ import OpenAIApi from 'openai'; import { getKey, hasKey } from '../utils/keys.js'; -import { strictFormat } from '../utils/text.js'; export class GPT { constructor(model_name, url) { From 3f541ae9303fb8631e61d190adf94e8b60090aa5 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 3 Jun 2024 19:09:22 -0500 Subject: [PATCH 76/78] moved shouldplacetorch to world --- src/agent/library/skills.js | 15 +-------------- src/agent/library/world.js | 13 +++++++++++++ src/agent/modes.js | 2 +- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index c409d6e..b848304 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -10,21 +10,8 @@ export function log(bot, message, chat=false) { bot.chat(message); } -export function shouldPlaceTorch(bot) { - if (!bot.modes.isOn('torch_placing') || bot.interrupt_code) return false; - const pos = world.getPosition(bot); - // TODO: check light level instead of nearby torches, block.light is broken - let nearest_torch = world.getNearestBlock(bot, 'torch', 6); - if (!nearest_torch) { - const block = bot.blockAt(pos); - let has_torch = bot.inventory.items().find(item => item.name === 'torch'); - return has_torch && block.name === 'air'; - } - return false; -} - async function autoLight(bot) { - if (shouldPlaceTorch(bot)) { + if (world.shouldPlaceTorch(bot)) { try { const pos = world.getPosition(bot); return await placeBlock(bot, 'torch', pos.x, pos.y, pos.z, true); diff --git a/src/agent/library/world.js b/src/agent/library/world.js index 9d6be62..0e32014 100644 --- a/src/agent/library/world.js +++ b/src/agent/library/world.js @@ -256,6 +256,19 @@ export async function isClearPath(bot, target) { return path.status === 'success'; } +export function shouldPlaceTorch(bot) { + if (!bot.modes.isOn('torch_placing') || bot.interrupt_code) return false; + const pos = getPosition(bot); + // TODO: check light level instead of nearby torches, block.light is broken + let nearest_torch = getNearestBlock(bot, 'torch', 6); + if (!nearest_torch) { + const block = bot.blockAt(pos); + let has_torch = bot.inventory.items().find(item => item.name === 'torch'); + return has_torch && block.name === 'air'; + } + return false; +} + export function getBiomeName(bot) { /** * Get the name of the biome the bot is in. diff --git a/src/agent/modes.js b/src/agent/modes.js index 4d5b83c..2d201d4 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -152,7 +152,7 @@ const modes = [ cooldown: 5, last_place: Date.now(), update: function (agent) { - if (skills.shouldPlaceTorch(agent.bot)) { + if (world.shouldPlaceTorch(agent.bot)) { if (Date.now() - this.last_place < this.cooldown * 1000) return; execute(this, agent, async () => { const pos = agent.bot.entity.position; From 83f98b10d4ed3a3a41aa5c8ef7fece47f50eda95 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 13 Jun 2024 17:03:13 -0500 Subject: [PATCH 77/78] updated profiles, fixed newaction --- profiles/claude.json | 11 +++++++++++ profiles/gemini.json | 13 ++++++++++++- profiles/gpt.json | 13 ++++++++++++- profiles/llama.json | 11 +++++++++++ src/agent/agent.js | 5 ++--- 5 files changed, 48 insertions(+), 5 deletions(-) diff --git a/profiles/claude.json b/profiles/claude.json index d6db2a3..4c3bbee 100644 --- a/profiles/claude.json +++ b/profiles/claude.json @@ -9,6 +9,17 @@ "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", + "modes": { + "self_preservation": false, + "cowardice": false, + "self_defense": false, + "hunting": false, + "item_collecting": true, + "torch_placing": false, + "idle_staring": true, + "cheat": true + }, + "conversation_examples": [ [ {"role": "user", "content": "miner_32: Hey! What are you up to?"}, diff --git a/profiles/gemini.json b/profiles/gemini.json index 0fe605c..aab8908 100644 --- a/profiles/gemini.json +++ b/profiles/gemini.json @@ -1,7 +1,7 @@ { "name": "gemini", - "model": "gemini-pro", + "model": "gemini-1.5-pro", "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", @@ -9,6 +9,17 @@ "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", + "modes": { + "self_preservation": false, + "cowardice": false, + "self_defense": false, + "hunting": false, + "item_collecting": true, + "torch_placing": false, + "idle_staring": true, + "cheat": true + }, + "conversation_examples": [ [ {"role": "user", "content": "miner_32: Hey! What are you up to?"}, diff --git a/profiles/gpt.json b/profiles/gpt.json index ffb7363..210fc7b 100644 --- a/profiles/gpt.json +++ b/profiles/gpt.json @@ -1,7 +1,7 @@ { "name": "gpt", - "model": "gpt-4o", + "model": "gpt-4", "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", @@ -9,6 +9,17 @@ "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", + "modes": { + "self_preservation": false, + "cowardice": false, + "self_defense": false, + "hunting": false, + "item_collecting": true, + "torch_placing": false, + "idle_staring": true, + "cheat": true + }, + "conversation_examples": [ [ {"role": "user", "content": "miner_32: Hey! What are you up to?"}, diff --git a/profiles/llama.json b/profiles/llama.json index 0b7c2bb..746b967 100644 --- a/profiles/llama.json +++ b/profiles/llama.json @@ -9,6 +9,17 @@ "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", + "modes": { + "self_preservation": false, + "cowardice": false, + "self_defense": false, + "hunting": false, + "item_collecting": true, + "torch_placing": false, + "idle_staring": true, + "cheat": true + }, + "conversation_examples": [ [ {"role": "user", "content": "miner_32: Hey! What are you up to?"}, diff --git a/src/agent/agent.js b/src/agent/agent.js index 8a0c044..9c14c10 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -86,13 +86,12 @@ export class Agent { return; } this.bot.chat(`*${source} used ${user_command_name.substring(1)}*`); - let execute_res = await executeCommand(this, message); if (user_command_name === '!newAction') { // all user initiated commands are ignored by the bot except for this one // add the preceding message to the history to give context for newAction - let truncated_msg = message.substring(0, message.indexOf(user_command_name)).trim(); - this.history.add(source, truncated_msg); + this.history.add(source, message); } + let execute_res = await executeCommand(this, message); if (execute_res) this.cleanChat(execute_res); return; From 87d1c6c00c1705a0de44302bd23befc0c1ab0132 Mon Sep 17 00:00:00 2001 From: Gigabyte0x1337 <16334203+Gigabyte0x1337@users.noreply.github.com> Date: Sun, 16 Jun 2024 16:33:54 +0200 Subject: [PATCH 78/78] Update package.json --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 247c2dc..b2912b1 100644 --- a/package.json +++ b/package.json @@ -17,6 +17,7 @@ "yargs": "^17.7.2" }, "scripts": { - "postinstall": "patch-package" + "postinstall": "patch-package", + "start": "node main.js" } }