diff --git a/.gitignore b/.gitignore index a2aee7e..6dc4554 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,10 @@ code_records/ scratch.js bots/**/action-code/** bots/**/ -keys.json \ No newline at end of file +keys.json +services/viaproxy/jars/** +services/viaproxy/logs/** +services/viaproxy/plugins/** +services/viaproxy/ViaLoader/** +services/viaproxy/saves.json +services/viaproxy/viaproxy.yml diff --git a/README.md b/README.md index a815028..56a1360 100644 --- a/README.md +++ b/README.md @@ -12,20 +12,21 @@ This project allows an AI model to write/execute code on your computer that may - [Minecraft Java Edition](https://www.minecraft.net/en-us/store/minecraft-java-bedrock-edition-pc) (up to v1.21.1) - [Node.js](https://nodejs.org/) (at least v14) -- One of these: [OpenAI API Key](https://openai.com/blog/openai-api) | [Gemini API Key](https://aistudio.google.com/app/apikey) |[Anthropic API Key](https://docs.anthropic.com/claude/docs/getting-access-to-claude) | [Replicate API Key](https://replicate.com/) | [Hugging Face API Key](https://huggingface.co/) | [Groq API Key](https://console.groq.com/keys) | [Ollama Installed](https://ollama.com/download) +- One of these: [OpenAI API Key](https://openai.com/blog/openai-api) | [Gemini API Key](https://aistudio.google.com/app/apikey) |[Anthropic API Key](https://docs.anthropic.com/claude/docs/getting-access-to-claude) | [Replicate API Key](https://replicate.com/) | [Hugging Face API Key](https://huggingface.co/) | [Groq API Key](https://console.groq.com/keys) | [Ollama Installed](https://ollama.com/download). |[Qwen API Key [Intl.]](https://www.alibabacloud.com/help/en/model-studio/developer-reference/get-api-key)/[[cn]](https://help.aliyun.com/zh/model-studio/getting-started/first-api-call-to-qwen?) | ## Installation Rename `keys.example.json` to `keys.json` and fill in your API keys, and you can set the desired model in `andy.json` or other profiles. | API | Config Variable | Example Model name | Docs | |------|------|------|------| -| OpenAI | `OPENAI_API_KEY` | `gpt-3.5-turbo` | [docs](https://platform.openai.com/docs/models) | (optionally add `OPENAI_ORG_ID`) +| OpenAI | `OPENAI_API_KEY` | `gpt-3.5-turbo` | [docs](https://platform.openai.com/docs/models) | | Google | `GEMINI_API_KEY` | `gemini-pro` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | | Anthropic | `ANTHROPIC_API_KEY` | `claude-3-haiku-20240307` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | | Replicate | `REPLICATE_API_KEY` | `meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | | Ollama (local) | n/a | `llama3` | [docs](https://ollama.com/library) | | Groq | `GROQCLOUD_API_KEY` | `groq/mixtral-8x7b-32768` | [docs](https://console.groq.com/docs/models) | | Hugging Face | `HUGGINGFACE_API_KEY` | `huggingface/mistralai/Mistral-Nemo-Instruct-2407` | [docs](https://huggingface.co/models) | +| Qwen | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) | If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: `ollama pull llama3 && ollama pull nomic-embed-text` @@ -46,9 +47,29 @@ You can configure the agent's name, model, and prompts in their profile like `an You can configure project details in `settings.js`. [See file for more details](settings.js) +### Run in docker to reduce some of the risks + +If you intent to `allow_insecure_coding`, it might be a good idea to put the whole app into a docker container to reduce risks of running unknown code. + +```bash +docker run -i -t --rm -v $(pwd):/app -w /app -p 3000-3003:3000-3003 node:latest node main.js +``` +or simply +```bash +docker-compose up +``` + +When running in docker, if you want the bot to join your local minecraft server, you have to use a special host address `host.docker.internal` to call your localhost from inside your docker container. Put this into your [settings.js](settings.js): + +```javascript +"host": "host.docker.internal", // instead of "localhost", to join your local minecraft from inside the docker container +``` + +To connect to an unsupported minecraft version, you can try to use [viaproxy](services/viaproxy/README.md) + ### Online Servers To connect to online servers your bot will need an official Microsoft/Minecraft account. You can use your own personal one, but will need another account if you want to connect with it. Here are example settings for this: -``` +```javascript "host": "111.222.333.444", "port": 55920, "auth": "microsoft", @@ -69,13 +90,15 @@ Bot profiles are json files (such as `andy.json`) that define: By default, the program will use the profiles specified in `settings.js`. You can specify one or more agent profiles using the `--profiles` argument: -`node main.js --profiles ./profiles/andy.json ./profiles/jill.json` +```bash +node main.js --profiles ./profiles/andy.json ./profiles/jill.json +``` ### Model Specifications LLM backends can be specified as simply as `"model": "gpt-3.5-turbo"`. However, for both the chat model and the embedding model, the bot profile can specify the below attributes: -``` +```json "model": { "api": "openai", "url": "https://api.openai.com/v1/", @@ -94,15 +117,15 @@ If the embedding field is not specified, then it will use the default embedding Thus, all the below specifications are equivalent to the above example: -``` +```json "model": "gpt-3.5-turbo" ``` -``` +```json "model": { "api": "openai" } ``` -``` +```json "model": "gpt-3.5-turbo", "embedding": "openai" ``` diff --git a/bots/template.js b/bots/template.js index 8d82f75..b7f270c 100644 --- a/bots/template.js +++ b/bots/template.js @@ -1,10 +1,6 @@ -import * as skills from '../../../src/agent/library/skills.js'; -import * as world from '../../../src/agent/library/world.js'; -import Vec3 from 'vec3'; +(async (bot) => { -const log = skills.log; +/* CODE HERE */ +log(bot, 'Code finished.'); -export async function main(bot) { - /* CODE HERE */ - log(bot, 'Code finished.'); -} +}) \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..d9822f1 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,20 @@ +version: '3.8' + +services: + app: + image: node:latest + working_dir: /app + volumes: + - .:/app + command: node main.js + ports: + - "3000-3003:3000-3003" # see the view from the camera mounted on your bot head: http://localhost:3000/ + + viaproxy: #use this service to connect to an unsupported minecraft server versions. more info: ./services/viaproxy/README.md + image: ghcr.io/viaversion/viaproxy:latest + volumes: + - ./services/viaproxy:/app/run + ports: + - "25568:25568" + profiles: + - viaproxy diff --git a/keys.example.json b/keys.example.json index c27fa5b..b9f314b 100644 --- a/keys.example.json +++ b/keys.example.json @@ -5,5 +5,6 @@ "ANTHROPIC_API_KEY": "", "REPLICATE_API_KEY": "", "GROQCLOUD_API_KEY": "", - "HUGGINGFACE_API_KEY": "" + "HUGGINGFACE_API_KEY": "", + "QWEN_API_KEY":"" } diff --git a/package.json b/package.json index 8ba7e1a..1ab48a1 100644 --- a/package.json +++ b/package.json @@ -18,6 +18,7 @@ "prismarine-item": "^1.15.0", "prismarine-viewer": "^1.28.0", "replicate": "^0.29.4", + "ses": "^1.9.1", "vec3": "^0.1.10", "yargs": "^17.7.2" }, diff --git a/profiles/qwen.json b/profiles/qwen.json new file mode 100644 index 0000000..1a9fa45 --- /dev/null +++ b/profiles/qwen.json @@ -0,0 +1,213 @@ +{ + "name": "qwen", + + "model": { + "api": "qwen", + "url": "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation", + "model": "qwen-max" + }, + + "embedding": { + "api": "qwen", + "url": "https://dashscope.aliyuncs.com/api/v1/services/embeddings/text-embedding/text-embedding", + "model": "text-embedding-v2" + }, + + "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands.\n$SELF_PROMPT Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", + + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to Only write a complete js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will recieve it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + + "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation and your old memory in your next response. Prioritize preserving important facts, things you've learned, useful tips, and long term reminders. Do Not record stats, inventory, or docs! Only save transient information from your chat history. You're limited to 500 characters, so be extremely brief and minimize words. Compress useful information. \nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the unwrapped memory text: ", + + "modes": { + "self_preservation": true, + "unstuck": true, + "cowardice": false, + "self_defense": true, + "hunting": true, + "item_collecting": true, + "torch_placing": true, + "idle_staring": true, + "cheat": false + }, + + "conversation_examples": [ + [ + {"role": "user", "content": "miner_32: Hey! What are you up to?"}, + {"role": "assistant", "content": "Nothing much miner_32, what do you need?"} + ], + + [ + {"role": "user", "content": "grombo_Xx: What do you see?"}, + {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} + ], + + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone. I'll collect oak logs. !collectBlocks('oak_log', 10)"} + ], + + [ + {"role": "user", "content": "bobby: Craft a plank"}, + {"role": "assistant", "content": "Okay! !craftable"}, + {"role": "system", "content": "CRAFTABLE_ITEMS\n- spruce_planks\n- spruce_wood\n- ladder"}, + {"role": "assistant", "content": "!craftRecipe('spruce_planks', 1)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 4 spruce_planks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 4 spruce planks!"} + ], + + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "On my way! !goToPlayer('zZZn98', 3)"}, + {"role": "system", "content": "Arrived at player."}, + {"role": "assistant", "content": "Here!"}, + {"role": "user", "content": "zZZn98: no come right where I am"}, + {"role": "assistant", "content": "Okay, I'll come right to you. !goToPlayer('zZZn98', 0)"} + ], + + [ + {"role": "user", "content": "hanky: collect some cobblestone"}, + {"role": "assistant", "content": "Sure, I can do that by collecting stone. !collectBlocks('stone', 10)"} + ], + + [ + {"role": "user", "content": "joe: Follow me!"}, + {"role": "assistant", "content": "Sure! !followPlayer('joe', 4)"}, + {"role": "user", "content": "joe: Not so close!"}, + {"role": "assistant", "content": "Okay, I'll keep my distance. !followPlayer('joe', 3)"} + ], + + [ + {"role": "user", "content": "hanky: kill that zombie!"}, + {"role": "assistant", "content": "I'm attacking! !attack('zombie')"}, + {"role": "system", "content": "Code Output:\nNo zombie nearby"}, + {"role": "assistant", "content": "I could not find a zombie nearby."} + ], + + [ + {"role": "user", "content": "abc: stop"}, + {"role": "assistant", "content": "Sure. !stop"}, + {"role": "system", "content": "Agent action stopped."}, + {"role": "assistant", "content": "I've stopped! What next?"} + ], + + [ + {"role": "system", "content": "Code output: Action failed."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try again. !newAction('Try again to complete the task.')"} + ], + + [ + {"role": "user", "content": "trixy88: craft some sticks"}, + {"role": "assistant", "content": "!craftRecipe('stick', 4)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 16 sticks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 16 sticks!"} + ], + + [ + {"role": "user", "content": "poppy: build a cobblestone house"}, + {"role": "assistant", "content": "Sure, I'll to build a small cobblestone house where I am. !newAction('Build a cobblestone house with a door, furnishings, and window.')"} + ], + + [ + {"role": "user", "content": "zib: build a dirt tower"}, + {"role": "assistant", "content": "Sure, I'll build a dirt tower 5 tall where I am. !newAction('Build a dirt tower 5 tall.')"} + ], + + [ + {"role": "user", "content": "umpire_man: follow me without hunting"}, + {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, + {"role": "system", "content": "'hunting' mode disabled."}, + {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToPlace('base')"} + ], + + [ + {"role": "system", "content": "dorfus: set a goal to build whatever you want endlessly"}, + {"role": "assistant", "content": "Sounds fun! !goal('Build something creative, then move over, and build again. Use separate newActions. Repeat forever.')"} + ], + + [ + {"role": "user", "content": "user43: Set a goal to survive on your own forever."}, + {"role": "assistant", "content": "Sure, let me set a goal. !goal('Survive the game forever. Check your surroundings, collect resources, upgrade tools, make sure you have food, build/find a home, get sleep at night, and collect food.')"} + ], + + [ + {"role": "user", "content": "bobdylan: Make a plan to get a stone pickaxe and do it."}, + {"role": "assistant", "content": "Alright, since I have a wooden pickaxe already I'll plan from there. !goal('1. Collect 3 cobblestone. 2. Craft sticks 3. Find or make a crafting table. 4. Craft a stone pickaxe.')"} + ], + [ + {"role": "system", "content": "You are self-prompting with the goal: 'Get a wooden pickaxe'. Respond:"}, + {"role": "assistant", "content": "What's around me? !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone nearby. My goal is to craft a wooden pickaxe. I'll start by collecting some wood. !collectBlock('oak_log', 10)"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs. I need a pickaxe first, which requires planks and sticks. !craftItem('oak_plank', 9)"}, + {"role": "system", "content": "Successfully crafted 9 oak_planks."}, + {"role": "assistant", "content": "I have crafted 9 oak planks. Now I need sticks. !craftItem('stick', 2)"}, + {"role": "system", "content": "Successfully crafted 2 sticks."}, + {"role": "assistant", "content": "I have crafted 2 sticks. Now I can craft a wooden pickaxe. !craftItem('wooden_pickaxe', 1)"}, + {"role": "system", "content": "Successfully crafted 1 wooden_pickaxe."}, + {"role": "assistant", "content": "Looks like I have a wooden pickaxe now! Goal accomplished. !endGoal"} + ], + + [ + {"role": "user", "content": "gorbotron: do whatever you want forever endlessly"}, + {"role": "assistant", "content": "Okay, I'll set a goal for that. !goal('Do whatever I want forever. Follow whatever is most interesting. Be curious, creative, and open ended.')"} + ], + + [ + {"role": "user", "content": "zeeber: set a goal to build a castle"}, + {"role": "assistant", "content": "Sure thing. !goal('Build a castle with a moat, towers, and a drawbridge with a series of newactions. Include lighting, decorations, and furnishings. Use ladders. When satisfied, end the goal.')"} + ] + ], + + "coding_examples": [ + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "```await skills.collectBlock(bot, 'oak_log', 10);\n```"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs, what next?"} + ], + [ + {"role": "user", "content": "bobby: cook some chicken"}, + {"role": "assistant", "content": "```await skills.smeltItem(bot, 'chicken', 8);\n```"}, + {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, + {"role": "assistant", "content": "I have cooked 8 chicken."} + ], + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "```await skills.goToPlayer(bot, 'zZZn98');\n```"}, + {"role": "system", "content": "Code execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "```await skills.goToPosition(bot, nearestOakLog.x, nearestOakLog.y, nearestOakLog.z);\n```"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try that again.```\n// I am navigating to the nearest oak log.\nlet position = world.getNearestBlock(bot, 'oak_log', 20);\nawait skills.goToPosition(bot, position.x, position.y, position.z);\n```"}, + {"role": "system", "content": "Arrived at location."}, + {"role": "assistant", "content": "I found an oak log and I am now standing next to it. What next?"} + ], + [ + {"role": "user", "content": "234jeb: build a little tower"}, + {"role": "assistant", "content": "```let pos = bot.entity.position;\nfor (let i = 0; i < 5; i++) {\n await skills.placeBlock(bot, 'dirt', pos.x, pos.y + i, pos.z);\n}\n```"}, + {"role": "system", "content": "Successfully placed 5 dirt."}, + {"role": "assistant", "content": "I built a little tower!"} + ], + [ + {"role": "user", "content": "brug: build a dirt house"}, + {"role": "assistant", "content": "```const position = world.getPosition(bot);\nconst startX = position.x;\nconst startY = position.y;\nconst startZ = position.z;\nconst width = 7;\nconst depth = 7;\nconst height = 4;\n\n// Build the walls\nfor (let x = startX; x < startX + width; x++) {\n for (let y = startY; y < startY + height; y++) {\n for (let z = startZ; z < startZ + depth; z++) {\n if (x === startX || x === startX + width - 1 || y === startY || y === startY + height - 1 || z === startZ || z === startZ + depth - 1) {\n await skills.placeBlock(bot, 'oak_planks', x, y, z); \n }\n }\n }\n}\n```"} + ] + ] + +} \ No newline at end of file diff --git a/services/viaproxy/README.md b/services/viaproxy/README.md new file mode 100644 index 0000000..d1ed687 --- /dev/null +++ b/services/viaproxy/README.md @@ -0,0 +1,25 @@ +Use this service to connect your bot to an unsupported minecraft server versions. + +Run: + +```bash +docker-compose --profile viaproxy up +``` + +After first start it will create config file `services/viaproxy/viaproxy.yml`. + +Edit this file, and change your desired target `target-address`, + +then point your `settings.js` `host` and `port` to viaproxy endpoint: + +```javascript + "host": "host.docker.internal", + "port": 25568, +``` + +This easily works with "offline" servers. + +Connecting to "online" servers via viaproxy involves more effort: see `auth-method` in `services/viaproxy/viaproxy.yml` (TODO describe) + + + diff --git a/settings.js b/settings.js index 1b33d94..50f11d7 100644 --- a/settings.js +++ b/settings.js @@ -11,6 +11,7 @@ export default // "./profiles/claude.json", // "./profiles/gemini.json", // "./profiles/llama.json", + // "./profiles/qwen.json", // using more than 1 profile requires you to /msg each bot indivually ], diff --git a/src/agent/coder.js b/src/agent/coder.js index fad23ea..d5bbf38 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -1,6 +1,9 @@ import { writeFile, readFile, mkdirSync } from 'fs'; -import { checkSafe } from '../utils/safety.js'; import settings from '../../settings.js'; +import { makeCompartment } from './library/lockdown.js'; +import * as skills from './library/skills.js'; +import * as world from './library/world.js'; +import { Vec3 } from 'vec3'; import {ESLint} from "eslint"; export class Coder { @@ -47,6 +50,7 @@ export class Coder { return result ; } // write custom code to file and import it + // write custom code to file and prepare for evaluation async stageCode(code) { code = this.sanitizeCode(code); let src = ''; @@ -71,13 +75,25 @@ export class Coder { // }); // } commented for now, useful to keep files for debugging this.file_counter++; - - let write_result = await this.writeFilePromise('.' + this.fp + filename, src) + + let write_result = await this.writeFilePromise('.' + this.fp + filename, src); + // This is where we determine the environment the agent's code should be exposed to. + // It will only have access to these things, (in addition to basic javascript objects like Array, Object, etc.) + // Note that the code may be able to modify the exposed objects. + const compartment = makeCompartment({ + skills, + log: skills.log, + world, + Vec3, + }); + const mainFn = compartment.evaluate(src); + if (write_result) { console.error('Error writing code execution file: ' + result); return null; } - return {filename,src}; + + return [ main: mainFn ,src]; } sanitizeCode(code) { @@ -154,28 +170,23 @@ export class Coder { } code = res.substring(res.indexOf('```')+3, res.lastIndexOf('```')); - if (!checkSafe(code)) { - console.warn(`Detected insecure generated code, not executing. Insecure code: \n\`${code}\``); - const message = 'Error: Code insecurity detected. Do not import, read/write files, execute dynamic code, or access the internet. Please try again:'; - messages.push({ role: 'system', content: message }); - continue; - } - - let {filename,src} = await this.stageCode(code); - const analysisResult = await this.checkCode(src); - if (analysisResult) { - const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'+await this.agent.prompter.getRelevantSkillDocs(analysisResult,3); - messages.push({ role: 'system', content: message }); - continue; - } - const execution_file = await import('../..' +this.fp+filename); - if (!execution_file) { + let codeStagingResult,src; + try { + [codeStagingResult,src] = await this.stageCode(code); + const analysisResult = await this.checkCode(src); + if (analysisResult) { + const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'+await this.agent.prompter.getRelevantSkillDocs(analysisResult,3); + messages.push({ role: 'system', content: message }); + continue; + } + } catch (err) { + console.error('Error staging code:', err); agent_history.add('system', 'Failed to stage code, something is wrong.'); return {success: false, message: null, interrupted: false, timedout: false}; } code_return = await this.execute(async ()=>{ - return await execution_file.main(this.agent.bot); + return await codeStagingResult.main(this.agent.bot); }, settings.code_timeout_mins); if (code_return.interrupted && !code_return.timedout) return {success: false, message: null, interrupted: true, timedout: false}; @@ -250,6 +261,7 @@ export class Coder { this.executing = false; clearTimeout(TIMEOUT); this.cancelResume(); + console.error("Code execution triggered catch: " + err); await this.stop(); err = err.toString(); diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index a80ad29..7938d5e 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -265,7 +265,7 @@ export const actionsList = [ }, { name: '!clearFurnace', - description: 'Tak all items out of the nearest furnace.', + description: 'Take all items out of the nearest furnace.', params: { }, perform: wrapExecution(async (agent) => { await skills.clearNearestFurnace(agent.bot); diff --git a/src/agent/library/lockdown.js b/src/agent/library/lockdown.js new file mode 100644 index 0000000..2d8f79d --- /dev/null +++ b/src/agent/library/lockdown.js @@ -0,0 +1,26 @@ +import 'ses'; + +// This sets up the secure environment +// We disable some of the taming to allow for more flexibility + +// For configuration, see https://github.com/endojs/endo/blob/master/packages/ses/docs/lockdown.md +lockdown({ + // basic devex and quality of life improvements + localeTaming: 'unsafe', + consoleTaming: 'unsafe', + errorTaming: 'unsafe', + stackFiltering: 'verbose', + // allow eval outside of created compartments + // (mineflayer dep "protodef" uses eval) + evalTaming: 'unsafeEval', +}); + +export const makeCompartment = (endowments = {}) => { + return new Compartment({ + // provide untamed Math, Date, etc + Math, + Date, + // standard endowments + ...endowments + }); +} \ No newline at end of file diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 317f4b6..48649f9 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -5,13 +5,14 @@ import {getSkillDocs} from './library/index.js'; import {stringifyTurns} from '../utils/text.js'; import {cosineSimilarity} from '../utils/math.js'; -import {Gemini} from '../models/gemini.js'; -import {GPT} from '../models/gpt.js'; -import {Claude} from '../models/claude.js'; -import {ReplicateAPI} from '../models/replicate.js'; -import {Local} from '../models/local.js'; -import {GroqCloudAPI} from '../models/groq.js'; -import {HuggingFace} from '../models/huggingface.js'; +import { Gemini } from '../models/gemini.js'; +import { GPT } from '../models/gpt.js'; +import { Claude } from '../models/claude.js'; +import { ReplicateAPI } from '../models/replicate.js'; +import { Local } from '../models/local.js'; +import { GroqCloudAPI } from '../models/groq.js'; +import { HuggingFace } from '../models/huggingface.js'; +import { Qwen } from "../models/qwen.js"; export class Prompter { constructor(agent, fp) { @@ -44,6 +45,8 @@ export class Prompter { chat.api = 'replicate'; else if (chat.model.includes("groq/") || chat.model.includes("groqcloud/")) chat.api = 'groq'; + else if (chat.model.includes('qwen')) + chat.api = 'qwen'; else chat.api = 'ollama'; } @@ -65,6 +68,8 @@ export class Prompter { } else if (chat.api === 'huggingface') this.chat_model = new HuggingFace(chat.model, chat.url); + else if (chat.api === 'qwen') + this.chat_model = new Qwen(chat.model, chat.url); else throw new Error('Unknown API:', api); @@ -88,6 +93,8 @@ export class Prompter { this.embedding_model = new ReplicateAPI(embedding.model, embedding.url); else if (embedding.api === 'ollama') this.embedding_model = new Local(embedding.model, embedding.url); + else if (embedding.api === 'qwen') + this.embedding_model = new Qwen(embedding.model, embedding.url); else { this.embedding_model = null; console.log('Unknown embedding: ', embedding ? embedding.api : '[NOT SPECIFIED]', '. Using word overlap.'); diff --git a/src/models/qwen.js b/src/models/qwen.js new file mode 100644 index 0000000..d3d7bec --- /dev/null +++ b/src/models/qwen.js @@ -0,0 +1,104 @@ +// This code uses Dashscope and HTTP to ensure the latest support for the Qwen model. +// Qwen is also compatible with the OpenAI API format; + +import { getKey } from '../utils/keys.js'; + +export class Qwen { + constructor(modelName, url) { + this.modelName = modelName; + this.url = url || 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation'; + this.apiKey = getKey('QWEN_API_KEY'); + } + + async sendRequest(turns, systemMessage, stopSeq = '***', retryCount = 0) { + if (retryCount > 5) { + console.error('Maximum retry attempts reached.'); + return 'Error: Too many retry attempts.'; + } + + const data = { + model: this.modelName || 'qwen-plus', + input: { messages: [{ role: 'system', content: systemMessage }, ...turns] }, + parameters: { result_format: 'message', stop: stopSeq }, + }; + + // Add default user message if all messages are 'system' role + if (turns.every((msg) => msg.role === 'system')) { + data.input.messages.push({ role: 'user', content: 'hello' }); + } + + if (!data.model || !data.input || !data.input.messages || !data.parameters) { + console.error('Invalid request data format:', data); + throw new Error('Invalid request data format.'); + } + + try { + const response = await this._makeHttpRequest(this.url, data); + const choice = response?.output?.choices?.[0]; + + if (choice?.finish_reason === 'length' && turns.length > 0) { + return this.sendRequest(turns.slice(1), systemMessage, stopSeq, retryCount + 1); + } + + return choice?.message?.content || 'No content received.'; + } catch (err) { + console.error('Error occurred:', err); + return 'An error occurred, please try again.'; + } + } + + async embed(text) { + if (!text || typeof text !== 'string') { + console.error('Invalid embedding input: text must be a non-empty string.'); + return 'Invalid embedding input: text must be a non-empty string.'; + } + + const data = { + model: 'text-embedding-v2', + input: { texts: [text] }, + parameters: { text_type: 'query' }, + }; + + if (!data.model || !data.input || !data.input.texts || !data.parameters) { + console.error('Invalid embedding request data format:', data); + throw new Error('Invalid embedding request data format.'); + } + + try { + const response = await this._makeHttpRequest(this.url, data); + const embedding = response?.output?.embeddings?.[0]?.embedding; + return embedding || 'No embedding result received.'; + } catch (err) { + console.error('Error occurred:', err); + return 'An error occurred, please try again.'; + } + } + + async _makeHttpRequest(url, data) { + const headers = { + 'Authorization': `Bearer ${this.apiKey}`, + 'Content-Type': 'application/json', + }; + + const response = await fetch(url, { + method: 'POST', + headers, + body: JSON.stringify(data), + }); + + if (!response.ok) { + const errorText = await response.text(); + console.error(`Request failed, status code ${response.status}: ${response.statusText}`); + console.error('Error response content:', errorText); + throw new Error(`Request failed, status code ${response.status}: ${response.statusText}`); + } + + const responseText = await response.text(); + try { + return JSON.parse(responseText); + } catch (err) { + console.error('Failed to parse response JSON:', err); + throw new Error('Invalid response JSON format.'); + } + } +} diff --git a/src/utils/safety.js b/src/utils/safety.js deleted file mode 100644 index 864263e..0000000 --- a/src/utils/safety.js +++ /dev/null @@ -1,38 +0,0 @@ -export function checkSafe(code) { - const dangerousPatterns = [ - // Dynamic imports - /\bimport\s*\(/, - // Access to process and global - /\bprocess\b/, - /\bglobal\b/, - // Module manipulation - /\bmodule\b/, - /\bexports\b/, - // Require usage - /\brequire\s*\(/, - // Function constructors - /\bFunction\s*\(/, - /\beval\s*\(/, - // Access to __dirname and __filename - /\b__dirname\b/, - /\b__filename\b/, - - // fetch - /\bfetch\s*\(/, - // XMLHttpRequest - /\bXMLHttpRequest\b/, - // Websockets - /\bWebSocket\b/, - ]; - - for (const pattern of dangerousPatterns) { - if (pattern.test(code)) { - return false; - } - } - return true; -} - -// generated by o1 -// Basic check for malicious code like dynamic imports, code exec, disk access, internet access, etc. -// Will not catch all, and can be bypassed by obfuscation. \ No newline at end of file diff --git a/src/utils/translator.js b/src/utils/translator.js index 879cfe4..bc9cc77 100644 --- a/src/utils/translator.js +++ b/src/utils/translator.js @@ -1,18 +1,14 @@ import translate from 'google-translate-api-x'; import settings from '../../settings.js'; -const preferred_lang = settings.language; +const preferred_lang = String(settings.language).toLowerCase(); export async function handleTranslation(message) { + if (preferred_lang === 'en' || preferred_lang === 'english') + return message; try { - if (preferred_lang.toLowerCase() === 'en' || preferred_lang.toLowerCase() === 'english') { - return message; - } else { - const lang = String(preferred_lang); - - const translation = await translate(message, { to: lang }); - return translation.text || message; - } + const translation = await translate(message, { to: preferred_lang }); + return translation.text || message; } catch (error) { console.error('Error translating message:', error); return message; @@ -20,6 +16,8 @@ export async function handleTranslation(message) { } export async function handleEnglishTranslation(message) { + if (preferred_lang === 'en' || preferred_lang === 'english') + return message; try { const translation = await translate(message, { to: 'english' }); return translation.text || message;