From 90df61d2be369192523e2522a804cb25f1363b7b Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Fri, 1 Nov 2024 01:08:30 +0800 Subject: [PATCH 01/65] Sort docs by relevance to !newAction("task") --- src/agent/library/index.js | 19 ++++++------ src/agent/prompter.js | 60 +++++++++++++++++++++++++++----------- 2 files changed, 53 insertions(+), 26 deletions(-) diff --git a/src/agent/library/index.js b/src/agent/library/index.js index 677dc11..ae864b0 100644 --- a/src/agent/library/index.js +++ b/src/agent/library/index.js @@ -3,20 +3,21 @@ import * as world from './world.js'; export function docHelper(functions, module_name) { - let docstring = ''; + let docArray = []; for (let skillFunc of functions) { let str = skillFunc.toString(); - if (str.includes('/**')){ - docstring += module_name+'.'+skillFunc.name; - docstring += str.substring(str.indexOf('/**')+3, str.indexOf('**/')) + '\n'; + if (str.includes('/**')) { + let docEntry = `${module_name}.${skillFunc.name}\n`; + docEntry += str.substring(str.indexOf('/**') + 3, str.indexOf('**/')).trim(); + docArray.push(docEntry); } } - return docstring; + return docArray; } export function getSkillDocs() { - let docstring = "\n*SKILL DOCS\nThese skills are javascript functions that can be called when writing actions and skills.\n"; - docstring += docHelper(Object.values(skills), 'skills'); - docstring += docHelper(Object.values(world), 'world'); - return docstring + '*\n'; + let docArray = []; + docArray = docArray.concat(docHelper(Object.values(skills), 'skills')); + docArray = docArray.concat(docHelper(Object.values(world), 'world')); + return docArray; } diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 114064a..3ba51dd 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -1,17 +1,17 @@ -import { readFileSync, mkdirSync, writeFileSync} from 'fs'; -import { Examples } from '../utils/examples.js'; -import { getCommandDocs } from './commands/index.js'; -import { getSkillDocs } from './library/index.js'; -import { stringifyTurns } from '../utils/text.js'; -import { getCommand } from './commands/index.js'; +import {mkdirSync, readFileSync, writeFileSync} from 'fs'; +import {Examples} from '../utils/examples.js'; +import {getCommand, getCommandDocs} from './commands/index.js'; +import {getSkillDocs} from './library/index.js'; +import {stringifyTurns} from '../utils/text.js'; +import {cosineSimilarity} from '../utils/math.js'; -import { Gemini } from '../models/gemini.js'; -import { GPT } from '../models/gpt.js'; -import { Claude } from '../models/claude.js'; -import { ReplicateAPI } from '../models/replicate.js'; -import { Local } from '../models/local.js'; -import { GroqCloudAPI } from '../models/groq.js'; -import { HuggingFace } from '../models/huggingface.js'; +import {Gemini} from '../models/gemini.js'; +import {GPT} from '../models/gpt.js'; +import {Claude} from '../models/claude.js'; +import {ReplicateAPI} from '../models/replicate.js'; +import {Local} from '../models/local.js'; +import {GroqCloudAPI} from '../models/groq.js'; +import {HuggingFace} from '../models/huggingface.js'; export class Prompter { constructor(agent, fp) { @@ -19,7 +19,8 @@ export class Prompter { this.profile = JSON.parse(readFileSync(fp, 'utf8')); this.convo_examples = null; this.coding_examples = null; - + this.skill_docs_embeddings = {}; + let name = this.profile.name; let chat = this.profile.model; this.cooldown = this.profile.cooldown ? this.profile.cooldown : 0; @@ -111,16 +112,41 @@ export class Prompter { async initExamples() { // Using Promise.all to implement concurrent processing - // Create Examples instances this.convo_examples = new Examples(this.embedding_model); this.coding_examples = new Examples(this.embedding_model); - // Use Promise.all to load examples concurrently + let skill_docs = getSkillDocs(); await Promise.all([ this.convo_examples.load(this.profile.conversation_examples), this.coding_examples.load(this.profile.coding_examples), + ...skill_docs.map(async (doc) => { + let func_name_desc = doc.split('\n').slice(0, 2).join(''); + this.skill_docs_embeddings[doc] = await this.embedding_model.embed([func_name_desc]); + }), ]); } + async getRelevantSkillDocs(messages, select_num) { + let latest_message_content = messages.slice().reverse().find(msg => msg.role !== 'system')?.content || ''; + let latest_message_embedding = await this.embedding_model.embed([latest_message_content]); + + let skill_doc_similarities = Object.keys(this.skill_docs_embeddings) + .map(doc_key => ({ + doc_key, + similarity_score: cosineSimilarity(latest_message_embedding, this.skill_docs_embeddings[doc_key]) + })) + .sort((a, b) => b.similarity_score - a.similarity_score); + + // select_num = -1 means select all + let selected_docs = skill_doc_similarities.slice(0, select_num === -1 ? skill_doc_similarities.length : select_num); + let message = '\nThe following recommended functions are listed in descending order of task relevance.\nSkillDocs:\n'; + message += selected_docs.map(doc => `${doc.doc_key}`).join('\n'); + return message; + } + + + + + async replaceStrings(prompt, messages, examples=null, to_summarize=[], last_goals=null) { prompt = prompt.replaceAll('$NAME', this.agent.name); @@ -135,7 +161,7 @@ export class Prompter { if (prompt.includes('$COMMAND_DOCS')) prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs()); if (prompt.includes('$CODE_DOCS')) - prompt = prompt.replaceAll('$CODE_DOCS', getSkillDocs()); + prompt = prompt.replaceAll('$CODE_DOCS', this.getRelevantSkillDocs(messages, -1)); if (prompt.includes('$EXAMPLES') && examples !== null) prompt = prompt.replaceAll('$EXAMPLES', await examples.createExampleMessage(messages)); if (prompt.includes('$MEMORY')) From f264b23ccc0cbe63c5d307717efdaf9ae6479f7e Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Fri, 1 Nov 2024 13:39:13 +0800 Subject: [PATCH 02/65] Add select_num exception range judgment --- src/agent/prompter.js | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 3ba51dd..d44829e 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -136,17 +136,19 @@ export class Prompter { })) .sort((a, b) => b.similarity_score - a.similarity_score); - // select_num = -1 means select all - let selected_docs = skill_doc_similarities.slice(0, select_num === -1 ? skill_doc_similarities.length : select_num); + let length = skill_doc_similarities.length; + if (typeof select_num !== 'number' || isNaN(select_num) || select_num <= 0) { + select_num = length; + } else { + select_num = Math.min(Math.floor(select_num), length); + } + let selected_docs = skill_doc_similarities.slice(0, select_num); let message = '\nThe following recommended functions are listed in descending order of task relevance.\nSkillDocs:\n'; message += selected_docs.map(doc => `${doc.doc_key}`).join('\n'); + console.log(message); return message; } - - - - async replaceStrings(prompt, messages, examples=null, to_summarize=[], last_goals=null) { prompt = prompt.replaceAll('$NAME', this.agent.name); From 17fa2b6083dfc2bd218706895d38431b3536297d Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Fri, 1 Nov 2024 13:47:08 +0800 Subject: [PATCH 03/65] Add select_num exception range judgment --- src/agent/prompter.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/agent/prompter.js b/src/agent/prompter.js index d44829e..530de3d 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -137,7 +137,7 @@ export class Prompter { .sort((a, b) => b.similarity_score - a.similarity_score); let length = skill_doc_similarities.length; - if (typeof select_num !== 'number' || isNaN(select_num) || select_num <= 0) { + if (typeof select_num !== 'number' || isNaN(select_num) || select_num < 0) { select_num = length; } else { select_num = Math.min(Math.floor(select_num), length); @@ -145,7 +145,6 @@ export class Prompter { let selected_docs = skill_doc_similarities.slice(0, select_num); let message = '\nThe following recommended functions are listed in descending order of task relevance.\nSkillDocs:\n'; message += selected_docs.map(doc => `${doc.doc_key}`).join('\n'); - console.log(message); return message; } @@ -163,7 +162,7 @@ export class Prompter { if (prompt.includes('$COMMAND_DOCS')) prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs()); if (prompt.includes('$CODE_DOCS')) - prompt = prompt.replaceAll('$CODE_DOCS', this.getRelevantSkillDocs(messages, -1)); + prompt = prompt.replaceAll('$CODE_DOCS', this.getRelevantSkillDocs(messages, 0)); if (prompt.includes('$EXAMPLES') && examples !== null) prompt = prompt.replaceAll('$EXAMPLES', await examples.createExampleMessage(messages)); if (prompt.includes('$MEMORY')) From ecaf5e87e14b8ba5430b49fdfff8b933a347e059 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Sat, 2 Nov 2024 20:26:56 +0800 Subject: [PATCH 04/65] Code capability enhancement & bot crash fix --- package.json | 5 +++++ settings.js | 2 +- src/agent/coder.js | 42 +++++++++++++++++++++++++++++++++++++----- src/agent/prompter.js | 17 +++++++++-------- 4 files changed, 52 insertions(+), 14 deletions(-) diff --git a/package.json b/package.json index cd5b272..8ba7e1a 100644 --- a/package.json +++ b/package.json @@ -24,5 +24,10 @@ "scripts": { "postinstall": "patch-package", "start": "node main.js" + }, + "devDependencies": { + "@eslint/js": "^9.13.0", + "eslint": "^9.13.0", + "globals": "^15.11.0" } } diff --git a/settings.js b/settings.js index 1de12c0..1b33d94 100644 --- a/settings.js +++ b/settings.js @@ -21,7 +21,7 @@ export default "show_bot_views": false, // show bot's view in browser at localhost:3000, 3001... "allow_insecure_coding": false, // allows newAction command and model can write/run code on your computer. enable at own risk - "code_timeout_mins": 10, // minutes code is allowed to run. -1 for no timeout + "code_timeout_mins": 1, // minutes code is allowed to run. -1 for no timeout,set 1.Set 1 min to timely code adjustments "max_messages": 15, // max number of messages to keep in context "max_commands": -1, // max number of commands to use in a response. -1 for no limit diff --git a/src/agent/coder.js b/src/agent/coder.js index d312387..fad23ea 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -1,6 +1,7 @@ import { writeFile, readFile, mkdirSync } from 'fs'; import { checkSafe } from '../utils/safety.js'; import settings from '../../settings.js'; +import {ESLint} from "eslint"; export class Coder { constructor(agent) { @@ -20,7 +21,31 @@ export class Coder { mkdirSync('.' + this.fp, { recursive: true }); } + + async checkCode(code) { + const eslint = new ESLint(); + const results = await eslint.lintText(code); + const codeLines = code.split('\n'); + let result = '#### CODE ERROR INFO ###\n'; + const exceptions = results.map(r => r.messages).flat(); + if (exceptions.length > 0) { + exceptions.forEach((exc, index) => { + if (exc.line && exc.column ) { + const errorLine = codeLines[exc.line - 1]?.trim() || 'Unable to retrieve error line content'; + result += `#ERROR ${index + 1}\n`; + result += `Message: ${exc.message}\n`; + result += `Location: Line ${exc.line}, Column ${exc.column}\n`; + result += `Related Code Line: ${errorLine}\n\n`; + } + }); + result += 'The code contains exceptions and cannot continue execution.\n'; + } else { + return null;//no error + } + + return result ; + } // write custom code to file and import it async stageCode(code) { code = this.sanitizeCode(code); @@ -48,12 +73,11 @@ export class Coder { this.file_counter++; let write_result = await this.writeFilePromise('.' + this.fp + filename, src) - if (write_result) { console.error('Error writing code execution file: ' + result); return null; } - return await import('../..' + this.fp + filename); + return {filename,src}; } sanitizeCode(code) { @@ -137,7 +161,14 @@ export class Coder { continue; } - const execution_file = await this.stageCode(code); + let {filename,src} = await this.stageCode(code); + const analysisResult = await this.checkCode(src); + if (analysisResult) { + const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'+await this.agent.prompter.getRelevantSkillDocs(analysisResult,3); + messages.push({ role: 'system', content: message }); + continue; + } + const execution_file = await import('../..' +this.fp+filename); if (!execution_file) { agent_history.add('system', 'Failed to stage code, something is wrong.'); return {success: false, message: null, interrupted: false, timedout: false}; @@ -219,10 +250,11 @@ export class Coder { this.executing = false; clearTimeout(TIMEOUT); this.cancelResume(); - console.error("Code execution triggered catch: " + err); await this.stop(); - let message = this.formatOutput(this.agent.bot) + '!!Code threw exception!! Error: ' + err; + err = err.toString(); + let relevant_skill_docs = await this.agent.prompter.getRelevantSkillDocs(err,5); + let message = this.formatOutput(this.agent.bot) + '!!Code threw exception!! Error: ' + err+'\n'+relevant_skill_docs; let interrupted = this.agent.bot.interrupt_code; this.clear(); if (!interrupted && !this.generating) this.agent.bot.emit('idle'); diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 530de3d..317f4b6 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -125,9 +125,8 @@ export class Prompter { ]); } - async getRelevantSkillDocs(messages, select_num) { - let latest_message_content = messages.slice().reverse().find(msg => msg.role !== 'system')?.content || ''; - let latest_message_embedding = await this.embedding_model.embed([latest_message_content]); + async getRelevantSkillDocs(message, select_num) { + let latest_message_embedding = await this.embedding_model.embed(message); let skill_doc_similarities = Object.keys(this.skill_docs_embeddings) .map(doc_key => ({ @@ -143,9 +142,9 @@ export class Prompter { select_num = Math.min(Math.floor(select_num), length); } let selected_docs = skill_doc_similarities.slice(0, select_num); - let message = '\nThe following recommended functions are listed in descending order of task relevance.\nSkillDocs:\n'; - message += selected_docs.map(doc => `${doc.doc_key}`).join('\n'); - return message; + let relevant_skill_docs = '####RELEVENT DOCS INFO###\nThe following functions are listed in descending order of relevance.\nSkillDocs:\n'; + relevant_skill_docs += selected_docs.map(doc => `${doc.doc_key}`).join('\n'); + return relevant_skill_docs; } async replaceStrings(prompt, messages, examples=null, to_summarize=[], last_goals=null) { @@ -161,8 +160,10 @@ export class Prompter { } if (prompt.includes('$COMMAND_DOCS')) prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs()); - if (prompt.includes('$CODE_DOCS')) - prompt = prompt.replaceAll('$CODE_DOCS', this.getRelevantSkillDocs(messages, 0)); + if (prompt.includes('$CODE_DOCS')){ + let latest_message_content = messages.slice().reverse().find(msg => msg.role !== 'system')?.content || ''; + prompt = prompt.replaceAll('$CODE_DOCS', await this.getRelevantSkillDocs(latest_message_content, 5)); + } if (prompt.includes('$EXAMPLES') && examples !== null) prompt = prompt.replaceAll('$EXAMPLES', await examples.createExampleMessage(messages)); if (prompt.includes('$MEMORY')) From 5e84d69aeeab09711c2d309bbd275bf7e27b4e27 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Sun, 3 Nov 2024 03:52:00 +0800 Subject: [PATCH 05/65] Merger conflict resolution --- bots/codeChackTemplate.js | 10 ++++++++++ eslint.config.js | 25 +++++++++++++++++++++++++ src/agent/coder.js | 18 ++++++++++++------ 3 files changed, 47 insertions(+), 6 deletions(-) create mode 100644 bots/codeChackTemplate.js create mode 100644 eslint.config.js diff --git a/bots/codeChackTemplate.js b/bots/codeChackTemplate.js new file mode 100644 index 0000000..77b5d97 --- /dev/null +++ b/bots/codeChackTemplate.js @@ -0,0 +1,10 @@ +import * as skills from '../../../src/agent/library/skills.js'; +import * as world from '../../../src/agent/library/world.js'; +import Vec3 from 'vec3'; + +const log = skills.log; + +export async function main(bot) { + /* CODE HERE */ + log(bot, 'Code finished.'); +} \ No newline at end of file diff --git a/eslint.config.js b/eslint.config.js new file mode 100644 index 0000000..1bdf2b3 --- /dev/null +++ b/eslint.config.js @@ -0,0 +1,25 @@ +// eslint.config.js +import globals from "globals"; +import pluginJs from "@eslint/js"; + +/** @type {import('eslint').Linter.Config[]} */ +export default [ + // 首先引入推荐配置 + pluginJs.configs.recommended, + + // 然后覆盖或自定义特定规则 + { + languageOptions: { + globals: globals.browser, + ecmaVersion: 2021, + sourceType: "module", + }, + rules: { + "no-undef": "error", // 禁止使用未声明的变量或函数。 + "semi": ["error", "always"], // 强制在语句末尾使用分号。 + "curly": "warn", // 强制使用花括号包裹代码块。 + "no-unused-vars": "off", // 禁用未使用变量的警告。 + "no-unreachable": "off", // 禁用无法到达代码的警告。 + }, + }, +]; diff --git a/src/agent/coder.js b/src/agent/coder.js index d5bbf38..729bb03 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -14,6 +14,7 @@ export class Coder { this.executing = false; this.generating = false; this.code_template = ''; + this.code_chack_template = ''; this.timedout = false; this.cur_action_name = ''; @@ -21,7 +22,10 @@ export class Coder { if (err) throw err; this.code_template = data; }); - + readFile('./bots/codeChackTemplate.js', 'utf8', (err, data) => { + if (err) throw err; + this.code_chack_template = data; + }); mkdirSync('.' + this.fp, { recursive: true }); } @@ -64,6 +68,7 @@ export class Coder { for (let line of code.split('\n')) { src += ` ${line}\n`; } + let src_check_copy = this.code_chack_template.replace('/* CODE HERE */', src); src = this.code_template.replace('/* CODE HERE */', src); let filename = this.file_counter + '.js'; @@ -92,8 +97,7 @@ export class Coder { console.error('Error writing code execution file: ' + result); return null; } - - return [ main: mainFn ,src]; + return { func:{main: mainFn}, src_check_copy: src_check_copy }; } sanitizeCode(code) { @@ -170,10 +174,12 @@ export class Coder { } code = res.substring(res.indexOf('```')+3, res.lastIndexOf('```')); - let codeStagingResult,src; + let codeStagingResult,src_check_copy; try { - [codeStagingResult,src] = await this.stageCode(code); - const analysisResult = await this.checkCode(src); + const result = await this.stageCode(code); + codeStagingResult = result.func; + src_check_copy = result.src_check_copy; + const analysisResult = await this.checkCode(src_check_copy); if (analysisResult) { const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'+await this.agent.prompter.getRelevantSkillDocs(analysisResult,3); messages.push({ role: 'system', content: message }); From e1dfad90803a08a51062063c6ad0c1788d7ad94e Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Sun, 3 Nov 2024 04:01:11 +0800 Subject: [PATCH 06/65] Change note to English --- eslint.config.js | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/eslint.config.js b/eslint.config.js index 1bdf2b3..e1506fd 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -4,10 +4,10 @@ import pluginJs from "@eslint/js"; /** @type {import('eslint').Linter.Config[]} */ export default [ - // 首先引入推荐配置 + // First, import the recommended configuration pluginJs.configs.recommended, - // 然后覆盖或自定义特定规则 + // Then override or customize specific rules { languageOptions: { globals: globals.browser, @@ -15,11 +15,11 @@ export default [ sourceType: "module", }, rules: { - "no-undef": "error", // 禁止使用未声明的变量或函数。 - "semi": ["error", "always"], // 强制在语句末尾使用分号。 - "curly": "warn", // 强制使用花括号包裹代码块。 - "no-unused-vars": "off", // 禁用未使用变量的警告。 - "no-unreachable": "off", // 禁用无法到达代码的警告。 + "no-undef": "error", // Disallow the use of undeclared variables or functions. + "semi": ["error", "always"], // Require the use of semicolons at the end of statements. + "curly": "warn", // Enforce the use of curly braces around blocks of code. + "no-unused-vars": "off", // Disable warnings for unused variables. + "no-unreachable": "off", // Disable warnings for unreachable code. }, }, ]; From 82b37e02aab56559f148253a0cdd0592c943c8d6 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Tue, 5 Nov 2024 01:08:56 +0800 Subject: [PATCH 07/65] Resolving merge conflicts with Task Manager --- src/agent/action_manager.js | 4 +++- src/agent/coder.js | 46 ++++++++++++++++++++++++------------- src/agent/prompter.js | 9 +++++--- 3 files changed, 39 insertions(+), 20 deletions(-) diff --git a/src/agent/action_manager.js b/src/agent/action_manager.js index 833f3c0..5133a6b 100644 --- a/src/agent/action_manager.js +++ b/src/agent/action_manager.js @@ -111,7 +111,9 @@ export class ActionManager { console.error("Code execution triggered catch: " + err); await this.stop(); - let message = this._getBotOutputSummary() + '!!Code threw exception!! Error: ' + err; + err = err.toString(); + let relevant_skill_docs = await this.agent.prompter.getRelevantSkillDocs(err,5); + let message = this._getBotOutputSummary() + '!!Code threw exception!! Error: ' + err+'\n'+relevant_skill_docs; let interrupted = this.agent.bot.interrupt_code; this.agent.clearBotLogs(); if (!interrupted && !this.agent.coder.generating) { diff --git a/src/agent/coder.js b/src/agent/coder.js index 36742d7..a4c18f6 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -27,10 +27,28 @@ export class Coder { } async checkCode(code) { + let result = '#### CODE ERROR INFO ###\n'; + // Extract everything in the code between the beginning of 'skills./world.' and the '(' + const skillRegex = /(?:skills|world)\.(.*?)\(/g; + const skills = []; + let match; + while ((match = skillRegex.exec(code)) !== null) { + skills.push(match[1]); + } + const allDocs = await this.agent.prompter.getRelevantSkillDocs(); + //Check if the function exists + const missingSkills = skills.filter(skill => !allDocs.includes(skill)); + if (missingSkills.length > 0) { + result += 'These functions do not exist. Please modify the correct function name and try again.\n'; + result += '### FUNCTIONS NOT FOUND ###\n'; + result += missingSkills.join('\n'); + console.log(result) + return result; + } + const eslint = new ESLint(); const results = await eslint.lintText(code); const codeLines = code.split('\n'); - let result = '#### CODE ERROR INFO ###\n'; const exceptions = results.map(r => r.messages).flat(); if (exceptions.length > 0) { @@ -40,10 +58,10 @@ export class Coder { result += `#ERROR ${index + 1}\n`; result += `Message: ${exc.message}\n`; result += `Location: Line ${exc.line}, Column ${exc.column}\n`; - result += `Related Code Line: ${errorLine}\n\n`; + result += `Related Code Line: ${errorLine}\n`; } }); - result += 'The code contains exceptions and cannot continue execution.\n'; + result += 'The code contains exceptions and cannot continue execution.'; } else { return null;//no error } @@ -172,14 +190,14 @@ export class Coder { code = res.substring(res.indexOf('```')+3, res.lastIndexOf('```')); const result = await this.stageCode(code); const executionModuleExports = result.func; + let src_check_copy = result.src_check_copy; + const analysisResult = await this.checkCode(src_check_copy); + if (analysisResult) { + const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'+await this.agent.prompter.getRelevantSkillDocs(analysisResult,3); + messages.push({ role: 'system', content: message }); + continue; + } if (!executionModuleExports) { - let src_check_copy = result.src_check_copy; - const analysisResult = await this.checkCode(src_check_copy); - if (analysisResult) { - const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'+await this.agent.prompter.getRelevantSkillDocs(analysisResult,3); - messages.push({ role: 'system', content: message }); - continue; - } agent_history.add('system', 'Failed to stage code, something is wrong.'); return {success: false, message: null, interrupted: false, timedout: false}; } @@ -189,10 +207,10 @@ export class Coder { }, { timeout: settings.code_timeout_mins }); if (code_return.interrupted && !code_return.timedout) return { success: false, message: null, interrupted: true, timedout: false }; - console.log("Code generation result:", code_return.success, code_return.message); + console.log("Code generation result:", code_return.success, code_return.message.toString()); if (code_return.success) { - const summary = "Summary of newAction\nAgent wrote this code: \n```" + this.sanitizeCode(code) + "```\nCode Output:\n" + code_return.message; + const summary = "Summary of newAction\nAgent wrote this code: \n```" + this.sanitizeCode(code) + "```\nCode Output:\n" + code_return.message.toString(); return { success: true, message: summary, interrupted: false, timedout: false }; } @@ -207,8 +225,4 @@ export class Coder { } return { success: false, message: null, interrupted: false, timedout: true }; } -//err = err.toString(); -// let relevant_skill_docs = await this.agent.prompter.getRelevantSkillDocs(err,5); -// let message = this.formatOutput(this.agent.bot) + '!!Code threw exception!! Error: ' + err+'\n'+relevant_skill_docs; -// } \ No newline at end of file diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 48649f9..0ef02f8 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -133,7 +133,9 @@ export class Prompter { } async getRelevantSkillDocs(message, select_num) { - let latest_message_embedding = await this.embedding_model.embed(message); + let latest_message_embedding = ''; + if(message) //message is not empty, get the relevant skill docs, else return all skill docs + latest_message_embedding = await this.embedding_model.embed(message); let skill_doc_similarities = Object.keys(this.skill_docs_embeddings) .map(doc_key => ({ @@ -149,8 +151,9 @@ export class Prompter { select_num = Math.min(Math.floor(select_num), length); } let selected_docs = skill_doc_similarities.slice(0, select_num); - let relevant_skill_docs = '####RELEVENT DOCS INFO###\nThe following functions are listed in descending order of relevance.\nSkillDocs:\n'; - relevant_skill_docs += selected_docs.map(doc => `${doc.doc_key}`).join('\n'); + let relevant_skill_docs = '#### RELEVENT DOCS INFO ###\nThe following functions are listed in descending order of relevance.\n'; + relevant_skill_docs += 'SkillDocs:\n' + relevant_skill_docs += '###'+ selected_docs.map(doc => `${doc.doc_key}`).join('\n'); return relevant_skill_docs; } From f6e309a6bcd640e5056a61991fd36740b6f8f574 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Wed, 6 Nov 2024 12:25:11 +0800 Subject: [PATCH 08/65] Fix spelling mistakes --- bots/{codeChackTemplate.js => codeCheckTemplate.js} | 0 src/agent/coder.js | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename bots/{codeChackTemplate.js => codeCheckTemplate.js} (100%) diff --git a/bots/codeChackTemplate.js b/bots/codeCheckTemplate.js similarity index 100% rename from bots/codeChackTemplate.js rename to bots/codeCheckTemplate.js diff --git a/src/agent/coder.js b/src/agent/coder.js index a4c18f6..d418829 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -19,7 +19,7 @@ export class Coder { if (err) throw err; this.code_template = data; }); - readFile('./bots/codeChackTemplate.js', 'utf8', (err, data) => { + readFile('./bots/codeCheckTemplate.js', 'utf8', (err, data) => { if (err) throw err; this.code_chack_template = data; }); From e15c5164756c1fe4d6b399af89dc0bf428843ec3 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Fri, 8 Nov 2024 18:43:54 +0800 Subject: [PATCH 09/65] Resolving conflicts created by adding new annotations --- src/agent/prompter.js | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 2ac98b3..06390e1 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -129,7 +129,7 @@ export class Prompter { this.convo_examples = new Examples(this.embedding_model); this.coding_examples = new Examples(this.embedding_model); - const [convoResult, codingResult] = await Promise.allSettled([ + const results = await Promise.allSettled([ this.convo_examples.load(this.profile.conversation_examples), this.coding_examples.load(this.profile.coding_examples), ...getSkillDocs().map(async (doc) => { @@ -138,7 +138,9 @@ export class Prompter { }) ]); - // Handle potential failures + // Handle potential failures for conversation and coding examples + const [convoResult, codingResult, ...skillDocResults] = results; + if (convoResult.status === 'rejected') { console.error('Failed to load conversation examples:', convoResult.reason); throw convoResult.reason; @@ -147,6 +149,12 @@ export class Prompter { console.error('Failed to load coding examples:', codingResult.reason); throw codingResult.reason; } + skillDocResults.forEach((result, index) => { + if (result.status === 'rejected') { + console.error(`Failed to load skill doc ${index + 1}:`, result.reason); + } + }); + } catch (error) { console.error('Failed to initialize examples:', error); throw error; From c8302c27ac2c61a3b765a89617be38642f87d694 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Sat, 9 Nov 2024 01:29:24 +0800 Subject: [PATCH 10/65] Improve the relevance of docs to !newAction("task") --- src/agent/action_manager.js | 2 +- src/agent/prompter.js | 19 +++++++++++++++---- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/src/agent/action_manager.js b/src/agent/action_manager.js index 5133a6b..c016e70 100644 --- a/src/agent/action_manager.js +++ b/src/agent/action_manager.js @@ -133,7 +133,7 @@ export class ActionManager { First outputs:\n${output.substring(0, MAX_OUT / 2)}\n...skipping many lines.\nFinal outputs:\n ${output.substring(output.length - MAX_OUT / 2)}`; } else { - output = 'Code output:\n' + output; + output = 'Code output:\n' + output.toString(); } return output; } diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 06390e1..5952ac6 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -134,7 +134,7 @@ export class Prompter { this.coding_examples.load(this.profile.coding_examples), ...getSkillDocs().map(async (doc) => { let func_name_desc = doc.split('\n').slice(0, 2).join(''); - this.skill_docs_embeddings[doc] = await this.embedding_model.embed([func_name_desc]); + this.skill_docs_embeddings[doc] = await this.embedding_model.embed(func_name_desc); }) ]); @@ -199,9 +199,20 @@ export class Prompter { } if (prompt.includes('$COMMAND_DOCS')) prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs()); - if (prompt.includes('$CODE_DOCS')){ - let latest_message_content = messages.slice().reverse().find(msg => msg.role !== 'system')?.content || ''; - prompt = prompt.replaceAll('$CODE_DOCS', await this.getRelevantSkillDocs(latest_message_content, 5)); + if (prompt.includes('$CODE_DOCS')) { + // Find the most recent non-system message containing '!newAction(' + let code_task_content = messages.slice().reverse().find(msg => + msg.role !== 'system' && msg.content.includes('!newAction(') + )?.content || ''; + + // Extract content between '!newAction(' and ')' + const match = code_task_content.match(/!newAction\((.*?)\)/); + code_task_content = match ? match[1] : ''; + + prompt = prompt.replaceAll( + '$CODE_DOCS', + await this.getRelevantSkillDocs(code_task_content, 5) + ); } if (prompt.includes('$EXAMPLES') && examples !== null) prompt = prompt.replaceAll('$EXAMPLES', await examples.createExampleMessage(messages)); From a3684516147bb1a52aee951652d6839c42c9d2fc Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Sat, 9 Nov 2024 01:29:57 +0800 Subject: [PATCH 11/65] Fix Qwen api concurrency limit issue --- src/models/qwen.js | 72 ++++++++++++++++++++++++++++++++-------------- 1 file changed, 51 insertions(+), 21 deletions(-) diff --git a/src/models/qwen.js b/src/models/qwen.js index d3d7bec..d546298 100644 --- a/src/models/qwen.js +++ b/src/models/qwen.js @@ -49,12 +49,12 @@ export class Qwen { async embed(text) { if (!text || typeof text !== 'string') { - console.error('Invalid embedding input: text must be a non-empty string.'); + console.error('Invalid embedding input: text must be a non-empty string:', text); return 'Invalid embedding input: text must be a non-empty string.'; } const data = { - model: 'text-embedding-v2', + model: this.modelName, input: { texts: [text] }, parameters: { text_type: 'query' }, }; @@ -67,38 +67,68 @@ export class Qwen { try { const response = await this._makeHttpRequest(this.url, data); const embedding = response?.output?.embeddings?.[0]?.embedding; + return embedding || 'No embedding result received.'; } catch (err) { - console.error('Error occurred:', err); + console.log('Embed data:', data); + console.error('Embed error occurred:', err); return 'An error occurred, please try again.'; } } - async _makeHttpRequest(url, data) { + async _makeHttpRequest(url, data, maxRetries = 10) { const headers = { 'Authorization': `Bearer ${this.apiKey}`, 'Content-Type': 'application/json', }; - const response = await fetch(url, { - method: 'POST', - headers, - body: JSON.stringify(data), - }); + let retryCount = 0; - if (!response.ok) { - const errorText = await response.text(); - console.error(`Request failed, status code ${response.status}: ${response.statusText}`); - console.error('Error response content:', errorText); - throw new Error(`Request failed, status code ${response.status}: ${response.statusText}`); - } + while (retryCount < maxRetries) { + try { + const response = await fetch(url, { + method: 'POST', + headers, + body: JSON.stringify(data), + }); - const responseText = await response.text(); - try { - return JSON.parse(responseText); - } catch (err) { - console.error('Failed to parse response JSON:', err); - throw new Error('Invalid response JSON format.'); + if (response.ok) { + const responseText = await response.text(); + try { + //Task completed successfully + return JSON.parse(responseText); + } catch (err) { + console.error('Failed to parse response JSON:', err); + throw new Error('Invalid response JSON format.'); + } + } else { + const errorText = await response.text(); + + if (response.status === 429 || response.statusText.includes('Too Many Requests')) { + // Handle rate limiting + retryCount++; + if (retryCount >= maxRetries) { + console.error('Exceeded maximum retry attempts, unable to get request result.'); + throw new Error(`Request failed after ${maxRetries} retries due to rate limiting.`); + } + //Reached Qwen concurrency limit, waiting in queue + const waitTime = Math.random() * 1000; // Random wait between 0 to 1 seconds + await new Promise(resolve => setTimeout(resolve, waitTime)); + continue; // Retry the request + } else { + console.error(`Request failed, status code ${response.status}: ${response.statusText}`); + console.error('Error response content:', errorText); + throw new Error(`Request failed, status code ${response.status}: ${response.statusText}`); + } + } + } catch (err) { + // Handle network errors or other exceptions + console.error('Error occurred during HTTP request:', err); + throw err; // Re-throw the error to be handled by the caller + } } + // Exceeded maximum retries + console.error('Exceeded maximum retry attempts, unable to get request result.'); + throw new Error(`Request failed after ${maxRetries} retries.`); } } From 2322f7857e36590a9409d4fe9d89d034c732de9d Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Sun, 10 Nov 2024 17:46:18 +0800 Subject: [PATCH 12/65] code_timeout_mins is set to 3 --- settings.js | 10 +++++----- src/agent/prompter.js | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/settings.js b/settings.js index 50f11d7..aa036c1 100644 --- a/settings.js +++ b/settings.js @@ -6,23 +6,23 @@ export default "auth": "offline", // or "microsoft" "profiles": [ - "./andy.json", + //"./andy.json", // "./profiles/gpt.json", // "./profiles/claude.json", // "./profiles/gemini.json", // "./profiles/llama.json", - // "./profiles/qwen.json", + "./profiles/qwen.json", // using more than 1 profile requires you to /msg each bot indivually ], - "load_memory": false, // load memory from previous session + "load_memory": true, // load memory from previous session "init_message": "Say hello world and your name", // sends to all on spawn "language": "en", // translate to/from this language. Supports these language names: https://cloud.google.com/translate/docs/languages "show_bot_views": false, // show bot's view in browser at localhost:3000, 3001... - "allow_insecure_coding": false, // allows newAction command and model can write/run code on your computer. enable at own risk - "code_timeout_mins": 1, // minutes code is allowed to run. -1 for no timeout,set 1.Set 1 min to timely code adjustments + "allow_insecure_coding": true, // allows newAction command and model can write/run code on your computer. enable at own risk + "code_timeout_mins": 3, // minutes code is allowed to run. -1 for no timeout,set 1.Set 1 min to timely code adjustments "max_messages": 15, // max number of messages to keep in context "max_commands": -1, // max number of commands to use in a response. -1 for no limit diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 5952ac6..bf81a10 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -182,7 +182,7 @@ export class Prompter { let selected_docs = skill_doc_similarities.slice(0, select_num); let relevant_skill_docs = '#### RELEVENT DOCS INFO ###\nThe following functions are listed in descending order of relevance.\n'; relevant_skill_docs += 'SkillDocs:\n' - relevant_skill_docs += '###'+ selected_docs.map(doc => `${doc.doc_key}`).join('\n'); + relevant_skill_docs += selected_docs.map(doc => `${doc.doc_key}`).join('\n### '); return relevant_skill_docs; } From dd176afc71560b00f9062c6a6b0a7e3ab0c57ef7 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Sun, 10 Nov 2024 17:50:30 +0800 Subject: [PATCH 13/65] set default profiles to andy.json --- settings.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/settings.js b/settings.js index aa036c1..b23feb1 100644 --- a/settings.js +++ b/settings.js @@ -6,12 +6,12 @@ export default "auth": "offline", // or "microsoft" "profiles": [ - //"./andy.json", + "./andy.json", // "./profiles/gpt.json", // "./profiles/claude.json", // "./profiles/gemini.json", // "./profiles/llama.json", - "./profiles/qwen.json", + // "./profiles/qwen.json", // using more than 1 profile requires you to /msg each bot indivually ], From 69c0bd158467d23ec791ced4ff861ec93bb8e354 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Sun, 10 Nov 2024 17:52:19 +0800 Subject: [PATCH 14/65] Default settings except code_timeout_mins --- settings.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/settings.js b/settings.js index b23feb1..46d529f 100644 --- a/settings.js +++ b/settings.js @@ -15,13 +15,13 @@ export default // using more than 1 profile requires you to /msg each bot indivually ], - "load_memory": true, // load memory from previous session + "load_memory": false, // load memory from previous session "init_message": "Say hello world and your name", // sends to all on spawn "language": "en", // translate to/from this language. Supports these language names: https://cloud.google.com/translate/docs/languages "show_bot_views": false, // show bot's view in browser at localhost:3000, 3001... - "allow_insecure_coding": true, // allows newAction command and model can write/run code on your computer. enable at own risk + "allow_insecure_coding": false, // allows newAction command and model can write/run code on your computer. enable at own risk "code_timeout_mins": 3, // minutes code is allowed to run. -1 for no timeout,set 1.Set 1 min to timely code adjustments "max_messages": 15, // max number of messages to keep in context From cba7f7b431d61219083c6d970d331eb19322bdba Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Sun, 10 Nov 2024 17:58:23 +0800 Subject: [PATCH 15/65] Default settings except code_timeout_mins --- settings.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/settings.js b/settings.js index 46d529f..b090838 100644 --- a/settings.js +++ b/settings.js @@ -22,7 +22,7 @@ export default "show_bot_views": false, // show bot's view in browser at localhost:3000, 3001... "allow_insecure_coding": false, // allows newAction command and model can write/run code on your computer. enable at own risk - "code_timeout_mins": 3, // minutes code is allowed to run. -1 for no timeout,set 1.Set 1 min to timely code adjustments + "code_timeout_mins": 3, // minutes code is allowed to run. -1 for no timeout,set 3.Set 3 min to timely code adjustments "max_messages": 15, // max number of messages to keep in context "max_commands": -1, // max number of commands to use in a response. -1 for no limit From 1835d5e541fc70cf76bddae5539f1bec8ab75f39 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Sat, 14 Dec 2024 00:07:01 +0800 Subject: [PATCH 16/65] Resolve merge conflicts with latest code --- profiles/qwen.json | 4 ++-- src/agent/prompter.js | 24 ++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/profiles/qwen.json b/profiles/qwen.json index b7ba37a..a531714 100644 --- a/profiles/qwen.json +++ b/profiles/qwen.json @@ -4,12 +4,12 @@ "model": { "api": "qwen", "url": "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation", - "model": "qwen-max" + "model": "qwen-plus" }, "embedding": { "api": "qwen", "url": "https://dashscope.aliyuncs.com/api/v1/services/embeddings/text-embedding/text-embedding", - "model": "text-embedding-v2" + "model": "text-embedding-v3" } } \ No newline at end of file diff --git a/src/agent/prompter.js b/src/agent/prompter.js index d7b21cf..f514ca8 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -15,6 +15,7 @@ import { GroqCloudAPI } from '../models/groq.js'; import { HuggingFace } from '../models/huggingface.js'; import { Qwen } from "../models/qwen.js"; import { Grok } from "../models/grok.js"; +import {cosineSimilarity} from "../utils/math.js"; export class Prompter { constructor(agent, fp) { @@ -163,7 +164,30 @@ export class Prompter { throw error; } } + async getRelevantSkillDocs(message, select_num) { + let latest_message_embedding = ''; + if(message) //message is not empty, get the relevant skill docs, else return all skill docs + latest_message_embedding = await this.embedding_model.embed(message); + let skill_doc_similarities = Object.keys(this.skill_docs_embeddings) + .map(doc_key => ({ + doc_key, + similarity_score: cosineSimilarity(latest_message_embedding, this.skill_docs_embeddings[doc_key]) + })) + .sort((a, b) => b.similarity_score - a.similarity_score); + + let length = skill_doc_similarities.length; + if (typeof select_num !== 'number' || isNaN(select_num) || select_num < 0) { + select_num = length; + } else { + select_num = Math.min(Math.floor(select_num), length); + } + let selected_docs = skill_doc_similarities.slice(0, select_num); + let relevant_skill_docs = '#### RELEVENT DOCS INFO ###\nThe following functions are listed in descending order of relevance.\n'; + relevant_skill_docs += 'SkillDocs:\n' + relevant_skill_docs += selected_docs.map(doc => `${doc.doc_key}`).join('\n### '); + return relevant_skill_docs; + } async replaceStrings(prompt, messages, examples=null, to_summarize=[], last_goals=null) { prompt = prompt.replaceAll('$NAME', this.agent.name); From c5b6cd59d82ac4aca245ab53700be16e9802f783 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Sun, 15 Dec 2024 00:05:12 +0800 Subject: [PATCH 17/65] Preliminary code separation --- src/agent/action_manager.js | 2 +- src/agent/coder.js | 4 ++-- src/agent/prompter.js | 40 +++++++------------------------------ 3 files changed, 10 insertions(+), 36 deletions(-) diff --git a/src/agent/action_manager.js b/src/agent/action_manager.js index 8da8ad0..97475dd 100644 --- a/src/agent/action_manager.js +++ b/src/agent/action_manager.js @@ -113,7 +113,7 @@ export class ActionManager { console.error(err.stack); await this.stop(); err = err.toString(); - let relevant_skill_docs = await this.agent.prompter.getRelevantSkillDocs(err,5); + let relevant_skill_docs = await this.agent.prompter.skill_libary.getRelevantSkillDocs(err,5); let message = this._getBotOutputSummary() + '!!Code threw exception!!\n' + diff --git a/src/agent/coder.js b/src/agent/coder.js index d418829..afc037b 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -35,7 +35,7 @@ export class Coder { while ((match = skillRegex.exec(code)) !== null) { skills.push(match[1]); } - const allDocs = await this.agent.prompter.getRelevantSkillDocs(); + const allDocs = await this.agent.prompter.skill_libary.getRelevantSkillDocs(); //Check if the function exists const missingSkills = skills.filter(skill => !allDocs.includes(skill)); if (missingSkills.length > 0) { @@ -193,7 +193,7 @@ export class Coder { let src_check_copy = result.src_check_copy; const analysisResult = await this.checkCode(src_check_copy); if (analysisResult) { - const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'+await this.agent.prompter.getRelevantSkillDocs(analysisResult,3); + const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'+await this.agent.prompter.skill_libary.getRelevantSkillDocs(analysisResult,3); messages.push({ role: 'system', content: message }); continue; } diff --git a/src/agent/prompter.js b/src/agent/prompter.js index f514ca8..d1af5cf 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -1,7 +1,7 @@ import { readFileSync, mkdirSync, writeFileSync} from 'fs'; import { Examples } from '../utils/examples.js'; import { getCommandDocs } from './commands/index.js'; -import { getSkillDocs } from './library/index.js'; +// import { getSkillDocs } from './library/index.js'; import { stringifyTurns } from '../utils/text.js'; import { getCommand } from './commands/index.js'; @@ -15,7 +15,8 @@ import { GroqCloudAPI } from '../models/groq.js'; import { HuggingFace } from '../models/huggingface.js'; import { Qwen } from "../models/qwen.js"; import { Grok } from "../models/grok.js"; -import {cosineSimilarity} from "../utils/math.js"; +// import {cosineSimilarity} from "../utils/math.js"; +import {SkillLibrary} from "./library/skill_library.js"; export class Prompter { constructor(agent, fp) { @@ -30,7 +31,7 @@ export class Prompter { this.convo_examples = null; this.coding_examples = null; - this.skill_docs_embeddings = {}; + let name = this.profile.name; let chat = this.profile.model; @@ -125,7 +126,7 @@ export class Prompter { console.log('Continuing anyway, using word overlap instead.'); this.embedding_model = null; } - + this.skill_libary = new SkillLibrary(agent, this.embedding_model); mkdirSync(`./bots/${name}`, { recursive: true }); writeFileSync(`./bots/${name}/last_profile.json`, JSON.stringify(this.profile, null, 4), (err) => { if (err) { @@ -152,10 +153,7 @@ export class Prompter { await Promise.all([ this.convo_examples.load(this.profile.conversation_examples), this.coding_examples.load(this.profile.coding_examples), - ...getSkillDocs().map(async (doc) => { - let func_name_desc = doc.split('\n').slice(0, 2).join(''); - this.skill_docs_embeddings[doc] = await this.embedding_model.embed(func_name_desc); - }) + this.skill_libary.initSkillLibrary() ]); console.log('Examples initialized.'); @@ -164,30 +162,6 @@ export class Prompter { throw error; } } - async getRelevantSkillDocs(message, select_num) { - let latest_message_embedding = ''; - if(message) //message is not empty, get the relevant skill docs, else return all skill docs - latest_message_embedding = await this.embedding_model.embed(message); - - let skill_doc_similarities = Object.keys(this.skill_docs_embeddings) - .map(doc_key => ({ - doc_key, - similarity_score: cosineSimilarity(latest_message_embedding, this.skill_docs_embeddings[doc_key]) - })) - .sort((a, b) => b.similarity_score - a.similarity_score); - - let length = skill_doc_similarities.length; - if (typeof select_num !== 'number' || isNaN(select_num) || select_num < 0) { - select_num = length; - } else { - select_num = Math.min(Math.floor(select_num), length); - } - let selected_docs = skill_doc_similarities.slice(0, select_num); - let relevant_skill_docs = '#### RELEVENT DOCS INFO ###\nThe following functions are listed in descending order of relevance.\n'; - relevant_skill_docs += 'SkillDocs:\n' - relevant_skill_docs += selected_docs.map(doc => `${doc.doc_key}`).join('\n### '); - return relevant_skill_docs; - } async replaceStrings(prompt, messages, examples=null, to_summarize=[], last_goals=null) { prompt = prompt.replaceAll('$NAME', this.agent.name); @@ -216,7 +190,7 @@ export class Prompter { prompt = prompt.replaceAll( '$CODE_DOCS', - await this.getRelevantSkillDocs(code_task_content, 5) + await this.skill_libary.getRelevantSkillDocs(code_task_content, 5) ); } if (prompt.includes('$EXAMPLES') && examples !== null) From 4c8c61b2f3104d7de1a85297a17c90c67fecb3b7 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Mon, 16 Dec 2024 19:23:24 +0800 Subject: [PATCH 18/65] Modify the url of qwen.json to default to the international version '-intl' --- profiles/qwen.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/profiles/qwen.json b/profiles/qwen.json index 490668a..96f4757 100644 --- a/profiles/qwen.json +++ b/profiles/qwen.json @@ -5,13 +5,13 @@ "model": { "api": "qwen", - "url": "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation", + "url": "https://dashscope-intl.aliyuncs.com/api/v1/services/aigc/text-generation/generation", "model": "qwen-max" }, "embedding": { "api": "qwen", - "url": "https://dashscope.aliyuncs.com/api/v1/services/embeddings/text-embedding/text-embedding", + "url": "https://dashscope-intl.aliyuncs.com/api/v1/services/embeddings/text-embedding/text-embedding", "model": "text-embedding-v3" } } \ No newline at end of file From b1dad6bedaa20a1ecc2485cd9afdb3387785bb03 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Mon, 16 Dec 2024 19:28:31 +0800 Subject: [PATCH 19/65] Code Separation: Related Skill Selection --- src/agent/library/skill_library.js | 45 ++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 src/agent/library/skill_library.js diff --git a/src/agent/library/skill_library.js b/src/agent/library/skill_library.js new file mode 100644 index 0000000..356c614 --- /dev/null +++ b/src/agent/library/skill_library.js @@ -0,0 +1,45 @@ +import { cosineSimilarity } from '../../utils/math.js'; +import { getSkillDocs } from './index.js'; + +export class SkillLibrary { + constructor(agent,embedding_model) { + this.agent = agent; + this.embedding_model = embedding_model; + this.skill_docs_embeddings = {}; + } + async initSkillLibrary() { + await Promise.all([ + ...getSkillDocs().map(async (doc) => { + let func_name_desc = doc.split('\n').slice(0, 2).join(''); + this.skill_docs_embeddings[doc] = await this.embedding_model.embed(func_name_desc); + }) + ]); + } + + async getRelevantSkillDocs(message, select_num) { + let latest_message_embedding = ''; + if(message) //message is not empty, get the relevant skill docs, else return all skill docs + latest_message_embedding = await this.embedding_model.embed(message); + + let skill_doc_similarities = Object.keys(this.skill_docs_embeddings) + .map(doc_key => ({ + doc_key, + similarity_score: cosineSimilarity(latest_message_embedding, this.skill_docs_embeddings[doc_key]) + })) + .sort((a, b) => b.similarity_score - a.similarity_score); + + let length = skill_doc_similarities.length; + if (typeof select_num !== 'number' || isNaN(select_num) || select_num < 0) { + select_num = length; + } else { + select_num = Math.min(Math.floor(select_num), length); + } + let selected_docs = skill_doc_similarities.slice(0, select_num); + let relevant_skill_docs = '#### RELEVENT DOCS INFO ###\nThe following functions are listed in descending order of relevance.\n'; + relevant_skill_docs += 'SkillDocs:\n' + relevant_skill_docs += selected_docs.map(doc => `${doc.doc_key}`).join('\n### '); + return relevant_skill_docs; + } + + +} From 72397c4c33c80421ef5f3a4199d44ee89328b257 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Tue, 17 Dec 2024 00:33:18 +0800 Subject: [PATCH 20/65] Add setting for number of "relevant_docs_count" --- settings.js | 1 + src/agent/prompter.js | 15 +++++---------- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/settings.js b/settings.js index 27b8606..a0c96d8 100644 --- a/settings.js +++ b/settings.js @@ -30,6 +30,7 @@ export default "allow_insecure_coding": false, // allows newAction command and model can write/run code on your computer. enable at own risk "code_timeout_mins": 3, // minutes code is allowed to run. -1 for no timeout,set 3.Set 3 min to timely code adjustments + "relevant_docs_count": 5, // number of relevant docs to show when generating code "max_messages": 15, // max number of messages to keep in context "max_commands": -1, // max number of commands to use in a response. -1 for no limit diff --git a/src/agent/prompter.js b/src/agent/prompter.js index d1af5cf..8673b09 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -4,7 +4,7 @@ import { getCommandDocs } from './commands/index.js'; // import { getSkillDocs } from './library/index.js'; import { stringifyTurns } from '../utils/text.js'; import { getCommand } from './commands/index.js'; - +import settings from '../../settings.js'; import { Gemini } from '../models/gemini.js'; import { GPT } from '../models/gpt.js'; import { Claude } from '../models/claude.js'; @@ -178,19 +178,14 @@ export class Prompter { } if (prompt.includes('$COMMAND_DOCS')) prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs(this.agent.blocked_actions)); - if (prompt.includes('$CODE_DOCS')){ - // Find the most recent non-system message containing '!newAction(' - let code_task_content = messages.slice().reverse().find(msg => + if (prompt.includes('$CODE_DOCS')) { + const code_task_content = messages.slice().reverse().find(msg => msg.role !== 'system' && msg.content.includes('!newAction(') - )?.content || ''; - - // Extract content between '!newAction(' and ')' - const match = code_task_content.match(/!newAction\((.*?)\)/); - code_task_content = match ? match[1] : ''; + )?.content?.match(/!newAction\((.*?)\)/)?.[1] || ''; prompt = prompt.replaceAll( '$CODE_DOCS', - await this.skill_libary.getRelevantSkillDocs(code_task_content, 5) + await this.skill_libary.getRelevantSkillDocs(code_task_content, settings.relevant_docs_count) ); } if (prompt.includes('$EXAMPLES') && examples !== null) From a7000ea970333d239c12546ac93a053301c96c87 Mon Sep 17 00:00:00 2001 From: Qu Yi Date: Fri, 3 Jan 2025 17:50:14 +0800 Subject: [PATCH 21/65] Merge code templates into codeTemplate.json --- bots/codeCheckTemplate.js | 10 ---------- bots/codeTemplate.json | 4 ++++ bots/template.js | 6 ------ settings.js | 2 +- src/agent/coder.js | 24 +++++++++++++++--------- 5 files changed, 20 insertions(+), 26 deletions(-) delete mode 100644 bots/codeCheckTemplate.js create mode 100644 bots/codeTemplate.json delete mode 100644 bots/template.js diff --git a/bots/codeCheckTemplate.js b/bots/codeCheckTemplate.js deleted file mode 100644 index 77b5d97..0000000 --- a/bots/codeCheckTemplate.js +++ /dev/null @@ -1,10 +0,0 @@ -import * as skills from '../../../src/agent/library/skills.js'; -import * as world from '../../../src/agent/library/world.js'; -import Vec3 from 'vec3'; - -const log = skills.log; - -export async function main(bot) { - /* CODE HERE */ - log(bot, 'Code finished.'); -} \ No newline at end of file diff --git a/bots/codeTemplate.json b/bots/codeTemplate.json new file mode 100644 index 0000000..3c861aa --- /dev/null +++ b/bots/codeTemplate.json @@ -0,0 +1,4 @@ +{ + "execTemplate": "(async (bot) => {\n\n /* CODE HERE */\n log(bot, 'Code finished.');\n\n});", + "checkTemplate": "import * as skills from '../../../src/agent/library/skills.js';\nimport * as world from '../../../src/agent/library/world.js';\nimport Vec3 from 'vec3';\n\nconst log = skills.log;\n\nexport async function main(bot) {\n /* CODE HERE */\n log(bot, 'Code finished.');\n}" +} diff --git a/bots/template.js b/bots/template.js deleted file mode 100644 index b7f270c..0000000 --- a/bots/template.js +++ /dev/null @@ -1,6 +0,0 @@ -(async (bot) => { - -/* CODE HERE */ -log(bot, 'Code finished.'); - -}) \ No newline at end of file diff --git a/settings.js b/settings.js index a0c96d8..ca2b8fb 100644 --- a/settings.js +++ b/settings.js @@ -30,7 +30,7 @@ export default "allow_insecure_coding": false, // allows newAction command and model can write/run code on your computer. enable at own risk "code_timeout_mins": 3, // minutes code is allowed to run. -1 for no timeout,set 3.Set 3 min to timely code adjustments - "relevant_docs_count": 5, // number of relevant docs to show when generating code + "relevant_docs_count": 5, // Parameter: -1 = all, 0 = no references, 5 = five references. If exceeding the maximum, all reference documents are returned. "max_messages": 15, // max number of messages to keep in context "max_commands": -1, // max number of commands to use in a response. -1 for no limit diff --git a/src/agent/coder.js b/src/agent/coder.js index afc037b..faafc1f 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -13,15 +13,21 @@ export class Coder { this.fp = '/bots/'+agent.name+'/action-code/'; this.generating = false; this.code_template = ''; - this.code_chack_template = ''; + this.code_check_template = ''; - readFile('./bots/template.js', 'utf8', (err, data) => { - if (err) throw err; - this.code_template = data; - }); - readFile('./bots/codeCheckTemplate.js', 'utf8', (err, data) => { - if (err) throw err; - this.code_chack_template = data; + readFile('./bots/codeTemplate.json', 'utf8', (err, data) => { + if (err) { + console.error('Error reading codeTemplate.json:', err); + throw err; + } + try { + const templates = JSON.parse(data); + this.code_template = templates.execTemplate; + this.code_check_template = templates.checkTemplate; + } catch (parseErr) { + console.error('Error parsing codeTemplate.json:', parseErr); + throw parseErr; + } }); mkdirSync('.' + this.fp, { recursive: true }); } @@ -83,7 +89,7 @@ export class Coder { for (let line of code.split('\n')) { src += ` ${line}\n`; } - let src_check_copy = this.code_chack_template.replace('/* CODE HERE */', src); + let src_check_copy = this.code_check_template.replace('/* CODE HERE */', src); src = this.code_template.replace('/* CODE HERE */', src); let filename = this.file_counter + '.js'; From a458a667590d298dbc49233401642ce6e8636eca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CQu?= Date: Sat, 4 Jan 2025 12:48:21 +0800 Subject: [PATCH 22/65] Resolve merge conflicts in deepseek --- src/agent/prompter.js | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/agent/prompter.js b/src/agent/prompter.js index b176d7a..1eeae10 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -1,7 +1,6 @@ import { readFileSync, mkdirSync, writeFileSync} from 'fs'; import { Examples } from '../utils/examples.js'; import { getCommandDocs } from './commands/index.js'; -// import { getSkillDocs } from './library/index.js'; import { stringifyTurns } from '../utils/text.js'; import { getCommand } from './commands/index.js'; import settings from '../../settings.js'; @@ -15,7 +14,6 @@ import { GroqCloudAPI } from '../models/groq.js'; import { HuggingFace } from '../models/huggingface.js'; import { Qwen } from "../models/qwen.js"; import { Grok } from "../models/grok.js"; -// import {cosineSimilarity} from "../utils/math.js"; import {SkillLibrary} from "./library/skill_library.js"; import { DeepSeek } from '../models/deepseek.js'; From a14d4af65fafc4bfffc76a5e88bd1f18ae6f08fb Mon Sep 17 00:00:00 2001 From: Ayush Maniar Date: Sun, 5 Jan 2025 19:27:44 -0800 Subject: [PATCH 23/65] Add getCraftingPlan tool for making agents faster and resource efficient --- profiles/_default.json | 12 +++ src/agent/commands/actions.js | 2 +- src/agent/commands/queries.js | 39 ++++++++- src/agent/library/skills.js | 2 +- src/agent/npc/item_goal.js | 2 +- src/utils/mcdata.js | 157 +++++++++++++++++++++++++++++++++- 6 files changed, 209 insertions(+), 5 deletions(-) diff --git a/profiles/_default.json b/profiles/_default.json index 2a67725..cfcae6d 100644 --- a/profiles/_default.json +++ b/profiles/_default.json @@ -200,6 +200,18 @@ [ {"role": "system", "content": "You are self-prompting with the goal: 'Build a house'. Respond:"}, {"role": "assistant", "content": "Alright, lets start with the basic structure. !newAction(\"Build an 8x8 base for the house.\")"} + ], + [ + {"role": "user", "content": "trex56: Lets make a wooden pickaxe together"}, + {"role": "assistant", "content": "Sure, let me check how to make a wooden pickaxe !getCraftingPlan(\"wooden_pickaxe\",1)"}, + {"role": "system", "content": "You are missing the following items:\n- 2 oak_log\n\nOnce you have these items, here's your crafting plan:\n\nCraft 1 oak_log -> 4 oak_planks\nCraft 1 oak_log -> 4 oak_planks\nCraft 2 oak_planks -> 4 stick\nCraft 3 oak_planks + 2 stick -> 1 wooden_pickaxe"}, + {"role": "assistant", "content": "I need 2 oak logs to make a wooden pickaxe. Do you have any?"} + ], + [ + {"role": "user", "content": "terrance: (FROM OTHER BOT) I want you to craft 5 chains"}, + {"role": "assistant", "content": "Sure, first let me check how to build a chain, !getCraftingPlan(\"chain\",5)"}, + {"role": "system", "content": "You are missing the following items:\n- 7 iron_ingot\n\nOnce you have these items, here's your crafting plan:\n\nCraft 2 iron_ingot -> 18 iron_nugget\nCraft 10 iron_nugget + 5 iron_ingot -> 5 chain"}, + {"role": "assistant", "content": "Seems like I need 7 iron_ingots for 5 chains. Do you have any?"} ] ], diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 6ee481b..9056d29 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -406,7 +406,7 @@ export const actionsList = [ convoManager.endConversation(player_name); return `Converstaion with ${player_name} ended.`; } - } + }, // { // commented for now, causes confusion with goal command // name: '!npcGoal', // description: 'Set a simple goal for an item or building to automatically work towards. Do not use for complex goals.', diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js index b4dc74a..59a8416 100644 --- a/src/agent/commands/queries.js +++ b/src/agent/commands/queries.js @@ -176,5 +176,42 @@ export const queryList = [ perform: async function (agent) { return "Saved place names: " + agent.memory_bank.getKeys(); } - } + }, + { + name: '!getCraftingPlan', + description: "Provides a comprehensive crafting plan for a specified item. This includes a breakdown of required ingredients, the exact quantities needed, and an analysis of missing ingredients or extra items needed based on the bot's current inventory.", + params: { + targetItem: { + type: 'string', + description: 'The item that we are trying to craft' + }, + quantity: { + type: 'int', + description: 'The quantity of the item that we are trying to craft', + optional: true, + domain: [1, Infinity, '[)'], // Quantity must be at least 1, + default: 1 + } + }, + perform: function (agent, targetItem, quantity = 1) { + let bot = agent.bot; + + // Fetch the bot's inventory + const curr_inventory = world.getInventoryCounts(bot); + const target_item = targetItem; + let existingCount = curr_inventory[target_item] || 0; + var prefixMessage = ''; + if (existingCount > 0) { + curr_inventory[target_item] -= existingCount; + prefixMessage = `You already have ${existingCount} ${target_item} in your inventory. If you need to craft more,\n`; + } + + // Generate crafting plan + var craftingPlan = mc.getDetailedCraftingPlan(target_item, quantity, curr_inventory); + craftingPlan = prefixMessage + craftingPlan; + console.log('\n\n\n\n\n\n\n\n\n\n\n'); + console.log(craftingPlan); + return pad(craftingPlan); + } + }, ]; diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 23f30ad..7293c0d 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -79,7 +79,7 @@ export async function craftRecipe(bot, itemName, num=1) { } } if (!recipes || recipes.length === 0) { - log(bot, `You do not have the resources to craft a ${itemName}. It requires: ${Object.entries(mc.getItemCraftingRecipes(itemName)[0]).map(([key, value]) => `${key}: ${value}`).join(', ')}.`); + log(bot, `You do not have the resources to craft a ${itemName}. It requires: ${Object.entries(mc.getItemCraftingRecipes(itemName)[0][0]).map(([key, value]) => `${key}: ${value}`).join(', ')}.`); if (placedTable) { await collectBlock(bot, 'crafting_table', 1); } diff --git a/src/agent/npc/item_goal.js b/src/agent/npc/item_goal.js index 40589ba..9055f54 100644 --- a/src/agent/npc/item_goal.js +++ b/src/agent/npc/item_goal.js @@ -204,7 +204,7 @@ class ItemWrapper { } createChildren() { - let recipes = mc.getItemCraftingRecipes(this.name); + let recipes = mc.getItemCraftingRecipes(this.name).map(([recipe, craftedCount]) => recipe); if (recipes) { for (let recipe of recipes) { let includes_blacklisted = false; diff --git a/src/utils/mcdata.js b/src/utils/mcdata.js index 58cfbdb..2a3a27c 100644 --- a/src/utils/mcdata.js +++ b/src/utils/mcdata.js @@ -190,7 +190,10 @@ export function getItemCraftingRecipes(itemName) { recipe[ingredientName] = 0; recipe[ingredientName]++; } - recipes.push(recipe); + recipes.push([ + recipe, + {craftedCount : r.result.count} + ]); } return recipes; @@ -327,4 +330,156 @@ export function calculateLimitingResource(availableItems, requiredItems, discret } if(discrete) num = Math.floor(num); return {num, limitingResource} +} + +let loopingItems = new Set(); + +export function initializeLoopingItems() { + + loopingItems = new Set(['coal', + 'wheat', + 'diamond', + 'emerald', + 'raw_iron', + 'raw_gold', + 'redstone', + 'blue_wool', + 'packed_mud', + 'raw_copper', + 'iron_ingot', + 'dried_kelp', + 'gold_ingot', + 'slime_ball', + 'black_wool', + 'quartz_slab', + 'copper_ingot', + 'lapis_lazuli', + 'honey_bottle', + 'rib_armor_trim_smithing_template', + 'eye_armor_trim_smithing_template', + 'vex_armor_trim_smithing_template', + 'dune_armor_trim_smithing_template', + 'host_armor_trim_smithing_template', + 'tide_armor_trim_smithing_template', + 'wild_armor_trim_smithing_template', + 'ward_armor_trim_smithing_template', + 'coast_armor_trim_smithing_template', + 'spire_armor_trim_smithing_template', + 'snout_armor_trim_smithing_template', + 'shaper_armor_trim_smithing_template', + 'netherite_upgrade_smithing_template', + 'raiser_armor_trim_smithing_template', + 'sentry_armor_trim_smithing_template', + 'silence_armor_trim_smithing_template', + 'wayfinder_armor_trim_smithing_template']); +} + + +/** + * Gets a detailed plan for crafting an item considering current inventory + */ +export function getDetailedCraftingPlan(targetItem, count = 1, current_inventory = {}) { + initializeLoopingItems(); + if (!targetItem || count <= 0 || !getItemId(targetItem)) { + return "Invalid input. Please provide a valid item name and positive count."; + } + + if (isBaseItem(targetItem)) { + const available = current_inventory[targetItem] || 0; + if (available >= count) return "You have all required items already in your inventory!"; + return `${targetItem} is a base item, you need to find ${count - available} more in the world`; + } + + const inventory = { ...current_inventory }; + const leftovers = {}; + const plan = craftItem(targetItem, count, inventory, leftovers); + return formatPlan(plan); +} + +function isBaseItem(item) { + return loopingItems.has(item) || getItemCraftingRecipes(item) === null; +} + +function craftItem(item, count, inventory, leftovers, crafted = { required: {}, steps: [], leftovers: {} }) { + // Check available inventory and leftovers first + const availableInv = inventory[item] || 0; + const availableLeft = leftovers[item] || 0; + const totalAvailable = availableInv + availableLeft; + + if (totalAvailable >= count) { + // Use leftovers first, then inventory + const useFromLeft = Math.min(availableLeft, count); + leftovers[item] = availableLeft - useFromLeft; + + const remainingNeeded = count - useFromLeft; + if (remainingNeeded > 0) { + inventory[item] = availableInv - remainingNeeded; + } + return crafted; + } + + // Use whatever is available + const stillNeeded = count - totalAvailable; + if (availableLeft > 0) leftovers[item] = 0; + if (availableInv > 0) inventory[item] = 0; + + if (isBaseItem(item)) { + crafted.required[item] = (crafted.required[item] || 0) + stillNeeded; + return crafted; + } + + const recipe = getItemCraftingRecipes(item)?.[0]; + if (!recipe) { + crafted.required[item] = stillNeeded; + return crafted; + } + + const [ingredients, result] = recipe; + const craftedPerRecipe = result.craftedCount; + const batchCount = Math.ceil(stillNeeded / craftedPerRecipe); + const totalProduced = batchCount * craftedPerRecipe; + + // Add excess to leftovers + if (totalProduced > stillNeeded) { + leftovers[item] = (leftovers[item] || 0) + (totalProduced - stillNeeded); + } + + // Process each ingredient + for (const [ingredientName, ingredientCount] of Object.entries(ingredients)) { + const totalIngredientNeeded = ingredientCount * batchCount; + craftItem(ingredientName, totalIngredientNeeded, inventory, leftovers, crafted); + } + + // Add crafting step + const stepIngredients = Object.entries(ingredients) + .map(([name, amount]) => `${amount * batchCount} ${name}`) + .join(' + '); + crafted.steps.push(`Craft ${stepIngredients} -> ${totalProduced} ${item}`); + + return crafted; +} + +function formatPlan({ required, steps, leftovers }) { + const lines = []; + + if (Object.keys(required).length > 0) { + lines.push('You are missing the following items:'); + Object.entries(required).forEach(([item, count]) => + lines.push(`- ${count} ${item}`)); + lines.push('\nOnce you have these items, here\'s your crafting plan:'); + } else { + lines.push('You have all items required to craft this item!'); + lines.push('Here\'s your crafting plan:'); + } + + lines.push(''); + lines.push(...steps); + + if (Object.keys(leftovers).length > 0) { + lines.push('\nYou will have leftover:'); + Object.entries(leftovers).forEach(([item, count]) => + lines.push(`- ${count} ${item}`)); + } + + return lines.join('\n'); } \ No newline at end of file From bca490a33ad31e3292a92d78468606146d4d9e95 Mon Sep 17 00:00:00 2001 From: itsme6582 Date: Sun, 12 Jan 2025 10:24:21 -0500 Subject: [PATCH 24/65] Update README.md Discord invite in section ## Install and Run was invalid. Replaced with the [Discord Support] link from the section # Mindcraft --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d165045..e102394 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,7 @@ Do not connect this bot to public servers with coding enabled. This project allo 6. Run `node main.js` from the installed directory -If you encounter issues, check the [FAQ](https://github.com/kolbytn/mindcraft/blob/main/FAQ.md) or find support on [discord](https://discord.gg/jVxQWVTM). We are currently not very responsive to github issues. +If you encounter issues, check the [FAQ](https://github.com/kolbytn/mindcraft/blob/main/FAQ.md) or find support on [discord](https://discord.gg/mp73p35dzC). We are currently not very responsive to github issues. ## Customization From 485d4a6d8c2af292d88c78079760e2370d29391d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CQu?= Date: Sun, 12 Jan 2025 23:52:39 +0800 Subject: [PATCH 25/65] Rollback two code template.js --- bots/codeCheckTemplate.js | 10 ++++++++++ bots/codeTemplate.json | 4 ---- src/agent/coder.js | 20 +++++++------------- 3 files changed, 17 insertions(+), 17 deletions(-) create mode 100644 bots/codeCheckTemplate.js delete mode 100644 bots/codeTemplate.json diff --git a/bots/codeCheckTemplate.js b/bots/codeCheckTemplate.js new file mode 100644 index 0000000..77b5d97 --- /dev/null +++ b/bots/codeCheckTemplate.js @@ -0,0 +1,10 @@ +import * as skills from '../../../src/agent/library/skills.js'; +import * as world from '../../../src/agent/library/world.js'; +import Vec3 from 'vec3'; + +const log = skills.log; + +export async function main(bot) { + /* CODE HERE */ + log(bot, 'Code finished.'); +} \ No newline at end of file diff --git a/bots/codeTemplate.json b/bots/codeTemplate.json deleted file mode 100644 index 3c861aa..0000000 --- a/bots/codeTemplate.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "execTemplate": "(async (bot) => {\n\n /* CODE HERE */\n log(bot, 'Code finished.');\n\n});", - "checkTemplate": "import * as skills from '../../../src/agent/library/skills.js';\nimport * as world from '../../../src/agent/library/world.js';\nimport Vec3 from 'vec3';\n\nconst log = skills.log;\n\nexport async function main(bot) {\n /* CODE HERE */\n log(bot, 'Code finished.');\n}" -} diff --git a/src/agent/coder.js b/src/agent/coder.js index faafc1f..159e201 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -15,19 +15,13 @@ export class Coder { this.code_template = ''; this.code_check_template = ''; - readFile('./bots/codeTemplate.json', 'utf8', (err, data) => { - if (err) { - console.error('Error reading codeTemplate.json:', err); - throw err; - } - try { - const templates = JSON.parse(data); - this.code_template = templates.execTemplate; - this.code_check_template = templates.checkTemplate; - } catch (parseErr) { - console.error('Error parsing codeTemplate.json:', parseErr); - throw parseErr; - } + readFile('./bots/template.js', 'utf8', (err, data) => { + if (err) throw err; + this.code_template = data; + }); + readFile('./bots/codeCheckTemplate.js', 'utf8', (err, data) => { + if (err) throw err; + this.code_check_template = data; }); mkdirSync('.' + this.fp, { recursive: true }); } From 8590366900756e0cbbb2e147ee6d1c46e9be8579 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CQu?= Date: Sun, 12 Jan 2025 23:52:59 +0800 Subject: [PATCH 26/65] Rollback two code template.js --- bots/template.js | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 bots/template.js diff --git a/bots/template.js b/bots/template.js new file mode 100644 index 0000000..b7f270c --- /dev/null +++ b/bots/template.js @@ -0,0 +1,6 @@ +(async (bot) => { + +/* CODE HERE */ +log(bot, 'Code finished.'); + +}) \ No newline at end of file From 9be83fec643c23b5f563a4f025a3400345cbc93e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CQu?= Date: Sun, 12 Jan 2025 23:56:10 +0800 Subject: [PATCH 27/65] Rename two code template --- bots/{template.js => execTemplate.js} | 0 bots/{codeCheckTemplate.js => lintTemplate.js} | 0 src/agent/coder.js | 4 ++-- 3 files changed, 2 insertions(+), 2 deletions(-) rename bots/{template.js => execTemplate.js} (100%) rename bots/{codeCheckTemplate.js => lintTemplate.js} (100%) diff --git a/bots/template.js b/bots/execTemplate.js similarity index 100% rename from bots/template.js rename to bots/execTemplate.js diff --git a/bots/codeCheckTemplate.js b/bots/lintTemplate.js similarity index 100% rename from bots/codeCheckTemplate.js rename to bots/lintTemplate.js diff --git a/src/agent/coder.js b/src/agent/coder.js index 159e201..19c41e8 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -15,11 +15,11 @@ export class Coder { this.code_template = ''; this.code_check_template = ''; - readFile('./bots/template.js', 'utf8', (err, data) => { + readFile('./bots/execTemplate.js', 'utf8', (err, data) => { if (err) throw err; this.code_template = data; }); - readFile('./bots/codeCheckTemplate.js', 'utf8', (err, data) => { + readFile('./bots/lintTemplate.js', 'utf8', (err, data) => { if (err) throw err; this.code_check_template = data; }); From 4782da120703d3921f784c149a5f4302cf84ba38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CQu?= Date: Mon, 13 Jan 2025 00:02:03 +0800 Subject: [PATCH 28/65] Rename func 'check' to 'lint' --- src/agent/coder.js | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/agent/coder.js b/src/agent/coder.js index 19c41e8..9767d91 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -13,7 +13,7 @@ export class Coder { this.fp = '/bots/'+agent.name+'/action-code/'; this.generating = false; this.code_template = ''; - this.code_check_template = ''; + this.code_lint_template = ''; readFile('./bots/execTemplate.js', 'utf8', (err, data) => { if (err) throw err; @@ -21,12 +21,12 @@ export class Coder { }); readFile('./bots/lintTemplate.js', 'utf8', (err, data) => { if (err) throw err; - this.code_check_template = data; + this.code_lint_template = data; }); mkdirSync('.' + this.fp, { recursive: true }); } - async checkCode(code) { + async lintCode(code) { let result = '#### CODE ERROR INFO ###\n'; // Extract everything in the code between the beginning of 'skills./world.' and the '(' const skillRegex = /(?:skills|world)\.(.*?)\(/g; @@ -36,7 +36,7 @@ export class Coder { skills.push(match[1]); } const allDocs = await this.agent.prompter.skill_libary.getRelevantSkillDocs(); - //Check if the function exists + //lint if the function exists const missingSkills = skills.filter(skill => !allDocs.includes(skill)); if (missingSkills.length > 0) { result += 'These functions do not exist. Please modify the correct function name and try again.\n'; @@ -83,7 +83,7 @@ export class Coder { for (let line of code.split('\n')) { src += ` ${line}\n`; } - let src_check_copy = this.code_check_template.replace('/* CODE HERE */', src); + let src_lint_copy = this.code_lint_template.replace('/* CODE HERE */', src); src = this.code_template.replace('/* CODE HERE */', src); let filename = this.file_counter + '.js'; @@ -112,7 +112,7 @@ export class Coder { console.error('Error writing code execution file: ' + result); return null; } - return { func:{main: mainFn}, src_check_copy: src_check_copy }; + return { func:{main: mainFn}, src_lint_copy: src_lint_copy }; } sanitizeCode(code) { @@ -190,8 +190,8 @@ export class Coder { code = res.substring(res.indexOf('```')+3, res.lastIndexOf('```')); const result = await this.stageCode(code); const executionModuleExports = result.func; - let src_check_copy = result.src_check_copy; - const analysisResult = await this.checkCode(src_check_copy); + let src_lint_copy = result.src_lint_copy; + const analysisResult = await this.lintCode(src_lint_copy); if (analysisResult) { const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'+await this.agent.prompter.skill_libary.getRelevantSkillDocs(analysisResult,3); messages.push({ role: 'system', content: message }); From 1a86c3a485f1bb1935d9d106c16a41e7baab3646 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CQu?= Date: Sun, 19 Jan 2025 20:50:25 +0800 Subject: [PATCH 29/65] Fix Qwen.js to be compatible with OpenAI and add random backoff for rate limiting --- profiles/qwen.json | 4 +- src/agent/library/skill_library.js | 10 +- src/models/qwen.js | 177 ++++++++++------------------- 3 files changed, 70 insertions(+), 121 deletions(-) diff --git a/profiles/qwen.json b/profiles/qwen.json index 96f4757..f6a3f46 100644 --- a/profiles/qwen.json +++ b/profiles/qwen.json @@ -5,13 +5,13 @@ "model": { "api": "qwen", - "url": "https://dashscope-intl.aliyuncs.com/api/v1/services/aigc/text-generation/generation", + "url": "https://dashscope-intl.aliyuncs.com/compatible-mode/v1", "model": "qwen-max" }, "embedding": { "api": "qwen", - "url": "https://dashscope-intl.aliyuncs.com/api/v1/services/embeddings/text-embedding/text-embedding", + "url": "https://dashscope-intl.aliyuncs.com/compatible-mode/v1", "model": "text-embedding-v3" } } \ No newline at end of file diff --git a/src/agent/library/skill_library.js b/src/agent/library/skill_library.js index 356c614..a019112 100644 --- a/src/agent/library/skill_library.js +++ b/src/agent/library/skill_library.js @@ -8,12 +8,14 @@ export class SkillLibrary { this.skill_docs_embeddings = {}; } async initSkillLibrary() { - await Promise.all([ - ...getSkillDocs().map(async (doc) => { + const skillDocs = getSkillDocs(); + const embeddingPromises = skillDocs.map((doc) => { + return (async () => { let func_name_desc = doc.split('\n').slice(0, 2).join(''); this.skill_docs_embeddings[doc] = await this.embedding_model.embed(func_name_desc); - }) - ]); + })(); + }); + await Promise.all(embeddingPromises); } async getRelevantSkillDocs(message, select_num) { diff --git a/src/models/qwen.js b/src/models/qwen.js index d546298..20d5b49 100644 --- a/src/models/qwen.js +++ b/src/models/qwen.js @@ -1,134 +1,81 @@ -// This code uses Dashscope and HTTP to ensure the latest support for the Qwen model. -// Qwen is also compatible with the OpenAI API format; - -import { getKey } from '../utils/keys.js'; +import OpenAIApi from 'openai'; +import { getKey, hasKey } from '../utils/keys.js'; +import { strictFormat } from '../utils/text.js'; export class Qwen { - constructor(modelName, url) { - this.modelName = modelName; - this.url = url || 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation'; - this.apiKey = getKey('QWEN_API_KEY'); + constructor(model_name, url) { + this.model_name = model_name; + + let config = {}; + + config.baseURL = url || 'https://dashscope.aliyuncs.com/compatible-mode/v1'; + config.apiKey = getKey('QWEN_API_KEY'); + + this.openai = new OpenAIApi(config); } - async sendRequest(turns, systemMessage, stopSeq = '***', retryCount = 0) { - if (retryCount > 5) { - console.error('Maximum retry attempts reached.'); - return 'Error: Too many retry attempts.'; - } + async sendRequest(turns, systemMessage, stop_seq='***') { + let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); - const data = { - model: this.modelName || 'qwen-plus', - input: { messages: [{ role: 'system', content: systemMessage }, ...turns] }, - parameters: { result_format: 'message', stop: stopSeq }, + messages = strictFormat(messages); + + const pack = { + model: this.model_name || "qwen-plus", + messages, + stop: stop_seq, }; - // Add default user message if all messages are 'system' role - if (turns.every((msg) => msg.role === 'system')) { - data.input.messages.push({ role: 'user', content: 'hello' }); - } - - if (!data.model || !data.input || !data.input.messages || !data.parameters) { - console.error('Invalid request data format:', data); - throw new Error('Invalid request data format.'); - } - + let res = null; try { - const response = await this._makeHttpRequest(this.url, data); - const choice = response?.output?.choices?.[0]; - - if (choice?.finish_reason === 'length' && turns.length > 0) { - return this.sendRequest(turns.slice(1), systemMessage, stopSeq, retryCount + 1); - } - - return choice?.message?.content || 'No content received.'; - } catch (err) { - console.error('Error occurred:', err); - return 'An error occurred, please try again.'; + console.log('Awaiting Qwen api response...') + // console.log('Messages:', messages); + let completion = await this.openai.chat.completions.create(pack); + if (completion.choices[0].finish_reason == 'length') + throw new Error('Context length exceeded'); + console.log('Received.') + res = completion.choices[0].message.content; } + catch (err) { + if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; + } + } + return res; } + // Why random backoff? + // With a 30 requests/second limit on Alibaba Qwen's embedding service, + // random backoff helps maximize bandwidth utilization. async embed(text) { - if (!text || typeof text !== 'string') { - console.error('Invalid embedding input: text must be a non-empty string:', text); - return 'Invalid embedding input: text must be a non-empty string.'; - } - - const data = { - model: this.modelName, - input: { texts: [text] }, - parameters: { text_type: 'query' }, - }; - - if (!data.model || !data.input || !data.input.texts || !data.parameters) { - console.error('Invalid embedding request data format:', data); - throw new Error('Invalid embedding request data format.'); - } - - try { - const response = await this._makeHttpRequest(this.url, data); - const embedding = response?.output?.embeddings?.[0]?.embedding; - - return embedding || 'No embedding result received.'; - } catch (err) { - console.log('Embed data:', data); - console.error('Embed error occurred:', err); - return 'An error occurred, please try again.'; - } - } - - async _makeHttpRequest(url, data, maxRetries = 10) { - const headers = { - 'Authorization': `Bearer ${this.apiKey}`, - 'Content-Type': 'application/json', - }; - - let retryCount = 0; - - while (retryCount < maxRetries) { + const maxRetries = 5; // Maximum number of retries + for (let retries = 0; retries < maxRetries; retries++) { try { - const response = await fetch(url, { - method: 'POST', - headers, - body: JSON.stringify(data), + const { data } = await this.openai.embeddings.create({ + model: this.model_name || "text-embedding-v3", + input: text, + encoding_format: "float", }); - - if (response.ok) { - const responseText = await response.text(); - try { - //Task completed successfully - return JSON.parse(responseText); - } catch (err) { - console.error('Failed to parse response JSON:', err); - throw new Error('Invalid response JSON format.'); - } - } else { - const errorText = await response.text(); - - if (response.status === 429 || response.statusText.includes('Too Many Requests')) { - // Handle rate limiting - retryCount++; - if (retryCount >= maxRetries) { - console.error('Exceeded maximum retry attempts, unable to get request result.'); - throw new Error(`Request failed after ${maxRetries} retries due to rate limiting.`); - } - //Reached Qwen concurrency limit, waiting in queue - const waitTime = Math.random() * 1000; // Random wait between 0 to 1 seconds - await new Promise(resolve => setTimeout(resolve, waitTime)); - continue; // Retry the request - } else { - console.error(`Request failed, status code ${response.status}: ${response.statusText}`); - console.error('Error response content:', errorText); - throw new Error(`Request failed, status code ${response.status}: ${response.statusText}`); - } - } + return data[0].embedding; } catch (err) { - // Handle network errors or other exceptions - console.error('Error occurred during HTTP request:', err); - throw err; // Re-throw the error to be handled by the caller + if (err.status === 429) { + // If a rate limit error occurs, calculate the exponential backoff with a random delay (1-5 seconds) + const delay = Math.pow(2, retries) * 1000 + Math.floor(Math.random() * 2000); + // console.log(`Rate limit hit, retrying in ${delay} ms...`); + await new Promise(resolve => setTimeout(resolve, delay)); // Wait for the delay before retrying + } else { + throw err; + } } } - // Exceeded maximum retries - console.error('Exceeded maximum retry attempts, unable to get request result.'); - throw new Error(`Request failed after ${maxRetries} retries.`); + // If maximum retries are reached and the request still fails, throw an error + throw new Error('Max retries reached, request failed.'); } + } + + + From 1d54af2fb7d4c8dc6e5a234387157700419291a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CQu?= Date: Sun, 19 Jan 2025 21:10:12 +0800 Subject: [PATCH 30/65] add the lost `|| new_resume` --- src/agent/action_manager.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agent/action_manager.js b/src/agent/action_manager.js index 97475dd..3833ccf 100644 --- a/src/agent/action_manager.js +++ b/src/agent/action_manager.js @@ -46,7 +46,7 @@ export class ActionManager { assert(actionLabel != null, 'actionLabel is required for new resume'); this.resume_name = actionLabel; } - if (this.resume_func != null && this.agent.isIdle() && (!this.agent.self_prompter.on || new_resume)) { + if (this.resume_func != null && (this.agent.isIdle() || new_resume) && (!this.agent.self_prompter.on || new_resume)) { this.currentActionLabel = this.resume_name; let res = await this._executeAction(this.resume_name, this.resume_func, timeout); this.currentActionLabel = ''; From f0396df8293ab905eb894ca6aa171aa5f855996c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CQu?= Date: Sun, 19 Jan 2025 22:05:01 +0800 Subject: [PATCH 31/65] Remove the relevant_skill_doc prompts from coder.js and action_manager.js --- src/agent/action_manager.js | 3 +-- src/agent/coder.js | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/agent/action_manager.js b/src/agent/action_manager.js index 3833ccf..ad08827 100644 --- a/src/agent/action_manager.js +++ b/src/agent/action_manager.js @@ -113,12 +113,11 @@ export class ActionManager { console.error(err.stack); await this.stop(); err = err.toString(); - let relevant_skill_docs = await this.agent.prompter.skill_libary.getRelevantSkillDocs(err,5); let message = this._getBotOutputSummary() + '!!Code threw exception!!\n' + 'Error: ' + err + '\n' + - 'Stack trace:\n' + err.stack+'\n'+relevant_skill_docs; + 'Stack trace:\n' + err.stack+'\n'; let interrupted = this.agent.bot.interrupt_code; this.agent.clearBotLogs(); diff --git a/src/agent/coder.js b/src/agent/coder.js index 9767d91..25a0a8a 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -193,7 +193,7 @@ export class Coder { let src_lint_copy = result.src_lint_copy; const analysisResult = await this.lintCode(src_lint_copy); if (analysisResult) { - const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'+await this.agent.prompter.skill_libary.getRelevantSkillDocs(analysisResult,3); + const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'; messages.push({ role: 'system', content: message }); continue; } From 3eeb88e60ef528adb0326083dead572f2ed22733 Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Tue, 21 Jan 2025 21:47:05 +0000 Subject: [PATCH 32/65] Update mind_server.js --- src/server/mind_server.js | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/src/server/mind_server.js b/src/server/mind_server.js index 5d99290..83e060f 100644 --- a/src/server/mind_server.js +++ b/src/server/mind_server.js @@ -114,6 +114,18 @@ export function createMindServer(port = 8080) { process.exit(0); }); + socket.on('send-message', (agentName, message) => { + if (!inGameAgents[agentName]) { + console.warn(`Agent ${agentName} not logged in, cannot send message via MindServer.`); + return + } + try { + console.log(`Sending message to agent ${agentName}: ${message}`); + inGameAgents[agentName].emit('send-message', agentName, message) + } catch (error) { + console.error('Error: ', error); + } + }); }); server.listen(port, 'localhost', () => { @@ -146,4 +158,4 @@ function stopAllAgents() { // Optional: export these if you need access to them from other files export const getIO = () => io; export const getServer = () => server; -export const getConnectedAgents = () => connectedAgents; \ No newline at end of file +export const getConnectedAgents = () => connectedAgents; From 1171bad1e4c9b30e8f8aa032c76d40ab006132bf Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Tue, 21 Jan 2025 21:47:36 +0000 Subject: [PATCH 33/65] Update agent_proxy.js --- src/agent/agent_proxy.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/agent/agent_proxy.js b/src/agent/agent_proxy.js index feeba37..65cdff6 100644 --- a/src/agent/agent_proxy.js +++ b/src/agent/agent_proxy.js @@ -42,6 +42,14 @@ class AgentServerProxy { console.log(`Restarting agent: ${agentName}`); this.agent.cleanKill(); }); + + this.socket.on('send-message', (agentName, message) => { + try { + this.agent.respondFunc("NO USERNAME", message); + } catch (error) { + console.error('Error: ', JSON.stringify(error, Object.getOwnPropertyNames(error))); + } + }); } login() { From e92ff0f933bb8941ec174de9675448cb18a4f139 Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Tue, 21 Jan 2025 21:48:05 +0000 Subject: [PATCH 34/65] Update agent.js --- src/agent/agent.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/agent/agent.js b/src/agent/agent.js index 79e1d29..66d8922 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -139,6 +139,8 @@ export class Agent { console.error('Error handling message:', error); } } + + this.respondFunc = respondFunc this.bot.on('whisper', respondFunc); if (settings.profiles.length === 1) From 007a0cdf6c34f7b52048ba2d0bfd0f8cd0ad2fa8 Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Tue, 21 Jan 2025 21:49:27 +0000 Subject: [PATCH 35/65] Update index.html --- src/server/public/index.html | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/server/public/index.html b/src/server/public/index.html index 1f7951f..c66a986 100644 --- a/src/server/public/index.html +++ b/src/server/public/index.html @@ -80,6 +80,7 @@ ${agent.in_game ? ` + ` : ` `} @@ -110,6 +111,10 @@ function shutdown() { socket.emit('shutdown'); } + + function sendMessage(agentName, message) { + socket.emit('send-message', agentName, message) + } - \ No newline at end of file + From 3bbed21526b862f3cdcdafcbc6b994d48cd2f8e3 Mon Sep 17 00:00:00 2001 From: Ayush Maniar Date: Thu, 23 Jan 2025 12:33:21 -0800 Subject: [PATCH 36/65] Added python script for task evaluation which stores experiment run times and results. --- evaluation_script.py | 151 +++++++++++++++++++++++++++++++++++++++++++ src/agent/agent.js | 2 + src/agent/history.js | 4 +- 3 files changed, 155 insertions(+), 2 deletions(-) create mode 100644 evaluation_script.py diff --git a/evaluation_script.py b/evaluation_script.py new file mode 100644 index 0000000..3f378e3 --- /dev/null +++ b/evaluation_script.py @@ -0,0 +1,151 @@ +import argparse +import json +import subprocess +import time +from datetime import datetime +import re + +def read_settings(file_path): + """Read and parse the settings.js file to get agent profiles.""" + with open(file_path, 'r', encoding='utf-8') as file: + content = file.read() + + # Remove `export default` and trailing commas + content = re.sub(r'export\s+default', '', content) + content = re.sub(r',\s*(?=[}\]])', '', content) + + # Remove JavaScript comments + content = re.sub(r'//.*', '', content) + + # Remove trailing commas (e.g., before } or ]) + content = re.sub(r',\s*(?=[}\]])', '', content) + + # Strip leading and trailing whitespace + content = content.strip() + + json_data = json.loads(content) + + profiles = json_data['profiles'] + + ## profiles is a list of strings like "./andy.json" and "./bob.json" + + agent_names = [profile.split('/')[-1].split('.')[0] for profile in profiles] + return agent_names + +# Example usage +if __name__ == "__main__": + config = read_settings("settings.js") + print(config) + + +def check_task_completion(agents): + """Check memory.json files of all agents to determine task success/failure.""" + for agent in agents: + memory_path = f"bots/{agent}/memory.json" + try: + with open(memory_path, 'r') as f: + memory = json.load(f) + + # Check the last system message in turns + for turn in reversed(memory['turns']): + if turn['role'] == 'system' and 'code' in turn['content']: + # Extract completion code + if 'code : 2' in turn['content']: + return True # Task successful + elif 'code : 4' in turn['content']: + return False # Task failed + + except (FileNotFoundError, json.JSONDecodeError) as e: + print(f"Error reading memory for agent {agent}: {e}") + continue + + return False # Default to failure if no conclusive result found + +def update_results_file(task_id, success_count, total_count, time_taken, experiment_results): + """Update the results file with current success ratio and time taken.""" + timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') + filename = f"results_{task_id}_{timestamp}.txt" + + success_ratio = success_count / total_count + + with open(filename, 'w') as f: + f.write(f"Task ID: {task_id}\n") + f.write(f"Experiments completed: {total_count}\n") + f.write(f"Successful experiments: {success_count}\n") + f.write(f"Success ratio: {success_ratio:.2f}\n") + f.write(f"Time taken for last experiment: {time_taken:.2f} seconds\n") + + # Write individual experiment results + for i, result in enumerate(experiment_results, 1): + f.write(f"Experiment {i}: {'Success' if result['success'] else 'Failure'}, Time taken: {result['time_taken']:.2f} seconds\n") + + # Write aggregated metrics + total_time = sum(result['time_taken'] for result in experiment_results) + f.write(f"\nAggregated metrics:\n") + f.write(f"Total experiments: {total_count}\n") + f.write(f"Total successful experiments: {success_count}\n") + f.write(f"Overall success ratio: {success_ratio:.2f}\n") + f.write(f"Total time taken: {total_time:.2f} seconds\n") + f.write(f"Average time per experiment: {total_time / total_count:.2f} seconds\n") + f.write(f"Last updated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n") + +def run_experiment(task_path, task_id, num_exp): + """Run the specified number of experiments and track results.""" + # Read agent profiles from settings.js + agents = read_settings(file_path="settings.js") + print(f"Detected agents: {agents}") + + success_count = 0 + experiment_results = [] + + for exp_num in range(num_exp): + print(f"\nRunning experiment {exp_num + 1}/{num_exp}") + + start_time = time.time() + + # Run the node command + cmd = f"node main.js --task_path {task_path} --task_id {task_id}" + try: + subprocess.run(cmd, shell=True, check=True) + except subprocess.CalledProcessError as e: + print(f"Error running experiment: {e}") + continue + + # Check if task was successful + success = check_task_completion(agents) + if success: + success_count += 1 + print(f"Experiment {exp_num + 1} successful") + else: + print(f"Experiment {exp_num + 1} failed") + + end_time = time.time() + time_taken = end_time - start_time + + # Store individual experiment result + experiment_results.append({ + 'success': success, + 'time_taken': time_taken + }) + + # Update results file after each experiment + update_results_file(task_id, success_count, exp_num + 1, time_taken, experiment_results) + + # Small delay between experiments + time.sleep(1) + + final_ratio = success_count / num_exp + print(f"\nExperiments completed. Final success ratio: {final_ratio:.2f}") + +def main(): + parser = argparse.ArgumentParser(description='Run Minecraft AI agent experiments') + parser.add_argument('task_path', help='Path to the task file') + parser.add_argument('task_id', help='ID of the task to run') + parser.add_argument('num_exp', type=int, help='Number of experiments to run') + + args = parser.parse_args() + + run_experiment(args.task_path, args.task_id, args.num_exp) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/src/agent/agent.js b/src/agent/agent.js index 4691079..4678c6f 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -447,6 +447,8 @@ export class Agent { if (this.task.data) { let res = this.task.isDone(); if (res) { + await this.history.add('system', `${res.message} ended with code : ${res.code}`); + await this.history.save(); console.log('Task finished:', res.message); this.killAll(); } diff --git a/src/agent/history.js b/src/agent/history.js index b6edf80..a578377 100644 --- a/src/agent/history.js +++ b/src/agent/history.js @@ -42,7 +42,7 @@ export class History { console.log("Memory updated to: ", this.memory); } - appendFullHistory(to_store) { + async appendFullHistory(to_store) { if (this.full_history_fp === undefined) { const string_timestamp = new Date().toLocaleString().replace(/[/:]/g, '-').replace(/ /g, '').replace(/,/g, '_'); this.full_history_fp = `./bots/${this.name}/histories/${string_timestamp}.json`; @@ -75,7 +75,7 @@ export class History { chunk.push(this.turns.shift()); // remove until turns starts with system/user message await this.summarizeMemories(chunk); - this.appendFullHistory(chunk); + await this.appendFullHistory(chunk); } } From fbde286931661f36c21861e2977a8f83beee8645 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Fri, 24 Jan 2025 11:31:26 -0600 Subject: [PATCH 37/65] better auto-prompt --- src/agent/self_prompter.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agent/self_prompter.js b/src/agent/self_prompter.js index 2c2f63c..439b6c6 100644 --- a/src/agent/self_prompter.js +++ b/src/agent/self_prompter.js @@ -38,7 +38,7 @@ export class SelfPrompter { let no_command_count = 0; const MAX_NO_COMMAND = 3; while (!this.interrupt) { - const msg = `You are self-prompting with the goal: '${this.prompt}'. Your next response MUST contain a command !withThisSyntax. Respond:`; + const msg = `You are self-prompting with the goal: '${this.prompt}'. Your next response MUST contain a command with this syntax: !commandName. Respond:`; let used_command = await this.agent.handleMessage('system', msg, -1); if (!used_command) { From 42f805cd160994af1734876f29e38601e707ffbc Mon Sep 17 00:00:00 2001 From: Pixel Date: Sat, 25 Jan 2025 10:53:22 -0600 Subject: [PATCH 38/65] Update README.md --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index d165045..369cca2 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,11 @@ # Mindcraft 🧠⛏️ -Crafting minds for Minecraft with LLMs and Mineflayer! +Crafting minds for Minecraft with LLMs and [Mineflayer!](https://prismarinejs.github.io/mineflayer/#/) [FAQ](https://github.com/kolbytn/mindcraft/blob/main/FAQ.md) | [Discord Support](https://discord.gg/mp73p35dzC) | [Blog Post](https://kolbynottingham.com/mindcraft/) | [Contributor TODO](https://github.com/users/kolbytn/projects/1) -#### ‼️Warning‼️ - +> [!WARNING] Do not connect this bot to public servers with coding enabled. This project allows an LLM to write/execute code on your computer. While the code is sandboxed, it is still vulnerable to injection attacks on public servers. Code writing is disabled by default, you can enable it by setting `allow_insecure_coding` to `true` in `settings.js`. We strongly recommend running with additional layers of security such as docker containers. Ye be warned. ## Requirements @@ -63,7 +62,8 @@ To connect to online servers your bot will need an official Microsoft/Minecraft // rest is same... ``` -‼️ The bot's name in the profile.json must exactly match the Minecraft profile name! Otherwise the bot will spam talk to itself. +> [!WARNING] +> The bot's name in the profile.json must exactly match the Minecraft profile name! Otherwise the bot will spam talk to itself. To use different accounts, Mindcraft will connect with the account that the Minecraft launcher is currently using. You can switch accounts in the launcer, then run `node main.js`, then switch to your main account after the bot has connected. From 1e6ee45f0135a5194cefc11be39533ff3cb99b5a Mon Sep 17 00:00:00 2001 From: Pixel Date: Sat, 25 Jan 2025 10:55:39 -0600 Subject: [PATCH 39/65] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 369cca2..bf72fb6 100644 --- a/README.md +++ b/README.md @@ -62,7 +62,7 @@ To connect to online servers your bot will need an official Microsoft/Minecraft // rest is same... ``` -> [!WARNING] +> [!CAUTION] > The bot's name in the profile.json must exactly match the Minecraft profile name! Otherwise the bot will spam talk to itself. To use different accounts, Mindcraft will connect with the account that the Minecraft launcher is currently using. You can switch accounts in the launcer, then run `node main.js`, then switch to your main account after the bot has connected. From 063d42176447c0274fcaa9b1fe98091be3e2f137 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 25 Jan 2025 12:26:22 -0600 Subject: [PATCH 40/65] added second model for coding --- src/agent/prompter.js | 132 +++++++++++++++++++++++------------------- 1 file changed, 74 insertions(+), 58 deletions(-) diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 310ca3e..5b09719 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -34,7 +34,6 @@ export class Prompter { this.coding_examples = null; let name = this.profile.name; - let chat = this.profile.model; this.cooldown = this.profile.cooldown ? this.profile.cooldown : 0; this.last_prompt_time = 0; this.awaiting_coding = false; @@ -43,68 +42,22 @@ export class Prompter { let max_tokens = null; if (this.profile.max_tokens) max_tokens = this.profile.max_tokens; - if (typeof chat === 'string' || chat instanceof String) { - chat = {model: chat}; - if (chat.model.includes('gemini')) - chat.api = 'google'; - else if (chat.model.includes('gpt') || chat.model.includes('o1')) - chat.api = 'openai'; - else if (chat.model.includes('claude')) - chat.api = 'anthropic'; - else if (chat.model.includes('huggingface/')) - chat.api = "huggingface"; - else if (chat.model.includes('meta/') || chat.model.includes('replicate/')) - chat.api = 'replicate'; - else if (chat.model.includes('mistralai/') || chat.model.includes("mistral/")) - chat.api = 'mistral'; - else if (chat.model.includes("groq/") || chat.model.includes("groqcloud/")) - chat.api = 'groq'; - else if (chat.model.includes('novita/')) - chat.api = 'novita'; - else if (chat.model.includes('qwen')) - chat.api = 'qwen'; - else if (chat.model.includes('grok')) - chat.api = 'xai'; - else if (chat.model.includes('deepseek')) - chat.api = 'deepseek'; - else - chat.api = 'ollama'; - } - console.log('Using chat settings:', chat); + let chat_model_profile = this._selectAPI(this.profile.model); + this.chat_model = this._createModel(chat_model_profile); - if (chat.api === 'google') - this.chat_model = new Gemini(chat.model, chat.url); - else if (chat.api === 'openai') - this.chat_model = new GPT(chat.model, chat.url); - else if (chat.api === 'anthropic') - this.chat_model = new Claude(chat.model, chat.url); - else if (chat.api === 'replicate') - this.chat_model = new ReplicateAPI(chat.model, chat.url); - else if (chat.api === 'ollama') - this.chat_model = new Local(chat.model, chat.url); - else if (chat.api === 'mistral') - this.chat_model = new Mistral(chat.model, chat.url); - else if (chat.api === 'groq') { - this.chat_model = new GroqCloudAPI(chat.model.replace('groq/', '').replace('groqcloud/', ''), chat.url, max_tokens ? max_tokens : 8192); + if (this.profile.code_model) { + let code_model_profile = this._selectAPI(this.profile.code_model); + this.code_model = this._createModel(code_model_profile); + } + else { + this.code_model = this.chat_model; } - else if (chat.api === 'huggingface') - this.chat_model = new HuggingFace(chat.model, chat.url); - else if (chat.api === 'novita') - this.chat_model = new Novita(chat.model.replace('novita/', ''), chat.url); - else if (chat.api === 'qwen') - this.chat_model = new Qwen(chat.model, chat.url); - else if (chat.api === 'xai') - this.chat_model = new Grok(chat.model, chat.url); - else if (chat.api === 'deepseek') - this.chat_model = new DeepSeek(chat.model, chat.url); - else - throw new Error('Unknown API:', api); let embedding = this.profile.embedding; if (embedding === undefined) { - if (chat.api !== 'ollama') - embedding = {api: chat.api}; + if (chat_model_profile.api !== 'ollama') + embedding = {api: chat_model_profile.api}; else embedding = {api: 'none'}; } @@ -146,6 +99,69 @@ export class Prompter { }); } + _selectAPI(profile) { + if (typeof profile === 'string' || profile instanceof String) { + profile = {model: profile}; + if (profile.model.includes('gemini')) + profile.api = 'google'; + else if (profile.model.includes('gpt') || profile.model.includes('o1')) + profile.api = 'openai'; + else if (profile.model.includes('claude')) + profile.api = 'anthropic'; + else if (profile.model.includes('huggingface/')) + profile.api = "huggingface"; + else if (profile.model.includes('meta/') || profile.model.includes('replicate/')) + profile.api = 'replicate'; + else if (profile.model.includes('mistralai/') || profile.model.includes("mistral/")) + model_profile.api = 'mistral'; + else if (profile.model.includes("groq/") || profile.model.includes("groqcloud/")) + profile.api = 'groq'; + else if (profile.model.includes('novita/')) + profile.api = 'novita'; + else if (profile.model.includes('qwen')) + profile.api = 'qwen'; + else if (profile.model.includes('grok')) + profile.api = 'xai'; + else if (profile.model.includes('deepseek')) + profile.api = 'deepseek'; + else + profile.api = 'ollama'; + } + return profile; + } + + _createModel(profile) { + let model = null; + if (profile.api === 'google') + model = new Gemini(profile.model, profile.url); + else if (profile.api === 'openai') + model = new GPT(profile.model, profile.url); + else if (profile.api === 'anthropic') + model = new Claude(profile.model, profile.url); + else if (profile.api === 'replicate') + model = new ReplicateAPI(profile.model, profile.url); + else if (profile.api === 'ollama') + model = new Local(profile.model, profile.url); + else if (profile.api === 'mistral') + model = new Mistral(profile.model, profile.url); + else if (profile.api === 'groq') { + model = new GroqCloudAPI(profile.model.replace('groq/', '').replace('groqcloud/', ''), profile.url, max_tokens ? max_tokens : 8192); + } + else if (profile.api === 'huggingface') + model = new HuggingFace(profile.model, profile.url); + else if (profile.api === 'novita') + model = new Novita(profile.model.replace('novita/', ''), profile.url); + else if (profile.api === 'qwen') + model = new Qwen(profile.model, profile.url); + else if (profile.api === 'xai') + model = new Grok(profile.model, profile.url); + else if (profile.api === 'deepseek') + model = new DeepSeek(profile.model, profile.url); + else + throw new Error('Unknown API:', api); + return model; + } + getName() { return this.profile.name; } @@ -273,7 +289,7 @@ export class Prompter { await this.checkCooldown(); let prompt = this.profile.coding; prompt = await this.replaceStrings(prompt, messages, this.coding_examples); - let resp = await this.chat_model.sendRequest(messages, prompt); + let resp = await this.code_model.sendRequest(messages, prompt); this.awaiting_coding = false; return resp; } From e1a9ed811b6690a60d6f14908e0ba1609c397e99 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 25 Jan 2025 12:26:49 -0600 Subject: [PATCH 41/65] small fix to block placing/farming --- src/agent/commands/index.js | 2 +- src/agent/library/skills.js | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/agent/commands/index.js b/src/agent/commands/index.js index 008c1d0..f40c5c2 100644 --- a/src/agent/commands/index.js +++ b/src/agent/commands/index.js @@ -160,7 +160,7 @@ export function parseCommandMessage(message) { suppressNoDomainWarning = true; //Don't spam console. Only give the warning once. } } else if(param.type === 'BlockName') { //Check that there is a block with this name - if(getBlockId(arg) == null) return `Invalid block type: ${arg}.` + if(getBlockId(arg) == null && arg !== 'air') return `Invalid block type: ${arg}.` } else if(param.type === 'ItemName') { //Check that there is an item with this name if(getItemId(arg) == null) return `Invalid item type: ${arg}.` } diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 726ef18..78f1ad3 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1275,8 +1275,14 @@ export async function tillAndSow(bot, x, y, z, seedType=null) { let block = bot.blockAt(new Vec3(x, y, z)); if (bot.modes.isOn('cheat')) { - placeBlock(bot, x, y, z, 'farmland'); - placeBlock(bot, x, y+1, z, seedType); + let to_remove = ['_seed', '_seeds']; + for (let remove of to_remove) { + if (seedType.endsWith(remove)) { + seedType = seedType.replace(remove, ''); + } + } + placeBlock(bot, 'farmland', x, y, z); + placeBlock(bot, seedType, x, y+1, z); return true; } From 9b387649a17b8a75c5f06e121053b1308180dd79 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 3 Feb 2025 18:35:58 -0600 Subject: [PATCH 42/65] enable o3, improve novita --- src/agent/prompter.js | 2 +- src/models/gpt.js | 2 +- src/models/novita.js | 17 +++++++++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 5b09719..b88001f 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -104,7 +104,7 @@ export class Prompter { profile = {model: profile}; if (profile.model.includes('gemini')) profile.api = 'google'; - else if (profile.model.includes('gpt') || profile.model.includes('o1')) + else if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3')) profile.api = 'openai'; else if (profile.model.includes('claude')) profile.api = 'anthropic'; diff --git a/src/models/gpt.js b/src/models/gpt.js index dfd5e22..49be3a6 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -33,7 +33,7 @@ export class GPT { let res = null; try { - console.log('Awaiting openai api response...') + console.log('Awaiting openai api response from model', this.model_name) // console.log('Messages:', messages); let completion = await this.openai.chat.completions.create(pack); if (completion.choices[0].finish_reason == 'length') diff --git a/src/models/novita.js b/src/models/novita.js index d84aee7..33c05cc 100644 --- a/src/models/novita.js +++ b/src/models/novita.js @@ -1,5 +1,6 @@ import OpenAIApi from 'openai'; import { getKey } from '../utils/keys.js'; +import { strictFormat } from '../utils/text.js'; // llama, mistral export class Novita { @@ -17,6 +18,10 @@ export class Novita { async sendRequest(turns, systemMessage, stop_seq='***') { let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); + + + messages = strictFormat(messages); + const pack = { model: this.model_name || "meta-llama/llama-3.1-70b-instruct", messages, @@ -41,6 +46,18 @@ export class Novita { res = 'My brain disconnected, try again.'; } } + if (res.includes('')) { + let start = res.indexOf(''); + let end = res.indexOf('') + 8; + if (start != -1) { + if (end != -1) { + res = res.substring(0, start) + res.substring(end); + } else { + res = res.substring(0, start+7); + } + } + res = res.trim(); + } return res; } From 23c54279ded2aaca6a33c2c83df788cc805470fe Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 3 Feb 2025 18:42:47 -0600 Subject: [PATCH 43/65] add code_model to readme --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index d165045..1eaa1bd 100644 --- a/README.md +++ b/README.md @@ -120,7 +120,7 @@ LLM backends can be specified as simply as `"model": "gpt-3.5-turbo"`. However, } ``` -The model parameter accepts either a string or object. If a string, it should specify the model to be used. The api and url will be assumed. If an object, the api field must be specified. Each api has a default model and url, so those fields are optional. +The model or code_model parameter accepts either a string or object. If a string, it should specify the model to be used. The api and url will be assumed. If an object, the api field must be specified. Each api has a default model and url, so those fields are optional. If the embedding field is not specified, then it will use the default embedding method for the chat model's api (Note that anthropic has no embedding model). The embedding parameter can also be a string or object. If a string, it should specify the embedding api and the default model and url will be used. If a valid embedding is not specified and cannot be assumed, then word overlap will be used to retrieve examples instead. @@ -137,6 +137,7 @@ Thus, all the below specifications are equivalent to the above example: ```json "model": "gpt-3.5-turbo", "embedding": "openai" +"code_model": "gpt-3.5-turbo" ``` ## Patches From 60187e23171a58c8c991ddc5d48af24f2f415691 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 4 Feb 2025 13:02:57 -0600 Subject: [PATCH 44/65] added model parameters obj to profile --- README.md | 2 +- profiles/gpt.json | 8 +++- src/agent/agent.js | 10 ++--- src/models/claude.js | 10 +++-- src/models/deepseek.js | 4 +- src/models/gemini.js | 29 ++++++++++++-- src/models/gpt.js | 5 ++- src/models/grok.js | 7 +++- src/models/groq.js | 14 ++++--- src/models/huggingface.js | 6 ++- src/models/local.js | 10 ++++- src/models/mistral.js | 8 ++-- src/models/novita.js | 5 ++- src/{agent => models}/prompter.js | 63 ++++++++++++++++--------------- src/models/qwen.js | 11 ++++-- src/models/replicate.js | 9 ++++- src/process/init_agent.js | 7 +--- 17 files changed, 134 insertions(+), 74 deletions(-) rename src/{agent => models}/prompter.js (88%) diff --git a/README.md b/README.md index 1eaa1bd..d18fe65 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,7 @@ You can configure the agent's name, model, and prompts in their profile like `an | OpenAI | `OPENAI_API_KEY` | `gpt-4o-mini` | [docs](https://platform.openai.com/docs/models) | | Google | `GEMINI_API_KEY` | `gemini-pro` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | | Anthropic | `ANTHROPIC_API_KEY` | `claude-3-haiku-20240307` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | -| Replicate | `REPLICATE_API_KEY` | `meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | +| Replicate | `REPLICATE_API_KEY` | `replicate/meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | | Ollama (local) | n/a | `llama3` | [docs](https://ollama.com/library) | | Groq | `GROQCLOUD_API_KEY` | `groq/mixtral-8x7b-32768` | [docs](https://console.groq.com/docs/models) | | Hugging Face | `HUGGINGFACE_API_KEY` | `huggingface/mistralai/Mistral-Nemo-Instruct-2407` | [docs](https://huggingface.co/models) | diff --git a/profiles/gpt.json b/profiles/gpt.json index 32d99c1..a5effe1 100644 --- a/profiles/gpt.json +++ b/profiles/gpt.json @@ -1,5 +1,11 @@ { "name": "gpt", - "model": "gpt-4o" + "model": { + "model": "gpt-4o-mini", + "params": { + "temperature": 1, + "not_real": true + } + } } \ No newline at end of file diff --git a/src/agent/agent.js b/src/agent/agent.js index 4691079..8e211ef 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -1,6 +1,6 @@ import { History } from './history.js'; import { Coder } from './coder.js'; -import { Prompter } from './prompter.js'; +import { Prompter } from '../models/prompter.js'; import { initModes } from './modes.js'; import { initBot } from '../utils/mcdata.js'; import { containsCommand, commandExists, executeCommand, truncCommandMessage, isAction, blacklistCommands } from './commands/index.js'; @@ -100,11 +100,9 @@ export class Agent { }); } catch (error) { // Ensure we're not losing error details - console.error('Agent start failed with error:', { - message: error.message || 'No error message', - stack: error.stack || 'No stack trace', - error: error - }); + console.error('Agent start failed with error') + console.error(error) + throw error; // Re-throw with preserved details } } diff --git a/src/models/claude.js b/src/models/claude.js index c97ecb2..f8d2a90 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -3,8 +3,9 @@ import { strictFormat } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; export class Claude { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; let config = {}; if (url) @@ -20,13 +21,16 @@ export class Claude { let res = null; try { console.log('Awaiting anthropic api response...') - // console.log('Messages:', messages); + if (!this.params.max_tokens) { + this.params.max_tokens = 4096; + } const resp = await this.anthropic.messages.create({ model: this.model_name || "claude-3-sonnet-20240229", system: systemMessage, - max_tokens: 2048, messages: messages, + ...(this.params || {}) }); + console.log('Received.') res = resp.content[0].text; } diff --git a/src/models/deepseek.js b/src/models/deepseek.js index 395aa8c..da98ba2 100644 --- a/src/models/deepseek.js +++ b/src/models/deepseek.js @@ -3,8 +3,9 @@ import { getKey, hasKey } from '../utils/keys.js'; import { strictFormat } from '../utils/text.js'; export class DeepSeek { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; let config = {}; @@ -23,6 +24,7 @@ export class DeepSeek { model: this.model_name || "deepseek-chat", messages, stop: stop_seq, + ...(this.params || {}) }; let res = null; diff --git a/src/models/gemini.js b/src/models/gemini.js index 1536d66..de71a66 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -3,8 +3,9 @@ import { toSinglePrompt } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; export class Gemini { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; this.url = url; this.safetySettings = [ { @@ -34,15 +35,20 @@ export class Gemini { async sendRequest(turns, systemMessage) { let model; + const modelConfig = { + model: this.model_name || "gemini-1.5-flash", + ...(this.params || {}) + }; + if (this.url) { model = this.genAI.getGenerativeModel( - { model: this.model_name || "gemini-1.5-flash" }, + modelConfig, { baseUrl: this.url }, { safetySettings: this.safetySettings } ); } else { model = this.genAI.getGenerativeModel( - { model: this.model_name || "gemini-1.5-flash" }, + modelConfig, { safetySettings: this.safetySettings } ); } @@ -50,12 +56,27 @@ export class Gemini { const stop_seq = '***'; const prompt = toSinglePrompt(turns, systemMessage, stop_seq, 'model'); console.log('Awaiting Google API response...'); - const result = await model.generateContent(prompt); + const result = await model.generateContent({ + contents: [ + { + role: 'user', + parts: [ + { + text: "Explain how AI works", + } + ], + } + ], + generateConfig: { + ...(this.params || {}) + } + }); const response = await result.response; const text = response.text(); console.log('Received.'); if (!text.includes(stop_seq)) return text; const idx = text.indexOf(stop_seq); + return text.slice(0, idx); } diff --git a/src/models/gpt.js b/src/models/gpt.js index 49be3a6..1a88bf4 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -3,8 +3,9 @@ import { getKey, hasKey } from '../utils/keys.js'; import { strictFormat } from '../utils/text.js'; export class GPT { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; let config = {}; if (url) @@ -25,6 +26,7 @@ export class GPT { model: this.model_name || "gpt-3.5-turbo", messages, stop: stop_seq, + ...(this.params || {}) }; if (this.model_name.includes('o1')) { pack.messages = strictFormat(messages); @@ -32,6 +34,7 @@ export class GPT { } let res = null; + try { console.log('Awaiting openai api response from model', this.model_name) // console.log('Messages:', messages); diff --git a/src/models/grok.js b/src/models/grok.js index 19a3b38..a8c6672 100644 --- a/src/models/grok.js +++ b/src/models/grok.js @@ -3,8 +3,10 @@ import { getKey } from '../utils/keys.js'; // xAI doesn't supply a SDK for their models, but fully supports OpenAI and Anthropic SDKs export class Grok { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.url = url; + this.params = params; let config = {}; if (url) @@ -23,7 +25,8 @@ export class Grok { const pack = { model: this.model_name || "grok-beta", messages, - stop: [stop_seq] + stop: [stop_seq], + ...(this.params || {}) }; let res = null; diff --git a/src/models/groq.js b/src/models/groq.js index e17f13d..6911534 100644 --- a/src/models/groq.js +++ b/src/models/groq.js @@ -4,12 +4,13 @@ import { getKey } from '../utils/keys.js'; // Umbrella class for Mixtral, LLama, Gemma... export class GroqCloudAPI { - constructor(model_name, url, max_tokens=16384) { + constructor(model_name, url, params) { this.model_name = model_name; this.url = url; - this.max_tokens = max_tokens; + this.params = params; // ReplicateAPI theft :3 if (this.url) { + console.warn("Groq Cloud has no implementation for custom URLs. Ignoring provided URL."); } this.groq = new Groq({ apiKey: getKey('GROQCLOUD_API_KEY') }); @@ -20,14 +21,15 @@ export class GroqCloudAPI { let res = null; try { console.log("Awaiting Groq response..."); + if (!this.params.max_tokens) { + this.params.max_tokens = 16384; + } let completion = await this.groq.chat.completions.create({ "messages": messages, "model": this.model_name || "mixtral-8x7b-32768", - "temperature": 0.2, - "max_tokens": this.max_tokens, // maximum token limit, differs from model to model - "top_p": 1, "stream": true, - "stop": stop_seq // "***" + "stop": stop_seq, + ...(this.params || {}) }); let temp_res = ""; diff --git a/src/models/huggingface.js b/src/models/huggingface.js index 56f9d55..dd5c89d 100644 --- a/src/models/huggingface.js +++ b/src/models/huggingface.js @@ -3,9 +3,10 @@ import {getKey} from '../utils/keys.js'; import {HfInference} from "@huggingface/inference"; export class HuggingFace { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name.replace('huggingface/',''); this.url = url; + this.params = params; if (this.url) { console.warn("Hugging Face doesn't support custom urls!"); @@ -25,7 +26,8 @@ export class HuggingFace { console.log('Awaiting Hugging Face API response...'); for await (const chunk of this.huggingface.chatCompletionStream({ model: model_name, - messages: [{ role: "user", content: input }] + messages: [{ role: "user", content: input }], + ...(this.params || {}) })) { res += (chunk.choices[0]?.delta?.content || ""); } diff --git a/src/models/local.js b/src/models/local.js index 18d06e0..23d7e0e 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -1,8 +1,9 @@ import { strictFormat } from '../utils/text.js'; export class Local { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; this.url = url || 'http://127.0.0.1:11434'; this.chat_endpoint = '/api/chat'; this.embedding_endpoint = '/api/embeddings'; @@ -15,7 +16,12 @@ export class Local { let res = null; try { console.log(`Awaiting local response... (model: ${model})`) - res = await this.send(this.chat_endpoint, {model: model, messages: messages, stream: false}); + res = await this.send(this.chat_endpoint, { + model: model, + messages: messages, + stream: false, + ...(this.params || {}) + }); if (res) res = res['message']['content']; } diff --git a/src/models/mistral.js b/src/models/mistral.js index 3b41f78..b33d1de 100644 --- a/src/models/mistral.js +++ b/src/models/mistral.js @@ -5,10 +5,13 @@ import { strictFormat } from '../utils/text.js'; export class Mistral { #client; - constructor(model_name, url) { + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params; if (typeof url === "string") { console.warn("Mistral does not support custom URL's, ignoring!"); + } if (!getKey("MISTRAL_API_KEY")) { @@ -22,8 +25,6 @@ export class Mistral { ); - this.model_name = model_name; - // Prevents the following code from running when model not specified if (typeof this.model_name === "undefined") return; @@ -49,6 +50,7 @@ export class Mistral { const response = await this.#client.chat.complete({ model, messages, + ...(this.params || {}) }); result = response.choices[0].message.content; diff --git a/src/models/novita.js b/src/models/novita.js index 33c05cc..8f2dd08 100644 --- a/src/models/novita.js +++ b/src/models/novita.js @@ -4,9 +4,11 @@ import { strictFormat } from '../utils/text.js'; // llama, mistral export class Novita { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name.replace('novita/', ''); this.url = url || 'https://api.novita.ai/v3/openai'; + this.params = params; + let config = { baseURL: this.url @@ -26,6 +28,7 @@ export class Novita { model: this.model_name || "meta-llama/llama-3.1-70b-instruct", messages, stop: [stop_seq], + ...(this.params || {}) }; let res = null; diff --git a/src/agent/prompter.js b/src/models/prompter.js similarity index 88% rename from src/agent/prompter.js rename to src/models/prompter.js index b88001f..91fcf3f 100644 --- a/src/agent/prompter.js +++ b/src/models/prompter.js @@ -1,23 +1,23 @@ import { readFileSync, mkdirSync, writeFileSync} from 'fs'; import { Examples } from '../utils/examples.js'; -import { getCommandDocs } from './commands/index.js'; -import { getSkillDocs } from './library/index.js'; +import { getCommandDocs } from '../agent/commands/index.js'; +import { getSkillDocs } from '../agent/library/index.js'; import { stringifyTurns } from '../utils/text.js'; -import { getCommand } from './commands/index.js'; +import { getCommand } from '../agent/commands/index.js'; import settings from '../../settings.js'; -import { Gemini } from '../models/gemini.js'; -import { GPT } from '../models/gpt.js'; -import { Claude } from '../models/claude.js'; -import { Mistral } from '../models/mistral.js'; -import { ReplicateAPI } from '../models/replicate.js'; -import { Local } from '../models/local.js'; -import { Novita } from '../models/novita.js'; -import { GroqCloudAPI } from '../models/groq.js'; -import { HuggingFace } from '../models/huggingface.js'; -import { Qwen } from "../models/qwen.js"; -import { Grok } from "../models/grok.js"; -import { DeepSeek } from '../models/deepseek.js'; +import { Gemini } from './gemini.js'; +import { GPT } from './gpt.js'; +import { Claude } from './claude.js'; +import { Mistral } from './mistral.js'; +import { ReplicateAPI } from './replicate.js'; +import { Local } from './local.js'; +import { Novita } from './novita.js'; +import { GroqCloudAPI } from './groq.js'; +import { HuggingFace } from './huggingface.js'; +import { Qwen } from "./qwen.js"; +import { Grok } from "./grok.js"; +import { DeepSeek } from './deepseek.js'; export class Prompter { constructor(agent, fp) { @@ -102,6 +102,8 @@ export class Prompter { _selectAPI(profile) { if (typeof profile === 'string' || profile instanceof String) { profile = {model: profile}; + } + if (!profile.api) { if (profile.model.includes('gemini')) profile.api = 'google'; else if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3')) @@ -110,7 +112,7 @@ export class Prompter { profile.api = 'anthropic'; else if (profile.model.includes('huggingface/')) profile.api = "huggingface"; - else if (profile.model.includes('meta/') || profile.model.includes('replicate/')) + else if (profile.model.includes('replicate/')) profile.api = 'replicate'; else if (profile.model.includes('mistralai/') || profile.model.includes("mistral/")) model_profile.api = 'mistral'; @@ -133,32 +135,31 @@ export class Prompter { _createModel(profile) { let model = null; if (profile.api === 'google') - model = new Gemini(profile.model, profile.url); + model = new Gemini(profile.model, profile.url, profile.params); else if (profile.api === 'openai') - model = new GPT(profile.model, profile.url); + model = new GPT(profile.model, profile.url, profile.params); else if (profile.api === 'anthropic') - model = new Claude(profile.model, profile.url); + model = new Claude(profile.model, profile.url, profile.params); else if (profile.api === 'replicate') - model = new ReplicateAPI(profile.model, profile.url); + model = new ReplicateAPI(profile.model, profile.url, profile.params); else if (profile.api === 'ollama') - model = new Local(profile.model, profile.url); + model = new Local(profile.model, profile.url, profile.params); else if (profile.api === 'mistral') - model = new Mistral(profile.model, profile.url); - else if (profile.api === 'groq') { - model = new GroqCloudAPI(profile.model.replace('groq/', '').replace('groqcloud/', ''), profile.url, max_tokens ? max_tokens : 8192); - } + model = new Mistral(profile.model, profile.url, profile.params); + else if (profile.api === 'groq') + model = new GroqCloudAPI(profile.model.replace('groq/', '').replace('groqcloud/', ''), profile.url, profile.params); else if (profile.api === 'huggingface') - model = new HuggingFace(profile.model, profile.url); + model = new HuggingFace(profile.model, profile.url, profile.params); else if (profile.api === 'novita') - model = new Novita(profile.model.replace('novita/', ''), profile.url); + model = new Novita(profile.model.replace('novita/', ''), profile.url, profile.params); else if (profile.api === 'qwen') - model = new Qwen(profile.model, profile.url); + model = new Qwen(profile.model, profile.url, profile.params); else if (profile.api === 'xai') - model = new Grok(profile.model, profile.url); + model = new Grok(profile.model, profile.url, profile.params); else if (profile.api === 'deepseek') - model = new DeepSeek(profile.model, profile.url); + model = new DeepSeek(profile.model, profile.url, profile.params); else - throw new Error('Unknown API:', api); + throw new Error('Unknown API:', profile.api); return model; } diff --git a/src/models/qwen.js b/src/models/qwen.js index d3d7bec..5f3eafa 100644 --- a/src/models/qwen.js +++ b/src/models/qwen.js @@ -4,8 +4,9 @@ import { getKey } from '../utils/keys.js'; export class Qwen { - constructor(modelName, url) { - this.modelName = modelName; + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params; this.url = url || 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation'; this.apiKey = getKey('QWEN_API_KEY'); } @@ -19,7 +20,11 @@ export class Qwen { const data = { model: this.modelName || 'qwen-plus', input: { messages: [{ role: 'system', content: systemMessage }, ...turns] }, - parameters: { result_format: 'message', stop: stopSeq }, + parameters: { + result_format: 'message', + stop: stopSeq, + ...(this.params || {}) + }, }; // Add default user message if all messages are 'system' role diff --git a/src/models/replicate.js b/src/models/replicate.js index e0c7d6c..c8c3ba3 100644 --- a/src/models/replicate.js +++ b/src/models/replicate.js @@ -4,9 +4,10 @@ import { getKey } from '../utils/keys.js'; // llama, mistral export class ReplicateAPI { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; this.url = url; + this.params = params; if (this.url) { console.warn('Replicate API does not support custom URLs. Ignoring provided URL.'); @@ -22,7 +23,11 @@ export class ReplicateAPI { const prompt = toSinglePrompt(turns, null, stop_seq); let model_name = this.model_name || 'meta/meta-llama-3-70b-instruct'; - const input = { prompt, system_prompt: systemMessage }; + const input = { + prompt, + system_prompt: systemMessage, + ...(this.params || {}) + }; let res = null; try { console.log('Awaiting Replicate API response...'); diff --git a/src/process/init_agent.js b/src/process/init_agent.js index 88c99b9..15b08e0 100644 --- a/src/process/init_agent.js +++ b/src/process/init_agent.js @@ -57,11 +57,8 @@ const argv = yargs(args) const agent = new Agent(); await agent.start(argv.profile, argv.load_memory, argv.init_message, argv.count_id, argv.task_path, argv.task_id); } catch (error) { - console.error('Failed to start agent process:', { - message: error.message || 'No error message', - stack: error.stack || 'No stack trace', - error: error - }); + console.error('Failed to start agent process:'); + console.error(error); process.exit(1); } })(); From be780cba27bb77bf44f5dbdd875778d1007fa7d3 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 4 Feb 2025 13:28:32 -0600 Subject: [PATCH 45/65] remove obsolete collectblock patche --- patches/mineflayer-collectblock+1.4.1.patch | 35 --------------------- 1 file changed, 35 deletions(-) delete mode 100644 patches/mineflayer-collectblock+1.4.1.patch diff --git a/patches/mineflayer-collectblock+1.4.1.patch b/patches/mineflayer-collectblock+1.4.1.patch deleted file mode 100644 index 1df504b..0000000 --- a/patches/mineflayer-collectblock+1.4.1.patch +++ /dev/null @@ -1,35 +0,0 @@ -diff --git a/node_modules/mineflayer-collectblock/lib/CollectBlock.js b/node_modules/mineflayer-collectblock/lib/CollectBlock.js -index 2c11e8c..bb49c11 100644 ---- a/node_modules/mineflayer-collectblock/lib/CollectBlock.js -+++ b/node_modules/mineflayer-collectblock/lib/CollectBlock.js -@@ -77,10 +77,11 @@ function mineBlock(bot, block, options) { - } - yield bot.tool.equipForBlock(block, equipToolOptions); - // @ts-expect-error -- if (!block.canHarvest(bot.heldItem)) { -+ if (bot.heldItem !== null && !block.canHarvest(bot.heldItem.type)) { - options.targets.removeTarget(block); - return; - } -+ - const tempEvents = new TemporarySubscriber_1.TemporarySubscriber(bot); - tempEvents.subscribeTo('itemDrop', (entity) => { - if (entity.position.distanceTo(block.position.offset(0.5, 0.5, 0.5)) <= 0.5) { -@@ -92,7 +93,7 @@ function mineBlock(bot, block, options) { - // Waiting for items to drop - yield new Promise(resolve => { - let remainingTicks = 10; -- tempEvents.subscribeTo('physicTick', () => { -+ tempEvents.subscribeTo('physicsTick', () => { - remainingTicks--; - if (remainingTicks <= 0) { - tempEvents.cleanup(); -@@ -195,6 +196,8 @@ class CollectBlock { - throw (0, Util_1.error)('UnresolvedDependency', 'The mineflayer-collectblock plugin relies on the mineflayer-tool plugin to run!'); - } - if (this.movements != null) { -+ this.movements.dontMineUnderFallingBlock = false; -+ this.movements.dontCreateFlow = false; - this.bot.pathfinder.setMovements(this.movements); - } - if (!optionsFull.append) From 0c3ba9a3830673e9ebe3c91ed262ab2421189dfb Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 4 Feb 2025 14:41:57 -0600 Subject: [PATCH 46/65] updated gemini, cleaned gpt profile --- profiles/gpt.json | 5 ++--- src/models/gemini.js | 32 +++++++++++++++----------------- src/utils/text.js | 6 ++++-- 3 files changed, 21 insertions(+), 22 deletions(-) diff --git a/profiles/gpt.json b/profiles/gpt.json index a5effe1..ad7097e 100644 --- a/profiles/gpt.json +++ b/profiles/gpt.json @@ -2,10 +2,9 @@ "name": "gpt", "model": { - "model": "gpt-4o-mini", + "model": "gpt-4o", "params": { - "temperature": 1, - "not_real": true + "temperature": 0.5 } } } \ No newline at end of file diff --git a/src/models/gemini.js b/src/models/gemini.js index de71a66..bcc1895 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -1,5 +1,5 @@ import { GoogleGenerativeAI } from '@google/generative-ai'; -import { toSinglePrompt } from '../utils/text.js'; +import { toSinglePrompt, strictFormat } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; export class Gemini { @@ -37,7 +37,7 @@ export class Gemini { let model; const modelConfig = { model: this.model_name || "gemini-1.5-flash", - ...(this.params || {}) + // systemInstruction does not work bc google is trash }; if (this.url) { @@ -53,29 +53,27 @@ export class Gemini { ); } - const stop_seq = '***'; - const prompt = toSinglePrompt(turns, systemMessage, stop_seq, 'model'); console.log('Awaiting Google API response...'); + + turns.unshift({ role: 'system', content: systemMessage }); + turns = strictFormat(turns); + let contents = []; + for (let turn of turns) { + contents.push({ + role: turn.role === 'assistant' ? 'model' : 'user', + parts: [{ text: turn.content }] + }); + } + const result = await model.generateContent({ - contents: [ - { - role: 'user', - parts: [ - { - text: "Explain how AI works", - } - ], - } - ], - generateConfig: { + contents, + generationConfig: { ...(this.params || {}) } }); const response = await result.response; const text = response.text(); console.log('Received.'); - if (!text.includes(stop_seq)) return text; - const idx = text.indexOf(stop_seq); return text.slice(0, idx); } diff --git a/src/utils/text.js b/src/utils/text.js index 1e93667..f500199 100644 --- a/src/utils/text.js +++ b/src/utils/text.js @@ -26,8 +26,10 @@ export function toSinglePrompt(turns, system=null, stop_seq='***', model_nicknam return prompt; } -// ensures stricter turn order for anthropic/llama models -// combines repeated messages from the same role, separates repeat assistant messages with filler user messages +// ensures stricter turn order and roles: +// - system messages are treated as user messages and prefixed with SYSTEM: +// - combines repeated messages from users +// - separates repeat assistant messages with filler user messages export function strictFormat(turns) { let prev_role = null; let messages = []; From 402e09f03795036b8b49fbdde10c90f479f64b35 Mon Sep 17 00:00:00 2001 From: Max Robinson Date: Tue, 4 Feb 2025 15:00:55 -0600 Subject: [PATCH 47/65] better readme --- README.md | 35 ++++++++++++++++------------------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index d18fe65..e735412 100644 --- a/README.md +++ b/README.md @@ -105,40 +105,37 @@ node main.js --profiles ./profiles/andy.json ./profiles/jill.json ### Model Specifications -LLM backends can be specified as simply as `"model": "gpt-3.5-turbo"`. However, for both the chat model and the embedding model, the bot profile can specify the below attributes: +LLM models can be specified as simply as `"model": "gpt-4o"`. However, you can specify different models for chat, coding, and embeddings. +You can pass a string or an object for these fields. A model object must specify an `api`, and optionally a `model`, `url`, and additional `params`. ```json "model": { "api": "openai", + "model": "gpt-4o", "url": "https://api.openai.com/v1/", - "model": "gpt-3.5-turbo" + "params": { + "max_tokens": 1000, + "temperature": 1 + } +}, +"code_model": { + "api": "openai", + "model": "gpt-4", + "url": "https://api.openai.com/v1/" }, "embedding": { "api": "openai", "url": "https://api.openai.com/v1/", "model": "text-embedding-ada-002" } + ``` -The model or code_model parameter accepts either a string or object. If a string, it should specify the model to be used. The api and url will be assumed. If an object, the api field must be specified. Each api has a default model and url, so those fields are optional. +`model` is used for chat, `code_model` is used for newAction coding, and `embedding` is used to embed text for example selection. If `code_model` is not specified, then it will use `model` for coding. -If the embedding field is not specified, then it will use the default embedding method for the chat model's api (Note that anthropic has no embedding model). The embedding parameter can also be a string or object. If a string, it should specify the embedding api and the default model and url will be used. If a valid embedding is not specified and cannot be assumed, then word overlap will be used to retrieve examples instead. +All apis have default models and urls, so those fields are optional. Note some apis have no embedding model, so they will default to word overlap to retrieve examples. -Thus, all the below specifications are equivalent to the above example: - -```json -"model": "gpt-3.5-turbo" -``` -```json -"model": { - "api": "openai" -} -``` -```json -"model": "gpt-3.5-turbo", -"embedding": "openai" -"code_model": "gpt-3.5-turbo" -``` +The `params` field is optional and can be used to specify additional parameters for the model. It accepts any key-value pairs supported by the api. Is not supported for embedding models. ## Patches From 9c675c51bcc042c70e4b6129857287ec593cccf7 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 4 Feb 2025 16:26:00 -0600 Subject: [PATCH 48/65] fix syntax error, var->let, remove examples --- profiles/_default.json | 12 ------------ src/agent/commands/queries.js | 6 +++--- src/agent/tasks.js | 4 ++-- viewer.html | 12 ++++++------ 4 files changed, 11 insertions(+), 23 deletions(-) diff --git a/profiles/_default.json b/profiles/_default.json index edd56a4..ac244fc 100644 --- a/profiles/_default.json +++ b/profiles/_default.json @@ -200,18 +200,6 @@ [ {"role": "system", "content": "You are self-prompting with the goal: 'Build a house'. Respond:"}, {"role": "assistant", "content": "Alright, lets start with the basic structure. !newAction(\"Build an 8x8 base for the house.\")"} - ], - [ - {"role": "user", "content": "trex56: Lets make a wooden pickaxe together"}, - {"role": "assistant", "content": "Sure, let me check how to make a wooden pickaxe !getCraftingPlan(\"wooden_pickaxe\",1)"}, - {"role": "system", "content": "You are missing the following items:\n- 2 oak_log\n\nOnce you have these items, here's your crafting plan:\n\nCraft 1 oak_log -> 4 oak_planks\nCraft 1 oak_log -> 4 oak_planks\nCraft 2 oak_planks -> 4 stick\nCraft 3 oak_planks + 2 stick -> 1 wooden_pickaxe"}, - {"role": "assistant", "content": "I need 2 oak logs to make a wooden pickaxe. Do you have any?"} - ], - [ - {"role": "user", "content": "terrance: (FROM OTHER BOT) I want you to craft 5 chains"}, - {"role": "assistant", "content": "Sure, first let me check how to build a chain, !getCraftingPlan(\"chain\",5)"}, - {"role": "system", "content": "You are missing the following items:\n- 7 iron_ingot\n\nOnce you have these items, here's your crafting plan:\n\nCraft 2 iron_ingot -> 18 iron_nugget\nCraft 10 iron_nugget + 5 iron_ingot -> 5 chain"}, - {"role": "assistant", "content": "Seems like I need 7 iron_ingots for 5 chains. Do you have any?"} ] ], diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js index 60049d0..c9fe333 100644 --- a/src/agent/commands/queries.js +++ b/src/agent/commands/queries.js @@ -201,18 +201,18 @@ export const queryList = [ const curr_inventory = world.getInventoryCounts(bot); const target_item = targetItem; let existingCount = curr_inventory[target_item] || 0; - var prefixMessage = ''; + let prefixMessage = ''; if (existingCount > 0) { curr_inventory[target_item] -= existingCount; prefixMessage = `You already have ${existingCount} ${target_item} in your inventory. If you need to craft more,\n`; } // Generate crafting plan - var craftingPlan = mc.getDetailedCraftingPlan(target_item, quantity, curr_inventory); + let craftingPlan = mc.getDetailedCraftingPlan(target_item, quantity, curr_inventory); craftingPlan = prefixMessage + craftingPlan; - console.log('\n\n\n\n\n\n\n\n\n\n\n'); console.log(craftingPlan); return pad(craftingPlan); + }, }, { name: '!help', diff --git a/src/agent/tasks.js b/src/agent/tasks.js index 6d968a9..1b9d56e 100644 --- a/src/agent/tasks.js +++ b/src/agent/tasks.js @@ -109,11 +109,11 @@ export class Task { await new Promise((resolve) => setTimeout(resolve, 500)); if (this.data.agent_count > 1) { - var initial_inventory = this.data.initial_inventory[this.agent.count_id.toString()]; + let initial_inventory = this.data.initial_inventory[this.agent.count_id.toString()]; console.log("Initial inventory:", initial_inventory); } else if (this.data) { console.log("Initial inventory:", this.data.initial_inventory); - var initial_inventory = this.data.initial_inventory; + let initial_inventory = this.data.initial_inventory; } if ("initial_inventory" in this.data) { diff --git a/viewer.html b/viewer.html index 737b046..bdf2047 100644 --- a/viewer.html +++ b/viewer.html @@ -26,9 +26,9 @@