Merge pull request #433 from kolbytn/relavent-code-docs

Relevant code docs
This commit is contained in:
Max Robinson 2025-02-05 15:39:48 -06:00 committed by GitHub
commit d2394ab956
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 255 additions and 124 deletions

10
bots/lintTemplate.js Normal file
View file

@ -0,0 +1,10 @@
import * as skills from '../../../src/agent/library/skills.js';
import * as world from '../../../src/agent/library/world.js';
import Vec3 from 'vec3';
const log = skills.log;
export async function main(bot) {
/* CODE HERE */
log(bot, 'Code finished.');
}

25
eslint.config.js Normal file
View file

@ -0,0 +1,25 @@
// eslint.config.js
import globals from "globals";
import pluginJs from "@eslint/js";
/** @type {import('eslint').Linter.Config[]} */
export default [
// First, import the recommended configuration
pluginJs.configs.recommended,
// Then override or customize specific rules
{
languageOptions: {
globals: globals.browser,
ecmaVersion: 2021,
sourceType: "module",
},
rules: {
"no-undef": "error", // Disallow the use of undeclared variables or functions.
"semi": ["error", "always"], // Require the use of semicolons at the end of statements.
"curly": "warn", // Enforce the use of curly braces around blocks of code.
"no-unused-vars": "off", // Disable warnings for unused variables.
"no-unreachable": "off", // Disable warnings for unreachable code.
},
},
];

View file

@ -29,5 +29,10 @@
"scripts": {
"postinstall": "patch-package",
"start": "node main.js"
},
"devDependencies": {
"@eslint/js": "^9.13.0",
"eslint": "^9.13.0",
"globals": "^15.11.0"
}
}

View file

@ -1,7 +1,7 @@
{
"name": "Freeguy",
"model": "groq/llama-3.1-70b-versatile",
"model": "groq/llama-3.3-70b-versatile",
"max_tokens": 8000
}

View file

@ -1,7 +1,7 @@
{
"name": "LLama",
"model": "groq/llama-3.1-70b-versatile",
"model": "groq/llama-3.3-70b-versatile",
"max_tokens": 4000,

View file

@ -5,9 +5,13 @@
"model": {
"api": "qwen",
"url": "https://dashscope-intl.aliyuncs.com/api/v1/services/aigc/text-generation/generation",
"url": "https://dashscope-intl.aliyuncs.com/compatible-mode/v1",
"model": "qwen-max"
},
"embedding": "openai"
"embedding": {
"api": "qwen",
"url": "https://dashscope-intl.aliyuncs.com/compatible-mode/v1",
"model": "text-embedding-v3"
}
}

View file

@ -33,6 +33,7 @@ export default
"allow_insecure_coding": false, // allows newAction command and model can write/run code on your computer. enable at own risk
"code_timeout_mins": -1, // minutes code is allowed to run. -1 for no timeout
"relevant_docs_count": 5, // Parameter: -1 = all, 0 = no references, 5 = five references. If exceeding the maximum, all reference documents are returned.
"max_messages": 15, // max number of messages to keep in context
"num_examples": 2, // number of examples to give to the model

View file

@ -112,12 +112,13 @@ export class ActionManager {
// Log the full stack trace
console.error(err.stack);
await this.stop();
err = err.toString();
let message = this._getBotOutputSummary() +
'!!Code threw exception!!\n' +
let message = this._getBotOutputSummary() +
'!!Code threw exception!!\n' +
'Error: ' + err + '\n' +
'Stack trace:\n' + err.stack;
'Stack trace:\n' + err.stack+'\n';
let interrupted = this.agent.bot.interrupt_code;
this.agent.clearBotLogs();
if (!interrupted && !this.agent.coder.generating) {
@ -137,7 +138,7 @@ export class ActionManager {
First outputs:\n${output.substring(0, MAX_OUT / 2)}\n...skipping many lines.\nFinal outputs:\n ${output.substring(output.length - MAX_OUT / 2)}`;
}
else {
output = 'Code output:\n' + output;
output = 'Code output:\n' + output.toString();
}
return output;
}

View file

@ -4,6 +4,7 @@ import { makeCompartment } from './library/lockdown.js';
import * as skills from './library/skills.js';
import * as world from './library/world.js';
import { Vec3 } from 'vec3';
import {ESLint} from "eslint";
export class Coder {
constructor(agent) {
@ -12,15 +13,62 @@ export class Coder {
this.fp = '/bots/'+agent.name+'/action-code/';
this.generating = false;
this.code_template = '';
this.code_lint_template = '';
readFile('./bots/template.js', 'utf8', (err, data) => {
readFile('./bots/execTemplate.js', 'utf8', (err, data) => {
if (err) throw err;
this.code_template = data;
});
readFile('./bots/lintTemplate.js', 'utf8', (err, data) => {
if (err) throw err;
this.code_lint_template = data;
});
mkdirSync('.' + this.fp, { recursive: true });
}
async lintCode(code) {
let result = '#### CODE ERROR INFO ###\n';
// Extract everything in the code between the beginning of 'skills./world.' and the '('
const skillRegex = /(?:skills|world)\.(.*?)\(/g;
const skills = [];
let match;
while ((match = skillRegex.exec(code)) !== null) {
skills.push(match[1]);
}
const allDocs = await this.agent.prompter.skill_libary.getRelevantSkillDocs();
//lint if the function exists
const missingSkills = skills.filter(skill => !allDocs.includes(skill));
if (missingSkills.length > 0) {
result += 'These functions do not exist. Please modify the correct function name and try again.\n';
result += '### FUNCTIONS NOT FOUND ###\n';
result += missingSkills.join('\n');
console.log(result)
return result;
}
const eslint = new ESLint();
const results = await eslint.lintText(code);
const codeLines = code.split('\n');
const exceptions = results.map(r => r.messages).flat();
if (exceptions.length > 0) {
exceptions.forEach((exc, index) => {
if (exc.line && exc.column ) {
const errorLine = codeLines[exc.line - 1]?.trim() || 'Unable to retrieve error line content';
result += `#ERROR ${index + 1}\n`;
result += `Message: ${exc.message}\n`;
result += `Location: Line ${exc.line}, Column ${exc.column}\n`;
result += `Related Code Line: ${errorLine}\n`;
}
});
result += 'The code contains exceptions and cannot continue execution.';
} else {
return null;//no error
}
return result ;
}
// write custom code to file and import it
// write custom code to file and prepare for evaluation
async stageCode(code) {
code = this.sanitizeCode(code);
@ -35,6 +83,7 @@ export class Coder {
for (let line of code.split('\n')) {
src += ` ${line}\n`;
}
let src_lint_copy = this.code_lint_template.replace('/* CODE HERE */', src);
src = this.code_template.replace('/* CODE HERE */', src);
let filename = this.file_counter + '.js';
@ -46,7 +95,7 @@ export class Coder {
// });
// } commented for now, useful to keep files for debugging
this.file_counter++;
let write_result = await this.writeFilePromise('.' + this.fp + filename, src);
// This is where we determine the environment the agent's code should be exposed to.
// It will only have access to these things, (in addition to basic javascript objects like Array, Object, etc.)
@ -63,8 +112,7 @@ export class Coder {
console.error('Error writing code execution file: ' + result);
return null;
}
return { main: mainFn };
return { func:{main: mainFn}, src_lint_copy: src_lint_copy };
}
sanitizeCode(code) {
@ -140,8 +188,15 @@ export class Coder {
continue;
}
code = res.substring(res.indexOf('```')+3, res.lastIndexOf('```'));
const executionModuleExports = await this.stageCode(code);
const result = await this.stageCode(code);
const executionModuleExports = result.func;
let src_lint_copy = result.src_lint_copy;
const analysisResult = await this.lintCode(src_lint_copy);
if (analysisResult) {
const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n';
messages.push({ role: 'system', content: message });
continue;
}
if (!executionModuleExports) {
agent_history.add('system', 'Failed to stage code, something is wrong.');
return {success: false, message: null, interrupted: false, timedout: false};
@ -152,10 +207,10 @@ export class Coder {
}, { timeout: settings.code_timeout_mins });
if (code_return.interrupted && !code_return.timedout)
return { success: false, message: null, interrupted: true, timedout: false };
console.log("Code generation result:", code_return.success, code_return.message);
console.log("Code generation result:", code_return.success, code_return.message.toString());
if (code_return.success) {
const summary = "Summary of newAction\nAgent wrote this code: \n```" + this.sanitizeCode(code) + "```\nCode Output:\n" + code_return.message;
const summary = "Summary of newAction\nAgent wrote this code: \n```" + this.sanitizeCode(code) + "```\nCode Output:\n" + code_return.message.toString();
return { success: true, message: summary, interrupted: false, timedout: false };
}
@ -170,5 +225,4 @@ export class Coder {
}
return { success: false, message: null, interrupted: false, timedout: true };
}
}

View file

@ -3,20 +3,21 @@ import * as world from './world.js';
export function docHelper(functions, module_name) {
let docstring = '';
let docArray = [];
for (let skillFunc of functions) {
let str = skillFunc.toString();
if (str.includes('/**')){
docstring += module_name+'.'+skillFunc.name;
docstring += str.substring(str.indexOf('/**')+3, str.indexOf('**/')) + '\n';
if (str.includes('/**')) {
let docEntry = `${module_name}.${skillFunc.name}\n`;
docEntry += str.substring(str.indexOf('/**') + 3, str.indexOf('**/')).trim();
docArray.push(docEntry);
}
}
return docstring;
return docArray;
}
export function getSkillDocs() {
let docstring = "\n*SKILL DOCS\nThese skills are javascript functions that can be called when writing actions and skills.\n";
docstring += docHelper(Object.values(skills), 'skills');
docstring += docHelper(Object.values(world), 'world');
return docstring + '*\n';
let docArray = [];
docArray = docArray.concat(docHelper(Object.values(skills), 'skills'));
docArray = docArray.concat(docHelper(Object.values(world), 'world'));
return docArray;
}

View file

@ -0,0 +1,47 @@
import { cosineSimilarity } from '../../utils/math.js';
import { getSkillDocs } from './index.js';
export class SkillLibrary {
constructor(agent,embedding_model) {
this.agent = agent;
this.embedding_model = embedding_model;
this.skill_docs_embeddings = {};
}
async initSkillLibrary() {
const skillDocs = getSkillDocs();
const embeddingPromises = skillDocs.map((doc) => {
return (async () => {
let func_name_desc = doc.split('\n').slice(0, 2).join('');
this.skill_docs_embeddings[doc] = await this.embedding_model.embed(func_name_desc);
})();
});
await Promise.all(embeddingPromises);
}
async getRelevantSkillDocs(message, select_num) {
let latest_message_embedding = '';
if(message) //message is not empty, get the relevant skill docs, else return all skill docs
latest_message_embedding = await this.embedding_model.embed(message);
let skill_doc_similarities = Object.keys(this.skill_docs_embeddings)
.map(doc_key => ({
doc_key,
similarity_score: cosineSimilarity(latest_message_embedding, this.skill_docs_embeddings[doc_key])
}))
.sort((a, b) => b.similarity_score - a.similarity_score);
let length = skill_doc_similarities.length;
if (typeof select_num !== 'number' || isNaN(select_num) || select_num < 0) {
select_num = length;
} else {
select_num = Math.min(Math.floor(select_num), length);
}
let selected_docs = skill_doc_similarities.slice(0, select_num);
let relevant_skill_docs = '#### RELEVENT DOCS INFO ###\nThe following functions are listed in descending order of relevance.\n';
relevant_skill_docs += 'SkillDocs:\n'
relevant_skill_docs += selected_docs.map(doc => `${doc.doc_key}`).join('\n### ');
return relevant_skill_docs;
}
}

View file

@ -1267,7 +1267,7 @@ export async function tillAndSow(bot, x, y, z, seedType=null) {
* @returns {Promise<boolean>} true if the ground was tilled, false otherwise.
* @example
* let position = world.getPosition(bot);
* await skills.till(bot, position.x, position.y - 1, position.x);
* await skills.tillAndSow(bot, position.x, position.y - 1, position.x, "wheat");
**/
x = Math.round(x);
y = Math.round(y);

View file

@ -75,7 +75,7 @@ export class Gemini {
const text = response.text();
console.log('Received.');
return text.slice(0, idx);
return text;
}
async embed(text) {

View file

@ -7,7 +7,7 @@ export class GroqCloudAPI {
constructor(model_name, url, params) {
this.model_name = model_name;
this.url = url;
this.params = params;
this.params = params || {};
// ReplicateAPI theft :3
if (this.url) {

View file

@ -2,6 +2,7 @@ import { readFileSync, mkdirSync, writeFileSync} from 'fs';
import { Examples } from '../utils/examples.js';
import { getCommandDocs } from '../agent/commands/index.js';
import { getSkillDocs } from '../agent/library/index.js';
import { SkillLibrary } from "../agent/library/skill_library.js";
import { stringifyTurns } from '../utils/text.js';
import { getCommand } from '../agent/commands/index.js';
import settings from '../../settings.js';
@ -89,7 +90,7 @@ export class Prompter {
console.log('Continuing anyway, using word overlap instead.');
this.embedding_model = null;
}
this.skill_libary = new SkillLibrary(agent, this.embedding_model);
mkdirSync(`./bots/${name}`, { recursive: true });
writeFileSync(`./bots/${name}/last_profile.json`, JSON.stringify(this.profile, null, 4), (err) => {
if (err) {
@ -146,7 +147,7 @@ export class Prompter {
model = new Local(profile.model, profile.url, profile.params);
else if (profile.api === 'mistral')
model = new Mistral(profile.model, profile.url, profile.params);
else if (profile.api === 'groq')
else if (profile.api === 'groq')
model = new GroqCloudAPI(profile.model.replace('groq/', '').replace('groqcloud/', ''), profile.url, profile.params);
else if (profile.api === 'huggingface')
model = new HuggingFace(profile.model, profile.url, profile.params);
@ -179,7 +180,8 @@ export class Prompter {
// Wait for both examples to load before proceeding
await Promise.all([
this.convo_examples.load(this.profile.conversation_examples),
this.coding_examples.load(this.profile.coding_examples)
this.coding_examples.load(this.profile.coding_examples),
this.skill_libary.initSkillLibrary()
]);
console.log('Examples initialized.');
@ -205,6 +207,17 @@ export class Prompter {
}
if (prompt.includes('$COMMAND_DOCS'))
prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs());
if (prompt.includes('$CODE_DOCS')) {
const code_task_content = messages.slice().reverse().find(msg =>
msg.role !== 'system' && msg.content.includes('!newAction(')
)?.content?.match(/!newAction\((.*?)\)/)?.[1] || '';
prompt = prompt.replaceAll(
'$CODE_DOCS',
await this.skill_libary.getRelevantSkillDocs(code_task_content, settings.relevant_docs_count)
);
}
prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs());
if (prompt.includes('$CODE_DOCS'))
prompt = prompt.replaceAll('$CODE_DOCS', getSkillDocs());
if (prompt.includes('$EXAMPLES') && examples !== null)

View file

@ -1,109 +1,79 @@
// This code uses Dashscope and HTTP to ensure the latest support for the Qwen model.
// Qwen is also compatible with the OpenAI API format;
import { getKey } from '../utils/keys.js';
import OpenAIApi from 'openai';
import { getKey, hasKey } from '../utils/keys.js';
import { strictFormat } from '../utils/text.js';
export class Qwen {
constructor(model_name, url, params) {
this.model_name = model_name;
this.params = params;
this.url = url || 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation';
this.apiKey = getKey('QWEN_API_KEY');
let config = {};
config.baseURL = url || 'https://dashscope.aliyuncs.com/compatible-mode/v1';
config.apiKey = getKey('QWEN_API_KEY');
this.openai = new OpenAIApi(config);
}
async sendRequest(turns, systemMessage, stopSeq = '***', retryCount = 0) {
if (retryCount > 5) {
console.error('Maximum retry attempts reached.');
return 'Error: Too many retry attempts.';
}
async sendRequest(turns, systemMessage, stop_seq='***') {
let messages = [{'role': 'system', 'content': systemMessage}].concat(turns);
const data = {
model: this.modelName || 'qwen-plus',
input: { messages: [{ role: 'system', content: systemMessage }, ...turns] },
parameters: {
result_format: 'message',
stop: stopSeq,
...(this.params || {})
},
messages = strictFormat(messages);
const pack = {
model: this.model_name || "qwen-plus",
messages,
stop: stop_seq,
...(this.params || {})
};
// Add default user message if all messages are 'system' role
if (turns.every((msg) => msg.role === 'system')) {
data.input.messages.push({ role: 'user', content: 'hello' });
}
if (!data.model || !data.input || !data.input.messages || !data.parameters) {
console.error('Invalid request data format:', data);
throw new Error('Invalid request data format.');
}
let res = null;
try {
const response = await this._makeHttpRequest(this.url, data);
const choice = response?.output?.choices?.[0];
if (choice?.finish_reason === 'length' && turns.length > 0) {
return this.sendRequest(turns.slice(1), systemMessage, stopSeq, retryCount + 1);
console.log('Awaiting Qwen api response...');
// console.log('Messages:', messages);
let completion = await this.openai.chat.completions.create(pack);
if (completion.choices[0].finish_reason == 'length')
throw new Error('Context length exceeded');
console.log('Received.');
res = completion.choices[0].message.content;
}
catch (err) {
if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) {
console.log('Context length exceeded, trying again with shorter context.');
return await this.sendRequest(turns.slice(1), systemMessage, stop_seq);
} else {
console.log(err);
res = 'My brain disconnected, try again.';
}
return choice?.message?.content || 'No content received.';
} catch (err) {
console.error('Error occurred:', err);
return 'An error occurred, please try again.';
}
return res;
}
// Why random backoff?
// With a 30 requests/second limit on Alibaba Qwen's embedding service,
// random backoff helps maximize bandwidth utilization.
async embed(text) {
if (!text || typeof text !== 'string') {
console.error('Invalid embedding input: text must be a non-empty string.');
return 'Invalid embedding input: text must be a non-empty string.';
}
const data = {
model: 'text-embedding-v2',
input: { texts: [text] },
parameters: { text_type: 'query' },
};
if (!data.model || !data.input || !data.input.texts || !data.parameters) {
console.error('Invalid embedding request data format:', data);
throw new Error('Invalid embedding request data format.');
}
try {
const response = await this._makeHttpRequest(this.url, data);
const embedding = response?.output?.embeddings?.[0]?.embedding;
return embedding || 'No embedding result received.';
} catch (err) {
console.error('Error occurred:', err);
return 'An error occurred, please try again.';
const maxRetries = 5; // Maximum number of retries
for (let retries = 0; retries < maxRetries; retries++) {
try {
const { data } = await this.openai.embeddings.create({
model: this.model_name || "text-embedding-v3",
input: text,
encoding_format: "float",
});
return data[0].embedding;
} catch (err) {
if (err.status === 429) {
// If a rate limit error occurs, calculate the exponential backoff with a random delay (1-5 seconds)
const delay = Math.pow(2, retries) * 1000 + Math.floor(Math.random() * 2000);
// console.log(`Rate limit hit, retrying in ${delay} ms...`);
await new Promise(resolve => setTimeout(resolve, delay)); // Wait for the delay before retrying
} else {
throw err;
}
}
}
// If maximum retries are reached and the request still fails, throw an error
throw new Error('Max retries reached, request failed.');
}
async _makeHttpRequest(url, data) {
const headers = {
'Authorization': `Bearer ${this.apiKey}`,
'Content-Type': 'application/json',
};
const response = await fetch(url, {
method: 'POST',
headers,
body: JSON.stringify(data),
});
if (!response.ok) {
const errorText = await response.text();
console.error(`Request failed, status code ${response.status}: ${response.statusText}`);
console.error('Error response content:', errorText);
throw new Error(`Request failed, status code ${response.status}: ${response.statusText}`);
}
const responseText = await response.text();
try {
return JSON.parse(responseText);
} catch (err) {
console.error('Failed to parse response JSON:', err);
throw new Error('Invalid response JSON format.');
}
}
}
}