mirror of
https://github.com/kolbytn/mindcraft.git
synced 2025-07-25 09:25:23 +02:00
prompter refactor
This commit is contained in:
parent
87d34aa023
commit
ce8dd89231
2 changed files with 42 additions and 74 deletions
|
@ -45,5 +45,5 @@ export default
|
|||
"verbose_commands": true, // show full command syntax
|
||||
"narrate_behavior": true, // chat simple automatic actions ('Picking up item!')
|
||||
"chat_bot_messages": true, // publicly chat messages to other bots
|
||||
"log_all_prompts": false, // log all prompts to console
|
||||
"log_all_prompts": false, // log all prompts to file
|
||||
}
|
||||
|
|
|
@ -131,6 +131,8 @@ export class Prompter {
|
|||
profile.api = 'google';
|
||||
else if (profile.model.includes('openrouter/'))
|
||||
profile.api = 'openrouter'; // must do before others bc shares model names
|
||||
else if (profile.model.includes('vllm/'))
|
||||
profile.api = 'vllm';
|
||||
else if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3'))
|
||||
profile.api = 'openai';
|
||||
else if (profile.model.includes('claude'))
|
||||
|
@ -304,51 +306,7 @@ export class Prompter {
|
|||
this.last_prompt_time = Date.now();
|
||||
}
|
||||
|
||||
// async promptConvo(messages) {
|
||||
// this.most_recent_msg_time = Date.now();
|
||||
// let current_msg_time = this.most_recent_msg_time;
|
||||
// for (let i = 0; i < 3; i++) { // try 3 times to avoid hallucinations
|
||||
// await this.checkCooldown();
|
||||
// if (current_msg_time !== this.most_recent_msg_time) {
|
||||
// return '';
|
||||
// }
|
||||
// let prompt = this.profile.conversing;
|
||||
// prompt = await this.replaceStrings(prompt, messages, this.convo_examples);
|
||||
// let generation = await this.chat_model.sendRequest(messages, prompt);
|
||||
// // in conversations >2 players LLMs tend to hallucinate and role-play as other bots
|
||||
// // the FROM OTHER BOT tag should never be generated by the LLM
|
||||
// if (generation.includes('(FROM OTHER BOT)')) {
|
||||
// console.warn('LLM hallucinated message as another bot. Trying again...');
|
||||
// continue;
|
||||
// }
|
||||
// if (current_msg_time !== this.most_recent_msg_time) {
|
||||
// console.warn(this.agent.name + ' received new message while generating, discarding old response.');
|
||||
// return '';
|
||||
// }
|
||||
// return generation;
|
||||
// }
|
||||
// return '';
|
||||
// }
|
||||
|
||||
async saveToFile(logFile, logEntry) {
|
||||
let task_id = this.agent.task.task_id;
|
||||
console.log(task_id)
|
||||
let logDir;
|
||||
if (this.task_id === null) {
|
||||
logDir = path.join(__dirname, `../../bots/${this.agent.name}/logs`);
|
||||
} else {
|
||||
logDir = path.join(__dirname, `../../bots/${this.agent.name}/logs/${task_id}`);
|
||||
}
|
||||
|
||||
await fs.mkdir(logDir, { recursive: true });
|
||||
|
||||
logFile = path.join(logDir, logFile);
|
||||
await fs.appendFile(logFile, String(logEntry), 'utf-8');
|
||||
}
|
||||
|
||||
async promptConvo(messages) {
|
||||
// console.log(`[${new Date().toISOString()}] promptConvo called with messages:`, messages);
|
||||
|
||||
this.most_recent_msg_time = Date.now();
|
||||
let current_msg_time = this.most_recent_msg_time;
|
||||
|
||||
|
@ -371,15 +329,7 @@ export class Prompter {
|
|||
throw new Error('Generated response is not a string');
|
||||
}
|
||||
console.log("Generated response:", generation);
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
let logEntry;
|
||||
if (this.task_id === null) {
|
||||
logEntry = `[${timestamp}] \nPrompt:\n${prompt}\n\nConversation:\n${JSON.stringify(messages, null, 2)}\n\nResponse:\n${generation}\n\n`;
|
||||
} else {
|
||||
logEntry = `[${timestamp}] Task ID: ${task_id}\nPrompt:\n${prompt}\n\nConversation:\n${JSON.stringify(messages, null, 2)}\n\nResponse:\n${generation}\n\n`;
|
||||
}
|
||||
const logFile = `conversation_${timestamp}.txt`;
|
||||
await this.saveToFile(logFile, logEntry);
|
||||
await this._saveLog(prompt, messages, generation, 'conversation');
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error during message generation or file writing:', error);
|
||||
|
@ -413,35 +363,19 @@ export class Prompter {
|
|||
let prompt = this.profile.coding;
|
||||
prompt = await this.replaceStrings(prompt, messages, this.coding_examples);
|
||||
|
||||
let logEntry;
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
if (this.task_id === null) {
|
||||
logEntry = `[${timestamp}] \nPrompt:\n${prompt}\n\nConversation:\n${JSON.stringify(messages, null, 2)}\n\n`;
|
||||
} else {
|
||||
logEntry = `[${timestamp}] Task ID: ${this.agent.task.task_id}\nPrompt:\n${prompt}\n\nConversation:\n${JSON.stringify(messages, null, 2)}\n\n`;
|
||||
}
|
||||
|
||||
const logFile = `coding_${timestamp}.txt`;
|
||||
await this.saveToFile(logFile, logEntry);
|
||||
let resp = await this.code_model.sendRequest(messages, prompt);
|
||||
this.awaiting_coding = false;
|
||||
await this._saveLog(prompt, messages, resp, 'coding');
|
||||
return resp;
|
||||
}
|
||||
|
||||
async promptMemSaving(to_summarize) {
|
||||
await this.checkCooldown();
|
||||
let prompt = this.profile.saving_memory;
|
||||
let logEntry;
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
if (this.task_id === null) {
|
||||
logEntry = `[${timestamp}] \nPrompt:\n${prompt}\n\nTo Summarize:\n${JSON.stringify(messages, null, 2)}\n\n`;
|
||||
} else {
|
||||
logEntry = `[${timestamp}] Task ID: ${this.agent.task.task_id}\nPrompt:\n${prompt}\n\nConversation:\n${JSON.stringify(to_summarize, null, 2)}\n\n`;
|
||||
}
|
||||
const logFile = `memSaving_${timestamp}.txt`;
|
||||
await this.saveToFile(logFile, logEntry);
|
||||
prompt = await this.replaceStrings(prompt, null, null, to_summarize);
|
||||
return await this.chat_model.sendRequest([], prompt);
|
||||
let resp = await this.chat_model.sendRequest([], prompt);
|
||||
await this._saveLog(prompt, null, resp, 'memSaving');
|
||||
return resp;
|
||||
}
|
||||
|
||||
async promptShouldRespondToBot(new_message) {
|
||||
|
@ -455,6 +389,7 @@ export class Prompter {
|
|||
}
|
||||
|
||||
async promptGoalSetting(messages, last_goals) {
|
||||
// deprecated
|
||||
let system_message = this.profile.goal_setting;
|
||||
system_message = await this.replaceStrings(system_message, messages);
|
||||
|
||||
|
@ -479,4 +414,37 @@ export class Prompter {
|
|||
goal.quantity = parseInt(goal.quantity);
|
||||
return goal;
|
||||
}
|
||||
|
||||
async _saveLog(prompt, messages, generation, tag) {
|
||||
if (!settings.log_all_prompts)
|
||||
return;
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
let logEntry;
|
||||
let task_id = this.agent.task.task_id;
|
||||
if (task_id == null) {
|
||||
logEntry = `[${timestamp}] \nPrompt:\n${prompt}\n\nConversation:\n${JSON.stringify(messages, null, 2)}\n\nResponse:\n${generation}\n\n`;
|
||||
} else {
|
||||
logEntry = `[${timestamp}] Task ID: ${task_id}\nPrompt:\n${prompt}\n\nConversation:\n${JSON.stringify(messages, null, 2)}\n\nResponse:\n${generation}\n\n`;
|
||||
}
|
||||
const logFile = `${tag}_${timestamp}.txt`;
|
||||
await this._saveToFile(logFile, logEntry);
|
||||
}
|
||||
|
||||
async _saveToFile(logFile, logEntry) {
|
||||
let task_id = this.agent.task.task_id;
|
||||
console.log(task_id)
|
||||
let logDir;
|
||||
if (task_id == null) {
|
||||
logDir = path.join(__dirname, `../../bots/${this.agent.name}/logs`);
|
||||
} else {
|
||||
logDir = path.join(__dirname, `../../bots/${this.agent.name}/logs/${task_id}`);
|
||||
}
|
||||
|
||||
await fs.mkdir(logDir, { recursive: true });
|
||||
|
||||
logFile = path.join(logDir, logFile);
|
||||
await fs.appendFile(logFile, String(logEntry), 'utf-8');
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue