fixed mem summary, save full history to file

This commit is contained in:
MaxRobinsonTheGreat 2024-10-10 22:15:55 -05:00
parent 7b77c395d0
commit cb30485943
3 changed files with 43 additions and 17 deletions

View file

@ -19,7 +19,8 @@ export default
"allow_insecure_coding": false, // allows newAction command and model can write/run code on your computer. enable at own risk
"code_timeout_mins": 10, // minutes code is allowed to run. -1 for no timeout
"max_messages": 20, // max number of messages to keep in context
"max_commands": -1, // max number of commands to use in a response. -1 for no limit
"verbose_commands": true, // show full command syntax
"narrate_behavior": true, // chat simple automatic actions ('Picking up item!')

View file

@ -1,5 +1,6 @@
import { writeFileSync, readFileSync } from 'fs';
import { writeFileSync, readFileSync, mkdirSync } from 'fs';
import { NPCData } from './npc/data.js';
import settings from '../../settings.js';
export class History {
@ -7,25 +8,49 @@ export class History {
this.agent = agent;
this.name = agent.name;
this.memory_fp = `./bots/${this.name}/memory.json`;
this.full_history_fp = undefined;
mkdirSync(`./bots/${this.name}/histories`, { recursive: true });
this.turns = [];
// These define an agent's long term memory
// Natural language memory as a summary of recent messages + previous memory
this.memory = '';
// Variables for controlling the agent's memory and knowledge
this.max_messages = 20;
// Maximum number of messages to keep in context before saving chunk to memory
this.max_messages = settings.max_messages;
// Number of messages to remove from current history and save into memory
this.summary_chunk_size = 5;
// chunking reduces expensive calls to promptMemSaving and appendFullHistory
}
getHistory() { // expects an Examples object
return JSON.parse(JSON.stringify(this.turns));
}
async storeMemories(turns) {
async summarizeMemories(turns) {
console.log("Storing memories...");
this.memory = await this.agent.prompter.promptMemSaving(this.getHistory(), turns);
this.memory = await this.agent.prompter.promptMemSaving(turns);
console.log("Memory updated to: ", this.memory);
}
appendFullHistory(to_store) {
if (this.full_history_fp === undefined) {
const string_timestamp = new Date().toLocaleString().replace(/[/:]/g, '-').replace(/ /g, '').replace(/,/g, '_');
this.full_history_fp = `./bots/${this.name}/histories/${string_timestamp}.json`;
writeFileSync(this.full_history_fp, '[]', 'utf8');
}
try {
const data = readFileSync(this.full_history_fp, 'utf8');
let full_history = JSON.parse(data);
full_history.push(...to_store);
writeFileSync(this.full_history_fp, JSON.stringify(full_history, null, 4), 'utf8');
} catch (err) {
console.error(`Error reading ${this.name}'s full history file: ${err.message}`);
}
}
async add(name, content) {
let role = 'assistant';
if (name === 'system') {
@ -37,12 +62,12 @@ export class History {
}
this.turns.push({role, content});
// Summarize older turns into memory
if (this.turns.length >= this.max_messages) {
let to_summarize = [this.turns.shift()];
let chunk = this.turns.splice(0, this.summary_chunk_size);
while (this.turns[0].role != 'user' && this.turns.length > 1)
to_summarize.push(this.turns.shift());
await this.storeMemories(to_summarize);
chunk.push(this.turns.shift()); // first message must be user message
await this.summarizeMemories(chunk);
this.appendFullHistory(chunk);
}
}

View file

@ -113,7 +113,7 @@ export class Prompter {
]);
}
async replaceStrings(prompt, messages, examples=null, prev_memory=null, to_summarize=[], last_goals=null) {
async replaceStrings(prompt, messages, examples=null, to_summarize=[], last_goals=null) {
prompt = prompt.replaceAll('$NAME', this.agent.name);
if (prompt.includes('$STATS')) {
@ -131,13 +131,13 @@ export class Prompter {
if (prompt.includes('$EXAMPLES') && examples !== null)
prompt = prompt.replaceAll('$EXAMPLES', await examples.createExampleMessage(messages));
if (prompt.includes('$MEMORY'))
prompt = prompt.replaceAll('$MEMORY', prev_memory ? prev_memory : 'None.');
prompt = prompt.replaceAll('$MEMORY', this.agent.history.memory);
if (prompt.includes('$TO_SUMMARIZE'))
prompt = prompt.replaceAll('$TO_SUMMARIZE', stringifyTurns(to_summarize));
if (prompt.includes('$CONVO'))
prompt = prompt.replaceAll('$CONVO', 'Recent conversation:\n' + stringifyTurns(messages));
if (prompt.includes('$SELF_PROMPT')) {
let self_prompt = this.agent.self_prompter.on ? `Use this self-prompt to guide your behavior: "${this.agent.self_prompter.prompt}"\n` : '';
let self_prompt = this.agent.self_prompter.on ? `YOUR CURRENT ASSIGNED GOAL: "${this.agent.self_prompter.prompt}"\n` : '';
prompt = prompt.replaceAll('$SELF_PROMPT', self_prompt);
}
if (prompt.includes('$LAST_GOALS')) {
@ -180,9 +180,9 @@ export class Prompter {
return await this.chat_model.sendRequest(messages, prompt);
}
async promptMemSaving(prev_mem, to_summarize) {
async promptMemSaving(to_summarize) {
let prompt = this.profile.saving_memory;
prompt = await this.replaceStrings(prompt, null, null, prev_mem, to_summarize);
prompt = await this.replaceStrings(prompt, null, null, to_summarize);
return await this.chat_model.sendRequest([], prompt);
}
@ -192,7 +192,7 @@ export class Prompter {
let user_message = 'Use the below info to determine what goal to target next\n\n';
user_message += '$LAST_GOALS\n$STATS\n$INVENTORY\n$CONVO'
user_message = await this.replaceStrings(user_message, messages, null, null, null, last_goals);
user_message = await this.replaceStrings(user_message, messages, null, null, last_goals);
let user_messages = [{role: 'user', content: user_message}];
let res = await this.chat_model.sendRequest(user_messages, system_message);