Added logging code to prompter.js

This commit is contained in:
Charvi Bannur 2025-03-07 14:33:57 -08:00
parent c68ea07159
commit 3525a7130f

View file

@ -21,6 +21,12 @@ import { Grok } from "./grok.js";
import { DeepSeek } from './deepseek.js';
import { OpenRouter } from './openrouter.js';
import { VLLM } from './vllm.js';
import { promises as fs } from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
export class Prompter {
constructor(agent, fp) {
@ -300,29 +306,89 @@ export class Prompter {
this.last_prompt_time = Date.now();
}
// async promptConvo(messages) {
// this.most_recent_msg_time = Date.now();
// let current_msg_time = this.most_recent_msg_time;
// for (let i = 0; i < 3; i++) { // try 3 times to avoid hallucinations
// await this.checkCooldown();
// if (current_msg_time !== this.most_recent_msg_time) {
// return '';
// }
// let prompt = this.profile.conversing;
// prompt = await this.replaceStrings(prompt, messages, this.convo_examples);
// let generation = await this.chat_model.sendRequest(messages, prompt);
// // in conversations >2 players LLMs tend to hallucinate and role-play as other bots
// // the FROM OTHER BOT tag should never be generated by the LLM
// if (generation.includes('(FROM OTHER BOT)')) {
// console.warn('LLM hallucinated message as another bot. Trying again...');
// continue;
// }
// if (current_msg_time !== this.most_recent_msg_time) {
// console.warn(this.agent.name + ' received new message while generating, discarding old response.');
// return '';
// }
// return generation;
// }
// return '';
// }
async promptConvo(messages) {
// console.log(`[${new Date().toISOString()}] promptConvo called with messages:`, messages);
this.most_recent_msg_time = Date.now();
let current_msg_time = this.most_recent_msg_time;
for (let i = 0; i < 3; i++) { // try 3 times to avoid hallucinations
await this.checkCooldown();
if (current_msg_time !== this.most_recent_msg_time) {
return '';
}
let prompt = this.profile.conversing;
prompt = await this.replaceStrings(prompt, messages, this.convo_examples);
let generation = await this.chat_model.sendRequest(messages, prompt);
// in conversations >2 players LLMs tend to hallucinate and role-play as other bots
// the FROM OTHER BOT tag should never be generated by the LLM
if (generation.includes('(FROM OTHER BOT)')) {
let generation;
try {
generation = await this.chat_model.sendRequest(messages, prompt);
let task_id = this.agent.task.task_id
console.log(task_id)
if (typeof generation !== 'string') {
console.error('Error: Generated response is not a string', generation);
throw new Error('Generated response is not a string');
}
console.log("Generated response:", generation);
// Create directory if it doesn't exist
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const logDir = path.join(__dirname, 'logs', String(task_id));
await fs.mkdir(logDir, { recursive: true });
// Write prompt & conversation to a task-specific file
const logFile = path.join(logDir, `conversation_${timestamp}.txt`);
const logEntry = `[${new Date().toISOString()}] Task ID: ${task_id}\nPrompt:\n${prompt}\n\nConversation:\n${JSON.stringify(messages, null, 2)}\n\nResponse:\n${generation}\n\n`;
await fs.appendFile(logFile, String(logEntry), 'utf-8');
} catch (error) {
console.error('Error during message generation or file writing:', error);
continue;
}
// Check for hallucination or invalid output
if (generation?.includes('(FROM OTHER BOT)')) {
console.warn('LLM hallucinated message as another bot. Trying again...');
continue;
}
if (current_msg_time !== this.most_recent_msg_time) {
console.warn(this.agent.name + ' received new message while generating, discarding old response.');
console.warn(`${this.agent.name} received new message while generating, discarding old response.`);
return '';
}
return generation;
}
return '';
}