diff --git a/src/agent/tasks/tasks.js b/src/agent/tasks/tasks.js index 1c00d95..a7948f5 100644 --- a/src/agent/tasks/tasks.js +++ b/src/agent/tasks/tasks.js @@ -426,7 +426,6 @@ export class Task { } async initBotTask() { - await this.setAgentGoal(); await this.agent.bot.chat(`/clear ${this.name}`); console.log(`Cleared ${this.name}'s inventory.`); @@ -511,7 +510,7 @@ export class Task { this.agent.killAll(); } } - + await new Promise((resolve) => setTimeout(resolve, 500)); if (this.data.conversation && this.agent.count_id === 0) { let other_name = this.available_agents.filter(n => n !== this.name)[0]; let waitCount = 0; @@ -526,8 +525,7 @@ export class Task { } await executeCommand(this.agent, `!startConversation("${other_name}", "${this.data.conversation}")`); } - - + await this.setAgentGoal(); } async teleportBots() { diff --git a/src/models/vllm.js b/src/models/vllm.js index 52e3e5b..e9116ef 100644 --- a/src/models/vllm.js +++ b/src/models/vllm.js @@ -38,6 +38,7 @@ export class VLLM { try { console.log('Awaiting openai api response...') // console.log('Messages:', messages); + // todo set max_tokens, temperature, top_p, etc. in pack let completion = await this.vllm.chat.completions.create(pack); if (completion.choices[0].finish_reason == 'length') throw new Error('Context length exceeded');