diff --git a/src/agent/agent.js b/src/agent/agent.js index deed4f5..ed11fc3 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -62,7 +62,7 @@ export class Agent { }; if (init_message) { - this.handleMessage('system', init_message); + this.handleMessage('MINECRAFT', init_message); } else { this.bot.chat('Hello world! I am ' + this.name); this.bot.emit('finished_executing'); diff --git a/src/models/claude.js b/src/models/claude.js index 46691d5..0304714 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -1,4 +1,5 @@ import Anthropic from '@anthropic-ai/sdk'; +import { strictFormat } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; export class Claude { @@ -15,36 +16,7 @@ export class Claude { } async sendRequest(turns, systemMessage) { - let prev_role = null; - let messages = []; - let filler = {role: 'user', content: '_'}; - for (let msg of turns) { - if (msg.role === 'system') { - msg.role = 'user'; - msg.content = 'SYSTEM: ' + msg.content; - } - if (msg.role === prev_role && msg.role === 'assistant') { - // insert empty user message to separate assistant messages - messages.push(filler); - messages.push(msg); - } - else if (msg.role === prev_role) { - // combine new message with previous message instead of adding a new one - messages[messages.length-1].content += '\n' + msg.content; - } - else { - messages.push(msg); - } - prev_role = msg.role; - - } - if (messages.length > 0 && messages[0].role !== 'user') { - messages.unshift(filler); // anthropic requires user message to start - } - if (messages.length === 0) { - messages.push(filler); - } - + const messages = strictFormat(turns); let res = null; try { console.log('Awaiting anthropic api response...') diff --git a/src/models/local.js b/src/models/local.js index dd3af34..21b70df 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -1,3 +1,5 @@ +import { strictFormat } from '../utils/text.js'; + export class Local { constructor(model_name, url) { this.model_name = model_name; @@ -8,8 +10,8 @@ export class Local { async sendRequest(turns, systemMessage) { let model = this.model_name || 'llama3'; - let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); - + let messages = strictFormat(turns); + messages.unshift({role: 'system', content: systemMessage}); let res = null; try { console.log(`Awaiting local response... (model: ${model})`) @@ -56,4 +58,4 @@ export class Local { } return data; } -} +} \ No newline at end of file diff --git a/src/utils/text.js b/src/utils/text.js index b0a273a..bf7b509 100644 --- a/src/utils/text.js +++ b/src/utils/text.js @@ -24,4 +24,39 @@ export function toSinglePrompt(turns, system=null, stop_seq='***', model_nicknam if (role !== model_nickname) // if the last message was from the user/system, add a prompt for the model. otherwise, pretend we are extending the model's own message prompt += model_nickname + ": "; return prompt; +} + +// ensures stricter turn order for anthropic/llama models +// combines repeated messages from the same role, separates repeat assistant messages with filler user messages +export function strictFormat(turns) { + let prev_role = null; + let messages = []; + let filler = {role: 'user', content: '_'}; + for (let msg of turns) { + if (msg.role === 'system') { + msg.role = 'user'; + msg.content = 'SYSTEM: ' + msg.content; + } + if (msg.role === prev_role && msg.role === 'assistant') { + // insert empty user message to separate assistant messages + messages.push(filler); + messages.push(msg); + } + else if (msg.role === prev_role) { + // combine new message with previous message instead of adding a new one + messages[messages.length-1].content += '\n' + msg.content; + } + else { + messages.push(msg); + } + prev_role = msg.role; + + } + if (messages.length > 0 && messages[0].role !== 'user') { + messages.unshift(filler); // anthropic requires user message to start + } + if (messages.length === 0) { + messages.push(filler); + } + return messages; } \ No newline at end of file