use strict formatting for ollama

This commit is contained in:
MaxRobinsonTheGreat 2024-06-01 15:03:45 -05:00
parent 78db1255e8
commit 666b45a194
4 changed files with 43 additions and 34 deletions

View file

@ -62,7 +62,7 @@ export class Agent {
};
if (init_message) {
this.handleMessage('system', init_message);
this.handleMessage('MINECRAFT', init_message);
} else {
this.bot.chat('Hello world! I am ' + this.name);
this.bot.emit('finished_executing');

View file

@ -1,4 +1,5 @@
import Anthropic from '@anthropic-ai/sdk';
import { strictFormat } from '../utils/text.js';
import { getKey } from '../utils/keys.js';
export class Claude {
@ -15,36 +16,7 @@ export class Claude {
}
async sendRequest(turns, systemMessage) {
let prev_role = null;
let messages = [];
let filler = {role: 'user', content: '_'};
for (let msg of turns) {
if (msg.role === 'system') {
msg.role = 'user';
msg.content = 'SYSTEM: ' + msg.content;
}
if (msg.role === prev_role && msg.role === 'assistant') {
// insert empty user message to separate assistant messages
messages.push(filler);
messages.push(msg);
}
else if (msg.role === prev_role) {
// combine new message with previous message instead of adding a new one
messages[messages.length-1].content += '\n' + msg.content;
}
else {
messages.push(msg);
}
prev_role = msg.role;
}
if (messages.length > 0 && messages[0].role !== 'user') {
messages.unshift(filler); // anthropic requires user message to start
}
if (messages.length === 0) {
messages.push(filler);
}
const messages = strictFormat(turns);
let res = null;
try {
console.log('Awaiting anthropic api response...')

View file

@ -1,3 +1,5 @@
import { strictFormat } from '../utils/text.js';
export class Local {
constructor(model_name, url) {
this.model_name = model_name;
@ -8,8 +10,8 @@ export class Local {
async sendRequest(turns, systemMessage) {
let model = this.model_name || 'llama3';
let messages = [{'role': 'system', 'content': systemMessage}].concat(turns);
let messages = strictFormat(turns);
messages.unshift({role: 'system', content: systemMessage});
let res = null;
try {
console.log(`Awaiting local response... (model: ${model})`)
@ -56,4 +58,4 @@ export class Local {
}
return data;
}
}
}

View file

@ -24,4 +24,39 @@ export function toSinglePrompt(turns, system=null, stop_seq='***', model_nicknam
if (role !== model_nickname) // if the last message was from the user/system, add a prompt for the model. otherwise, pretend we are extending the model's own message
prompt += model_nickname + ": ";
return prompt;
}
// ensures stricter turn order for anthropic/llama models
// combines repeated messages from the same role, separates repeat assistant messages with filler user messages
export function strictFormat(turns) {
let prev_role = null;
let messages = [];
let filler = {role: 'user', content: '_'};
for (let msg of turns) {
if (msg.role === 'system') {
msg.role = 'user';
msg.content = 'SYSTEM: ' + msg.content;
}
if (msg.role === prev_role && msg.role === 'assistant') {
// insert empty user message to separate assistant messages
messages.push(filler);
messages.push(msg);
}
else if (msg.role === prev_role) {
// combine new message with previous message instead of adding a new one
messages[messages.length-1].content += '\n' + msg.content;
}
else {
messages.push(msg);
}
prev_role = msg.role;
}
if (messages.length > 0 && messages[0].role !== 'user') {
messages.unshift(filler); // anthropic requires user message to start
}
if (messages.length === 0) {
messages.push(filler);
}
return messages;
}