mirror of
https://github.com/kolbytn/mindcraft.git
synced 2025-03-28 14:56:24 +01:00
set back default value, put instruct variant of mistral as default and fixed model set to none
This commit is contained in:
parent
54a45c3dfe
commit
5951fd02e5
3 changed files with 10 additions and 11 deletions
|
@ -23,7 +23,7 @@ Add one of these environment variables:
|
|||
|
||||
Or you can also use Ollama.
|
||||
To install the models used by default (generation and embedding), execute the following script:
|
||||
`ollama pull mistral && ollama pull nomic-embed-text`
|
||||
`ollama pull mistral:instruct && ollama pull nomic-embed-text`
|
||||
|
||||
Then, clone/download this repository
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "radley",
|
||||
|
||||
"model": "ollama[mistral]",
|
||||
"model": "ollama[mistral:instruct]",
|
||||
|
||||
"conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:",
|
||||
|
||||
|
|
|
@ -17,8 +17,13 @@ function getContentInBrackets(str) {
|
|||
|
||||
export class Ollama {
|
||||
constructor(model_name) {
|
||||
this.model_name = getContentInBrackets(model_name);
|
||||
let ollamaConfig = null;
|
||||
|
||||
if (this.model_name == "") {
|
||||
throw new Error('Model is not specified! Please ensure you input the model in the following format: ollama[model]. For example, for Mistral, use: ollama[mistral]');
|
||||
}
|
||||
|
||||
axios.get(ollamaSettings["url"]).then(response => {
|
||||
|
||||
if (response.status === 200) {
|
||||
|
@ -27,12 +32,6 @@ export class Ollama {
|
|||
apiKey: 'ollama', // required but unused
|
||||
};
|
||||
|
||||
this.model_name = getContentInBrackets(model_name);
|
||||
|
||||
if (this.model_name = "") {
|
||||
throw new Error('Model is not specified! Please ensure you input the model in the following format: ollama[model]. For example, for Mistral, use: ollama[mistral]');
|
||||
}
|
||||
|
||||
this.openai = new OpenAIApi(ollamaConfig);
|
||||
}
|
||||
else {
|
||||
|
@ -46,15 +45,15 @@ export class Ollama {
|
|||
|
||||
async sendRequest(turns, systemMessage, stop_seq='***') {
|
||||
|
||||
let messages = [{'role': 'system', 'content': systemMessage}].concat(turns);
|
||||
|
||||
console.log(this.model_name)
|
||||
let messages = [{'role': 'system', 'content': systemMessage}].concat(turns);
|
||||
|
||||
let res = null;
|
||||
try {
|
||||
console.log('Awaiting openai api response...')
|
||||
console.log(`Awaiting ollama response... (model: ${this.model_name})`)
|
||||
console.log('Messages:', messages);
|
||||
let completion = await this.openai.chat.completions.create({
|
||||
|
||||
model: this.model_name,
|
||||
messages: messages,
|
||||
stop: stop_seq,
|
||||
|
|
Loading…
Add table
Reference in a new issue