This commit is contained in:
Copper 2024-06-18 19:10:56 -07:00 committed by GitHub
parent 1de106be19
commit a6359afa8a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -2,32 +2,68 @@ const Groq = require('groq-sdk');
import { getKey } from '../utils/keys.js'; import { getKey } from '../utils/keys.js';
export class Mixtral { export class Mixtral {
constructor(model_name, url) { constructor(model_name, url) {
this.model_name = model_name; this.model_name = model_name;
this.url = url; this.url = url;
this.groq = new Groq(getKey('GROQ_API_KEY')); this.groq = new Groq(getKey('GROQ_API_KEY'));
} }
async sendRequest(turns, systemMessage, stop_seq="***") {
let messages = [{"role": "system", "content": systemMessage}].concat(turns);
let res = null;
try {
console.log("Awaiting Groq response...");
let completion = await this.groq.chat.completions.create({
"messages": messages,
"model": this.model_name || "mixtral-8x7b-32768",
"temperature": 0.85,
"max_tokens": 8192,
"top_p": 1,
"stream": true,
"stop": stop_seq
});
let temp_res = "";
for await (const chunk of completion) {
temp_res += chunk.choices[0]?.delta?.content || '';
}
res = temp_res;
}
catch(err) {
console.log(err);
res = "My brain just kinda stopped working. Try again.";
}
return res;
}
async embed(text) {
/* GPT's embed:
const embedding = await this.openai.embeddings.create({
model: this.model_name || "text-embedding-ada-002",
input: text,
encoding_format: "float",
});
return embedding.data[0].embedding;
*/
// lol no embeddings for u
// l
console.log("big oof, embeds on groq dont is not thing");
}
} }
const groq = new Groq();
async function definitelynotmain() { async function definitelynotmain() {
const chatCompletion = await groq.chat.completions.create({ const chatCompletion = await groq.chat.completions.create({
"messages": [ "messages": "",
{
"role": "system",
"content": "i like grapes"
},
{
"role": "user",
"content": ""
}
],
"model": "mixtral-8x7b-32768", "model": "mixtral-8x7b-32768",
"temperature": 0.85, "temperature": 0.85,
"max_tokens": 8192, "max_tokens": 8192,
"top_p": 1, "top_p": 1,
"stream": true, "stream": true,
"stop": null "stop": "***"
}); });
for await (const chunk of chatCompletion) { for await (const chunk of chatCompletion) {