fix grok, updated many default models

This commit is contained in:
MaxRobinsonTheGreat 2025-08-23 14:33:26 -05:00
parent 4bf0deaa1b
commit 6c8382874d
8 changed files with 11 additions and 12 deletions

View file

@ -1,7 +1,7 @@
{
"name": "claude",
"model": "claude-3-5-sonnet-latest",
"model": "claude-4-sonnet-latest",
"embedding": "openai"
}

View file

@ -2,7 +2,7 @@
"name": "claude_thinker",
"model": {
"model": "claude-3-7-sonnet-latest",
"model": "claude-4-sonnet-latest",
"params": {
"thinking": {
"type": "enabled",

View file

@ -1,7 +1,7 @@
{
"name": "gemini",
"model": "gemini-2.0-flash",
"model": "gemini-2.5-flash",
"cooldown": 5000
}

View file

@ -1,7 +1,7 @@
{
"name": "Grok",
"model": "grok-beta",
"model": "grok-3-mini-latest",
"embedding": "openai"
}

View file

@ -37,7 +37,7 @@ export class Gemini {
async sendRequest(turns, systemMessage) {
let model;
const modelConfig = {
model: this.model_name || "gemini-1.5-flash",
model: this.model_name || "gemini-2.5-flash",
// systemInstruction does not work bc google is trash
};
if (this.url) {

View file

@ -3,7 +3,7 @@ import { getKey } from '../utils/keys.js';
// xAI doesn't supply a SDK for their models, but fully supports OpenAI and Anthropic SDKs
export class Grok {
static prefix = 'grok';
static prefix = 'xai';
constructor(model_name, url, params) {
this.model_name = model_name;
this.url = url;
@ -20,13 +20,12 @@ export class Grok {
this.openai = new OpenAIApi(config);
}
async sendRequest(turns, systemMessage, stop_seq='***') {
async sendRequest(turns, systemMessage) {
let messages = [{'role': 'system', 'content': systemMessage}].concat(turns);
const pack = {
model: this.model_name || "grok-beta",
model: this.model_name || "grok-3-mini-latest",
messages,
stop: [stop_seq],
...(this.params || {})
};
@ -43,7 +42,7 @@ export class Grok {
catch (err) {
if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) {
console.log('Context length exceeded, trying again with shorter context.');
return await this.sendRequest(turns.slice(1), systemMessage, stop_seq);
return await this.sendRequest(turns.slice(1), systemMessage);
} else if (err.message.includes('The model expects a single `text` element per message.')) {
console.log(err);
res = 'Vision is only supported by certain models.';

View file

@ -50,7 +50,7 @@ export class GroqCloudAPI {
let completion = await this.groq.chat.completions.create({
"messages": messages,
"model": this.model_name || "llama-3.3-70b-versatile",
"model": this.model_name || "qwen/qwen3-32b",
"stream": false,
"stop": stop_seq,
...(this.params || {})

View file

@ -26,7 +26,7 @@ export class Novita {
messages = strictFormat(messages);
const pack = {
model: this.model_name || "meta-llama/llama-3.1-70b-instruct",
model: this.model_name || "meta-llama/llama-4-scout-17b-16e-instruct",
messages,
stop: [stop_seq],
...(this.params || {})