From 056e1bce53f468d2c51ed3d7782566247a07e278 Mon Sep 17 00:00:00 2001 From: Isadora White Date: Thu, 20 Mar 2025 15:27:16 -0500 Subject: [PATCH] backwards compatibility for llama --- src/models/vllm.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/models/vllm.js b/src/models/vllm.js index fdc14af..7070d15 100644 --- a/src/models/vllm.js +++ b/src/models/vllm.js @@ -23,7 +23,10 @@ export class VLLM { async sendRequest(turns, systemMessage, stop_seq = '***') { let messages = [{ 'role': 'system', 'content': systemMessage }].concat(turns); - messages = strictFormat(messages); + if (this.model_name.includes("deepseek") || this.model_name.inclues("qwen")) { + messages = strictFormat(messages); + } + const pack = { model: this.model_name || "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",