diff --git a/README.md b/README.md index 7e4c047..af1c670 100644 --- a/README.md +++ b/README.md @@ -115,7 +115,7 @@ LLM backends can be specified as simply as `"model": "gpt-3.5-turbo"`. However, "embedding": { "api": "openai", "url": "https://api.openai.com/v1/", - "model": "text-embedding-ada-002" + "model": "text-embedding-3-small" } ``` diff --git a/src/models/gpt.js b/src/models/gpt.js index 67511d2..53430a1 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -55,7 +55,7 @@ export class GPT { async embed(text) { const embedding = await this.openai.embeddings.create({ - model: this.model_name || "text-embedding-ada-002", + model: this.model_name || "text-embedding-3-small", input: text, encoding_format: "float", });