Vector Database
+
+
+ Model Preference
+
+
+
+ text-embedding-ada-002
+
+
+
>
)}
diff --git a/frontend/src/pages/OnboardingFlow/OnboardingModal/Steps/LLMSelection/index.jsx b/frontend/src/pages/OnboardingFlow/OnboardingModal/Steps/LLMSelection/index.jsx
index e3582309021..73e68c05cbb 100644
--- a/frontend/src/pages/OnboardingFlow/OnboardingModal/Steps/LLMSelection/index.jsx
+++ b/frontend/src/pages/OnboardingFlow/OnboardingModal/Steps/LLMSelection/index.jsx
@@ -46,15 +46,7 @@ function LLMSelection({ nextStep, prevStep, currentStep }) {
alert(`Failed to save LLM settings: ${error}`, "error");
return;
}
-
- switch (data.LLMProvider) {
- case "anthropic":
- case "lmstudio":
- case "localai":
- return nextStep("embedding_preferences");
- default:
- return nextStep("vector_database");
- }
+ nextStep("embedding_preferences");
};
if (loading)
diff --git a/frontend/src/pages/OnboardingFlow/OnboardingModal/index.jsx b/frontend/src/pages/OnboardingFlow/OnboardingModal/index.jsx
index c4dceed874e..d00b7e95664 100644
--- a/frontend/src/pages/OnboardingFlow/OnboardingModal/index.jsx
+++ b/frontend/src/pages/OnboardingFlow/OnboardingModal/index.jsx
@@ -65,8 +65,7 @@ const STEPS = {
},
embedding_preferences: {
title: "Embedding Preference",
- description:
- "Due to your LLM selection you need to set up a provider for embedding files and text.",
+ description: "Choose a provider for embedding files and text.",
component: EmbeddingSelection,
},
};
diff --git a/server/utils/AiProviders/azureOpenAi/index.js b/server/utils/AiProviders/azureOpenAi/index.js
index a424902b166..82e28204bca 100644
--- a/server/utils/AiProviders/azureOpenAi/index.js
+++ b/server/utils/AiProviders/azureOpenAi/index.js
@@ -1,9 +1,8 @@
const { AzureOpenAiEmbedder } = require("../../EmbeddingEngines/azureOpenAi");
const { chatPrompt } = require("../../chats");
-class AzureOpenAiLLM extends AzureOpenAiEmbedder {
- constructor() {
- super();
+class AzureOpenAiLLM {
+ constructor(embedder = null) {
const { OpenAIClient, AzureKeyCredential } = require("@azure/openai");
if (!process.env.AZURE_OPENAI_ENDPOINT)
throw new Error("No Azure API endpoint was set.");
@@ -20,6 +19,12 @@ class AzureOpenAiLLM extends AzureOpenAiEmbedder {
system: this.promptWindowLimit() * 0.15,
user: this.promptWindowLimit() * 0.7,
};
+
+ if (!embedder)
+ console.warn(
+ "No embedding provider defined for AzureOpenAiLLM - falling back to AzureOpenAiEmbedder for embedding!"
+ );
+ this.embedder = !embedder ? new AzureOpenAiEmbedder() : embedder;
}
streamingEnabled() {
@@ -114,6 +119,14 @@ Context:
return data.choices[0].message.content;
}
+ // Simple wrapper for dynamic embedder & normalize interface for all LLM implementations
+ async embedTextInput(textInput) {
+ return await this.embedder.embedTextInput(textInput);
+ }
+ async embedChunks(textChunks = []) {
+ return await this.embedder.embedChunks(textChunks);
+ }
+
async compressMessages(promptArgs = {}, rawHistory = []) {
const { messageArrayCompressor } = require("../../helpers/chat");
const messageArray = this.constructPrompt(promptArgs);
diff --git a/server/utils/AiProviders/openAi/index.js b/server/utils/AiProviders/openAi/index.js
index 0c5b7116dd2..46464271968 100644
--- a/server/utils/AiProviders/openAi/index.js
+++ b/server/utils/AiProviders/openAi/index.js
@@ -1,9 +1,8 @@
const { OpenAiEmbedder } = require("../../EmbeddingEngines/openAi");
const { chatPrompt } = require("../../chats");
-class OpenAiLLM extends OpenAiEmbedder {
- constructor() {
- super();
+class OpenAiLLM {
+ constructor(embedder = null) {
const { Configuration, OpenAIApi } = require("openai");
if (!process.env.OPEN_AI_KEY) throw new Error("No OpenAI API key was set.");
@@ -17,6 +16,12 @@ class OpenAiLLM extends OpenAiEmbedder {
system: this.promptWindowLimit() * 0.15,
user: this.promptWindowLimit() * 0.7,
};
+
+ if (!embedder)
+ console.warn(
+ "No embedding provider defined for OpenAiLLM - falling back to OpenAiEmbedder for embedding!"
+ );
+ this.embedder = !embedder ? new OpenAiEmbedder() : embedder;
}
streamingEnabled() {
@@ -203,6 +208,14 @@ Context:
return streamRequest;
}
+ // Simple wrapper for dynamic embedder & normalize interface for all LLM implementations
+ async embedTextInput(textInput) {
+ return await this.embedder.embedTextInput(textInput);
+ }
+ async embedChunks(textChunks = []) {
+ return await this.embedder.embedChunks(textChunks);
+ }
+
async compressMessages(promptArgs = {}, rawHistory = []) {
const { messageArrayCompressor } = require("../../helpers/chat");
const messageArray = this.constructPrompt(promptArgs);
diff --git a/server/utils/helpers/index.js b/server/utils/helpers/index.js
index c7c61822141..74804b90423 100644
--- a/server/utils/helpers/index.js
+++ b/server/utils/helpers/index.js
@@ -23,25 +23,22 @@ function getVectorDbClass() {
function getLLMProvider() {
const vectorSelection = process.env.LLM_PROVIDER || "openai";
- let embedder = null;
+ const embedder = getEmbeddingEngineSelection();
switch (vectorSelection) {
case "openai":
const { OpenAiLLM } = require("../AiProviders/openAi");
- return new OpenAiLLM();
+ return new OpenAiLLM(embedder);
case "azure":
const { AzureOpenAiLLM } = require("../AiProviders/azureOpenAi");
- return new AzureOpenAiLLM();
+ return new AzureOpenAiLLM(embedder);
case "anthropic":
const { AnthropicLLM } = require("../AiProviders/anthropic");
- embedder = getEmbeddingEngineSelection();
return new AnthropicLLM(embedder);
case "lmstudio":
const { LMStudioLLM } = require("../AiProviders/lmStudio");
- embedder = getEmbeddingEngineSelection();
return new LMStudioLLM(embedder);
case "localai":
const { LocalAiLLM } = require("../AiProviders/localAi");
- embedder = getEmbeddingEngineSelection();
return new LocalAiLLM(embedder);
default:
throw new Error("ENV: No LLM_PROVIDER value found in environment!");