diff --git a/frontend/src/components/EmbeddingSelection/MistralAiOptions/index.jsx b/frontend/src/components/EmbeddingSelection/MistralAiOptions/index.jsx
new file mode 100644
index 00000000000..6012b319228
--- /dev/null
+++ b/frontend/src/components/EmbeddingSelection/MistralAiOptions/index.jsx
@@ -0,0 +1,46 @@
+export default function MistralAiOptions({ settings }) {
+ return (
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+}
diff --git a/frontend/src/pages/GeneralSettings/EmbeddingPreference/index.jsx b/frontend/src/pages/GeneralSettings/EmbeddingPreference/index.jsx
index 4f3dd8ef59f..f2f3884b45e 100644
--- a/frontend/src/pages/GeneralSettings/EmbeddingPreference/index.jsx
+++ b/frontend/src/pages/GeneralSettings/EmbeddingPreference/index.jsx
@@ -13,6 +13,7 @@ import CohereLogo from "@/media/llmprovider/cohere.png";
import VoyageAiLogo from "@/media/embeddingprovider/voyageai.png";
import LiteLLMLogo from "@/media/llmprovider/litellm.png";
import GenericOpenAiLogo from "@/media/llmprovider/generic-openai.png";
+import MistralAiLogo from "@/media/llmprovider/mistral.jpeg";
import PreLoader from "@/components/Preloader";
import ChangeWarningModal from "@/components/ChangeWarning";
@@ -33,6 +34,7 @@ import { useModal } from "@/hooks/useModal";
import ModalWrapper from "@/components/ModalWrapper";
import CTAButton from "@/components/lib/CTAButton";
import { useTranslation } from "react-i18next";
+import MistralAiOptions from "@/components/EmbeddingSelection/MistralAiOptions";
const EMBEDDERS = [
{
@@ -100,6 +102,13 @@ const EMBEDDERS = [
options: (settings) => ,
description: "Run powerful embedding models from LiteLLM.",
},
+ {
+ name: "Mistral AI",
+ value: "mistral",
+ logo: MistralAiLogo,
+ options: (settings) => ,
+ description: "Run powerful embedding models from Mistral AI.",
+ },
{
name: "Generic OpenAI",
value: "generic-openai",
diff --git a/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx b/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
index 44fbaed661c..ab83a5af240 100644
--- a/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
+++ b/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
@@ -349,6 +349,13 @@ export const EMBEDDING_ENGINE_PRIVACY = {
],
logo: VoyageAiLogo,
},
+ mistral: {
+ name: "Mistral AI",
+ description: [
+ "Data sent to Mistral AI's servers is shared according to the terms of service of https://mistral.ai.",
+ ],
+ logo: MistralLogo,
+ },
litellm: {
name: "LiteLLM",
description: [
diff --git a/server/utils/EmbeddingEngines/mistral/index.js b/server/utils/EmbeddingEngines/mistral/index.js
new file mode 100644
index 00000000000..1d4f7351462
--- /dev/null
+++ b/server/utils/EmbeddingEngines/mistral/index.js
@@ -0,0 +1,43 @@
+class MistralEmbedder {
+ constructor() {
+ if (!process.env.MISTRAL_API_KEY)
+ throw new Error("No Mistral API key was set.");
+
+ const { OpenAI: OpenAIApi } = require("openai");
+ this.openai = new OpenAIApi({
+ baseURL: "https://api.mistral.ai/v1",
+ apiKey: process.env.MISTRAL_API_KEY ?? null,
+ });
+ this.model = process.env.EMBEDDING_MODEL_PREF || "mistral-embed";
+ }
+
+ async embedTextInput(textInput) {
+ try {
+ const response = await this.openai.embeddings.create({
+ model: this.model,
+ input: textInput,
+ });
+ return response?.data[0]?.embedding || [];
+ } catch (error) {
+ console.error("Failed to get embedding from Mistral.", error.message);
+ return [];
+ }
+ }
+
+ async embedChunks(textChunks = []) {
+ try {
+ const response = await this.openai.embeddings.create({
+ model: this.model,
+ input: textChunks,
+ });
+ return response?.data?.map((emb) => emb.embedding) || [];
+ } catch (error) {
+ console.error("Failed to get embeddings from Mistral.", error.message);
+ return new Array(textChunks.length).fill([]);
+ }
+ }
+}
+
+module.exports = {
+ MistralEmbedder,
+};
\ No newline at end of file
diff --git a/server/utils/helpers/index.js b/server/utils/helpers/index.js
index 57ec191e77e..cbf07fbd0ef 100644
--- a/server/utils/helpers/index.js
+++ b/server/utils/helpers/index.js
@@ -214,6 +214,9 @@ function getEmbeddingEngineSelection() {
case "litellm":
const { LiteLLMEmbedder } = require("../EmbeddingEngines/liteLLM");
return new LiteLLMEmbedder();
+ case "mistral":
+ const { MistralEmbedder } = require("../EmbeddingEngines/mistral");
+ return new MistralEmbedder();
case "generic-openai":
const {
GenericOpenAiEmbedder,
diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js
index ede372427fc..d547930a520 100644
--- a/server/utils/helpers/updateENV.js
+++ b/server/utils/helpers/updateENV.js
@@ -753,6 +753,7 @@ function supportedEmbeddingModel(input = "") {
"voyageai",
"litellm",
"generic-openai",
+ "mistral",
];
return supported.includes(input)
? null