这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions docker/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,15 @@ CACHE_VECTORS="true"
# EMBEDDING_ENGINE='openai'
# OPEN_AI_KEY=sk-xxxx

# EMBEDDING_ENGINE='azure'
# AZURE_OPENAI_ENDPOINT=
# AZURE_OPENAI_KEY=
# EMBEDDING_MODEL_PREF='my-embedder-model' # This is the "deployment" on Azure you want to use for embeddings. Not the base model. Valid base model is text-embedding-ada-002

# EMBEDDING_ENGINE='localai'
# EMBEDDING_BASE_PATH='https://localhost:8080/v1'
# EMBEDDING_MODEL_PREF='text-embedding-ada-002'

###########################################
######## Vector Database Selection ########
###########################################
Expand Down
17 changes: 14 additions & 3 deletions frontend/src/components/Modals/MangeWorkspace/Documents/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,11 @@ import WorkspaceDirectory from "./WorkspaceDirectory";

const COST_PER_TOKEN = 0.0004;

export default function DocumentSettings({ workspace, fileTypes }) {
export default function DocumentSettings({
workspace,
fileTypes,
systemSettings,
}) {
const [highlightWorkspace, setHighlightWorkspace] = useState(false);
const [availableDocs, setAvailableDocs] = useState([]);
const [loading, setLoading] = useState(true);
Expand Down Expand Up @@ -135,8 +139,15 @@ export default function DocumentSettings({ workspace, fileTypes }) {
}
});

const dollarAmount = (totalTokenCount / 1000) * COST_PER_TOKEN;
setEmbeddingsCost(dollarAmount);
// Do not do cost estimation unless the embedding engine is OpenAi.
if (
!systemSettings?.EmbeddingEngine ||
systemSettings.EmbeddingEngine === "openai"
) {
const dollarAmount = (totalTokenCount / 1000) * COST_PER_TOKEN;
setEmbeddingsCost(dollarAmount);
}

setMovedItems([...movedItems, ...newMovedItems]);

let newAvailableDocs = JSON.parse(JSON.stringify(availableDocs));
Expand Down
9 changes: 8 additions & 1 deletion frontend/src/components/Modals/MangeWorkspace/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,14 @@ const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
const [selectedTab, setSelectedTab] = useState("documents");
const [workspace, setWorkspace] = useState(null);
const [fileTypes, setFileTypes] = useState(null);
const [settings, setSettings] = useState({});

useEffect(() => {
async function checkSupportedFiletypes() {
const acceptedTypes = await System.acceptedDocumentTypes();
const _settings = await System.keys();
setFileTypes(acceptedTypes ?? {});
setSettings(_settings ?? {});
}
checkSupportedFiletypes();
}, []);
Expand Down Expand Up @@ -104,7 +107,11 @@ const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
</div>
<Suspense fallback={<div>Loading...</div>}>
<div className={selectedTab === "documents" ? "" : "hidden"}>
<DocumentSettings workspace={workspace} fileTypes={fileTypes} />
<DocumentSettings
workspace={workspace}
fileTypes={fileTypes}
systemSettings={settings}
/>
</div>
<div className={selectedTab === "settings" ? "" : "hidden"}>
<WorkspaceSettings workspace={workspace} fileTypes={fileTypes} />
Expand Down
114 changes: 114 additions & 0 deletions frontend/src/pages/GeneralSettings/EmbeddingPreference/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import System from "../../../models/system";
import showToast from "../../../utils/toast";
import OpenAiLogo from "../../../media/llmprovider/openai.png";
import AzureOpenAiLogo from "../../../media/llmprovider/azure.png";
import LocalAiLogo from "../../../media/llmprovider/localai.png";
import PreLoader from "../../../components/Preloader";
import LLMProviderOption from "../../../components/LLMSelection/LLMProviderOption";

Expand All @@ -16,6 +17,8 @@ export default function GeneralEmbeddingPreference() {
const [embeddingChoice, setEmbeddingChoice] = useState("openai");
const [settings, setSettings] = useState(null);
const [loading, setLoading] = useState(true);
const [basePathValue, setBasePathValue] = useState("");
const [basePath, setBasePath] = useState("");

const handleSubmit = async (e) => {
e.preventDefault();
Expand All @@ -38,11 +41,17 @@ export default function GeneralEmbeddingPreference() {
setHasChanges(true);
};

function updateBasePath() {
setBasePath(basePathValue);
}

useEffect(() => {
async function fetchKeys() {
const _settings = await System.keys();
setSettings(_settings);
setEmbeddingChoice(_settings?.EmbeddingEngine || "openai");
setBasePath(_settings?.EmbeddingBasePath || "");
setBasePathValue(_settings?.EmbeddingBasePath || "");
setLoading(false);
}
fetchKeys();
Expand Down Expand Up @@ -136,6 +145,15 @@ export default function GeneralEmbeddingPreference() {
image={AzureOpenAiLogo}
onClick={updateChoice}
/>
<LLMProviderOption
name="LocalAI"
value="localai"
link="localai.io"
description="Self hosted LocalAI embedding engine."
checked={embeddingChoice === "localai"}
image={LocalAiLogo}
onClick={updateChoice}
/>
</div>
<div className="mt-10 flex flex-wrap gap-4 max-w-[800px]">
{embeddingChoice === "openai" && (
Expand Down Expand Up @@ -215,6 +233,32 @@ export default function GeneralEmbeddingPreference() {
</div>
</>
)}

{embeddingChoice === "localai" && (
<>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
LocalAI Base URL
</label>
<input
type="url"
name="EmbeddingBasePath"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://localhost:8080/v1"
defaultValue={settings?.EmbeddingBasePath}
onChange={(e) => setBasePathValue(e.target.value)}
onBlur={updateBasePath}
required={true}
autoComplete="off"
spellCheck={false}
/>
</div>
<LocalAIModelSelection
settings={settings}
basePath={basePath}
/>
</>
)}
</div>
</>
)}
Expand All @@ -225,3 +269,73 @@ export default function GeneralEmbeddingPreference() {
</div>
);
}

function LocalAIModelSelection({ settings, basePath = null }) {
const [customModels, setCustomModels] = useState([]);
const [loading, setLoading] = useState(true);

useEffect(() => {
async function findCustomModels() {
if (!basePath || !basePath.includes("/v1")) {
setCustomModels([]);
setLoading(false);
return;
}
setLoading(true);
const { models } = await System.customModels("localai", null, basePath);
setCustomModels(models || []);
setLoading(false);
}
findCustomModels();
}, [basePath]);

if (loading || customModels.length == 0) {
return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Embedding Model Name
</label>
<select
name="EmbeddingModelPref"
disabled={true}
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
<option disabled={true} selected={true}>
{basePath?.includes("/v1")
? "-- loading available models --"
: "-- waiting for URL --"}
</option>
</select>
</div>
);
}

return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Embedding Model Name
</label>
<select
name="EmbeddingModelPref"
required={true}
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{customModels.length > 0 && (
<optgroup label="Your loaded models">
{customModels.map((model) => {
return (
<option
key={model.id}
value={model.id}
selected={settings?.EmbeddingModelPref === model.id}
>
{model.id}
</option>
);
})}
</optgroup>
)}
</select>
</div>
);
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import React, { memo, useEffect, useState } from "react";
import OpenAiLogo from "../../../../../media/llmprovider/openai.png";
import AzureOpenAiLogo from "../../../../../media/llmprovider/azure.png";
import LocalAiLogo from "../../../../../media/llmprovider/localai.png";
import System from "../../../../../models/system";
import PreLoader from "../../../../../components/Preloader";
import LLMProviderOption from "../../../../../components/LLMSelection/LLMProviderOption";
Expand All @@ -9,16 +10,23 @@ function EmbeddingSelection({ nextStep, prevStep, currentStep }) {
const [embeddingChoice, setEmbeddingChoice] = useState("openai");
const [settings, setSettings] = useState(null);
const [loading, setLoading] = useState(true);
const [basePathValue, setBasePathValue] = useState("");
const [basePath, setBasePath] = useState("");

const updateChoice = (selection) => {
setEmbeddingChoice(selection);
};

function updateBasePath() {
setBasePath(basePathValue);
}

useEffect(() => {
async function fetchKeys() {
const _settings = await System.keys();
setSettings(_settings);
setEmbeddingChoice(_settings?.EmbeddingEngine || "openai");
setBasePathValue(_settings?.EmbeddingBasePath || "");
setLoading(false);
}
fetchKeys();
Expand Down Expand Up @@ -77,6 +85,15 @@ function EmbeddingSelection({ nextStep, prevStep, currentStep }) {
image={AzureOpenAiLogo}
onClick={updateChoice}
/>
<LLMProviderOption
name="LocalAI"
value="localai"
link="localai.io"
description="Self hosted LocalAI embedding engine."
checked={embeddingChoice === "localai"}
image={LocalAiLogo}
onClick={updateChoice}
/>
</div>
<div className="mt-10 flex flex-wrap gap-4 max-w-[800px]">
{embeddingChoice === "openai" && (
Expand Down Expand Up @@ -152,6 +169,32 @@ function EmbeddingSelection({ nextStep, prevStep, currentStep }) {
</div>
</>
)}

{embeddingChoice === "localai" && (
<>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
LocalAI Base URL
</label>
<input
type="url"
name="EmbeddingBasePath"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://localhost:8080/v1"
defaultValue={settings?.EmbeddingBasePath}
onChange={(e) => setBasePathValue(e.target.value)}
onBlur={updateBasePath}
required={true}
autoComplete="off"
spellCheck={false}
/>
</div>
<LocalAIModelSelection
settings={settings}
basePath={basePath}
/>
</>
)}
</div>
</div>
<div className="flex w-full justify-between items-center p-6 space-x-2 border-t rounded-b border-gray-500/50">
Expand All @@ -174,4 +217,74 @@ function EmbeddingSelection({ nextStep, prevStep, currentStep }) {
);
}

function LocalAIModelSelection({ settings, basePath = null }) {
const [customModels, setCustomModels] = useState([]);
const [loading, setLoading] = useState(true);

useEffect(() => {
async function findCustomModels() {
if (!basePath || !basePath.includes("/v1")) {
setCustomModels([]);
setLoading(false);
return;
}
setLoading(true);
const { models } = await System.customModels("localai", null, basePath);
setCustomModels(models || []);
setLoading(false);
}
findCustomModels();
}, [basePath]);

if (loading || customModels.length == 0) {
return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Embedding Model Name
</label>
<select
name="EmbeddingModelPref"
disabled={true}
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
<option disabled={true} selected={true}>
{basePath?.includes("/v1")
? "-- loading available models --"
: "-- waiting for URL --"}
</option>
</select>
</div>
);
}

return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Embedding Model Name
</label>
<select
name="EmbeddingModelPref"
required={true}
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{customModels.length > 0 && (
<optgroup label="Your loaded models">
{customModels.map((model) => {
return (
<option
key={model.id}
value={model.id}
selected={settings.EmbeddingModelPref === model.id}
>
{model.id}
</option>
);
})}
</optgroup>
)}
</select>
</div>
);
}

export default memo(EmbeddingSelection);
Loading