@@ -153,7 +160,7 @@ function LMStudioModelSelection({ settings, basePath = null }) {
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
diff --git a/frontend/src/components/EmbeddingSelection/LiteLLMOptions/index.jsx b/frontend/src/components/EmbeddingSelection/LiteLLMOptions/index.jsx
index 37ed233a4a3..e18186f5cf7 100644
--- a/frontend/src/components/EmbeddingSelection/LiteLLMOptions/index.jsx
+++ b/frontend/src/components/EmbeddingSelection/LiteLLMOptions/index.jsx
@@ -8,12 +8,21 @@ export default function LiteLLMOptions({ settings }) {
const [basePath, setBasePath] = useState(settings?.LiteLLMBasePath);
const [apiKeyValue, setApiKeyValue] = useState(settings?.LiteLLMAPIKey);
const [apiKey, setApiKey] = useState(settings?.LiteLLMAPIKey);
+ const [maxChunkLength, setMaxChunkLength] = useState(
+ settings?.EmbeddingModelMaxChunkLength || 8192
+ );
+
+ const handleMaxChunkLengthChange = (e) => {
+ setMaxChunkLength(Number(e.target.value));
+ };
+
+ const estimatedTokens = Math.round(maxChunkLength / 4.2);
return (
-
+
-