From b685ce16aff2e62225631ffc6c23cfd26e3a1f6b Mon Sep 17 00:00:00 2001 From: shatfield4 Date: Mon, 24 Jun 2024 16:02:13 -0700 Subject: [PATCH 01/11] add ollama automatic url detection in llm and embedder prefrences --- .../OllamaOptions/index.jsx | 127 ++++++++++++++---- .../LLMSelection/OllamaLLMOptions/index.jsx | 119 +++++++++++++--- frontend/src/utils/constants.js | 5 + 3 files changed, 209 insertions(+), 42 deletions(-) diff --git a/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx b/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx index 3213f5d39af..ed9dc814511 100644 --- a/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx +++ b/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx @@ -1,41 +1,107 @@ import React, { useEffect, useState } from "react"; import System from "@/models/system"; +import showToast from "@/utils/toast"; +import PreLoader from "@/components/Preloader"; +import { OLLAMA_COMMON_URLS } from "@/utils/constants"; export default function OllamaEmbeddingOptions({ settings }) { + const [loading, setLoading] = useState(false); const [basePathValue, setBasePathValue] = useState( - settings?.EmbeddingBasePath + settings?.EmbeddingBasePath || "" ); - const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath); + const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath || ""); + const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); + + useEffect(() => { + if (!settings?.EmbeddingBasePath && !autoDetectAttempted) { + autoDetectBasePath(true); + } + }, [settings?.EmbeddingBasePath, autoDetectAttempted]); + + const autoDetectBasePath = async (firstLoad = false) => { + setLoading(true); + setAutoDetectAttempted(true); + for (const url of OLLAMA_COMMON_URLS) { + try { + const { models } = await System.customModels("ollama", null, url); + if (models && models.length > 0) { + setBasePath(url); + setBasePathValue(url); + setLoading(false); + if (!firstLoad) + showToast("Ollama URL detected successfully!", "success"); + return; + } + } catch (error) { + console.error(`Failed to connect to ${url}:`, error); + } + } + setLoading(false); + showToast( + "Couldn't automatically detect Ollama. Ollama may not be setup properly. Please enter the URL manually or try again.", + "info" + ); + }; + + const handleAutoDetectClick = (e) => { + e.preventDefault(); + autoDetectBasePath(); + }; + + const handleBasePathChange = (e) => { + const value = e.target.value; + setBasePathValue(value); + }; + + const handleBasePathBlur = () => { + setBasePath(basePathValue); + }; return (
-
+
- +
+ + {loading ? ( + + ) : ( + + )} +
setBasePathValue(e.target.value)} - onBlur={() => setBasePath(basePathValue)} + value={basePathValue} required={true} autoComplete="off" spellCheck={false} + onChange={handleBasePathChange} + onBlur={handleBasePathBlur} /> +

+ Enter the URL where Ollama is running. Click "Auto-Detect" if you're + not sure. +

-
@@ -61,8 +130,13 @@ function OllamaLLMModelSelection({ settings, basePath = null }) { return; } setLoading(true); - const { models } = await System.customModels("ollama", null, basePath); - setCustomModels(models || []); + try { + const { models } = await System.customModels("ollama", null, basePath); + setCustomModels(models || []); + } catch (error) { + console.error("Failed to fetch custom models:", error); + setCustomModels([]); + } setLoading(false); } findCustomModels(); @@ -71,33 +145,37 @@ function OllamaLLMModelSelection({ settings, basePath = null }) { if (loading || customModels.length == 0) { return (
-
); } return (
-
); } diff --git a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx index b08f29447c4..a86d88487c3 100644 --- a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx +++ b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx @@ -1,38 +1,104 @@ import { useEffect, useState } from "react"; import System from "@/models/system"; +import showToast from "@/utils/toast"; +import PreLoader from "@/components/Preloader"; +import { OLLAMA_COMMON_URLS } from "@/utils/constants"; export default function OllamaLLMOptions({ settings }) { + const [loading, setLoading] = useState(false); const [basePathValue, setBasePathValue] = useState( - settings?.OllamaLLMBasePath + settings?.OllamaLLMBasePath || "" ); - const [basePath, setBasePath] = useState(settings?.OllamaLLMBasePath); + const [basePath, setBasePath] = useState(settings?.OllamaLLMBasePath || ""); + const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); + + useEffect(() => { + if (!settings?.OllamaLLMBasePath && !autoDetectAttempted) { + autoDetectBasePath(true); + } + }, [settings?.OllamaLLMBasePath, autoDetectAttempted]); + + const autoDetectBasePath = async (firstLoad = false) => { + setLoading(true); + setAutoDetectAttempted(true); + for (const url of OLLAMA_COMMON_URLS) { + try { + const { models } = await System.customModels("ollama", null, url); + if (models && models.length > 0) { + setBasePath(url); + setBasePathValue(url); + setLoading(false); + if (!firstLoad) + showToast("Ollama URL detected successfully!", "success"); + return; + } + } catch (error) { + console.error(`Failed to connect to ${url}:`, error); + } + } + setLoading(false); + showToast( + "Couldn't automatically detect Ollama. Ollama may not be setup properly. Please enter the URL manually or try again.", + "info" + ); + }; + + const handleAutoDetectClick = (e) => { + e.preventDefault(); + autoDetectBasePath(); + }; + + const handleBasePathChange = (e) => { + const value = e.target.value; + setBasePathValue(value); + }; + + const handleBasePathBlur = () => { + setBasePath(basePathValue); + }; return (
-
+
- +
+ + {loading ? ( + + ) : ( + + )} +
setBasePathValue(e.target.value)} - onBlur={() => setBasePath(basePathValue)} + onChange={handleBasePathChange} + onBlur={handleBasePathBlur} /> +

+ Enter the URL where Ollama is running. Click "Auto-Detect" if you're + not sure. +

{!settings?.credentialsOnly && ( <>
-
)} @@ -65,8 +134,13 @@ function OllamaLLMModelSelection({ settings, basePath = null }) { return; } setLoading(true); - const { models } = await System.customModels("ollama", null, basePath); - setCustomModels(models || []); + try { + const { models } = await System.customModels("ollama", null, basePath); + setCustomModels(models || []); + } catch (error) { + console.error("Failed to fetch custom models:", error); + setCustomModels([]); + } setLoading(false); } findCustomModels(); @@ -75,8 +149,8 @@ function OllamaLLMModelSelection({ settings, basePath = null }) { if (loading || customModels.length == 0) { return (
-
); } return (
-
); } diff --git a/frontend/src/utils/constants.js b/frontend/src/utils/constants.js index 3f637617f42..60767ed25e7 100644 --- a/frontend/src/utils/constants.js +++ b/frontend/src/utils/constants.js @@ -10,6 +10,11 @@ export const SEEN_WATCH_ALERT = "anythingllm_watched_document_alert"; export const USER_BACKGROUND_COLOR = "bg-historical-msg-user"; export const AI_BACKGROUND_COLOR = "bg-historical-msg-system"; +export const OLLAMA_COMMON_URLS = [ + "http://127.0.0.1:11434", + "http://host.docker.internal:11434", + "http://172.17.0.1:11434", +]; export function fullApiUrl() { if (API_BASE !== "/api") return API_BASE; return `${window.location.origin}/api`; From 90fc4cbe3b05a83e16dc8320dedcc988beabca33 Mon Sep 17 00:00:00 2001 From: shatfield4 Date: Mon, 24 Jun 2024 17:01:13 -0700 Subject: [PATCH 02/11] implement auto detection for lmstudio llm and embedder/improve performance of checking common urls --- .../LMStudioOptions/index.jsx | 143 +++++++++++++++--- .../OllamaOptions/index.jsx | 34 +++-- .../LLMSelection/LMStudioOptions/index.jsx | 141 ++++++++++++++--- .../LLMSelection/OllamaLLMOptions/index.jsx | 34 +++-- frontend/src/utils/constants.js | 8 + 5 files changed, 297 insertions(+), 63 deletions(-) diff --git a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx index 1192ce675fe..253ff6b4a7e 100644 --- a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx +++ b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx @@ -1,41 +1,121 @@ import React, { useEffect, useState } from "react"; import System from "@/models/system"; +import showToast from "@/utils/toast"; +import PreLoader from "@/components/Preloader"; +import { LMSTUDIO_COMMON_URLS } from "@/utils/constants"; export default function LMStudioEmbeddingOptions({ settings }) { + const [loading, setLoading] = useState(false); const [basePathValue, setBasePathValue] = useState( - settings?.EmbeddingBasePath + settings?.EmbeddingBasePath || "" ); - const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath); + const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath || ""); + const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); + + useEffect(() => { + if (!settings?.EmbeddingBasePath && !autoDetectAttempted) { + autoDetectBasePath(true); + } + }, [settings?.EmbeddingBasePath, autoDetectAttempted]); + + const autoDetectBasePath = async (firstLoad = false) => { + setLoading(true); + setAutoDetectAttempted(true); + + const checkUrl = async (url) => { + const timeoutPromise = new Promise((_, reject) => + setTimeout(() => reject(new Error("Timeout")), 2000) + ); + + const fetchPromise = System.customModels("lmstudio", null, url); + + try { + const { models } = await Promise.race([fetchPromise, timeoutPromise]); + return models && models.length > 0 ? url : null; + } catch (error) { + console.error(`Failed to connect to ${url}:`, error); + return null; + } + }; + + for (const url of LMSTUDIO_COMMON_URLS) { + const detectedUrl = await checkUrl(url); + if (detectedUrl) { + setBasePath(detectedUrl); + setBasePathValue(detectedUrl); + setLoading(false); + if (!firstLoad) + showToast("LM Studio URL detected successfully!", "success"); + return; + } + } + + setLoading(false); + showToast( + "Couldn't automatically detect LM Studio. LM Studio may not be running. Please enter the URL manually or try again.", + "info" + ); + }; + + const handleAutoDetectClick = (e) => { + e.preventDefault(); + autoDetectBasePath(); + }; + + const handleBasePathChange = (e) => { + const value = e.target.value; + setBasePathValue(value); + }; + + const handleBasePathBlur = () => { + setBasePath(basePathValue); + }; return (
-
+
- +
+ + {loading ? ( + + ) : ( + + )} +
setBasePathValue(e.target.value)} - onBlur={() => setBasePath(basePathValue)} + value={basePathValue} required={true} autoComplete="off" spellCheck={false} + onChange={handleBasePathChange} + onBlur={handleBasePathBlur} /> +

+ Enter the URL where LM Studio is running. Click "Auto-Detect" if + you're not sure. +

-
@@ -55,14 +138,23 @@ function LMStudioModelSelection({ settings, basePath = null }) { useEffect(() => { async function findCustomModels() { - if (!basePath || !basePath.includes("/v1")) { + if (!basePath) { setCustomModels([]); setLoading(false); return; } setLoading(true); - const { models } = await System.customModels("lmstudio", null, basePath); - setCustomModels(models || []); + try { + const { models } = await System.customModels( + "lmstudio", + null, + basePath + ); + setCustomModels(models || []); + } catch (error) { + console.error("Failed to fetch custom models:", error); + setCustomModels([]); + } setLoading(false); } findCustomModels(); @@ -71,8 +163,8 @@ function LMStudioModelSelection({ settings, basePath = null }) { if (loading || customModels.length == 0) { return (
-
); } return (
-
); } diff --git a/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx b/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx index ed9dc814511..5f67b8ea58b 100644 --- a/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx +++ b/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx @@ -21,21 +21,35 @@ export default function OllamaEmbeddingOptions({ settings }) { const autoDetectBasePath = async (firstLoad = false) => { setLoading(true); setAutoDetectAttempted(true); - for (const url of OLLAMA_COMMON_URLS) { + + const checkUrl = async (url) => { + const timeoutPromise = new Promise((_, reject) => + setTimeout(() => reject(new Error("Timeout")), 2000) + ); + + const fetchPromise = System.customModels("ollama", null, url); + try { - const { models } = await System.customModels("ollama", null, url); - if (models && models.length > 0) { - setBasePath(url); - setBasePathValue(url); - setLoading(false); - if (!firstLoad) - showToast("Ollama URL detected successfully!", "success"); - return; - } + const { models } = await Promise.race([fetchPromise, timeoutPromise]); + return models && models.length > 0 ? url : null; } catch (error) { console.error(`Failed to connect to ${url}:`, error); + return null; + } + }; + + for (const url of OLLAMA_COMMON_URLS) { + const detectedUrl = await checkUrl(url); + if (detectedUrl) { + setBasePath(detectedUrl); + setBasePathValue(detectedUrl); + setLoading(false); + if (!firstLoad) + showToast("Ollama URL detected successfully!", "success"); + return; } } + setLoading(false); showToast( "Couldn't automatically detect Ollama. Ollama may not be setup properly. Please enter the URL manually or try again.", diff --git a/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx b/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx index 9a1c59bc73f..69c56301036 100644 --- a/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx +++ b/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx @@ -2,12 +2,76 @@ import { useEffect, useState } from "react"; import { Info } from "@phosphor-icons/react"; import paths from "@/utils/paths"; import System from "@/models/system"; +import showToast from "@/utils/toast"; +import PreLoader from "@/components/Preloader"; +import { LMSTUDIO_COMMON_URLS } from "@/utils/constants"; export default function LMStudioOptions({ settings, showAlert = false }) { + const [loading, setLoading] = useState(false); const [basePathValue, setBasePathValue] = useState( - settings?.LMStudioBasePath + settings?.LMStudioBasePath || "" ); - const [basePath, setBasePath] = useState(settings?.LMStudioBasePath); + const [basePath, setBasePath] = useState(settings?.LMStudioBasePath || ""); + const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); + + useEffect(() => { + if (!settings?.LMStudioBasePath && !autoDetectAttempted) { + autoDetectBasePath(true); + } + }, [settings?.LMStudioBasePath, autoDetectAttempted]); + + const autoDetectBasePath = async (firstLoad = false) => { + setLoading(true); + setAutoDetectAttempted(true); + + const checkUrl = async (url) => { + const timeoutPromise = new Promise((_, reject) => + setTimeout(() => reject(new Error("Timeout")), 2000) + ); + + const fetchPromise = System.customModels("lmstudio", null, url); + + try { + const { models } = await Promise.race([fetchPromise, timeoutPromise]); + return models && models.length > 0 ? url : null; + } catch (error) { + console.error(`Failed to connect to ${url}:`, error); + return null; + } + }; + + for (const url of LMSTUDIO_COMMON_URLS) { + const detectedUrl = await checkUrl(url); + if (detectedUrl) { + setBasePath(detectedUrl); + setBasePathValue(detectedUrl); + setLoading(false); + if (!firstLoad) + showToast("LM Studio URL detected successfully!", "success"); + return; + } + } + + setLoading(false); + showToast( + "Couldn't automatically detect LM Studio. LM Studio may not be running. Please enter the URL manually or try again.", + "info" + ); + }; + + const handleAutoDetectClick = (e) => { + e.preventDefault(); + autoDetectBasePath(); + }; + + const handleBasePathChange = (e) => { + const value = e.target.value; + setBasePathValue(value); + }; + + const handleBasePathBlur = () => { + setBasePath(basePathValue); + }; return (
@@ -28,30 +92,46 @@ export default function LMStudioOptions({ settings, showAlert = false }) {
)} -
+
- +
+ + {loading ? ( + + ) : ( + + )} +
setBasePathValue(e.target.value)} - onBlur={() => setBasePath(basePathValue)} + onChange={handleBasePathChange} + onBlur={handleBasePathBlur} /> +

+ Enter the URL where LM Studio is running. Click "Auto-Detect" if + you're not sure. +

{!settings?.credentialsOnly && ( <>
-
)} @@ -78,14 +161,23 @@ function LMStudioModelSelection({ settings, basePath = null }) { useEffect(() => { async function findCustomModels() { - if (!basePath || !basePath.includes("/v1")) { + if (!basePath) { setCustomModels([]); setLoading(false); return; } setLoading(true); - const { models } = await System.customModels("lmstudio", null, basePath); - setCustomModels(models || []); + try { + const { models } = await System.customModels( + "lmstudio", + null, + basePath + ); + setCustomModels(models || []); + } catch (error) { + console.error("Failed to fetch custom models:", error); + setCustomModels([]); + } setLoading(false); } findCustomModels(); @@ -94,8 +186,8 @@ function LMStudioModelSelection({ settings, basePath = null }) { if (loading || customModels.length == 0) { return (
-
); } return (
-
); } diff --git a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx index a86d88487c3..89ced983586 100644 --- a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx +++ b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx @@ -21,21 +21,35 @@ export default function OllamaLLMOptions({ settings }) { const autoDetectBasePath = async (firstLoad = false) => { setLoading(true); setAutoDetectAttempted(true); - for (const url of OLLAMA_COMMON_URLS) { + + const checkUrl = async (url) => { + const timeoutPromise = new Promise((_, reject) => + setTimeout(() => reject(new Error("Timeout")), 2000) + ); + + const fetchPromise = System.customModels("ollama", null, url); + try { - const { models } = await System.customModels("ollama", null, url); - if (models && models.length > 0) { - setBasePath(url); - setBasePathValue(url); - setLoading(false); - if (!firstLoad) - showToast("Ollama URL detected successfully!", "success"); - return; - } + const { models } = await Promise.race([fetchPromise, timeoutPromise]); + return models && models.length > 0 ? url : null; } catch (error) { console.error(`Failed to connect to ${url}:`, error); + return null; + } + }; + + for (const url of OLLAMA_COMMON_URLS) { + const detectedUrl = await checkUrl(url); + if (detectedUrl) { + setBasePath(detectedUrl); + setBasePathValue(detectedUrl); + setLoading(false); + if (!firstLoad) + showToast("Ollama URL detected successfully!", "success"); + return; } } + setLoading(false); showToast( "Couldn't automatically detect Ollama. Ollama may not be setup properly. Please enter the URL manually or try again.", diff --git a/frontend/src/utils/constants.js b/frontend/src/utils/constants.js index 60767ed25e7..a08439d0ba3 100644 --- a/frontend/src/utils/constants.js +++ b/frontend/src/utils/constants.js @@ -15,6 +15,14 @@ export const OLLAMA_COMMON_URLS = [ "http://host.docker.internal:11434", "http://172.17.0.1:11434", ]; + +export const LMSTUDIO_COMMON_URLS = [ + "http://localhost:1234/v1", + "http://127.0.0.1:1234/v1", + "http://host.docker.internal:1234/v1", + "http://172.17.0.1:1234/v1", +]; + export function fullApiUrl() { if (API_BASE !== "/api") return API_BASE; return `${window.location.origin}/api`; From 59d25d7e66212ff7a9b4c3eed52cf25761ff2023 Mon Sep 17 00:00:00 2001 From: shatfield4 Date: Mon, 24 Jun 2024 17:12:33 -0700 Subject: [PATCH 03/11] fix modal not clearing --- .../EmbeddingSelection/LMStudioOptions/index.jsx | 9 +++++++-- .../EmbeddingSelection/OllamaOptions/index.jsx | 9 +++++++-- .../components/LLMSelection/LMStudioOptions/index.jsx | 9 +++++++-- .../components/LLMSelection/OllamaLLMOptions/index.jsx | 9 +++++++-- 4 files changed, 28 insertions(+), 8 deletions(-) diff --git a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx index 253ff6b4a7e..ae489efe794 100644 --- a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx +++ b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx @@ -45,7 +45,9 @@ export default function LMStudioEmbeddingOptions({ settings }) { setBasePathValue(detectedUrl); setLoading(false); if (!firstLoad) - showToast("LM Studio URL detected successfully!", "success"); + showToast("LM Studio URL detected successfully!", "success", { + clear: true, + }); return; } } @@ -53,7 +55,10 @@ export default function LMStudioEmbeddingOptions({ settings }) { setLoading(false); showToast( "Couldn't automatically detect LM Studio. LM Studio may not be running. Please enter the URL manually or try again.", - "info" + "info", + { + clear: true, + } ); }; diff --git a/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx b/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx index 5f67b8ea58b..4057ab3ed9f 100644 --- a/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx +++ b/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx @@ -45,7 +45,9 @@ export default function OllamaEmbeddingOptions({ settings }) { setBasePathValue(detectedUrl); setLoading(false); if (!firstLoad) - showToast("Ollama URL detected successfully!", "success"); + showToast("Ollama URL detected successfully!", "success", { + clear: true, + }); return; } } @@ -53,7 +55,10 @@ export default function OllamaEmbeddingOptions({ settings }) { setLoading(false); showToast( "Couldn't automatically detect Ollama. Ollama may not be setup properly. Please enter the URL manually or try again.", - "info" + "info", + { + clear: true, + } ); }; diff --git a/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx b/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx index 69c56301036..1d89a5026d7 100644 --- a/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx +++ b/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx @@ -47,7 +47,9 @@ export default function LMStudioOptions({ settings, showAlert = false }) { setBasePathValue(detectedUrl); setLoading(false); if (!firstLoad) - showToast("LM Studio URL detected successfully!", "success"); + showToast("LM Studio URL detected successfully!", "success", { + clear: true, + }); return; } } @@ -55,7 +57,10 @@ export default function LMStudioOptions({ settings, showAlert = false }) { setLoading(false); showToast( "Couldn't automatically detect LM Studio. LM Studio may not be running. Please enter the URL manually or try again.", - "info" + "info", + { + clear: true, + } ); }; diff --git a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx index 89ced983586..2de2dffd724 100644 --- a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx +++ b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx @@ -45,7 +45,9 @@ export default function OllamaLLMOptions({ settings }) { setBasePathValue(detectedUrl); setLoading(false); if (!firstLoad) - showToast("Ollama URL detected successfully!", "success"); + showToast("Ollama URL detected successfully!", "success", { + clear: true, + }); return; } } @@ -53,7 +55,10 @@ export default function OllamaLLMOptions({ settings }) { setLoading(false); showToast( "Couldn't automatically detect Ollama. Ollama may not be setup properly. Please enter the URL manually or try again.", - "info" + "info", + { + clear: true, + } ); }; From ecb04b8e6b2c6d1e85a5871eadde403f96e5f290 Mon Sep 17 00:00:00 2001 From: shatfield4 Date: Mon, 24 Jun 2024 17:24:13 -0700 Subject: [PATCH 04/11] fix lmstudio url check --- .../components/EmbeddingSelection/LMStudioOptions/index.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx index ae489efe794..a2fe79df964 100644 --- a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx +++ b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx @@ -143,7 +143,7 @@ function LMStudioModelSelection({ settings, basePath = null }) { useEffect(() => { async function findCustomModels() { - if (!basePath) { + if (!basePath || !basePath.includes("/v1")) { setCustomModels([]); setLoading(false); return; @@ -177,7 +177,7 @@ function LMStudioModelSelection({ settings, basePath = null }) { className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5" > From d90961598ea7a099c4478f1384eec00fd0c120d8 Mon Sep 17 00:00:00 2001 From: shatfield4 Date: Mon, 24 Jun 2024 17:55:22 -0700 Subject: [PATCH 05/11] improve ux for ollama llm provider option --- .../LLMSelection/OllamaLLMOptions/index.jsx | 122 +++++++++++------- 1 file changed, 75 insertions(+), 47 deletions(-) diff --git a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx index 2de2dffd724..da90a308834 100644 --- a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx +++ b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx @@ -3,6 +3,7 @@ import System from "@/models/system"; import showToast from "@/utils/toast"; import PreLoader from "@/components/Preloader"; import { OLLAMA_COMMON_URLS } from "@/utils/constants"; +import { CaretDown, CaretUp } from "@phosphor-icons/react"; export default function OllamaLLMOptions({ settings }) { const [loading, setLoading] = useState(false); @@ -11,6 +12,10 @@ export default function OllamaLLMOptions({ settings }) { ); const [basePath, setBasePath] = useState(settings?.OllamaLLMBasePath || ""); const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); + const [showAdvanced, setShowAdvanced] = useState(false); + const [maxTokens, setMaxTokens] = useState( + settings?.OllamaLLMTokenLimit || 4096 + ); useEffect(() => { if (!settings?.OllamaLLMBasePath && !autoDetectAttempted) { @@ -53,6 +58,7 @@ export default function OllamaLLMOptions({ settings }) { } setLoading(false); + setShowAdvanced(true); showToast( "Couldn't automatically detect Ollama. Ollama may not be setup properly. Please enter the URL manually or try again.", "info", @@ -76,67 +82,89 @@ export default function OllamaLLMOptions({ settings }) { setBasePath(basePathValue); }; + const handleMaxTokensChange = (e) => { + setMaxTokens(Number(e.target.value)); + }; + return (
+
-
- - {loading ? ( - - ) : ( - - )} -
+ e.target.blur()} required={true} autoComplete="off" - spellCheck={false} - onChange={handleBasePathChange} - onBlur={handleBasePathBlur} />

- Enter the URL where Ollama is running. Click "Auto-Detect" if you're - not sure. + Maximum number of tokens for context and response.

- {!settings?.credentialsOnly && ( - <> - -
-
+
+ +
+ {showAdvanced && ( +
+
+
+ - e.target.blur()} - defaultValue={settings?.OllamaLLMTokenLimit} - required={true} - autoComplete="off" - /> -

- Maximum number of tokens for context and response. -

+ {loading ? ( + + ) : ( + + )}
- - )} -
+ +

+ Enter the URL where Ollama is running. Click "Auto-Detect" if + you're not sure. +

+
+
+ )}
); } From 911bc78ac6798d007fc6fc6c1b156b9d335749fb Mon Sep 17 00:00:00 2001 From: shatfield4 Date: Mon, 24 Jun 2024 18:04:44 -0700 Subject: [PATCH 06/11] improve ux for lm studio llm provider option --- .../LLMSelection/LMStudioOptions/index.jsx | 124 +++++++++++------- .../LLMSelection/OllamaLLMOptions/index.jsx | 3 +- 2 files changed, 78 insertions(+), 49 deletions(-) diff --git a/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx b/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx index 1d89a5026d7..51125275ded 100644 --- a/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx +++ b/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx @@ -1,5 +1,5 @@ import { useEffect, useState } from "react"; -import { Info } from "@phosphor-icons/react"; +import { Info, CaretDown, CaretUp } from "@phosphor-icons/react"; import paths from "@/utils/paths"; import System from "@/models/system"; import showToast from "@/utils/toast"; @@ -13,6 +13,10 @@ export default function LMStudioOptions({ settings, showAlert = false }) { ); const [basePath, setBasePath] = useState(settings?.LMStudioBasePath || ""); const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); + const [showAdvanced, setShowAdvanced] = useState(true); + const [maxTokens, setMaxTokens] = useState( + settings?.LMStudioTokenLimit || 4096 + ); useEffect(() => { if (!settings?.LMStudioBasePath && !autoDetectAttempted) { @@ -50,11 +54,13 @@ export default function LMStudioOptions({ settings, showAlert = false }) { showToast("LM Studio URL detected successfully!", "success", { clear: true, }); + setShowAdvanced(false); return; } } setLoading(false); + setShowAdvanced(true); showToast( "Couldn't automatically detect LM Studio. LM Studio may not be running. Please enter the URL manually or try again.", "info", @@ -78,6 +84,10 @@ export default function LMStudioOptions({ settings, showAlert = false }) { setBasePath(basePathValue); }; + const handleMaxTokensChange = (e) => { + setMaxTokens(Number(e.target.value)); + }; + return (
{showAlert && ( @@ -98,64 +108,82 @@ export default function LMStudioOptions({ settings, showAlert = false }) {
)}
+
-
- - {loading ? ( - - ) : ( - - )} -
+ e.target.blur()} required={true} autoComplete="off" - spellCheck={false} - onChange={handleBasePathChange} - onBlur={handleBasePathBlur} />

- Enter the URL where LM Studio is running. Click "Auto-Detect" if - you're not sure. + Maximum number of tokens for context and response.

- {!settings?.credentialsOnly && ( - <> - -
-
+
+ +
+ {showAdvanced && ( +
+
+
+ - e.target.blur()} - defaultValue={settings?.LMStudioTokenLimit} - required={true} - autoComplete="off" - /> -

- Maximum number of tokens for context and response. -

+ {loading ? ( + + ) : ( + + )}
- - )} -
+ +

+ Enter the URL where LM Studio is running. Click "Auto-Detect" if + you're not sure. +

+
+
+ )}
); } diff --git a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx index da90a308834..c8116f30146 100644 --- a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx +++ b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx @@ -12,7 +12,7 @@ export default function OllamaLLMOptions({ settings }) { ); const [basePath, setBasePath] = useState(settings?.OllamaLLMBasePath || ""); const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); - const [showAdvanced, setShowAdvanced] = useState(false); + const [showAdvanced, setShowAdvanced] = useState(true); const [maxTokens, setMaxTokens] = useState( settings?.OllamaLLMTokenLimit || 4096 ); @@ -53,6 +53,7 @@ export default function OllamaLLMOptions({ settings }) { showToast("Ollama URL detected successfully!", "success", { clear: true, }); + setShowAdvanced(false); return; } } From d49b28cefff1859ce3f0d0892ad86db8d4ed6642 Mon Sep 17 00:00:00 2001 From: shatfield4 Date: Mon, 24 Jun 2024 18:08:25 -0700 Subject: [PATCH 07/11] improve ux for ollama embedder option --- .../OllamaOptions/index.jsx | 109 ++++++++++++------ 1 file changed, 72 insertions(+), 37 deletions(-) diff --git a/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx b/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx index 4057ab3ed9f..ee72daab158 100644 --- a/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx +++ b/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx @@ -3,6 +3,7 @@ import System from "@/models/system"; import showToast from "@/utils/toast"; import PreLoader from "@/components/Preloader"; import { OLLAMA_COMMON_URLS } from "@/utils/constants"; +import { CaretDown, CaretUp } from "@phosphor-icons/react"; export default function OllamaEmbeddingOptions({ settings }) { const [loading, setLoading] = useState(false); @@ -11,6 +12,10 @@ export default function OllamaEmbeddingOptions({ settings }) { ); const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath || ""); const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); + const [showAdvanced, setShowAdvanced] = useState(true); + const [maxChunkLength, setMaxChunkLength] = useState( + settings?.EmbeddingModelMaxChunkLength || 8192 + ); useEffect(() => { if (!settings?.EmbeddingBasePath && !autoDetectAttempted) { @@ -48,11 +53,13 @@ export default function OllamaEmbeddingOptions({ settings }) { showToast("Ollama URL detected successfully!", "success", { clear: true, }); + setShowAdvanced(false); return; } } setLoading(false); + setShowAdvanced(true); showToast( "Couldn't automatically detect Ollama. Ollama may not be setup properly. Please enter the URL manually or try again.", "info", @@ -76,43 +83,17 @@ export default function OllamaEmbeddingOptions({ settings }) { setBasePath(basePathValue); }; + const handleMaxChunkLengthChange = (e) => { + setMaxChunkLength(Number(e.target.value)); + }; + return (
-
-
- - {loading ? ( - - ) : ( - - )} -
- -

- Enter the URL where Ollama is running. Click "Auto-Detect" if you're - not sure. -

-
- +
+
+ +
+ {showAdvanced && ( +
+
+
+ + {loading ? ( + + ) : ( + + )} +
+ +

+ Enter the URL where Ollama is running. Click "Auto-Detect" if + you're not sure. +

+
+
+ )}
); } -function OllamaLLMModelSelection({ settings, basePath = null }) { +function OllamaEmbeddingModelSelection({ settings, basePath = null }) { const [customModels, setCustomModels] = useState([]); const [loading, setLoading] = useState(true); From 7f4ae26bb7454403bd86f36e6d780d968c978930 Mon Sep 17 00:00:00 2001 From: shatfield4 Date: Mon, 24 Jun 2024 18:12:22 -0700 Subject: [PATCH 08/11] improve ux for lmstudio embedder option --- .../LMStudioOptions/index.jsx | 104 ++++++++++++------ 1 file changed, 68 insertions(+), 36 deletions(-) diff --git a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx index a2fe79df964..87055b14b9a 100644 --- a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx +++ b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx @@ -3,6 +3,7 @@ import System from "@/models/system"; import showToast from "@/utils/toast"; import PreLoader from "@/components/Preloader"; import { LMSTUDIO_COMMON_URLS } from "@/utils/constants"; +import { CaretDown, CaretUp } from "@phosphor-icons/react"; export default function LMStudioEmbeddingOptions({ settings }) { const [loading, setLoading] = useState(false); @@ -11,6 +12,10 @@ export default function LMStudioEmbeddingOptions({ settings }) { ); const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath || ""); const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); + const [showAdvanced, setShowAdvanced] = useState(true); + const [maxChunkLength, setMaxChunkLength] = useState( + settings?.EmbeddingModelMaxChunkLength || 8192 + ); useEffect(() => { if (!settings?.EmbeddingBasePath && !autoDetectAttempted) { @@ -48,11 +53,13 @@ export default function LMStudioEmbeddingOptions({ settings }) { showToast("LM Studio URL detected successfully!", "success", { clear: true, }); + setShowAdvanced(false); return; } } setLoading(false); + setShowAdvanced(true); showToast( "Couldn't automatically detect LM Studio. LM Studio may not be running. Please enter the URL manually or try again.", "info", @@ -76,46 +83,17 @@ export default function LMStudioEmbeddingOptions({ settings }) { setBasePath(basePathValue); }; + const handleMaxChunkLengthChange = (e) => { + setMaxChunkLength(Number(e.target.value)); + }; + return (
-
-
- - {loading ? ( - - ) : ( - - )} -
- -

- Enter the URL where LM Studio is running. Click "Auto-Detect" if - you're not sure. -

-
e.target.blur()} - defaultValue={settings?.EmbeddingModelMaxChunkLength} - required={false} + required={true} autoComplete="off" />

@@ -133,6 +112,59 @@ export default function LMStudioEmbeddingOptions({ settings }) {

+
+ +
+ {showAdvanced && ( +
+
+
+ + {loading ? ( + + ) : ( + + )} +
+ +

+ Enter the URL where LM Studio is running. Click "Auto-Detect" if + you're not sure. +

+
+
+ )}
); } From 1792face9ed01e7e79362d5b0c24f58ab541e2c0 Mon Sep 17 00:00:00 2001 From: shatfield4 Date: Tue, 25 Jun 2024 09:48:02 -0700 Subject: [PATCH 09/11] ux improvement lmstudio embedder options --- .../components/EmbeddingSelection/LMStudioOptions/index.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx index 87055b14b9a..0ef8fe583cc 100644 --- a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx +++ b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx @@ -175,7 +175,7 @@ function LMStudioModelSelection({ settings, basePath = null }) { useEffect(() => { async function findCustomModels() { - if (!basePath || !basePath.includes("/v1")) { + if (!basePath) { setCustomModels([]); setLoading(false); return; @@ -209,7 +209,7 @@ function LMStudioModelSelection({ settings, basePath = null }) { className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5" > From 4c779cd9277ef55724468f6aee3d2b4e9fa4d8df Mon Sep 17 00:00:00 2001 From: timothycarambat Date: Tue, 25 Jun 2024 13:24:46 -0700 Subject: [PATCH 10/11] refactor implementation to hook and use native timeout Swap to promise.any for resolving of available endpoints --- .../LLMSelection/LMStudioOptions/index.jsx | 126 +++++------------- .../hooks/useProviderEndpointAutoDiscovery.js | 99 ++++++++++++++ frontend/src/models/system.js | 15 ++- 3 files changed, 148 insertions(+), 92 deletions(-) create mode 100644 frontend/src/hooks/useProviderEndpointAutoDiscovery.js diff --git a/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx b/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx index 51125275ded..d3e1df58f9e 100644 --- a/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx +++ b/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx @@ -2,88 +2,28 @@ import { useEffect, useState } from "react"; import { Info, CaretDown, CaretUp } from "@phosphor-icons/react"; import paths from "@/utils/paths"; import System from "@/models/system"; -import showToast from "@/utils/toast"; import PreLoader from "@/components/Preloader"; import { LMSTUDIO_COMMON_URLS } from "@/utils/constants"; +import useProviderEndpointAutoDiscovery from "@/hooks/useProviderEndpointAutoDiscovery"; export default function LMStudioOptions({ settings, showAlert = false }) { - const [loading, setLoading] = useState(false); - const [basePathValue, setBasePathValue] = useState( - settings?.LMStudioBasePath || "" - ); - const [basePath, setBasePath] = useState(settings?.LMStudioBasePath || ""); - const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); - const [showAdvanced, setShowAdvanced] = useState(true); + const { + autoDetecting: loading, + basePath, + basePathValue, + showAdvancedControls, + setShowAdvancedControls, + handleAutoDetectClick, + } = useProviderEndpointAutoDiscovery({ + provider: "lmstudio", + initialBasePath: settings?.LMStudioBasePath, + ENDPOINTS: LMSTUDIO_COMMON_URLS, + }); + const [maxTokens, setMaxTokens] = useState( settings?.LMStudioTokenLimit || 4096 ); - useEffect(() => { - if (!settings?.LMStudioBasePath && !autoDetectAttempted) { - autoDetectBasePath(true); - } - }, [settings?.LMStudioBasePath, autoDetectAttempted]); - - const autoDetectBasePath = async (firstLoad = false) => { - setLoading(true); - setAutoDetectAttempted(true); - - const checkUrl = async (url) => { - const timeoutPromise = new Promise((_, reject) => - setTimeout(() => reject(new Error("Timeout")), 2000) - ); - - const fetchPromise = System.customModels("lmstudio", null, url); - - try { - const { models } = await Promise.race([fetchPromise, timeoutPromise]); - return models && models.length > 0 ? url : null; - } catch (error) { - console.error(`Failed to connect to ${url}:`, error); - return null; - } - }; - - for (const url of LMSTUDIO_COMMON_URLS) { - const detectedUrl = await checkUrl(url); - if (detectedUrl) { - setBasePath(detectedUrl); - setBasePathValue(detectedUrl); - setLoading(false); - if (!firstLoad) - showToast("LM Studio URL detected successfully!", "success", { - clear: true, - }); - setShowAdvanced(false); - return; - } - } - - setLoading(false); - setShowAdvanced(true); - showToast( - "Couldn't automatically detect LM Studio. LM Studio may not be running. Please enter the URL manually or try again.", - "info", - { - clear: true, - } - ); - }; - - const handleAutoDetectClick = (e) => { - e.preventDefault(); - autoDetectBasePath(); - }; - - const handleBasePathChange = (e) => { - const value = e.target.value; - setBasePathValue(value); - }; - - const handleBasePathBlur = () => { - setBasePath(basePathValue); - }; - const handleMaxTokensChange = (e) => { setMaxTokens(Number(e.target.value)); }; @@ -108,7 +48,7 @@ export default function LMStudioOptions({ settings, showAlert = false }) {
)}
- +
- {showAdvanced && ( + +
); } diff --git a/frontend/src/hooks/useProviderEndpointAutoDiscovery.js b/frontend/src/hooks/useProviderEndpointAutoDiscovery.js new file mode 100644 index 00000000000..956b0907593 --- /dev/null +++ b/frontend/src/hooks/useProviderEndpointAutoDiscovery.js @@ -0,0 +1,99 @@ +import { useEffect, useState } from "react"; +import System from "@/models/system"; +import showToast from "@/utils/toast"; + +export default function useProviderEndpointAutoDiscovery({ + provider = null, + initialBasePath = "", + ENDPOINTS = [], +}) { + const [loading, setLoading] = useState(false); + const [basePath, setBasePath] = useState(initialBasePath); + const [basePathValue, setBasePathValue] = useState(initialBasePath); + const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); + const [showAdvancedControls, setShowAdvancedControls] = useState(true); + + async function autoDetect(isInitialAttempt = false) { + setLoading(true); + setAutoDetectAttempted(true); + const possibleEndpoints = []; + ENDPOINTS.forEach((endpoint) => { + possibleEndpoints.push( + new Promise((resolve, reject) => { + System.customModels(provider, null, endpoint, 2_000) + .then((results) => { + if (!results?.models || results.models.length === 0) + throw new Error("No models"); + resolve({ endpoint, models: results.models }); + }) + .catch(() => { + reject(`${provider} @ ${endpoint} did not resolve.`); + }); + }) + ); + }); + + const { endpoint, models } = await Promise.any(possibleEndpoints) + .then((resolved) => resolved) + .catch(() => { + console.error("All endpoints failed to resolve."); + return { endpoint: null, models: null }; + }); + + if (models !== null) { + setBasePath(endpoint); + setBasePathValue(endpoint); + setLoading(false); + showToast("Provider endpoint discovered automatically.", "success", { + clear: true, + }); + setShowAdvancedControls(false); + return; + } + + setLoading(false); + setShowAdvancedControls(true); + showToast( + "Couldn't automatically discover the provider endpoint. Please enter it manually.", + "info", + { clear: true } + ); + } + + function handleAutoDetectClick(e) { + e.preventDefault(); + autoDetect(); + } + + function handleBasePathChange(e) { + const value = e.target.value; + setBasePathValue(value); + } + + function handleBasePathBlur() { + setBasePath(basePathValue); + } + + useEffect(() => { + if (!initialBasePath && !autoDetectAttempted) autoDetect(true); + }, [initialBasePath, autoDetectAttempted]); + + return { + autoDetecting: loading, + autoDetectAttempted, + showAdvancedControls, + setShowAdvancedControls, + basePath: { + value: basePath, + set: setBasePathValue, + onChange: handleBasePathChange, + onBlur: handleBasePathBlur, + }, + basePathValue: { + value: basePathValue, + set: setBasePathValue, + }, + handleAutoDetectClick, + runAutoDetect: autoDetect, + }; +} diff --git a/frontend/src/models/system.js b/frontend/src/models/system.js index b922457b72b..d6c724b7d1a 100644 --- a/frontend/src/models/system.js +++ b/frontend/src/models/system.js @@ -512,10 +512,23 @@ const System = { return false; }); }, - customModels: async function (provider, apiKey = null, basePath = null) { + customModels: async function ( + provider, + apiKey = null, + basePath = null, + timeout = null + ) { + const controller = new AbortController(); + if (!!timeout) { + setTimeout(() => { + controller.abort("Request timed out."); + }, timeout); + } + return fetch(`${API_BASE}/system/custom-models`, { method: "POST", headers: baseHeaders(), + signal: controller.signal, body: JSON.stringify({ provider, apiKey, From b3f80dbfa9a76bb89885fce87057c94152d3ce29 Mon Sep 17 00:00:00 2001 From: shatfield4 Date: Tue, 25 Jun 2024 15:54:00 -0700 Subject: [PATCH 11/11] implement useProviderEndpointAutoDiscovery hook for lmstudio and ollama provider options --- .../LMStudioOptions/index.jsx | 128 +++++------------ .../OllamaOptions/index.jsx | 132 +++++------------ .../LLMSelection/OllamaLLMOptions/index.jsx | 133 ++++++------------ 3 files changed, 114 insertions(+), 279 deletions(-) diff --git a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx index 0ef8fe583cc..8a18fb999eb 100644 --- a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx +++ b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx @@ -1,88 +1,28 @@ import React, { useEffect, useState } from "react"; import System from "@/models/system"; -import showToast from "@/utils/toast"; import PreLoader from "@/components/Preloader"; import { LMSTUDIO_COMMON_URLS } from "@/utils/constants"; import { CaretDown, CaretUp } from "@phosphor-icons/react"; +import useProviderEndpointAutoDiscovery from "@/hooks/useProviderEndpointAutoDiscovery"; export default function LMStudioEmbeddingOptions({ settings }) { - const [loading, setLoading] = useState(false); - const [basePathValue, setBasePathValue] = useState( - settings?.EmbeddingBasePath || "" - ); - const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath || ""); - const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); - const [showAdvanced, setShowAdvanced] = useState(true); + const { + autoDetecting: loading, + basePath, + basePathValue, + showAdvancedControls, + setShowAdvancedControls, + handleAutoDetectClick, + } = useProviderEndpointAutoDiscovery({ + provider: "lmstudio", + initialBasePath: settings?.EmbeddingBasePath, + ENDPOINTS: LMSTUDIO_COMMON_URLS, + }); + const [maxChunkLength, setMaxChunkLength] = useState( settings?.EmbeddingModelMaxChunkLength || 8192 ); - useEffect(() => { - if (!settings?.EmbeddingBasePath && !autoDetectAttempted) { - autoDetectBasePath(true); - } - }, [settings?.EmbeddingBasePath, autoDetectAttempted]); - - const autoDetectBasePath = async (firstLoad = false) => { - setLoading(true); - setAutoDetectAttempted(true); - - const checkUrl = async (url) => { - const timeoutPromise = new Promise((_, reject) => - setTimeout(() => reject(new Error("Timeout")), 2000) - ); - - const fetchPromise = System.customModels("lmstudio", null, url); - - try { - const { models } = await Promise.race([fetchPromise, timeoutPromise]); - return models && models.length > 0 ? url : null; - } catch (error) { - console.error(`Failed to connect to ${url}:`, error); - return null; - } - }; - - for (const url of LMSTUDIO_COMMON_URLS) { - const detectedUrl = await checkUrl(url); - if (detectedUrl) { - setBasePath(detectedUrl); - setBasePathValue(detectedUrl); - setLoading(false); - if (!firstLoad) - showToast("LM Studio URL detected successfully!", "success", { - clear: true, - }); - setShowAdvanced(false); - return; - } - } - - setLoading(false); - setShowAdvanced(true); - showToast( - "Couldn't automatically detect LM Studio. LM Studio may not be running. Please enter the URL manually or try again.", - "info", - { - clear: true, - } - ); - }; - - const handleAutoDetectClick = (e) => { - e.preventDefault(); - autoDetectBasePath(); - }; - - const handleBasePathChange = (e) => { - const value = e.target.value; - setBasePathValue(value); - }; - - const handleBasePathBlur = () => { - setBasePath(basePathValue); - }; - const handleMaxChunkLengthChange = (e) => { setMaxChunkLength(Number(e.target.value)); }; @@ -90,7 +30,7 @@ export default function LMStudioEmbeddingOptions({ settings }) { return (
- +
-
+
- {showAdvanced && ( + +
); } diff --git a/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx b/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx index ee72daab158..fca1ae7553f 100644 --- a/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx +++ b/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx @@ -1,88 +1,28 @@ import React, { useEffect, useState } from "react"; import System from "@/models/system"; -import showToast from "@/utils/toast"; import PreLoader from "@/components/Preloader"; import { OLLAMA_COMMON_URLS } from "@/utils/constants"; import { CaretDown, CaretUp } from "@phosphor-icons/react"; +import useProviderEndpointAutoDiscovery from "@/hooks/useProviderEndpointAutoDiscovery"; export default function OllamaEmbeddingOptions({ settings }) { - const [loading, setLoading] = useState(false); - const [basePathValue, setBasePathValue] = useState( - settings?.EmbeddingBasePath || "" - ); - const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath || ""); - const [autoDetectAttempted, setAutoDetectAttempted] = useState(false); - const [showAdvanced, setShowAdvanced] = useState(true); + const { + autoDetecting: loading, + basePath, + basePathValue, + showAdvancedControls, + setShowAdvancedControls, + handleAutoDetectClick, + } = useProviderEndpointAutoDiscovery({ + provider: "ollama", + initialBasePath: settings?.EmbeddingBasePath, + ENDPOINTS: OLLAMA_COMMON_URLS, + }); + const [maxChunkLength, setMaxChunkLength] = useState( settings?.EmbeddingModelMaxChunkLength || 8192 ); - useEffect(() => { - if (!settings?.EmbeddingBasePath && !autoDetectAttempted) { - autoDetectBasePath(true); - } - }, [settings?.EmbeddingBasePath, autoDetectAttempted]); - - const autoDetectBasePath = async (firstLoad = false) => { - setLoading(true); - setAutoDetectAttempted(true); - - const checkUrl = async (url) => { - const timeoutPromise = new Promise((_, reject) => - setTimeout(() => reject(new Error("Timeout")), 2000) - ); - - const fetchPromise = System.customModels("ollama", null, url); - - try { - const { models } = await Promise.race([fetchPromise, timeoutPromise]); - return models && models.length > 0 ? url : null; - } catch (error) { - console.error(`Failed to connect to ${url}:`, error); - return null; - } - }; - - for (const url of OLLAMA_COMMON_URLS) { - const detectedUrl = await checkUrl(url); - if (detectedUrl) { - setBasePath(detectedUrl); - setBasePathValue(detectedUrl); - setLoading(false); - if (!firstLoad) - showToast("Ollama URL detected successfully!", "success", { - clear: true, - }); - setShowAdvanced(false); - return; - } - } - - setLoading(false); - setShowAdvanced(true); - showToast( - "Couldn't automatically detect Ollama. Ollama may not be setup properly. Please enter the URL manually or try again.", - "info", - { - clear: true, - } - ); - }; - - const handleAutoDetectClick = (e) => { - e.preventDefault(); - autoDetectBasePath(); - }; - - const handleBasePathChange = (e) => { - const value = e.target.value; - setBasePathValue(value); - }; - - const handleBasePathBlur = () => { - setBasePath(basePathValue); - }; - const handleMaxChunkLengthChange = (e) => { setMaxChunkLength(Number(e.target.value)); }; @@ -92,7 +32,7 @@ export default function OllamaEmbeddingOptions({ settings }) {
-
+
- {showAdvanced && ( + +
); } @@ -200,7 +144,7 @@ function OllamaEmbeddingModelSelection({ settings, basePath = null }) { return (
{ - if (!settings?.OllamaLLMBasePath && !autoDetectAttempted) { - autoDetectBasePath(true); - } - }, [settings?.OllamaLLMBasePath, autoDetectAttempted]); - - const autoDetectBasePath = async (firstLoad = false) => { - setLoading(true); - setAutoDetectAttempted(true); - - const checkUrl = async (url) => { - const timeoutPromise = new Promise((_, reject) => - setTimeout(() => reject(new Error("Timeout")), 2000) - ); - - const fetchPromise = System.customModels("ollama", null, url); - - try { - const { models } = await Promise.race([fetchPromise, timeoutPromise]); - return models && models.length > 0 ? url : null; - } catch (error) { - console.error(`Failed to connect to ${url}:`, error); - return null; - } - }; - - for (const url of OLLAMA_COMMON_URLS) { - const detectedUrl = await checkUrl(url); - if (detectedUrl) { - setBasePath(detectedUrl); - setBasePathValue(detectedUrl); - setLoading(false); - if (!firstLoad) - showToast("Ollama URL detected successfully!", "success", { - clear: true, - }); - setShowAdvanced(false); - return; - } - } - - setLoading(false); - setShowAdvanced(true); - showToast( - "Couldn't automatically detect Ollama. Ollama may not be setup properly. Please enter the URL manually or try again.", - "info", - { - clear: true, - } - ); - }; - - const handleAutoDetectClick = (e) => { - e.preventDefault(); - autoDetectBasePath(); - }; - - const handleBasePathChange = (e) => { - const value = e.target.value; - setBasePathValue(value); - }; - - const handleBasePathBlur = () => { - setBasePath(basePathValue); - }; - const handleMaxTokensChange = (e) => { setMaxTokens(Number(e.target.value)); }; @@ -90,7 +30,10 @@ export default function OllamaLLMOptions({ settings }) { return (
- +
-
+
- {showAdvanced && ( + +
); }