+
+
- {!settings?.credentialsOnly && (
- <>
-
-
-
+
+
+
+
+
+
+
+
+
-
e.target.blur()}
- defaultValue={settings?.OllamaLLMTokenLimit}
- required={true}
- autoComplete="off"
- />
+ {loading ? (
+
+ ) : (
+ <>
+ {!basePathValue.value && (
+
+ )}
+ >
+ )}
- >
- )}
+
+
+ Enter the URL where Ollama is running.
+
+
+
);
@@ -65,8 +129,13 @@ function OllamaLLMModelSelection({ settings, basePath = null }) {
return;
}
setLoading(true);
- const { models } = await System.customModels("ollama", null, basePath);
- setCustomModels(models || []);
+ try {
+ const { models } = await System.customModels("ollama", null, basePath);
+ setCustomModels(models || []);
+ } catch (error) {
+ console.error("Failed to fetch custom models:", error);
+ setCustomModels([]);
+ }
setLoading(false);
}
findCustomModels();
@@ -75,8 +144,8 @@ function OllamaLLMModelSelection({ settings, basePath = null }) {
if (loading || customModels.length == 0) {
return (
-
);
}
return (
-
- Chat Model Selection
+
+ Ollama Model
+
+ Choose the Ollama model you want to use for your conversations.
+
);
}
diff --git a/frontend/src/hooks/useProviderEndpointAutoDiscovery.js b/frontend/src/hooks/useProviderEndpointAutoDiscovery.js
new file mode 100644
index 00000000000..956b0907593
--- /dev/null
+++ b/frontend/src/hooks/useProviderEndpointAutoDiscovery.js
@@ -0,0 +1,99 @@
+import { useEffect, useState } from "react";
+import System from "@/models/system";
+import showToast from "@/utils/toast";
+
+export default function useProviderEndpointAutoDiscovery({
+ provider = null,
+ initialBasePath = "",
+ ENDPOINTS = [],
+}) {
+ const [loading, setLoading] = useState(false);
+ const [basePath, setBasePath] = useState(initialBasePath);
+ const [basePathValue, setBasePathValue] = useState(initialBasePath);
+ const [autoDetectAttempted, setAutoDetectAttempted] = useState(false);
+ const [showAdvancedControls, setShowAdvancedControls] = useState(true);
+
+ async function autoDetect(isInitialAttempt = false) {
+ setLoading(true);
+ setAutoDetectAttempted(true);
+ const possibleEndpoints = [];
+ ENDPOINTS.forEach((endpoint) => {
+ possibleEndpoints.push(
+ new Promise((resolve, reject) => {
+ System.customModels(provider, null, endpoint, 2_000)
+ .then((results) => {
+ if (!results?.models || results.models.length === 0)
+ throw new Error("No models");
+ resolve({ endpoint, models: results.models });
+ })
+ .catch(() => {
+ reject(`${provider} @ ${endpoint} did not resolve.`);
+ });
+ })
+ );
+ });
+
+ const { endpoint, models } = await Promise.any(possibleEndpoints)
+ .then((resolved) => resolved)
+ .catch(() => {
+ console.error("All endpoints failed to resolve.");
+ return { endpoint: null, models: null };
+ });
+
+ if (models !== null) {
+ setBasePath(endpoint);
+ setBasePathValue(endpoint);
+ setLoading(false);
+ showToast("Provider endpoint discovered automatically.", "success", {
+ clear: true,
+ });
+ setShowAdvancedControls(false);
+ return;
+ }
+
+ setLoading(false);
+ setShowAdvancedControls(true);
+ showToast(
+ "Couldn't automatically discover the provider endpoint. Please enter it manually.",
+ "info",
+ { clear: true }
+ );
+ }
+
+ function handleAutoDetectClick(e) {
+ e.preventDefault();
+ autoDetect();
+ }
+
+ function handleBasePathChange(e) {
+ const value = e.target.value;
+ setBasePathValue(value);
+ }
+
+ function handleBasePathBlur() {
+ setBasePath(basePathValue);
+ }
+
+ useEffect(() => {
+ if (!initialBasePath && !autoDetectAttempted) autoDetect(true);
+ }, [initialBasePath, autoDetectAttempted]);
+
+ return {
+ autoDetecting: loading,
+ autoDetectAttempted,
+ showAdvancedControls,
+ setShowAdvancedControls,
+ basePath: {
+ value: basePath,
+ set: setBasePathValue,
+ onChange: handleBasePathChange,
+ onBlur: handleBasePathBlur,
+ },
+ basePathValue: {
+ value: basePathValue,
+ set: setBasePathValue,
+ },
+ handleAutoDetectClick,
+ runAutoDetect: autoDetect,
+ };
+}
diff --git a/frontend/src/models/system.js b/frontend/src/models/system.js
index b922457b72b..d6c724b7d1a 100644
--- a/frontend/src/models/system.js
+++ b/frontend/src/models/system.js
@@ -512,10 +512,23 @@ const System = {
return false;
});
},
- customModels: async function (provider, apiKey = null, basePath = null) {
+ customModels: async function (
+ provider,
+ apiKey = null,
+ basePath = null,
+ timeout = null
+ ) {
+ const controller = new AbortController();
+ if (!!timeout) {
+ setTimeout(() => {
+ controller.abort("Request timed out.");
+ }, timeout);
+ }
+
return fetch(`${API_BASE}/system/custom-models`, {
method: "POST",
headers: baseHeaders(),
+ signal: controller.signal,
body: JSON.stringify({
provider,
apiKey,
diff --git a/frontend/src/utils/constants.js b/frontend/src/utils/constants.js
index 3f637617f42..a08439d0ba3 100644
--- a/frontend/src/utils/constants.js
+++ b/frontend/src/utils/constants.js
@@ -10,6 +10,19 @@ export const SEEN_WATCH_ALERT = "anythingllm_watched_document_alert";
export const USER_BACKGROUND_COLOR = "bg-historical-msg-user";
export const AI_BACKGROUND_COLOR = "bg-historical-msg-system";
+export const OLLAMA_COMMON_URLS = [
+ "http://127.0.0.1:11434",
+ "http://host.docker.internal:11434",
+ "http://172.17.0.1:11434",
+];
+
+export const LMSTUDIO_COMMON_URLS = [
+ "http://localhost:1234/v1",
+ "http://127.0.0.1:1234/v1",
+ "http://host.docker.internal:1234/v1",
+ "http://172.17.0.1:1234/v1",
+];
+
export function fullApiUrl() {
if (API_BASE !== "/api") return API_BASE;
return `${window.location.origin}/api`;