From d84b6dcebede0b711d4bdf326dbb855a79165217 Mon Sep 17 00:00:00 2001 From: timothycarambat Date: Mon, 11 Dec 2023 14:14:44 -0800 Subject: [PATCH] patch: API key to localai service calls connect #417 --- .../LocalAiOptions/index.jsx | 103 +++++++++++------- server/models/systemSettings.js | 3 +- .../utils/EmbeddingEngines/localAi/index.js | 5 + server/utils/helpers/customModels.js | 2 +- 4 files changed, 73 insertions(+), 40 deletions(-) diff --git a/frontend/src/components/EmbeddingSelection/LocalAiOptions/index.jsx b/frontend/src/components/EmbeddingSelection/LocalAiOptions/index.jsx index 232c33205fb..5871e288f47 100644 --- a/frontend/src/components/EmbeddingSelection/LocalAiOptions/index.jsx +++ b/frontend/src/components/EmbeddingSelection/LocalAiOptions/index.jsx @@ -6,51 +6,80 @@ export default function LocalAiOptions({ settings }) { settings?.EmbeddingBasePath ); const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath); - function updateBasePath() { - setBasePath(basePathValue); - } + const [apiKeyValue, setApiKeyValue] = useState(settings?.LocalAiApiKey); + const [apiKey, setApiKey] = useState(settings?.LocalAiApiKey); return ( <> -
- - setBasePathValue(e.target.value)} - onBlur={updateBasePath} - required={true} - autoComplete="off" - spellCheck={false} +
+
+ + setBasePathValue(e.target.value)} + onBlur={() => setBasePath(basePathValue)} + required={true} + autoComplete="off" + spellCheck={false} + /> +
+ +
+ + e.target.blur()} + defaultValue={settings?.EmbeddingModelMaxChunkLength} + required={false} + autoComplete="off" + /> +
- -
- - e.target.blur()} - defaultValue={settings?.EmbeddingModelMaxChunkLength} - required={false} - autoComplete="off" - /> +
+
+
+ +

+ optional API key to use if running LocalAI with API keys. +

+
+ + setApiKeyValue(e.target.value)} + onBlur={() => setApiKey(apiKeyValue)} + /> +
); } -function LocalAIModelSelection({ settings, basePath = null }) { +function LocalAIModelSelection({ settings, apiKey = null, basePath = null }) { const [customModels, setCustomModels] = useState([]); const [loading, setLoading] = useState(true); @@ -62,12 +91,12 @@ function LocalAIModelSelection({ settings, basePath = null }) { return; } setLoading(true); - const { models } = await System.customModels("localai", null, basePath); + const { models } = await System.customModels("localai", apiKey, basePath); setCustomModels(models || []); setLoading(false); } findCustomModels(); - }, [basePath]); + }, [basePath, apiKey]); if (loading || customModels.length == 0) { return ( diff --git a/server/models/systemSettings.js b/server/models/systemSettings.js index 66f7108bd83..068359bb023 100644 --- a/server/models/systemSettings.js +++ b/server/models/systemSettings.js @@ -29,6 +29,7 @@ const SystemSettings = { EmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF, EmbeddingModelMaxChunkLength: process.env.EMBEDDING_MODEL_MAX_CHUNK_LENGTH, + LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY, ...(vectorDB === "pinecone" ? { PineConeEnvironment: process.env.PINECONE_ENVIRONMENT, @@ -98,13 +99,11 @@ const SystemSettings = { AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF, } : {}), - ...(llmProvider === "localai" ? { LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH, LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF, LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT, - LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY, // For embedding credentials when localai is selected. OpenAiKey: !!process.env.OPEN_AI_KEY, diff --git a/server/utils/EmbeddingEngines/localAi/index.js b/server/utils/EmbeddingEngines/localAi/index.js index aa36b5d1330..0f43cc7dc65 100644 --- a/server/utils/EmbeddingEngines/localAi/index.js +++ b/server/utils/EmbeddingEngines/localAi/index.js @@ -9,6 +9,11 @@ class LocalAiEmbedder { throw new Error("No embedding model was set."); const config = new Configuration({ basePath: process.env.EMBEDDING_BASE_PATH, + ...(!!process.env.LOCAL_AI_API_KEY + ? { + apiKey: process.env.LOCAL_AI_API_KEY, + } + : {}), }); this.openai = new OpenAIApi(config); diff --git a/server/utils/helpers/customModels.js b/server/utils/helpers/customModels.js index 8d46ab6856d..03e373774c7 100644 --- a/server/utils/helpers/customModels.js +++ b/server/utils/helpers/customModels.js @@ -8,7 +8,7 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) { case "openai": return await openAiModels(apiKey); case "localai": - return await localAIModels(basePath); + return await localAIModels(basePath, apiKey); case "native-llm": return nativeLLMModels(); default: