From b1936b746f4efdaed914171b10b899ab0c0d8b32 Mon Sep 17 00:00:00 2001 From: shatfield4 Date: Tue, 13 Feb 2024 17:59:22 -0800 Subject: [PATCH 1/8] WIP new settings layout --- frontend/src/App.jsx | 13 +- .../Sidebar/ActiveWorkspaces/index.jsx | 62 ++-- frontend/src/hooks/useGetProvidersModels.js | 56 ++++ .../AdditionalWorkspaceSettings/index.jsx | 208 ++++++++++++ .../WorkspaceSettings/ChatSettings/index.jsx | 298 ++++++++++++++++++ .../WorkspaceSettings/GeneralInfo/index.jsx | 123 ++++++++ .../VectorDatabase/index.jsx | 128 ++++++++ .../src/pages/WorkspaceSettings/index.jsx | 263 +++++----------- frontend/src/utils/paths.js | 2 +- frontend/src/utils/types.js | 19 ++ 10 files changed, 967 insertions(+), 205 deletions(-) create mode 100644 frontend/src/hooks/useGetProvidersModels.js create mode 100644 frontend/src/pages/AdditionalWorkspaceSettings/index.jsx create mode 100644 frontend/src/pages/WorkspaceSettings/ChatSettings/index.jsx create mode 100644 frontend/src/pages/WorkspaceSettings/GeneralInfo/index.jsx create mode 100644 frontend/src/pages/WorkspaceSettings/VectorDatabase/index.jsx create mode 100644 frontend/src/utils/types.js diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index 7a1395f1744..1b994868471 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -43,6 +43,9 @@ const DataConnectorSetup = lazy( () => import("@/pages/GeneralSettings/DataConnectors/Connectors") ); const WorkspaceSettings = lazy(() => import("@/pages/WorkspaceSettings")); +const AdditionalWorkspaceSettings = lazy( + () => import("@/pages/AdditionalWorkspaceSettings") +); const EmbedConfigSetup = lazy( () => import("@/pages/GeneralSettings/EmbedConfigs") ); @@ -57,6 +60,10 @@ export default function App() { } /> } /> + } + /> } @@ -69,8 +76,10 @@ export default function App() { {/* Admin */} } + path="/workspace/:slug/additional-settings" + element={ + + } /> - +
+ + + +
diff --git a/frontend/src/hooks/useGetProvidersModels.js b/frontend/src/hooks/useGetProvidersModels.js new file mode 100644 index 00000000000..1f8cce9889f --- /dev/null +++ b/frontend/src/hooks/useGetProvidersModels.js @@ -0,0 +1,56 @@ +import System from "@/models/system"; +import { useEffect, useState } from "react"; + +// Providers which cannot use this feature for workspace<>model selection +export const DISABLED_PROVIDERS = ["azure", "lmstudio"]; +const PROVIDER_DEFAULT_MODELS = { + openai: [ + "gpt-3.5-turbo", + "gpt-3.5-turbo-1106", + "gpt-4", + "gpt-4-turbo-preview", + "gpt-4-1106-preview", + "gpt-4-32k", + ], + gemini: ["gemini-pro"], + anthropic: ["claude-2", "claude-instant-1"], + azure: [], + lmstudio: [], + localai: [], + ollama: [], + togetherai: [], + native: [], +}; + +// For togetherAi, which has a large model list - we subgroup the options +// by their creator organization (eg: Meta, Mistral, etc) +// which makes selection easier to read. +function groupModels(models) { + return models.reduce((acc, model) => { + acc[model.organization] = acc[model.organization] || []; + acc[model.organization].push(model); + return acc; + }, {}); +} + +export default function useGetProviderModels(provider = null) { + const [defaultModels, setDefaultModels] = useState([]); + const [customModels, setCustomModels] = useState([]); + const [loading, setLoading] = useState(true); + + useEffect(() => { + async function fetchProviderModels() { + if (!provider) return; + const { models = [] } = await System.customModels(provider); + if (PROVIDER_DEFAULT_MODELS.hasOwnProperty(provider)) + setDefaultModels(PROVIDER_DEFAULT_MODELS[provider]); + provider === "togetherai" + ? setCustomModels(groupModels(models)) + : setCustomModels(models); + setLoading(false); + } + fetchProviderModels(); + }, [provider]); + + return { defaultModels, customModels, loading }; +} diff --git a/frontend/src/pages/AdditionalWorkspaceSettings/index.jsx b/frontend/src/pages/AdditionalWorkspaceSettings/index.jsx new file mode 100644 index 00000000000..2cb59b0e80b --- /dev/null +++ b/frontend/src/pages/AdditionalWorkspaceSettings/index.jsx @@ -0,0 +1,208 @@ +import React, { useState, useEffect } from "react"; +import { useParams } from "react-router-dom"; +import { isMobile } from "react-device-detect"; +import showToast from "@/utils/toast"; +import { ArrowUUpLeft, Plus, X } from "@phosphor-icons/react"; +import Workspace from "@/models/workspace"; +import paths from "@/utils/paths"; + +export default function AdditionalWorkspaceSettings() { + const [hasChanges, setHasChanges] = useState(false); + const [workspace, setWorkspace] = useState(null); + const [suggestedMessages, setSuggestedMessages] = useState([]); + const [editingIndex, setEditingIndex] = useState(-1); + const [newMessage, setNewMessage] = useState({ heading: "", message: "" }); + const { slug } = useParams(); + + useEffect(() => { + async function fetchWorkspace() { + if (!slug) return; + const workspace = await Workspace.bySlug(slug); + const suggestedMessages = await Workspace.getSuggestedMessages(slug); + setWorkspace(workspace); + setSuggestedMessages(suggestedMessages); + } + fetchWorkspace(); + }, [slug]); + + const handleSaveSuggestedMessages = async () => { + const validMessages = suggestedMessages.filter( + (msg) => + msg?.heading?.trim()?.length > 0 || msg?.message?.trim()?.length > 0 + ); + const { success, error } = await Workspace.setSuggestedMessages( + slug, + validMessages + ); + if (!success) { + showToast(`Failed to update welcome messages: ${error}`, "error"); + return; + } + showToast("Successfully updated welcome messages.", "success"); + setHasChanges(false); + }; + + const addMessage = () => { + setEditingIndex(-1); + if (suggestedMessages.length >= 4) { + showToast("Maximum of 4 messages allowed.", "warning"); + return; + } + const defaultMessage = { + heading: "Explain to me", + message: "the benefits of AnythingLLM", + }; + setNewMessage(defaultMessage); + setSuggestedMessages([...suggestedMessages, { ...defaultMessage }]); + setHasChanges(true); + }; + + const removeMessage = (index) => { + const messages = [...suggestedMessages]; + messages.splice(index, 1); + setSuggestedMessages(messages); + setHasChanges(true); + }; + + const startEditing = (index) => { + setEditingIndex(index); + setNewMessage({ ...suggestedMessages[index] }); + }; + + const handleRemoveMessage = (index) => { + removeMessage(index); + setEditingIndex(-1); + }; + + const onEditChange = (e) => { + const updatedNewMessage = { + ...newMessage, + [e.target.name]: e.target.value, + }; + setNewMessage(updatedNewMessage); + const updatedMessages = suggestedMessages.map((message, index) => { + if (index === editingIndex) { + return { ...message, [e.target.name]: e.target.value }; + } + return message; + }); + + setSuggestedMessages(updatedMessages); + setHasChanges(true); + }; + + return ( +
+ + + +
+
+
+
+

+ Workspace Settings ({workspace?.name}) +

+
+

+ Customize your workspace. +

+
+
+
+

+ Suggested Chat Messages +

+

+ Customize the messages that will be suggested to your workspace + users. +

+
+ +
+ {suggestedMessages.map((suggestion, index) => ( +
+ + +
+ ))} +
+ {editingIndex >= 0 && ( +
+
+ + +
+
+ + +
+
+ )} + {suggestedMessages.length < 4 && ( + + )} + + {hasChanges && ( +
+ +
+ )} +
+
+
+
+ ); +} diff --git a/frontend/src/pages/WorkspaceSettings/ChatSettings/index.jsx b/frontend/src/pages/WorkspaceSettings/ChatSettings/index.jsx new file mode 100644 index 00000000000..c7314762fc3 --- /dev/null +++ b/frontend/src/pages/WorkspaceSettings/ChatSettings/index.jsx @@ -0,0 +1,298 @@ +import useGetProviderModels, { + DISABLED_PROVIDERS, +} from "@/hooks/useGetProvidersModels"; +import System from "@/models/system"; +import Workspace from "@/models/workspace"; +import { chatPrompt } from "@/utils/chat"; +import showToast from "@/utils/toast"; +import { castToType } from "@/utils/types"; +import { useEffect, useRef, useState } from "react"; + +export default function ChatSettings({ workspace }) { + const [settings, setSettings] = useState({}); + const [hasChanges, setHasChanges] = useState(false); + const [saving, setSaving] = useState(false); + + const formEl = useRef(null); + useEffect(() => { + async function fetchSettings() { + const _settings = await System.keys(); + setSettings(_settings ?? {}); + } + fetchSettings(); + }, []); + + const handleUpdate = async (e) => { + setSaving(true); + e.preventDefault(); + const data = {}; + const form = new FormData(formEl.current); + for (var [key, value] of form.entries()) data[key] = castToType(key, value); + const { workspace: updatedWorkspace, message } = await Workspace.update( + workspace.slug, + data + ); + if (!!updatedWorkspace) { + showToast("Workspace updated!", "success", { clear: true }); + } else { + showToast(`Error: ${message}`, "error", { clear: true }); + } + setSaving(false); + setHasChanges(false); + }; + + if (!workspace) return null; + return ( +
+ + + + + {hasChanges && ( + + )} + + ); +} + +function recommendedSettings(provider = null) { + switch (provider) { + case "mistral": + return { temp: 0 }; + default: + return { temp: 0.7 }; + } +} + +function ChatModelSelection({ settings, workspace, setHasChanges }) { + const { defaultModels, customModels, loading } = useGetProviderModels( + settings?.LLMProvider + ); + if (DISABLED_PROVIDERS.includes(settings?.LLMProvider)) return null; + + if (loading) { + return ( +
+
+ +

+ The specific chat model that will be used for this workspace. If + empty, will use the system LLM preference. +

+
+ +
+ ); + } + + return ( +
+
+ +

+ The specific chat model that will be used for this workspace. If + empty, will use the system LLM preference. +

+
+ + +
+ ); +} + +function ChatHistorySettings({ workspace, setHasChanges }) { + return ( +
+
+ +

+ The number of previous chats that will be included in the + response's short-term memory. + Recommend 20. + Anything more than 45 is likely to lead to continuous chat failures + depending on message size. +

+
+ e.target.blur()} + defaultValue={workspace?.openAiHistory ?? 20} + className="bg-zinc-900 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5" + placeholder="20" + required={true} + autoComplete="off" + onChange={() => setHasChanges(true)} + /> +
+ ); +} + +function ChatPromptSettings({ workspace, setHasChanges }) { + return ( +
+
+ +

+ The prompt that will be used on this workspace. Define the context and + instructions for the AI to generate a response. You should to provide + a carefully crafted prompt so the AI can generate a relevant and + accurate response. +

+
+