θΏ™ζ˜―indexlocζδΎ›ηš„ζœεŠ‘οΌŒδΈθ¦θΎ“ε…₯任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import useGetProviderModels, {
DISABLED_PROVIDERS,
} from "@/hooks/useGetProvidersModels";
import paths from "@/utils/paths";
import { useTranslation } from "react-i18next";
import { Link, useParams } from "react-router-dom";

// These models do NOT support function calling
function supportedModel(provider, model = "") {
Expand All @@ -18,11 +20,32 @@ export default function AgentModelSelection({
workspace,
setHasChanges,
}) {
const { slug } = useParams();
const { defaultModels, customModels, loading } =
useGetProviderModels(provider);

const { t } = useTranslation();
if (DISABLED_PROVIDERS.includes(provider)) return null;
if (DISABLED_PROVIDERS.includes(provider)) {
return (
<div className="w-full h-10 justify-center items-center flex">
<p className="text-sm font-base text-white text-opacity-60 text-center">
Multi-model support is not supported for this provider yet.
<br />
Agent's will use{" "}
<Link
to={paths.workspace.settings.chatSettings(slug)}
className="underline"
>
the model set for the workspace
</Link>{" "}
or{" "}
<Link to={paths.settings.llmPreference()} className="underline">
the model set for the system.
</Link>
</p>
</div>
);
}

if (loading) {
return (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import useGetProviderModels, {
DISABLED_PROVIDERS,
} from "@/hooks/useGetProvidersModels";
import { useTranslation } from "react-i18next";

export default function ChatModelSelection({
provider,
workspace,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@ import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
import WorkspaceLLMItem from "./WorkspaceLLMItem";
import { AVAILABLE_LLM_PROVIDERS } from "@/pages/GeneralSettings/LLMPreference";
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
import ChatModelSelection from "../ChatModelSelection";
import ChatModelSelection from "./ChatModelSelection";
import { useTranslation } from "react-i18next";
import { Link } from "react-router-dom";
import paths from "@/utils/paths";

// Some providers can only be associated with a single model.
// In that case there is no selection to be made so we can just move on.
Expand Down Expand Up @@ -148,7 +150,22 @@ export default function WorkspaceLLMSelection({
</button>
)}
</div>
{!NO_MODEL_SELECTION.includes(selectedLLM) && (
{NO_MODEL_SELECTION.includes(selectedLLM) ? (
<>
{selectedLLM !== "default" && (
<div className="w-full h-10 justify-center items-center flex mt-4">
<p className="text-sm font-base text-white text-opacity-60 text-center">
Multi-model support is not supported for this provider yet.
<br />
This workspace will use{" "}
<Link to={paths.settings.llmPreference()} className="underline">
the model set for the system.
</Link>
</p>
</div>
)}
</>
) : (
<div className="mt-4 flex flex-col gap-y-1">
<ChatModelSelection
provider={selectedLLM}
Expand Down
32 changes: 29 additions & 3 deletions server/utils/agents/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,12 @@ const { USER_AGENT, WORKSPACE_AGENT } = require("./defaults");
class AgentHandler {
#invocationUUID;
#funcsToLoad = [];
#noProviderModelDefault = {
azure: "OPEN_MODEL_PREF",
lmstudio: "LMSTUDIO_MODEL_PREF",
textgenwebui: null, // does not even use `model` in API req
"generic-openai": "GENERIC_OPEN_AI_MODEL_PREF",
};
invocation = null;
aibitat = null;
channel = null;
Expand Down Expand Up @@ -172,7 +178,7 @@ class AgentHandler {
case "mistral":
return "mistral-medium";
case "generic-openai":
return "gpt-3.5-turbo";
return null;
case "perplexity":
return "sonar-small-online";
case "textgenwebui":
Expand All @@ -182,10 +188,30 @@ class AgentHandler {
}
}

/**
* Finds or assumes the model preference value to use for API calls.
* If multi-model loading is supported, we use their agent model selection of the workspace
* If not supported, we attempt to fallback to the system provider value for the LLM preference
* and if that fails - we assume a reasonable base model to exist.
* @returns {string} the model preference value to use in API calls
*/
#fetchModel() {
if (!Object.keys(this.#noProviderModelDefault).includes(this.provider))
return this.invocation.workspace.agentModel || this.#providerDefault();

// Provider has no reliable default (cant load many models) - so we need to look at system
// for the model param.
const sysModelKey = this.#noProviderModelDefault[this.provider];
if (!!sysModelKey)
return process.env[sysModelKey] ?? this.#providerDefault();

// If all else fails - look at the provider default list
return this.#providerDefault();
}

#providerSetupAndCheck() {
this.provider = this.invocation.workspace.agentProvider || "openai";
this.model =
this.invocation.workspace.agentModel || this.#providerDefault();
this.model = this.#fetchModel();
this.log(`Start ${this.#invocationUUID}::${this.provider}:${this.model}`);
this.#checkSetup();
}
Expand Down