这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 21 additions & 6 deletions frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import APIPieLogo from "@/media/llmprovider/apipie.png";
import XAILogo from "@/media/llmprovider/xai.png";
import NvidiaNimLogo from "@/media/llmprovider/nvidia-nim.png";
import PPIOLogo from "@/media/llmprovider/ppio.png";
import DellProAiStudioLogo from "@/media/llmprovider/dpais.png";

import PreLoader from "@/components/Preloader";
import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
Expand Down Expand Up @@ -59,6 +60,7 @@ import ApiPieLLMOptions from "@/components/LLMSelection/ApiPieOptions";
import XAILLMOptions from "@/components/LLMSelection/XAiLLMOptions";
import NvidiaNimOptions from "@/components/LLMSelection/NvidiaNimOptions";
import PPIOLLMOptions from "@/components/LLMSelection/PPIOLLMOptions";
import DellProAiStudioOptions from "@/components/LLMSelection/DPAISOptions";

import LLMItem from "@/components/LLMSelection/LLMItem";
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
Expand Down Expand Up @@ -128,13 +130,17 @@ export const AVAILABLE_LLM_PROVIDERS = [
requiredConfig: ["OllamaLLMBasePath"],
},
{
name: "Novita AI",
value: "novita",
logo: NovitaLogo,
options: (settings) => <NovitaLLMOptions settings={settings} />,
name: "Dell Pro AI Studio",
value: "dpais",
logo: DellProAiStudioLogo,
options: (settings) => <DellProAiStudioOptions settings={settings} />,
description:
"Reliable, Scalable, and Cost-Effective for LLMs from Novita AI",
requiredConfig: ["NovitaLLMApiKey"],
"Run powerful LLMs quickly on NPU powered by Dell Pro AI Studio.",
requiredConfig: [
"DellProAiStudioBasePath",
"DellProAiStudioModelPref",
"DellProAiStudioTokenLimit",
],
},
{
name: "LM Studio",
Expand All @@ -153,6 +159,15 @@ export const AVAILABLE_LLM_PROVIDERS = [
description: "Run LLMs locally on your own machine.",
requiredConfig: ["LocalAiApiKey", "LocalAiBasePath", "LocalAiTokenLimit"],
},
{
name: "Novita AI",
value: "novita",
logo: NovitaLogo,
options: (settings) => <NovitaLLMOptions settings={settings} />,
description:
"Reliable, Scalable, and Cost-Effective for LLMs from Novita AI",
requiredConfig: ["NovitaLLMApiKey"],
},
{
name: "Together AI",
value: "togetherai",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ import MilvusLogo from "@/media/vectordbs/milvus.png";
import VoyageAiLogo from "@/media/embeddingprovider/voyageai.png";
import PPIOLogo from "@/media/llmprovider/ppio.png";
import PGVectorLogo from "@/media/vectordbs/pgvector.png";
import DPAISLogo from "@/media/llmprovider/dpais.png";
import React, { useState, useEffect } from "react";
import paths from "@/utils/paths";
import { useNavigate } from "react-router-dom";
Expand Down Expand Up @@ -234,6 +235,13 @@ export const LLM_SELECTION_PRIVACY = {
],
logo: PPIOLogo,
},
dpais: {
name: "Dell Pro AI Studio",
description: [
"Your model and chat contents are only accessible on the computer running Dell Pro AI Studio",
],
logo: DPAISLogo,
},
};

export const VECTOR_DB_PRIVACY = {
Expand Down
20 changes: 15 additions & 5 deletions frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import XAILogo from "@/media/llmprovider/xai.png";
import NvidiaNimLogo from "@/media/llmprovider/nvidia-nim.png";
import CohereLogo from "@/media/llmprovider/cohere.png";
import PPIOLogo from "@/media/llmprovider/ppio.png";
import DellProAiStudioLogo from "@/media/llmprovider/dpais.png";

import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
import GenericOpenAiOptions from "@/components/LLMSelection/GenericOpenAiOptions";
Expand Down Expand Up @@ -53,6 +54,7 @@ import NovitaLLMOptions from "@/components/LLMSelection/NovitaLLMOptions";
import XAILLMOptions from "@/components/LLMSelection/XAiLLMOptions";
import NvidiaNimOptions from "@/components/LLMSelection/NvidiaNimOptions";
import PPIOLLMOptions from "@/components/LLMSelection/PPIOLLMOptions";
import DellProAiStudioOptions from "@/components/LLMSelection/DPAISOptions";

import LLMItem from "@/components/LLMSelection/LLMItem";
import System from "@/models/system";
Expand Down Expand Up @@ -114,12 +116,12 @@ const LLMS = [
description: "Run LLMs locally on your own machine.",
},
{
name: "Novita AI",
value: "novita",
logo: NovitaLogo,
options: (settings) => <NovitaLLMOptions settings={settings} />,
name: "Dell Pro AI Studio",
value: "dpais",
logo: DellProAiStudioLogo,
options: (settings) => <DellProAiStudioOptions settings={settings} />,
description:
"Reliable, Scalable, and Cost-Effective for LLMs from Novita AI",
"Run powerful LLMs quickly on NPU powered by Dell Pro AI Studio.",
},
{
name: "LM Studio",
Expand All @@ -136,6 +138,14 @@ const LLMS = [
options: (settings) => <LocalAiOptions settings={settings} />,
description: "Run LLMs locally on your own machine.",
},
{
name: "Novita AI",
value: "novita",
logo: NovitaLogo,
options: (settings) => <NovitaLLMOptions settings={settings} />,
description:
"Reliable, Scalable, and Cost-Effective for LLMs from Novita AI",
},
{
name: "KoboldCPP",
value: "koboldcpp",
Expand Down
7 changes: 7 additions & 0 deletions frontend/src/utils/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,13 @@ export const LOCALAI_COMMON_URLS = [
"http://172.17.0.1:8080/v1",
];

export const DPAIS_COMMON_URLS = [
"http://127.0.0.1:8553/v1",
"http://0.0.0.0:8553/v1",
"http://localhost:8553/v1",
"http://host.docker.internal:8553/v1",
];

export const NVIDIA_NIM_COMMON_URLS = [
"http://127.0.0.1:8000/v1/version",
"http://localhost:8000/v1/version",
Expand Down
6 changes: 6 additions & 0 deletions server/models/systemSettings.js
Original file line number Diff line number Diff line change
Expand Up @@ -574,6 +574,12 @@ const SystemSettings = {
// PPIO API keys
PPIOApiKey: !!process.env.PPIO_API_KEY,
PPIOModelPref: process.env.PPIO_MODEL_PREF,

// Dell Pro AI Studio Keys
DellProAiStudioBasePath: process.env.DPAIS_LLM_BASE_PATH,
DellProAiStudioModelPref: process.env.DPAIS_LLM_MODEL_PREF,
DellProAiStudioTokenLimit:
process.env.DPAIS_LLM_MODEL_TOKEN_LIMIT ?? 4096,
};
},

Expand Down
2 changes: 2 additions & 0 deletions server/utils/agents/aibitat/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -797,6 +797,8 @@ ${this.getHistory({ to: route.to })
return new Providers.PPIOProvider({ model: config.model });
case "gemini":
return new Providers.GeminiProvider({ model: config.model });
case "dpais":
return new Providers.DellProAiStudioProvider({ model: config.model });
default:
throw new Error(
`Unknown provider: ${config.provider}. Please use a valid provider.`
Expand Down
2 changes: 2 additions & 0 deletions server/utils/agents/aibitat/providers/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ const NovitaProvider = require("./novita.js");
const NvidiaNimProvider = require("./nvidiaNim.js");
const PPIOProvider = require("./ppio.js");
const GeminiProvider = require("./gemini.js");
const DellProAiStudioProvider = require("./dellProAiStudio.js");

module.exports = {
OpenAIProvider,
Expand All @@ -48,4 +49,5 @@ module.exports = {
NvidiaNimProvider,
PPIOProvider,
GeminiProvider,
DellProAiStudioProvider,
};
12 changes: 12 additions & 0 deletions server/utils/agents/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,16 @@ class AgentHandler {
if (!process.env.GEMINI_API_KEY)
throw new Error("Gemini API key must be provided to use agents.");
break;
case "dpais":
if (!process.env.DPAIS_LLM_BASE_PATH)
throw new Error(
"Dell Pro AI Studio base path must be provided to use agents."
);
if (!process.env.DPAIS_LLM_MODEL_PREF)
throw new Error(
"Dell Pro AI Studio model must be set to use agents."
);
break;

default:
throw new Error(
Expand Down Expand Up @@ -256,6 +266,8 @@ class AgentHandler {
return process.env.PPIO_MODEL_PREF ?? "qwen/qwen2.5-32b-instruct";
case "gemini":
return process.env.GEMINI_LLM_MODEL_PREF ?? "gemini-2.0-flash-lite";
case "dpais":
return process.env.DPAIS_LLM_MODEL_PREF;
default:
return null;
}
Expand Down
40 changes: 40 additions & 0 deletions server/utils/helpers/customModels.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ const SUPPORT_CUSTOM_MODELS = [
"xai",
"gemini",
"ppio",
"dpais",
];

async function getCustomModels(provider = "", apiKey = null, basePath = null) {
Expand Down Expand Up @@ -81,6 +82,8 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
return await getGeminiModels(apiKey);
case "ppio":
return await getPPIOModels(apiKey);
case "dpais":
return await getDellProAiStudioModels(basePath);
default:
return { models: [], error: "Invalid provider for custom models" };
}
Expand Down Expand Up @@ -635,6 +638,43 @@ async function getPPIOModels() {
return { models, error: null };
}

async function getDellProAiStudioModels(basePath = null) {
const { OpenAI: OpenAIApi } = require("openai");
try {
const { origin } = new URL(
basePath || process.env.DELL_PRO_AI_STUDIO_BASE_PATH
);
const openai = new OpenAIApi({
baseURL: `${origin}/v1/openai`,
apiKey: null,
});
const models = await openai.models
.list()
.then((results) => results.data)
.then((models) => {
return models
.filter((model) => model.capability === "TextToText") // Only include text-to-text models for this handler
.map((model) => {
return {
id: model.id,
name: model.name,
organization: model.owned_by,
};
});
})
.catch((e) => {
throw new Error(e.message);
});
return { models, error: null };
} catch (e) {
console.error(`getDellProAiStudioModels`, e.message);
return {
models: [],
error: "Could not reach Dell Pro Ai Studio from the provided base path",
};
}
}

module.exports = {
getCustomModels,
};
6 changes: 6 additions & 0 deletions server/utils/helpers/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,9 @@ function getLLMProvider({ provider = null, model = null } = {}) {
case "ppio":
const { PPIOLLM } = require("../AiProviders/ppio");
return new PPIOLLM(embedder, model);
case "dpais":
const { DellProAiStudioLLM } = require("../AiProviders/dellProAiStudio");
return new DellProAiStudioLLM(embedder, model);
default:
throw new Error(
`ENV: No valid LLM_PROVIDER value found in environment! Using ${process.env.LLM_PROVIDER}`
Expand Down Expand Up @@ -347,6 +350,9 @@ function getLLMProviderClass({ provider = null } = {}) {
case "ppio":
const { PPIOLLM } = require("../AiProviders/ppio");
return PPIOLLM;
case "dpais":
const { DellProAiStudioLLM } = require("../AiProviders/dellProAiStudio");
return DellProAiStudioLLM;
default:
return null;
}
Expand Down
15 changes: 15 additions & 0 deletions server/utils/helpers/updateENV.js
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,20 @@ const KEY_MAPPING = {
checks: [nonZero],
},

// Dell Pro AI Studio Settings
DellProAiStudioBasePath: {
envKey: "DPAIS_LLM_BASE_PATH",
checks: [isNotEmpty, validDockerizedUrl],
},
DellProAiStudioModelPref: {
envKey: "DPAIS_LLM_MODEL_PREF",
checks: [isNotEmpty],
},
DellProAiStudioTokenLimit: {
envKey: "DPAIS_LLM_MODEL_TOKEN_LIMIT",
checks: [nonZero],
},

EmbeddingEngine: {
envKey: "EMBEDDING_ENGINE",
checks: [supportedEmbeddingModel],
Expand Down Expand Up @@ -765,6 +779,7 @@ function supportedLLM(input = "") {
"xai",
"nvidia-nim",
"ppio",
"dpais",
].includes(input);
return validSelection ? null : `${input} is not a valid LLM provider.`;
}
Expand Down