这是indexloc提供的服务,不要输入任何密码
Skip to content

feat: Add AI/ML API (aimlapi) as new LLM provider #4156

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ AnythingLLM divides your documents into objects called `workspaces`. A Workspace
- [LocalAI (all models)](https://localai.io/)
- [Together AI (chat models)](https://www.together.ai/)
- [Fireworks AI (chat models)](https://fireworks.ai/)
- [AI/ML API (chat models)](https://aimlapi.com/models/?utm_source=anythingllm&utm_medium=github&utm_campaign=integration)
- [Perplexity (chat models)](https://www.perplexity.ai/)
- [OpenRouter (chat models)](https://openrouter.ai/)
- [DeepSeek (chat models)](https://deepseek.com/)
Expand Down
10 changes: 9 additions & 1 deletion docker/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,10 @@ GID='1000'
# DEEPSEEK_API_KEY='your-deepseek-api-key-here'
# DEEPSEEK_MODEL_PREF='deepseek-chat'

# LLM_PROVIDER='aimlapi'
# AIML_LLM_API_KEY='your-aimlapi-key'
# AIML_MODEL_PREF='gpt-3.5-turbo'

# LLM_PROVIDER='ppio'
# PPIO_API_KEY='your-ppio-api-key-here'
# PPIO_MODEL_PREF=deepseek/deepseek-v3/community
Expand Down Expand Up @@ -182,6 +186,10 @@ GID='1000'
# GENERIC_OPEN_AI_EMBEDDING_API_KEY='sk-123abc'
# GENERIC_OPEN_AI_EMBEDDING_MAX_CONCURRENT_CHUNKS=500

# EMBEDDING_ENGINE='aimlapi'
# AIML_EMBEDDER_API_KEY='your-aimlapi-key'
# EMBEDDING_MODEL_PREF='text-embedding-ada-002'

# EMBEDDING_ENGINE='gemini'
# GEMINI_EMBEDDING_API_KEY=
# EMBEDDING_MODEL_PREF='text-embedding-004'
Expand Down Expand Up @@ -339,4 +347,4 @@ GID='1000'
# Specify the target languages for when using OCR to parse images and PDFs.
# This is a comma separated list of language codes as a string. Unsupported languages will be ignored.
# Default is English. See https://tesseract-ocr.github.io/tessdoc/Data-Files-in-different-versions.html for a list of valid language codes.
# TARGET_OCR_LANG=eng,deu,ita,spa,fra,por,rus,nld,tur,hun,pol,ita,spa,fra,por,rus,nld,tur,hun,pol
# TARGET_OCR_LANG=eng,deu,ita,spa,fra,por,rus,nld,tur,hun,pol,ita,spa,fra,por,rus,nld,tur,hun,pol
110 changes: 110 additions & 0 deletions frontend/src/components/EmbeddingSelection/AimlApiOptions/index.jsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
import { useState, useEffect } from "react";
import System from "@/models/system";

export default function AimlApiOptions({ settings }) {
const [inputValue, setInputValue] = useState(settings?.AimlEmbedderApiKey);
const [apiKey, setApiKey] = useState(settings?.AimlEmbedderApiKey);

return (
<div className="w-full flex flex-col gap-y-4">
<div className="w-full flex items-center gap-[36px] mt-1.5">
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
API Key
</label>
<input
type="password"
name="AimlEmbedderApiKey"
className="border-none bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
placeholder="AI/ML API Key"
defaultValue={settings?.AimlEmbedderApiKey ? "*".repeat(20) : ""}
required={true}
autoComplete="off"
spellCheck={false}
onChange={(e) => setInputValue(e.target.value)}
onBlur={() => setApiKey(inputValue)}
/>
</div>
<AimlApiEmbeddingModelSelection apiKey={apiKey} settings={settings} />
</div>
</div>
);
}

function AimlApiEmbeddingModelSelection({ apiKey, settings }) {
const [groupedModels, setGroupedModels] = useState({});
const [loading, setLoading] = useState(true);

useEffect(() => {
async function findModels() {
if (!apiKey) {
setGroupedModels({});
setLoading(true);
return;
}
setLoading(true);
const { models } = await System.customModels(
"aimlapi-embed",
typeof apiKey === "boolean" ? null : apiKey
);
if (models?.length > 0) {
const byDev = models.reduce((acc, model) => {
acc[model.organization] = acc[model.organization] || [];
acc[model.organization].push(model);
return acc;
}, {});
setGroupedModels(byDev);
}
setLoading(false);
}
findModels();
}, [apiKey]);

if (loading || Object.keys(groupedModels).length === 0) {
return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Model Preference
</label>
<select
name="EmbeddingModelPref"
disabled={true}
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
<option disabled={true} selected={true}>
-- loading available models --
</option>
</select>
</div>
);
}

return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Model Preference
</label>
<select
name="EmbeddingModelPref"
required={true}
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{Object.keys(groupedModels)
.sort()
.map((organization) => (
<optgroup key={organization} label={organization}>
{groupedModels[organization].map((model) => (
<option
key={model.id}
value={model.id}
selected={settings?.EmbeddingModelPref === model.id}
>
{model.name}
</option>
))}
</optgroup>
))}
</select>
</div>
);
}
111 changes: 111 additions & 0 deletions frontend/src/components/LLMSelection/AimlApiOptions/index.jsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
import { useState, useEffect } from "react";
import System from "@/models/system";

export default function AimlApiOptions({ settings }) {
const [inputValue, setInputValue] = useState(settings?.AimlLlmApiKey);
const [apiKey, setApiKey] = useState(settings?.AimlLlmApiKey);

return (
<div className="flex gap-[36px] mt-1.5">
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
API Key
</label>
<input
type="password"
name="AimlLlmApiKey"
className="border-none bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
placeholder="AI/ML API Key"
defaultValue={settings?.AimlLlmApiKey ? "*".repeat(20) : ""}
required={true}
autoComplete="off"
spellCheck={false}
onChange={(e) => setInputValue(e.target.value)}
onBlur={() => setApiKey(inputValue)}
/>
</div>
{!settings?.credentialsOnly && (
<AimlApiModelSelection settings={settings} apiKey={apiKey} />
)}
</div>
);
}

function AimlApiModelSelection({ apiKey, settings }) {
const [groupedModels, setGroupedModels] = useState({});
const [loading, setLoading] = useState(true);

useEffect(() => {
async function findCustomModels() {
if (!apiKey) {
setGroupedModels({});
setLoading(true);
return;
}

setLoading(true);
const { models } = await System.customModels(
"aimlapi",
typeof apiKey === "boolean" ? null : apiKey
);
if (models?.length > 0) {
const byDev = models.reduce((acc, model) => {
acc[model.organization] = acc[model.organization] || [];
acc[model.organization].push(model);
return acc;
}, {});
setGroupedModels(byDev);
}
setLoading(false);
}
findCustomModels();
}, [apiKey]);

if (loading || Object.keys(groupedModels).length === 0) {
return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Chat Model Selection
</label>
<select
name="AimlModelPref"
disabled={true}
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
<option disabled={true} selected={true}>
-- loading available models --
</option>
</select>
</div>
);
}

return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Chat Model Selection
</label>
<select
name="AimlModelPref"
required={true}
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{Object.keys(groupedModels)
.sort()
.map((organization) => (
<optgroup key={organization} label={organization}>
{groupedModels[organization].map((model) => (
<option
key={model.id}
value={model.id}
selected={settings?.AimlModelPref === model.id}
>
{model.name}
</option>
))}
</optgroup>
))}
</select>
</div>
);
}
1 change: 1 addition & 0 deletions frontend/src/hooks/useGetProvidersModels.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ const groupedProviders = [
"novita",
"openrouter",
"ppio",
"aimlapi",
];
export default function useGetProviderModels(provider = null) {
const [defaultModels, setDefaultModels] = useState([]);
Expand Down
Binary file added frontend/src/media/llmprovider/aimlapi.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ import ModalWrapper from "@/components/ModalWrapper";
import CTAButton from "@/components/lib/CTAButton";
import { useTranslation } from "react-i18next";
import MistralAiOptions from "@/components/EmbeddingSelection/MistralAiOptions";
import AimlApiLogo from "@/media/llmprovider/aimlapi.png";
import AimlApiOptions from "@/components/EmbeddingSelection/AimlApiOptions";

const EMBEDDERS = [
{
Expand Down Expand Up @@ -118,6 +120,13 @@ const EMBEDDERS = [
options: (settings) => <MistralAiOptions settings={settings} />,
description: "Run powerful embedding models from Mistral AI.",
},
{
name: "AI/ML API",
value: "aimlapi",
logo: AimlApiLogo,
options: (settings) => <AimlApiOptions settings={settings} />,
description: "Use embedding models hosted on AI/ML API.",
},
{
name: "Generic OpenAI",
value: "generic-openai",
Expand Down
10 changes: 10 additions & 0 deletions frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ import XAILogo from "@/media/llmprovider/xai.png";
import NvidiaNimLogo from "@/media/llmprovider/nvidia-nim.png";
import PPIOLogo from "@/media/llmprovider/ppio.png";
import DellProAiStudioLogo from "@/media/llmprovider/dpais.png";
import AimlApiLogo from "@/media/llmprovider/aimlapi.png";

import PreLoader from "@/components/Preloader";
import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
Expand Down Expand Up @@ -61,6 +62,7 @@ import XAILLMOptions from "@/components/LLMSelection/XAiLLMOptions";
import NvidiaNimOptions from "@/components/LLMSelection/NvidiaNimOptions";
import PPIOLLMOptions from "@/components/LLMSelection/PPIOLLMOptions";
import DellProAiStudioOptions from "@/components/LLMSelection/DPAISOptions";
import AimlApiOptions from "@/components/LLMSelection/AimlApiOptions";

import LLMItem from "@/components/LLMSelection/LLMItem";
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
Expand Down Expand Up @@ -263,6 +265,14 @@ export const AVAILABLE_LLM_PROVIDERS = [
description: "Run DeepSeek's powerful LLMs.",
requiredConfig: ["DeepSeekApiKey"],
},
{
name: "AI/ML API",
value: "aimlapi",
logo: AimlApiLogo,
options: (settings) => <AimlApiOptions settings={settings} />,
description: "Access 300+ AI models with enterprise uptime.",
requiredConfig: ["AimlLlmApiKey"],
},
{
name: "PPIO",
value: "ppio",
Expand Down
17 changes: 17 additions & 0 deletions frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ import VoyageAiLogo from "@/media/embeddingprovider/voyageai.png";
import PPIOLogo from "@/media/llmprovider/ppio.png";
import PGVectorLogo from "@/media/vectordbs/pgvector.png";
import DPAISLogo from "@/media/llmprovider/dpais.png";
import AimlApiLogo from "@/media/llmprovider/aimlapi.png";
import React, { useState, useEffect } from "react";
import paths from "@/utils/paths";
import { useNavigate } from "react-router-dom";
Expand Down Expand Up @@ -235,6 +236,14 @@ export const LLM_SELECTION_PRIVACY = {
],
logo: PPIOLogo,
},
aimlapi: {
name: "AI/ML API",
description: [
"Your chats will not be used for training",
"Your prompts and document text used in response creation are visible to AI/ML API",
],
logo: AimlApiLogo,
},
dpais: {
name: "Dell Pro AI Studio",
description: [
Expand Down Expand Up @@ -379,6 +388,14 @@ export const EMBEDDING_ENGINE_PRIVACY = {
],
logo: MistralLogo,
},
aimlapi: {
name: "AI/ML API",
description: [
"Your document text is visible to AI/ML API",
"Your documents are not used for training",
],
logo: AimlApiLogo,
},
litellm: {
name: "LiteLLM",
description: [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import NvidiaNimLogo from "@/media/llmprovider/nvidia-nim.png";
import CohereLogo from "@/media/llmprovider/cohere.png";
import PPIOLogo from "@/media/llmprovider/ppio.png";
import DellProAiStudioLogo from "@/media/llmprovider/dpais.png";
import AimlApiLogo from "@/media/llmprovider/aimlapi.png";

import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
import GenericOpenAiOptions from "@/components/LLMSelection/GenericOpenAiOptions";
Expand Down Expand Up @@ -55,6 +56,7 @@ import XAILLMOptions from "@/components/LLMSelection/XAiLLMOptions";
import NvidiaNimOptions from "@/components/LLMSelection/NvidiaNimOptions";
import PPIOLLMOptions from "@/components/LLMSelection/PPIOLLMOptions";
import DellProAiStudioOptions from "@/components/LLMSelection/DPAISOptions";
import AimlApiOptions from "@/components/LLMSelection/AimlApiOptions";

import LLMItem from "@/components/LLMSelection/LLMItem";
import System from "@/models/system";
Expand Down Expand Up @@ -226,6 +228,13 @@ const LLMS = [
options: (settings) => <DeepSeekOptions settings={settings} />,
description: "Run DeepSeek's powerful LLMs.",
},
{
name: "AI/ML API",
value: "aimlapi",
logo: AimlApiLogo,
options: (settings) => <AimlApiOptions settings={settings} />,
description: "Access 300+ AI models with enterprise uptime.",
},
{
name: "PPIO",
value: "ppio",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ const ENABLED_PROVIDERS = [
"xai",
"nvidia-nim",
"gemini",
"aimlapi",
// TODO: More agent support.
// "cohere", // Has tool calling and will need to build explicit support
// "huggingface" // Can be done but already has issues with no-chat templated. Needs to be tested.
Expand Down
Loading