θΏ™ζ˜―indexlocζδΎ›ηš„ζœεŠ‘οΌŒδΈθ¦θΎ“ε…₯任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/dev-build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ concurrency:

on:
push:
branches: ['sharp-pdf-image-converter'] # put your current branch to create a build. Core team only.
branches: ['chore/anthropic-model-endpoint'] # put your current branch to create a build. Core team only.
paths-ignore:
- '**.md'
- 'cloud-deployments/*'
Expand Down
146 changes: 113 additions & 33 deletions frontend/src/components/LLMSelection/AnthropicAiOptions/index.jsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
import { useState, useEffect } from "react";
import System from "@/models/system";

export default function AnthropicAiOptions({ settings }) {
const [inputValue, setInputValue] = useState(settings?.AnthropicApiKey);
const [anthropicApiKey, setAnthropicApiKey] = useState(
settings?.AnthropicApiKey
);

return (
<div className="w-full flex flex-col">
<div className="w-full flex items-center gap-[36px] mt-1.5">
Expand All @@ -15,45 +23,117 @@ export default function AnthropicAiOptions({ settings }) {
required={true}
autoComplete="off"
spellCheck={false}
onChange={(e) => setInputValue(e.target.value)}
onBlur={() => setAnthropicApiKey(inputValue)}
/>
</div>

{!settings?.credentialsOnly && (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Chat Model Selection
</label>
<select
name="AnthropicModelPref"
defaultValue={settings?.AnthropicModelPref || "claude-2"}
required={true}
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{[
"claude-instant-1.2",
"claude-2.0",
"claude-2.1",
"claude-3-haiku-20240307",
"claude-3-sonnet-20240229",
"claude-3-opus-latest",
"claude-3-5-haiku-latest",
"claude-3-5-haiku-20241022",
"claude-3-5-sonnet-latest",
"claude-3-5-sonnet-20241022",
"claude-3-5-sonnet-20240620",
"claude-3-7-sonnet-20250219",
"claude-3-7-sonnet-latest",
].map((model) => {
return (
<option key={model} value={model}>
{model}
</option>
);
})}
</select>
</div>
<AnthropicModelSelection
apiKey={anthropicApiKey}
settings={settings}
/>
)}
</div>
</div>
);
}

const DEFAULT_MODELS = [
{
id: "claude-3-7-sonnet-20250219",
name: "Claude 3.7 Sonnet",
},
{
id: "claude-3-5-sonnet-20241022",
name: "Claude 3.5 Sonnet (New)",
},
{
id: "claude-3-5-haiku-20241022",
name: "Claude 3.5 Haiku",
},
{
id: "claude-3-5-sonnet-20240620",
name: "Claude 3.5 Sonnet (Old)",
},
{
id: "claude-3-haiku-20240307",
name: "Claude 3 Haiku",
},
{
id: "claude-3-opus-20240229",
name: "Claude 3 Opus",
},
{
id: "claude-3-sonnet-20240229",
name: "Claude 3 Sonnet",
},
{
id: "claude-2.1",
name: "Claude 2.1",
},
{
id: "claude-2.0",
name: "Claude 2.0",
},
];

function AnthropicModelSelection({ apiKey, settings }) {
const [models, setModels] = useState(DEFAULT_MODELS);
const [loading, setLoading] = useState(true);

useEffect(() => {
async function findCustomModels() {
setLoading(true);
const { models } = await System.customModels(
"anthropic",
typeof apiKey === "boolean" ? null : apiKey
);
if (models.length > 0) setModels(models);
setLoading(false);
}
findCustomModels();
}, [apiKey]);

if (loading) {
return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Chat Model Selection
</label>
<select
name="AnthropicModelPref"
disabled={true}
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
<option disabled={true} selected={true}>
-- loading available models --
</option>
</select>
</div>
);
}

return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Chat Model Selection
</label>
<select
name="AnthropicModelPref"
required={true}
className="border-none bg-theme-settings-input-bg border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{models.map((model) => (
<option
key={model.id}
value={model.id}
selected={settings?.AnthropicModelPref === model.id}
>
{model.name}
</option>
))}
</select>
</div>
);
}
16 changes: 1 addition & 15 deletions frontend/src/hooks/useGetProvidersModels.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,21 +25,7 @@ const PROVIDER_DEFAULT_MODELS = {
"learnlm-1.5-pro-experimental",
"gemini-2.0-flash-exp",
],
anthropic: [
"claude-instant-1.2",
"claude-2.0",
"claude-2.1",
"claude-3-haiku-20240307",
"claude-3-sonnet-20240229",
"claude-3-opus-latest",
"claude-3-5-haiku-latest",
"claude-3-5-haiku-20241022",
"claude-3-5-sonnet-latest",
"claude-3-5-sonnet-20241022",
"claude-3-5-sonnet-20240620",
"claude-3-7-sonnet-20250219",
"claude-3-7-sonnet-latest",
],
anthropic: [],
azure: [],
lmstudio: [],
localai: [],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ export default function ChatModelSelection({
</optgroup>
)}
{Array.isArray(customModels) && customModels.length > 0 && (
<optgroup label="Custom models">
<optgroup label="Discovered models">
{customModels.map((model) => {
return (
<option
Expand Down
4 changes: 2 additions & 2 deletions server/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
"seed": "node prisma/seed.js"
},
"dependencies": {
"@anthropic-ai/sdk": "^0.32.1",
"@anthropic-ai/sdk": "^0.39.0",
"@azure/openai": "1.0.0-beta.10",
"@datastax/astra-db-ts": "^0.1.3",
"@google/generative-ai": "^0.7.1",
Expand Down Expand Up @@ -98,4 +98,4 @@
"prettier": "^3.0.3",
"cross-env": "^7.0.3"
}
}
}
38 changes: 10 additions & 28 deletions server/utils/AiProviders/anthropic/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@ class AnthropicLLM {
});
this.anthropic = anthropic;
this.model =
modelPreference || process.env.ANTHROPIC_MODEL_PREF || "claude-2.0";
modelPreference ||
process.env.ANTHROPIC_MODEL_PREF ||
"claude-3-5-sonnet-20241022";
this.limits = {
history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15,
Expand All @@ -31,6 +33,11 @@ class AnthropicLLM {

this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
this.log(`Initialized with ${this.model}`);
}

log(text, ...args) {
console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args);
}

streamingEnabled() {
Expand All @@ -45,23 +52,8 @@ class AnthropicLLM {
return MODEL_MAP.anthropic[this.model] ?? 100_000;
}

isValidChatCompletionModel(modelName = "") {
const validModels = [
"claude-instant-1.2",
"claude-2.0",
"claude-2.1",
"claude-3-haiku-20240307",
"claude-3-sonnet-20240229",
"claude-3-opus-latest",
"claude-3-5-haiku-latest",
"claude-3-5-haiku-20241022",
"claude-3-5-sonnet-latest",
"claude-3-5-sonnet-20241022",
"claude-3-5-sonnet-20240620",
"claude-3-7-sonnet-20250219",
"claude-3-7-sonnet-latest",
];
return validModels.includes(modelName);
isValidChatCompletionModel(_modelName = "") {
return true;
}

/**
Expand Down Expand Up @@ -111,11 +103,6 @@ class AnthropicLLM {
}

async getChatCompletion(messages = null, { temperature = 0.7 }) {
if (!this.isValidChatCompletionModel(this.model))
throw new Error(
`Anthropic chat: ${this.model} is not valid for chat completion!`
);

try {
const result = await LLMPerformanceMonitor.measureAsyncFunction(
this.anthropic.messages.create({
Expand Down Expand Up @@ -146,11 +133,6 @@ class AnthropicLLM {
}

async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
if (!this.isValidChatCompletionModel(this.model))
throw new Error(
`Anthropic chat: ${this.model} is not valid for chat completion!`
);

const measuredStreamRequest = await LLMPerformanceMonitor.measureStream(
this.anthropic.messages.stream({
model: this.model,
Expand Down
33 changes: 33 additions & 0 deletions server/utils/helpers/customModels.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ const { GeminiLLM } = require("../AiProviders/gemini");

const SUPPORT_CUSTOM_MODELS = [
"openai",
"anthropic",
"localai",
"ollama",
"togetherai",
Expand Down Expand Up @@ -40,6 +41,8 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
switch (provider) {
case "openai":
return await openAiModels(apiKey);
case "anthropic":
return await anthropicModels(apiKey);
case "localai":
return await localAIModels(basePath, apiKey);
case "ollama":
Expand Down Expand Up @@ -185,6 +188,36 @@ async function openAiModels(apiKey = null) {
return { models: [...gpts, ...customModels], error: null };
}

async function anthropicModels(_apiKey = null) {
const apiKey =
_apiKey === true
? process.env.ANTHROPIC_API_KEY
: _apiKey || process.env.ANTHROPIC_API_KEY || null;
const AnthropicAI = require("@anthropic-ai/sdk");
const anthropic = new AnthropicAI({ apiKey });
const models = await anthropic.models
.list()
.then((results) => results.data)
.then((models) => {
return models
.filter((model) => model.type === "model")
.map((model) => {
return {
id: model.id,
name: model.display_name,
};
});
})
.catch((e) => {
console.error(`Anthropic:listModels`, e.message);
return [];
});

// Api Key was successful so lets save it for future uses
if (models.length > 0 && !!apiKey) process.env.ANTHROPIC_API_KEY = apiKey;
return { models, error: null };
}

async function localAIModels(basePath = null, apiKey = null) {
const { OpenAI: OpenAIApi } = require("openai");
const openai = new OpenAIApi({
Expand Down
23 changes: 1 addition & 22 deletions server/utils/helpers/updateENV.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ const KEY_MAPPING = {
},
AnthropicModelPref: {
envKey: "ANTHROPIC_MODEL_PREF",
checks: [isNotEmpty, validAnthropicModel],
checks: [isNotEmpty],
},

GeminiLLMApiKey: {
Expand Down Expand Up @@ -755,27 +755,6 @@ function validGeminiSafetySetting(input = "") {
: `Invalid Safety setting. Must be one of ${validModes.join(", ")}.`;
}

function validAnthropicModel(input = "") {
const validModels = [
"claude-instant-1.2",
"claude-2.0",
"claude-2.1",
"claude-3-haiku-20240307",
"claude-3-sonnet-20240229",
"claude-3-opus-latest",
"claude-3-5-haiku-latest",
"claude-3-5-haiku-20241022",
"claude-3-5-sonnet-latest",
"claude-3-5-sonnet-20241022",
"claude-3-5-sonnet-20240620",
"claude-3-7-sonnet-20250219",
"claude-3-7-sonnet-latest",
];
return validModels.includes(input)
? null
: `Invalid Model type. Must be one of ${validModels.join(", ")}.`;
}

function supportedEmbeddingModel(input = "") {
const supported = [
"openai",
Expand Down
8 changes: 4 additions & 4 deletions server/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@
node-fetch "^2.6.7"
web-streams-polyfill "^3.2.1"

"@anthropic-ai/sdk@^0.32.1":
version "0.32.1"
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.32.1.tgz#d22c8ebae2adccc59d78fb416e89de337ff09014"
integrity sha512-U9JwTrDvdQ9iWuABVsMLj8nJVwAyQz6QXvgLsVhryhCEPkLsbcP/MXxm+jYcAwLoV8ESbaTTjnD4kuAFa+Hyjg==
"@anthropic-ai/sdk@^0.39.0":
version "0.39.0"
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.39.0.tgz#624d5b33413a9cc322febb64e9d48bdcf5a98cdc"
integrity sha512-eMyDIPRZbt1CCLErRCi3exlAvNkBtRe+kW5vvJyef93PmNr/clstYgHhtvmkxN82nlKgzyGPCyGxrm0JQ1ZIdg==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"
Expand Down