θΏ™ζ˜―indexlocζδΎ›ηš„ζœεŠ‘οΌŒδΈθ¦θΎ“ε…₯任何密码
Skip to content
Merged
43 changes: 29 additions & 14 deletions frontend/src/components/LLMSelection/TogetherAiOptions/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@ import System from "@/models/system";
import { useState, useEffect } from "react";

export default function TogetherAiOptions({ settings }) {
const [inputValue, setInputValue] = useState(settings?.TogetherAiApiKey);
const [apiKey, setApiKey] = useState(settings?.TogetherAiApiKey);

return (
<div className="flex gap-[36px] mt-1.5">
<div className="flex flex-col w-60">
Expand All @@ -17,37 +20,49 @@ export default function TogetherAiOptions({ settings }) {
required={true}
autoComplete="off"
spellCheck={false}
onChange={(e) => setInputValue(e.target.value)}
onBlur={() => setApiKey(inputValue)}
/>
</div>
{!settings?.credentialsOnly && (
<TogetherAiModelSelection settings={settings} />
<TogetherAiModelSelection settings={settings} apiKey={apiKey} />
)}
</div>
);
}
function TogetherAiModelSelection({ settings }) {

function TogetherAiModelSelection({ settings, apiKey }) {
const [groupedModels, setGroupedModels] = useState({});
const [loading, setLoading] = useState(true);

useEffect(() => {
async function findCustomModels() {
setLoading(true);
const { models } = await System.customModels("togetherai");

if (models?.length > 0) {
const modelsByOrganization = models.reduce((acc, model) => {
acc[model.organization] = acc[model.organization] || [];
acc[model.organization].push(model);
return acc;
}, {});

setGroupedModels(modelsByOrganization);
try {
const key = apiKey === "*".repeat(20) ? null : apiKey;
const { models } = await System.customModels("togetherai", key);
if (models?.length > 0) {
const modelsByOrganization = models.reduce((acc, model) => {
if (model.type !== "chat") return acc; // Only show chat models in dropdown
const org = model.organization || "Unknown";
acc[org] = acc[org] || [];
acc[org].push({
id: model.id,
name: model.name || model.id,
organization: org,
maxLength: model.maxLength,
});
return acc;
}, {});
setGroupedModels(modelsByOrganization);
}
} catch (error) {
console.error("Error fetching Together AI models:", error);
}

setLoading(false);
}
findCustomModels();
}, []);
}, [apiKey]);

if (loading || Object.keys(groupedModels).length === 0) {
return (
Expand Down
3 changes: 2 additions & 1 deletion server/storage/models/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@ openrouter
apipie
novita
mixedbread-ai*
gemini
gemini
togetherAi
101 changes: 86 additions & 15 deletions server/utils/AiProviders/togetherAi/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,76 @@ const {
const {
LLMPerformanceMonitor,
} = require("../../helpers/chat/LLMPerformanceMonitor");
const fs = require("fs");
const path = require("path");
const { safeJsonParse } = require("../../http");

function togetherAiModels() {
const { MODELS } = require("./models.js");
return MODELS || {};
const cacheFolder = path.resolve(
process.env.STORAGE_DIR
? path.resolve(process.env.STORAGE_DIR, "models", "togetherAi")
: path.resolve(__dirname, `../../../storage/models/togetherAi`)
);

async function togetherAiModels(apiKey = null) {
const cacheModelPath = path.resolve(cacheFolder, "models.json");
const cacheAtPath = path.resolve(cacheFolder, ".cached_at");

// If cache exists and is less than 1 week old, use it
if (fs.existsSync(cacheModelPath) && fs.existsSync(cacheAtPath)) {
const now = Number(new Date());
const timestampMs = Number(fs.readFileSync(cacheAtPath));
if (now - timestampMs <= 6.048e8) {
// 1 Week in MS
return safeJsonParse(
fs.readFileSync(cacheModelPath, { encoding: "utf-8" }),
[]
);
}
}

try {
const { OpenAI: OpenAIApi } = require("openai");
const openai = new OpenAIApi({
baseURL: "https://api.together.xyz/v1",
apiKey: apiKey || process.env.TOGETHER_AI_API_KEY || null,
});

const response = await openai.models.list();

// Filter and transform models into the expected format
// Only include chat models
const validModels = response.body
.filter((model) => ["chat"].includes(model.type))
.map((model) => ({
id: model.id,
name: model.display_name || model.id,
organization: model.organization || "Unknown",
type: model.type,
maxLength: model.context_length || 4096,
}));

// Cache the results
if (!fs.existsSync(cacheFolder))
fs.mkdirSync(cacheFolder, { recursive: true });
fs.writeFileSync(cacheModelPath, JSON.stringify(validModels), {
encoding: "utf-8",
});
fs.writeFileSync(cacheAtPath, String(Number(new Date())), {
encoding: "utf-8",
});

return validModels;
} catch (error) {
console.error("Error fetching Together AI models:", error);
// If cache exists but is stale, still use it as fallback
if (fs.existsSync(cacheModelPath)) {
return safeJsonParse(
fs.readFileSync(cacheModelPath, { encoding: "utf-8" }),
[]
);
}
return [];
}
}

class TogetherAiLLM {
Expand Down Expand Up @@ -60,29 +126,34 @@ class TogetherAiLLM {
return content.flat();
}

allModelInformation() {
return togetherAiModels();
async allModelInformation() {
const models = await togetherAiModels();
return models.reduce((acc, model) => {
acc[model.id] = model;
return acc;
}, {});
}

streamingEnabled() {
return "streamGetChatCompletion" in this;
}

static promptWindowLimit(modelName) {
const availableModels = togetherAiModels();
return availableModels[modelName]?.maxLength || 4096;
static async promptWindowLimit(modelName) {
const models = await togetherAiModels();
const model = models.find((m) => m.id === modelName);
return model?.maxLength || 4096;
}

// Ensure the user set a value for the token limit
// and if undefined - assume 4096 window.
promptWindowLimit() {
const availableModels = this.allModelInformation();
return availableModels[this.model]?.maxLength || 4096;
async promptWindowLimit() {
const models = await togetherAiModels();
const model = models.find((m) => m.id === this.model);
return model?.maxLength || 4096;
}

async isValidChatCompletionModel(model = "") {
const availableModels = this.allModelInformation();
return availableModels.hasOwnProperty(model);
const models = await togetherAiModels();
const foundModel = models.find((m) => m.id === model);
return foundModel && foundModel.type === "chat";
}

constructPrompt({
Expand Down
Loading