θΏ™ζ˜―indexlocζδΎ›ηš„ζœεŠ‘οΌŒδΈθ¦θΎ“ε…₯任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ const ENABLED_PROVIDERS = [
"bedrock",
"fireworksai",
"deepseek",
"litellm",
"apipie",
// TODO: More agent support.
// "cohere", // Has tool calling and will need to build explicit support
Expand Down
2 changes: 2 additions & 0 deletions server/utils/agents/aibitat/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -785,6 +785,8 @@ ${this.getHistory({ to: route.to })
return new Providers.FireworksAIProvider({ model: config.model });
case "deepseek":
return new Providers.DeepSeekProvider({ model: config.model });
case "litellm":
return new Providers.LiteLLMProvider({ model: config.model });
case "apipie":
return new Providers.ApiPieProvider({ model: config.model });

Expand Down
31 changes: 20 additions & 11 deletions server/utils/agents/aibitat/providers/ai-provider.js
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,22 @@ class Provider {
apiKey: process.env.FIREWORKS_AI_LLM_API_KEY,
...config,
});
case "apipie":
return new ChatOpenAI({
configuration: {
baseURL: "https://apipie.ai/v1",
},
apiKey: process.env.APIPIE_LLM_API_KEY ?? null,
...config,
});
case "deepseek":
return new ChatOpenAI({
configuration: {
baseURL: "https://api.deepseek.com/v1",
},
apiKey: process.env.DEEPSEEK_API_KEY ?? null,
...config,
});

// OSS Model Runners
// case "anythingllm_ollama":
Expand Down Expand Up @@ -174,22 +190,15 @@ class Provider {
apiKey: process.env.TEXT_GEN_WEB_UI_API_KEY ?? "not-used",
...config,
});
case "deepseek":
return new ChatOpenAI({
configuration: {
baseURL: "https://api.deepseek.com/v1",
},
apiKey: process.env.DEEPSEEK_API_KEY ?? null,
...config,
});
case "apipie":
case "litellm":
return new ChatOpenAI({
configuration: {
baseURL: "https://apipie.ai/v1",
baseURL: process.env.LITE_LLM_BASE_PATH,
},
apiKey: process.env.APIPIE_LLM_API_KEY ?? null,
apiKey: process.env.LITE_LLM_API_KEY ?? null,
...config,
});

default:
throw new Error(`Unsupported provider ${provider} for this task.`);
}
Expand Down
2 changes: 2 additions & 0 deletions server/utils/agents/aibitat/providers/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ const TextWebGenUiProvider = require("./textgenwebui.js");
const AWSBedrockProvider = require("./bedrock.js");
const FireworksAIProvider = require("./fireworksai.js");
const DeepSeekProvider = require("./deepseek.js");
const LiteLLMProvider = require("./litellm.js");
const ApiPieProvider = require("./apipie.js");

module.exports = {
Expand All @@ -35,5 +36,6 @@ module.exports = {
TextWebGenUiProvider,
AWSBedrockProvider,
FireworksAIProvider,
LiteLLMProvider,
ApiPieProvider,
};
110 changes: 110 additions & 0 deletions server/utils/agents/aibitat/providers/litellm.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
const OpenAI = require("openai");
const Provider = require("./ai-provider.js");
const InheritMultiple = require("./helpers/classes.js");
const UnTooled = require("./helpers/untooled.js");

/**
* The agent provider for LiteLLM.
*/
class LiteLLMProvider extends InheritMultiple([Provider, UnTooled]) {
model;

constructor(config = {}) {
super();
const { model = null } = config;
const client = new OpenAI({
baseURL: process.env.LITE_LLM_BASE_PATH,
apiKey: process.env.LITE_LLM_API_KEY ?? null,
maxRetries: 3,
});

this._client = client;
this.model = model || process.env.LITE_LLM_MODEL_PREF;
this.verbose = true;
}

get client() {
return this._client;
}

async #handleFunctionCallChat({ messages = [] }) {
return await this.client.chat.completions
.create({
model: this.model,
temperature: 0,
messages,
})
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("LiteLLM chat: No results!");
if (result.choices.length === 0)
throw new Error("LiteLLM chat: No results length!");
return result.choices[0].message.content;
})
.catch((_) => {
return null;
});
}

/**
* Create a completion based on the received messages.
*
* @param messages A list of messages to send to the API.
* @param functions
* @returns The completion.
*/
async complete(messages, functions = null) {
try {
let completion;
if (functions.length > 0) {
const { toolCall, text } = await this.functionCall(
messages,
functions,
this.#handleFunctionCallChat.bind(this)
);

if (toolCall !== null) {
this.providerLog(`Valid tool call found - running ${toolCall.name}.`);
this.deduplicator.trackRun(toolCall.name, toolCall.arguments);
return {
result: null,
functionCall: {
name: toolCall.name,
arguments: toolCall.arguments,
},
cost: 0,
};
}
completion = { content: text };
}

if (!completion?.content) {
this.providerLog(
"Will assume chat completion without tool call inputs."
);
const response = await this.client.chat.completions.create({
model: this.model,
messages: this.cleanMsgs(messages),
});
completion = response.choices[0].message;
}

// The UnTooled class inherited Deduplicator is mostly useful to prevent the agent
// from calling the exact same function over and over in a loop within a single chat exchange
// _but_ we should enable it to call previously used tools in a new chat interaction.
this.deduplicator.reset("runs");
return {
result: completion.content,
cost: 0,
};
} catch (error) {
throw error;
}
}

getCost(_usage) {
return 0;
}
}

module.exports = LiteLLMProvider;
8 changes: 8 additions & 0 deletions server/utils/agents/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,12 @@ class AgentHandler {
if (!process.env.DEEPSEEK_API_KEY)
throw new Error("DeepSeek API Key must be provided to use agents.");
break;
case "litellm":
if (!process.env.LITE_LLM_BASE_PATH)
throw new Error(
"LiteLLM API base path and key must be provided to use agents."
);
break;
case "apipie":
if (!process.env.APIPIE_LLM_API_KEY)
throw new Error("ApiPie API Key must be provided to use agents.");
Expand Down Expand Up @@ -216,6 +222,8 @@ class AgentHandler {
return null;
case "deepseek":
return "deepseek-chat";
case "litellm":
return null;
case "apipie":
return null;
default:
Expand Down