这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
"searxng",
"Serper",
"Serply",
"streamable",
"textgenwebui",
"togetherai",
"Unembed",
Expand Down
37 changes: 25 additions & 12 deletions server/endpoints/api/workspace/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,16 @@ const { Telemetry } = require("../../../models/telemetry");
const { DocumentVectors } = require("../../../models/vectors");
const { Workspace } = require("../../../models/workspace");
const { WorkspaceChats } = require("../../../models/workspaceChats");
const { chatWithWorkspace } = require("../../../utils/chats");
const { getVectorDbClass } = require("../../../utils/helpers");
const { multiUserMode, reqBody } = require("../../../utils/http");
const { validApiKey } = require("../../../utils/middleware/validApiKey");
const {
streamChatWithWorkspace,
VALID_CHAT_MODE,
} = require("../../../utils/chats/stream");
const { VALID_CHAT_MODE } = require("../../../utils/chats/stream");
const { EventLogs } = require("../../../models/eventLogs");
const {
convertToChatHistory,
writeResponseChunk,
} = require("../../../utils/helpers/chat/responses");
const { ApiChatHandler } = require("../../../utils/chats/apiChatHandler");

function apiWorkspaceEndpoints(app) {
if (!app) return;
Expand Down Expand Up @@ -584,7 +581,7 @@ function apiWorkspaceEndpoints(app) {
try {
const { slug } = request.params;
const { message, mode = "query" } = reqBody(request);
const workspace = await Workspace.get({ slug });
const workspace = await Workspace.get({ slug: String(slug) });

if (!workspace) {
response.status(400).json({
Expand Down Expand Up @@ -612,9 +609,17 @@ function apiWorkspaceEndpoints(app) {
return;
}

const result = await chatWithWorkspace(workspace, message, mode);
const result = await ApiChatHandler.chatSync({
workspace,
message,
mode,
user: null,
thread: null,
});

await Telemetry.sendTelemetry("sent_chat", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
LLMSelection:
workspace.chatProvider ?? process.env.LLM_PROVIDER ?? "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
TTSSelection: process.env.TTS_PROVIDER || "native",
Expand All @@ -623,7 +628,7 @@ function apiWorkspaceEndpoints(app) {
workspaceName: workspace?.name,
chatModel: workspace?.chatModel || "System Default",
});
response.status(200).json({ ...result });
return response.status(200).json({ ...result });
} catch (e) {
console.error(e.message, e);
response.status(500).json({
Expand Down Expand Up @@ -702,7 +707,7 @@ function apiWorkspaceEndpoints(app) {
try {
const { slug } = request.params;
const { message, mode = "query" } = reqBody(request);
const workspace = await Workspace.get({ slug });
const workspace = await Workspace.get({ slug: String(slug) });

if (!workspace) {
response.status(400).json({
Expand Down Expand Up @@ -736,9 +741,17 @@ function apiWorkspaceEndpoints(app) {
response.setHeader("Connection", "keep-alive");
response.flushHeaders();

await streamChatWithWorkspace(response, workspace, message, mode);
await ApiChatHandler.streamChat({
response,
workspace,
message,
mode,
user: null,
thread: null,
});
await Telemetry.sendTelemetry("sent_chat", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
LLMSelection:
workspace.chatProvider ?? process.env.LLM_PROVIDER ?? "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
TTSSelection: process.env.TTS_PROVIDER || "native",
Expand Down
14 changes: 7 additions & 7 deletions server/endpoints/api/workspaceThread/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ const { WorkspaceThread } = require("../../../models/workspaceThread");
const { Workspace } = require("../../../models/workspace");
const { validApiKey } = require("../../../utils/middleware/validApiKey");
const { reqBody, multiUserMode } = require("../../../utils/http");
const { chatWithWorkspace } = require("../../../utils/chats");
const {
streamChatWithWorkspace,
VALID_CHAT_MODE,
Expand All @@ -16,6 +15,7 @@ const {
} = require("../../../utils/helpers/chat/responses");
const { WorkspaceChats } = require("../../../models/workspaceChats");
const { User } = require("../../../models/user");
const { ApiChatHandler } = require("../../../utils/chats/apiChatHandler");

function apiWorkspaceThreadEndpoints(app) {
if (!app) return;
Expand Down Expand Up @@ -405,13 +405,13 @@ function apiWorkspaceThreadEndpoints(app) {
}

const user = userId ? await User.get({ id: Number(userId) }) : null;
const result = await chatWithWorkspace(
const result = await ApiChatHandler.chatSync({
workspace,
message,
mode,
user,
thread
);
thread,
});
await Telemetry.sendTelemetry("sent_chat", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
Expand Down Expand Up @@ -556,14 +556,14 @@ function apiWorkspaceThreadEndpoints(app) {
response.setHeader("Connection", "keep-alive");
response.flushHeaders();

await streamChatWithWorkspace(
await ApiChatHandler.streamChat({
response,
workspace,
message,
mode,
user,
thread
);
thread,
});
await Telemetry.sendTelemetry("sent_chat", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
Expand Down
Loading