diff --git a/.gitignore b/.gitignore index f6a7e551f78..3fe38aba94d 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ v-env aws_cf_deploy_anything_llm.json yarn.lock *.bak +server/app.log \ No newline at end of file diff --git a/server/endpoints/api/document/index.js b/server/endpoints/api/document/index.js index 50601c21a2d..1282bfdd2b2 100644 --- a/server/endpoints/api/document/index.js +++ b/server/endpoints/api/document/index.js @@ -13,6 +13,7 @@ const { CollectorApi } = require("../../../utils/collectorApi"); const fs = require("fs"); const path = require("path"); const { Document } = require("../../../models/documents"); +const logger = require("../../../utils/logger"); const documentsPath = process.env.NODE_ENV === "development" ? path.resolve(__dirname, "../../../storage/documents") @@ -115,7 +116,7 @@ function apiDocumentEndpoints(app) { }); response.status(200).json({ success: true, error: null, documents }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/document/upload" }); response.sendStatus(500).end(); } } @@ -213,7 +214,7 @@ function apiDocumentEndpoints(app) { }); response.status(200).json({ success: true, error: null, documents }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/document/upload-link" }); response.sendStatus(500).end(); } } @@ -346,7 +347,7 @@ function apiDocumentEndpoints(app) { await EventLogs.logEvent("api_raw_document_uploaded"); response.status(200).json({ success: true, error: null, documents }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/document/raw-text" }); response.sendStatus(500).end(); } } @@ -391,7 +392,7 @@ function apiDocumentEndpoints(app) { const localFiles = await viewLocalFiles(); response.status(200).json({ localFiles }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/documents" }); response.sendStatus(500).end(); } }); @@ -447,7 +448,7 @@ function apiDocumentEndpoints(app) { response.status(200).json({ types }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/document/accepted-file-types" }); response.sendStatus(500).end(); } } @@ -497,7 +498,7 @@ function apiDocumentEndpoints(app) { }, }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/document/metadata-schema" }); response.sendStatus(500).end(); } } @@ -555,7 +556,7 @@ function apiDocumentEndpoints(app) { } response.status(200).json({ document }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/document/:docName" }); response.sendStatus(500).end(); } }); diff --git a/server/endpoints/api/system/index.js b/server/endpoints/api/system/index.js index 029fca89237..2a84c0aec2b 100644 --- a/server/endpoints/api/system/index.js +++ b/server/endpoints/api/system/index.js @@ -8,6 +8,7 @@ const { } = require("../../../utils/helpers/chat/convertTo"); const { dumpENV, updateENV } = require("../../../utils/helpers/updateENV"); const { reqBody } = require("../../../utils/http"); +const logger = require("../../../utils/logger"); const { validApiKey } = require("../../../utils/middleware/validApiKey"); function apiSystemEndpoints(app) { @@ -29,7 +30,7 @@ function apiSystemEndpoints(app) { dumpENV(); response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/system/env-dump" }); response.sendStatus(500).end(); } }); @@ -66,7 +67,7 @@ function apiSystemEndpoints(app) { const settings = await SystemSettings.currentSettings(); response.status(200).json({ settings }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/system" }); response.sendStatus(500).end(); } }); @@ -98,7 +99,7 @@ function apiSystemEndpoints(app) { const vectorCount = await VectorDb.totalVectors(); response.status(200).json({ vectorCount }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/system/vector-count" }); response.sendStatus(500).end(); } }); @@ -147,7 +148,7 @@ function apiSystemEndpoints(app) { const { newValues, error } = await updateENV(body); response.status(200).json({ newValues, error }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/system/update-env" }); response.sendStatus(500).end(); } } @@ -201,7 +202,7 @@ function apiSystemEndpoints(app) { response.setHeader("Content-Type", contentType); response.status(200).send(data); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/system/export-chats" }); response.sendStatus(500).end(); } } @@ -267,7 +268,7 @@ function apiSystemEndpoints(app) { .json({ success: true, message: "Documents removed successfully" }) .end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/system/remove-documents" }); response.sendStatus(500).end(); } } diff --git a/server/endpoints/api/userManagement/index.js b/server/endpoints/api/userManagement/index.js index 656fc580b63..3487e014b4c 100644 --- a/server/endpoints/api/userManagement/index.js +++ b/server/endpoints/api/userManagement/index.js @@ -1,5 +1,6 @@ const { User } = require("../../../models/user"); const { multiUserMode } = require("../../../utils/http"); +const logger = require("../../../utils/logger"); const { validApiKey } = require("../../../utils/middleware/validApiKey"); function apiUserManagementEndpoints(app) { @@ -55,7 +56,7 @@ function apiUserManagementEndpoints(app) { })); response.status(200).json({ users: filteredUsers }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/users" }); response.sendStatus(500).end(); } }); diff --git a/server/endpoints/api/workspace/index.js b/server/endpoints/api/workspace/index.js index cbbf1f23634..cb1bffd7cbe 100644 --- a/server/endpoints/api/workspace/index.js +++ b/server/endpoints/api/workspace/index.js @@ -17,6 +17,7 @@ const { convertToChatHistory, writeResponseChunk, } = require("../../../utils/helpers/chat/responses"); +const logger = require("../../../utils/logger"); function apiWorkspaceEndpoints(app) { if (!app) return; @@ -79,7 +80,7 @@ function apiWorkspaceEndpoints(app) { }); response.status(200).json({ workspace, message }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/workspace/new" }); response.sendStatus(500).end(); } }); @@ -121,7 +122,7 @@ function apiWorkspaceEndpoints(app) { const workspaces = await Workspace.where(); response.status(200).json({ workspaces }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/workspaces" }); response.sendStatus(500).end(); } }); @@ -170,7 +171,7 @@ function apiWorkspaceEndpoints(app) { const workspace = await Workspace.get({ slug }); response.status(200).json({ workspace }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/workspace/:slug" }); response.sendStatus(500).end(); } }); @@ -221,7 +222,7 @@ function apiWorkspaceEndpoints(app) { } response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/workspace/:slug" }); response.sendStatus(500).end(); } } @@ -301,7 +302,7 @@ function apiWorkspaceEndpoints(app) { ); response.status(200).json({ workspace, message }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/workspace/:slug/update" }); response.sendStatus(500).end(); } } @@ -362,7 +363,7 @@ function apiWorkspaceEndpoints(app) { const history = await WorkspaceChats.forWorkspace(workspace.id); response.status(200).json({ history: convertToChatHistory(history) }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/workspace/:slug/chats" }); response.sendStatus(500).end(); } } @@ -441,7 +442,9 @@ function apiWorkspaceEndpoints(app) { }); response.status(200).json({ workspace: updatedWorkspace }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { + origin: "/v1/workspace/:slug/update-embeddings", + }); response.sendStatus(500).end(); } } @@ -603,7 +606,7 @@ function apiWorkspaceEndpoints(app) { }); response.status(200).json({ ...result }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/workspace/:slug/chat" }); response.status(500).json({ id: uuidv4(), type: "abort", @@ -726,7 +729,7 @@ function apiWorkspaceEndpoints(app) { }); response.end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v1/workspace/:slug/stream-chat" }); writeResponseChunk(response, { id: uuidv4(), type: "abort", diff --git a/server/endpoints/system.js b/server/endpoints/system.js index 1849a2fc4f3..d659bb531ad 100644 --- a/server/endpoints/system.js +++ b/server/endpoints/system.js @@ -51,6 +51,7 @@ const { generateRecoveryCodes, } = require("../utils/PasswordRecovery"); const { SlashCommandPresets } = require("../models/slashCommandsPresets"); +const logger = require("../utils/logger"); function systemEndpoints(app) { if (!app) return; @@ -75,7 +76,7 @@ function systemEndpoints(app) { const results = await SystemSettings.currentSettings(); response.status(200).json({ results }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/setup-complete" }); response.sendStatus(500).end(); } }); @@ -98,7 +99,7 @@ function systemEndpoints(app) { response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/system/check-token" }); response.sendStatus(500).end(); } } @@ -241,7 +242,7 @@ function systemEndpoints(app) { }); } } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/request-token" }); response.sendStatus(500).end(); } }); @@ -307,7 +308,7 @@ function systemEndpoints(app) { : await VectorDb.totalVectors(); response.status(200).json({ vectorCount }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/system/system-vectors" }); response.sendStatus(500).end(); } } @@ -322,7 +323,7 @@ function systemEndpoints(app) { await purgeDocument(name); response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/system/remove-document" }); response.sendStatus(500).end(); } } @@ -337,7 +338,7 @@ function systemEndpoints(app) { for await (const name of names) await purgeDocument(name); response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/system/remove-documents" }); response.sendStatus(500).end(); } } @@ -352,7 +353,7 @@ function systemEndpoints(app) { await purgeFolder(name); response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/system/remove-folder" }); response.sendStatus(500).end(); } } @@ -366,7 +367,7 @@ function systemEndpoints(app) { const localFiles = await viewLocalFiles(); response.status(200).json({ localFiles }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/system/local-files" }); response.sendStatus(500).end(); } } @@ -380,7 +381,9 @@ function systemEndpoints(app) { const online = await new CollectorApi().online(); response.sendStatus(online ? 200 : 503); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { + origin: "/system/document-processing-status", + }); response.sendStatus(500).end(); } } @@ -399,7 +402,7 @@ function systemEndpoints(app) { response.status(200).json({ types }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/system/accepted-document-types" }); response.sendStatus(500).end(); } } @@ -418,7 +421,7 @@ function systemEndpoints(app) { ); response.status(200).json({ newValues, error }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/system/update-env" }); response.sendStatus(500).end(); } } @@ -452,7 +455,7 @@ function systemEndpoints(app) { } response.status(200).json({ success: !error, error }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/system/update-password" }); response.sendStatus(500).end(); } } @@ -501,7 +504,7 @@ function systemEndpoints(app) { multi_user_mode: false, }); - console.log(e.message, e); + logger.error(e.message, { origin: "/system/enable-multi-user" }); response.sendStatus(500).end(); } } @@ -512,7 +515,7 @@ function systemEndpoints(app) { const multiUserMode = await SystemSettings.isMultiUserMode(); response.status(200).json({ multiUserMode }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/system/multi-user-mode" }); response.sendStatus(500).end(); } }); diff --git a/server/endpoints/workspaceThreads.js b/server/endpoints/workspaceThreads.js index 1c207e5230f..9e1b916cbb6 100644 --- a/server/endpoints/workspaceThreads.js +++ b/server/endpoints/workspaceThreads.js @@ -18,6 +18,7 @@ const { } = require("../utils/middleware/validWorkspace"); const { WorkspaceChats } = require("../models/workspaceChats"); const { convertToChatHistory } = require("../utils/helpers/chat/responses"); +const logger = require("../utils/logger"); function workspaceThreadEndpoints(app) { if (!app) return; @@ -53,7 +54,7 @@ function workspaceThreadEndpoints(app) { ); response.status(200).json({ thread, message }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspace/:slug/thread/new" }); response.sendStatus(500).end(); } } @@ -72,7 +73,7 @@ function workspaceThreadEndpoints(app) { }); response.status(200).json({ threads }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspace/:slug/threads" }); response.sendStatus(500).end(); } } @@ -91,7 +92,9 @@ function workspaceThreadEndpoints(app) { await WorkspaceThread.delete({ id: thread.id }); response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { + origin: "/workspace/:slug/thread/:threadSlug", + }); response.sendStatus(500).end(); } } @@ -114,7 +117,9 @@ function workspaceThreadEndpoints(app) { }); response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { + origin: "/workspace/:slug/thread-bulk-delete", + }); response.sendStatus(500).end(); } } @@ -145,7 +150,9 @@ function workspaceThreadEndpoints(app) { response.status(200).json({ history: convertToChatHistory(history) }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { + origin: "/workspace/:slug/thread/:threadSlug/chats", + }); response.sendStatus(500).end(); } } @@ -168,7 +175,9 @@ function workspaceThreadEndpoints(app) { ); response.status(200).json({ thread, message }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { + origin: "/workspace/:slug/thread/:threadSlug/update", + }); response.sendStatus(500).end(); } } @@ -197,7 +206,9 @@ function workspaceThreadEndpoints(app) { response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { + origin: "/workspace/:slug/thread/:threadSlug/delete-edited-chats", + }); response.sendStatus(500).end(); } } @@ -239,7 +250,9 @@ function workspaceThreadEndpoints(app) { response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { + origin: "/workspace/:slug/thread/:threadSlug/update-chat", + }); response.sendStatus(500).end(); } } diff --git a/server/endpoints/workspaces.js b/server/endpoints/workspaces.js index 6d6f29bbd51..842379a1e98 100644 --- a/server/endpoints/workspaces.js +++ b/server/endpoints/workspaces.js @@ -31,6 +31,7 @@ const { fetchPfp, } = require("../utils/files/pfp"); const { getTTSProvider } = require("../utils/TextToSpeech"); +const logger = require("../utils/logger"); function workspaceEndpoints(app) { if (!app) return; @@ -68,7 +69,7 @@ function workspaceEndpoints(app) { response.status(200).json({ workspace, message }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspace/new" }); response.sendStatus(500).end(); } } @@ -97,7 +98,7 @@ function workspaceEndpoints(app) { ); response.status(200).json({ workspace, message }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspace/:slug/update" }); response.sendStatus(500).end(); } } @@ -147,7 +148,7 @@ function workspaceEndpoints(app) { ); response.status(200).json({ success: true, error: null }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspace/:slug/upload" }); response.sendStatus(500).end(); } } @@ -190,7 +191,7 @@ function workspaceEndpoints(app) { ); response.status(200).json({ success: true, error: null }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspace/:slug/upload-link" }); response.sendStatus(500).end(); } } @@ -234,7 +235,9 @@ function workspaceEndpoints(app) { : null, }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { + origin: "/workspace/:slug/update-embeddings", + }); response.sendStatus(500).end(); } } @@ -277,7 +280,7 @@ function workspaceEndpoints(app) { } response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspace/:slug" }); response.sendStatus(500).end(); } } @@ -318,7 +321,7 @@ function workspaceEndpoints(app) { } response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspace/:slug/reset-vector-db" }); response.sendStatus(500).end(); } } @@ -336,7 +339,7 @@ function workspaceEndpoints(app) { response.status(200).json({ workspaces }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspaces" }); response.sendStatus(500).end(); } } @@ -355,7 +358,7 @@ function workspaceEndpoints(app) { response.status(200).json({ workspace }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspace/:slug" }); response.sendStatus(500).end(); } } @@ -382,7 +385,7 @@ function workspaceEndpoints(app) { : await WorkspaceChats.forWorkspace(workspace.id); response.status(200).json({ history: convertToChatHistory(history) }); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspace/:slug/chats" }); response.sendStatus(500).end(); } } @@ -413,7 +416,7 @@ function workspaceEndpoints(app) { response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspace/:slug/delete-chats" }); response.sendStatus(500).end(); } } @@ -437,7 +440,9 @@ function workspaceEndpoints(app) { response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { + origin: "/workspace/:slug/delete-edited-chats", + }); response.sendStatus(500).end(); } } @@ -474,7 +479,7 @@ function workspaceEndpoints(app) { response.sendStatus(200).end(); } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/workspace/:slug/update-chat" }); response.sendStatus(500).end(); } } diff --git a/server/index.js b/server/index.js index 987a0cab7d2..d3bd0a91801 100644 --- a/server/index.js +++ b/server/index.js @@ -22,6 +22,7 @@ const { bootHTTP, bootSSL } = require("./utils/boot"); const { workspaceThreadEndpoints } = require("./endpoints/workspaceThreads"); const { documentEndpoints } = require("./endpoints/document"); const { agentWebsocket } = require("./endpoints/agentWebsocket"); +const logger = require("./utils/logger"); const { experimentalEndpoints } = require("./endpoints/experimental"); const app = express(); const apiRouter = express.Router(); @@ -106,7 +107,7 @@ if (process.env.NODE_ENV !== "development") { } return; } catch (e) { - console.log(e.message, e); + logger.error(e.message, { origin: "/v/:command" }); response.sendStatus(500).end(); } }); diff --git a/server/models/eventLogs.js b/server/models/eventLogs.js index 51240431a75..c53be284d05 100644 --- a/server/models/eventLogs.js +++ b/server/models/eventLogs.js @@ -1,3 +1,4 @@ +const logger = require("../utils/logger"); const prisma = require("../utils/prisma"); const EventLogs = { @@ -11,13 +12,10 @@ const EventLogs = { occurredAt: new Date(), }, }); - console.log(`\x1b[32m[Event Logged]\x1b[0m - ${event}`); + logger.info(`[Event Logged] - ${event}`, { origin: "EventLogs" }); return { eventLog, message: null }; } catch (error) { - console.error( - `\x1b[31m[Event Logging Failed]\x1b[0m - ${event}`, - error.message - ); + logger.error(error.message, { origin: "EventLogs" }); return { eventLog: null, message: error.message }; } }, diff --git a/server/models/telemetry.js b/server/models/telemetry.js index 011da05e97f..a2dcd33ce23 100644 --- a/server/models/telemetry.js +++ b/server/models/telemetry.js @@ -1,5 +1,6 @@ const { v4 } = require("uuid"); const { SystemSettings } = require("./systemSettings"); +const logger = require("../utils/logger"); const Telemetry = { // Write-only key. It can't read events or any of your other data, so it's safe to use in public apps. @@ -49,11 +50,10 @@ const Telemetry = { // Silence some events to keep logs from being too messy in production // eg: Tool calls from agents spamming the logs. if (!silent) { - console.log(`\x1b[32m[TELEMETRY SENT]\x1b[0m`, { - event, - distinctId, - properties, - }); + logger.info( + `${event} - ${distinctId} - ${JSON.stringify(properties)}`, + { origin: "Telemetry" } + ); } client.capture({ diff --git a/server/swagger/init.js b/server/swagger/init.js index 31edcf1c47c..a8812b1acf5 100644 --- a/server/swagger/init.js +++ b/server/swagger/init.js @@ -1,6 +1,7 @@ const swaggerAutogen = require("swagger-autogen")({ openapi: "3.0.0" }); const fs = require("fs"); const path = require("path"); +const logger = require("../utils/logger"); const doc = { info: { @@ -73,5 +74,5 @@ swaggerAutogen(outputFile, endpointsFiles, doc).then(({ data }) => { encoding: "utf-8", flag: "w", }); - console.log(`Swagger-autogen: \x1b[32mPatched servers.url ✔\x1b[0m`); + logger.info("Patched servers.url ✔", { origin: "Swagger-autogen" }); }); diff --git a/server/utils/AiProviders/anthropic/index.js b/server/utils/AiProviders/anthropic/index.js index 4da56bf9af0..d8a5f8e040d 100644 --- a/server/utils/AiProviders/anthropic/index.js +++ b/server/utils/AiProviders/anthropic/index.js @@ -4,6 +4,7 @@ const { clientAbortedHandler, } = require("../../helpers/chat/responses"); const { NativeEmbedder } = require("../../EmbeddingEngines/native"); +const logger = require("../../logger"); class AnthropicLLM { constructor(embedder = null, modelPreference = null) { @@ -104,7 +105,9 @@ class AnthropicLLM { return response.content[0].text; } catch (error) { - console.log(error); + logger.error(error, { + origin: "AnthropicLLM", + }); return error; } } diff --git a/server/utils/AiProviders/genericOpenAi/index.js b/server/utils/AiProviders/genericOpenAi/index.js index 8d0c0f34a7d..336f9670ecd 100644 --- a/server/utils/AiProviders/genericOpenAi/index.js +++ b/server/utils/AiProviders/genericOpenAi/index.js @@ -3,6 +3,7 @@ const { handleDefaultStreamResponseV2, } = require("../../helpers/chat/responses"); const { toValidNumber } = require("../../http"); +const logger = require("../../logger"); class GenericOpenAiLLM { constructor(embedder = null, modelPreference = null) { @@ -32,11 +33,9 @@ class GenericOpenAiLLM { this.embedder = embedder ?? new NativeEmbedder(); this.defaultTemp = 0.7; - this.log(`Inference API: ${this.basePath} Model: ${this.model}`); - } - - log(text, ...args) { - console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args); + logger.info(`Inference API: ${this.basePath} Model: ${this.model}`, { + origin: "GenericOpenAiLLM", + }); } #appendContext(contextTexts = []) { diff --git a/server/utils/AiProviders/koboldCPP/index.js b/server/utils/AiProviders/koboldCPP/index.js index 15163648421..97d323b74b7 100644 --- a/server/utils/AiProviders/koboldCPP/index.js +++ b/server/utils/AiProviders/koboldCPP/index.js @@ -4,6 +4,7 @@ const { writeResponseChunk, } = require("../../helpers/chat/responses"); const { v4: uuidv4 } = require("uuid"); +const logger = require("../../logger"); class KoboldCPPLLM { constructor(embedder = null, modelPreference = null) { @@ -28,11 +29,9 @@ class KoboldCPPLLM { this.embedder = embedder ?? new NativeEmbedder(); this.defaultTemp = 0.7; - this.log(`Inference API: ${this.basePath} Model: ${this.model}`); - } - - log(text, ...args) { - console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args); + logger.info(`Inference API: ${this.basePath} Model: ${this.model}`, { + origin: "KoboldCPPLLM", + }); } #appendContext(contextTexts = []) { diff --git a/server/utils/AiProviders/liteLLM/index.js b/server/utils/AiProviders/liteLLM/index.js index 28d0b71dc43..6f744fe88b2 100644 --- a/server/utils/AiProviders/liteLLM/index.js +++ b/server/utils/AiProviders/liteLLM/index.js @@ -2,6 +2,7 @@ const { NativeEmbedder } = require("../../EmbeddingEngines/native"); const { handleDefaultStreamResponseV2, } = require("../../helpers/chat/responses"); +const logger = require("../../logger"); class LiteLLM { constructor(embedder = null, modelPreference = null) { @@ -27,11 +28,9 @@ class LiteLLM { this.embedder = embedder ?? new NativeEmbedder(); this.defaultTemp = 0.7; - this.log(`Inference API: ${this.basePath} Model: ${this.model}`); - } - - log(text, ...args) { - console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args); + logger.info(`Inference API: ${this.basePath} Model: ${this.model}`, { + origin: "LiteLLM", + }); } #appendContext(contextTexts = []) { diff --git a/server/utils/AiProviders/openRouter/index.js b/server/utils/AiProviders/openRouter/index.js index 301ed7046cd..520401c6d99 100644 --- a/server/utils/AiProviders/openRouter/index.js +++ b/server/utils/AiProviders/openRouter/index.js @@ -7,6 +7,7 @@ const { const fs = require("fs"); const path = require("path"); const { safeJsonParse } = require("../../http"); +const logger = require("../../logger"); const cacheFolder = path.resolve( process.env.STORAGE_DIR ? path.resolve(process.env.STORAGE_DIR, "models", "openrouter") @@ -45,10 +46,6 @@ class OpenRouterLLM { this.cacheAtPath = path.resolve(cacheFolder, ".cached_at"); } - log(text, ...args) { - console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args); - } - // This checks if the .cached_at file has a timestamp that is more than 1Week (in millis) // from the current date. If it is, then we will refetch the API so that all the models are up // to date. @@ -68,9 +65,11 @@ class OpenRouterLLM { async #syncModels() { if (fs.existsSync(this.cacheModelPath) && !this.#cacheIsStale()) return false; - - this.log( - "Model cache is not present or stale. Fetching from OpenRouter API." + logger.info( + "Model cache is not present or stale. Fetching from OpenRouter API.", + { + origin: "OpenRouterLLM", + } ); await fetchOpenRouterModels(); return; @@ -193,8 +192,9 @@ class OpenRouterLLM { const now = Number(new Date()); const diffMs = now - lastChunkTime; if (diffMs >= timeoutThresholdMs) { - console.log( - `OpenRouter stream did not self-close and has been stale for >${timeoutThresholdMs}ms. Closing response stream.` + logger.info( + `OpenRouter stream did not self-close and has been stale for >${timeoutThresholdMs}ms. Closing response stream.`, + { origin: "OpenRouterLLM" } ); writeResponseChunk(response, { uuid, @@ -300,7 +300,7 @@ async function fetchOpenRouterModels() { return models; }) .catch((e) => { - console.error(e); + logger.error(e, { origin: "OpenRouterLLM" }); return {}; }); } diff --git a/server/utils/AiProviders/textGenWebUI/index.js b/server/utils/AiProviders/textGenWebUI/index.js index 71d50577701..5abd2c9474a 100644 --- a/server/utils/AiProviders/textGenWebUI/index.js +++ b/server/utils/AiProviders/textGenWebUI/index.js @@ -2,6 +2,7 @@ const { NativeEmbedder } = require("../../EmbeddingEngines/native"); const { handleDefaultStreamResponseV2, } = require("../../helpers/chat/responses"); +const logger = require("../../logger"); class TextGenWebUILLM { constructor(embedder = null) { @@ -25,11 +26,9 @@ class TextGenWebUILLM { this.embedder = embedder ?? new NativeEmbedder(); this.defaultTemp = 0.7; - this.log(`Inference API: ${this.basePath} Model: ${this.model}`); - } - - log(text, ...args) { - console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args); + logger.info(`Inference API: ${this.basePath} Model: ${this.model}`, { + origin: "TextGenWebUILLM", + }); } #appendContext(contextTexts = []) { diff --git a/server/utils/DocumentManager/index.js b/server/utils/DocumentManager/index.js index 17fd9860ee2..adc36c4656d 100644 --- a/server/utils/DocumentManager/index.js +++ b/server/utils/DocumentManager/index.js @@ -1,5 +1,6 @@ const fs = require("fs"); const path = require("path"); +const logger = require("../logger"); const documentsPath = process.env.NODE_ENV === "development" @@ -14,7 +15,7 @@ class DocumentManager { } log(text, ...args) { - console.log(`\x1b[36m[DocumentManager]\x1b[0m ${text}`, ...args); + logger.info(`${text} ${args}`, { origin: "DocumentManager" }); } async pinnedDocuments() { diff --git a/server/utils/EmbeddingEngines/lmstudio/index.js b/server/utils/EmbeddingEngines/lmstudio/index.js index 6874b4b3292..0bca4102fe6 100644 --- a/server/utils/EmbeddingEngines/lmstudio/index.js +++ b/server/utils/EmbeddingEngines/lmstudio/index.js @@ -1,4 +1,5 @@ const { maximumChunkLength } = require("../../helpers"); +const logger = require("../../logger"); class LMStudioEmbedder { constructor() { @@ -15,17 +16,13 @@ class LMStudioEmbedder { this.embeddingMaxChunkLength = maximumChunkLength(); } - log(text, ...args) { - console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args); - } - async #isAlive() { return await fetch(`${this.basePath}/models`, { method: "HEAD", }) .then((res) => res.ok) .catch((e) => { - this.log(e.message); + logger.error(e, { origin: "LMStudioEmbedder" }); return false; }); } @@ -43,8 +40,9 @@ class LMStudioEmbedder { `LMStudio service could not be reached. Is LMStudio running?` ); - this.log( - `Embedding ${textChunks.length} chunks of text with ${this.model}.` + logger.info( + `Embedding ${textChunks.length} chunks of text with ${this.model}.`, + { origin: "LMStudioEmbedder" } ); // LMStudio will drop all queued requests now? So if there are many going on @@ -91,11 +89,15 @@ class LMStudioEmbedder { if (errors.length > 0) { let uniqueErrors = new Set(); - console.log(errors); errors.map((error) => uniqueErrors.add(`[${error.type}]: ${error.message}`) ); + logger.error( + `LMStudio Failed to embed: ${Array.from(uniqueErrors).join(", ")}`, + { origin: "LMStudioEmbedder" } + ); + if (errors.length > 0) throw new Error( `LMStudio Failed to embed: ${Array.from(uniqueErrors).join(", ")}` diff --git a/server/utils/EmbeddingEngines/native/index.js b/server/utils/EmbeddingEngines/native/index.js index 5494c886956..889e5f98f63 100644 --- a/server/utils/EmbeddingEngines/native/index.js +++ b/server/utils/EmbeddingEngines/native/index.js @@ -2,6 +2,7 @@ const path = require("path"); const fs = require("fs"); const { toChunks } = require("../../helpers"); const { v4 } = require("uuid"); +const logger = require("../../logger"); class NativeEmbedder { // This is a folder that Mintplex Labs hosts for those who cannot capture the HF model download @@ -30,10 +31,6 @@ class NativeEmbedder { this.log("Initialized"); } - log(text, ...args) { - console.log(`\x1b[36m[NativeEmbedder]\x1b[0m ${text}`, ...args); - } - #tempfilePath() { const filename = `${v4()}.tmp`; const tmpPath = process.env.STORAGE_DIR @@ -47,7 +44,9 @@ class NativeEmbedder { try { await fs.promises.appendFile(filePath, data, { encoding: "utf8" }); } catch (e) { - console.error(`Error writing to tempfile: ${e}`); + logger.error(`Error writing to tempfile: ${e}`, { + origin: "NativeEmbedder", + }); } } @@ -62,7 +61,9 @@ class NativeEmbedder { env.remoteHost = hostOverride; env.remotePathTemplate = "{model}/"; // Our S3 fallback url does not support revision File structure. } - this.log(`Downloading ${this.model} from ${env.remoteHost}`); + logger.info(`Downloading ${this.model} from ${env.remoteHost}`, { + origin: "NativeEmbedder", + }); } return pipeline(...args); }); @@ -74,10 +75,9 @@ class NativeEmbedder { // Show download progress if we need to download any files progress_callback: (data) => { if (!data.hasOwnProperty("progress")) return; - console.log( - `\x1b[36m[NativeEmbedder - Downloading model]\x1b[0m ${ - data.file - } ${~~data?.progress}%` + logger.info( + `Downloading model - ${data.file} ${~~data?.progress}%`, + { origin: "NativeEmbedder" } ); }, } @@ -102,8 +102,9 @@ class NativeEmbedder { // So to attempt to monkey-patch this we have a single fallback URL to help alleviate duplicate bug reports. async embedderClient() { if (!this.modelDownloaded) - this.log( - "The native embedding model has never been run and will be downloaded right now. Subsequent runs will be faster. (~23MB)" + logger.info( + "The native embedding model has never been run and will be downloaded right now. Subsequent runs will be faster. (~23MB)", + { origin: "NativeEmbedder" } ); let fetchResponse = await this.#fetchWithHost(); diff --git a/server/utils/EmbeddingEngines/ollama/index.js b/server/utils/EmbeddingEngines/ollama/index.js index 2d1cea7ae12..840e455fd21 100644 --- a/server/utils/EmbeddingEngines/ollama/index.js +++ b/server/utils/EmbeddingEngines/ollama/index.js @@ -1,4 +1,5 @@ const { maximumChunkLength } = require("../../helpers"); +const logger = require("../../logger"); class OllamaEmbedder { constructor() { @@ -14,17 +15,13 @@ class OllamaEmbedder { this.embeddingMaxChunkLength = maximumChunkLength(); } - log(text, ...args) { - console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args); - } - async #isAlive() { return await fetch(process.env.EMBEDDING_BASE_PATH, { method: "HEAD", }) .then((res) => res.ok) .catch((e) => { - this.log(e.message); + logger.error(e.message, { origin: "OllamaEmbedder" }); return false; }); } @@ -43,8 +40,9 @@ class OllamaEmbedder { ); const embeddingRequests = []; - this.log( - `Embedding ${textChunks.length} chunks of text with ${this.model}.` + logger.info( + `Embedding ${textChunks.length} chunks of text with ${this.model}.`, + { origin: "OllamaEmbedder" } ); for (const chunk of textChunks) { diff --git a/server/utils/EmbeddingEngines/voyageAi/index.js b/server/utils/EmbeddingEngines/voyageAi/index.js index 65126613b65..01356a5e2b2 100644 --- a/server/utils/EmbeddingEngines/voyageAi/index.js +++ b/server/utils/EmbeddingEngines/voyageAi/index.js @@ -1,3 +1,5 @@ +const logger = require("../../logger"); + class VoyageAiEmbedder { constructor() { if (!process.env.VOYAGEAI_API_KEY) @@ -52,7 +54,7 @@ class VoyageAiEmbedder { }); return embeddings; } catch (error) { - console.error("Voyage AI Failed to embed:", error); + logger.error(error, { origin: "VoyageAiEmbedder" }); if ( error.message.includes( "Cannot read properties of undefined (reading '0')" diff --git a/server/utils/TextSplitter/index.js b/server/utils/TextSplitter/index.js index 4162fa74e9e..86496d9988a 100644 --- a/server/utils/TextSplitter/index.js +++ b/server/utils/TextSplitter/index.js @@ -1,3 +1,5 @@ +const logger = require("../logger"); + function isNullOrNaN(value) { if (value === null) return true; return isNaN(value); @@ -25,10 +27,6 @@ class TextSplitter { this.#splitter = this.#setSplitter(config); } - log(text, ...args) { - console.log(`\x1b[35m[TextSplitter]\x1b[0m ${text}`, ...args); - } - // Does a quick check to determine the text chunk length limit. // Embedder models have hard-set limits that cannot be exceeded, just like an LLM context // so here we want to allow override of the default 1000, but up to the models maximum, which is @@ -39,8 +37,9 @@ class TextSplitter { : Number(preferred); const limit = Number(embedderLimit); if (prefValue > limit) - console.log( - `\x1b[43m[WARN]\x1b[0m Text splitter chunk length of ${prefValue} exceeds embedder model max of ${embedderLimit}. Will use ${embedderLimit}.` + logger.warn( + `Text splitter chunk length of ${prefValue} exceeds embedder model max of ${embedderLimit}. Will use ${embedderLimit}.`, + { origin: "TextSplitter" } ); return prefValue > limit ? limit : prefValue; } @@ -79,7 +78,12 @@ class RecursiveSplitter { const { RecursiveCharacterTextSplitter, } = require("@langchain/textsplitters"); - this.log(`Will split with`, { chunkSize, chunkOverlap }); + logger.info( + `Will split with chunk size of ${chunkSize} and overlap of ${chunkOverlap}`, + { + origin: "RecursiveSplitter", + } + ); this.chunkHeader = chunkHeader; this.engine = new RecursiveCharacterTextSplitter({ chunkSize, @@ -87,10 +91,6 @@ class RecursiveSplitter { }); } - log(text, ...args) { - console.log(`\x1b[35m[RecursiveSplitter]\x1b[0m ${text}`, ...args); - } - async _splitText(documentText) { if (!this.chunkHeader) return this.engine.splitText(documentText); const strings = await this.engine.splitText(documentText); diff --git a/server/utils/agents/aibitat/example/websocket/websock-branding-collab.js b/server/utils/agents/aibitat/example/websocket/websock-branding-collab.js index 7229bd882d9..dd1f2ed400e 100644 --- a/server/utils/agents/aibitat/example/websocket/websock-branding-collab.js +++ b/server/utils/agents/aibitat/example/websocket/websock-branding-collab.js @@ -3,7 +3,6 @@ // Scraping is enabled, but search requires AGENT_GSE_* keys. const express = require("express"); -const chalk = require("chalk"); const AIbitat = require("../../index.js"); const { websocket, @@ -11,6 +10,7 @@ const { webScraping, } = require("../../plugins/index.js"); const path = require("path"); +const logger = require("../../../../logger/index.js"); const port = 3000; const app = express(); require("express-ws")(app); @@ -31,11 +31,13 @@ app.ws("/ws", function (ws, _response) { }); ws.on("close", function () { - console.log("Socket killed"); + logger.info("Socket killed", { origin: "websock-branding-collab.js" }); return; }); - console.log("Socket online and waiting..."); + logger.info("Socket online and waiting...", { + origin: "websock-branding-collab.js", + }); runAIbitat(ws).catch((error) => { ws.send( JSON.stringify({ @@ -53,11 +55,15 @@ app.all("*", function (_, response) { }); app.listen(port, () => { - console.log(`Testing HTTP/WSS server listening at http://localhost:${port}`); + logger.info(`Testing HTTP/WSS server listening at http://localhost:${port}`, { + origin: "websock-branding-collab.js", + }); }); async function runAIbitat(socket) { - console.log(chalk.blue("Booting AIbitat class & starting agent(s)")); + logger.info("Booting AIbitat class & starting agent(s)", { + origin: "websock-branding-collab.js", + }); const aibitat = new AIbitat({ provider: "openai", @@ -68,19 +74,19 @@ async function runAIbitat(socket) { .use(webScraping.plugin()) .agent("creativeDirector", { role: `You are a Creative Director. Your role is overseeing the entire branding project, ensuring - the client's brief is met, and maintaining consistency across all brand elements, developing the + the client's brief is met, and maintaining consistency across all brand elements, developing the brand strategy, guiding the visual and conceptual direction, and providing overall creative leadership.`, }) .agent("marketResearcher", { - role: `You do competitive market analysis via searching on the internet and learning about - comparative products and services. You can search by using keywords and phrases that you think will lead + role: `You do competitive market analysis via searching on the internet and learning about + comparative products and services. You can search by using keywords and phrases that you think will lead to competitor research that can help find the unique angle and market of the idea.`, functions: ["web-browsing"], }) .agent("PM", { - role: `You are the Project Coordinator. Your role is overseeing the project's progress, timeline, - and budget. Ensure effective communication and coordination among team members, client, and stakeholders. - Your tasks include planning and scheduling project milestones, tracking tasks, and managing any + role: `You are the Project Coordinator. Your role is overseeing the project's progress, timeline, + and budget. Ensure effective communication and coordination among team members, client, and stakeholders. + Your tasks include planning and scheduling project milestones, tracking tasks, and managing any risks or issues that arise.`, interrupt: "ALWAYS", }) @@ -93,8 +99,8 @@ async function runAIbitat(socket) { await aibitat.start({ from: "PM", to: "#branding", - content: `I have an idea for a muslim focused meetup called Chai & Vibes. - I want to focus on professionals that are muslim and are in their 18-30 year old range who live in big cities. + content: `I have an idea for a muslim focused meetup called Chai & Vibes. + I want to focus on professionals that are muslim and are in their 18-30 year old range who live in big cities. Does anything like this exist? How can we differentiate?`, }); } diff --git a/server/utils/agents/aibitat/example/websocket/websock-multi-turn-chat.js b/server/utils/agents/aibitat/example/websocket/websock-multi-turn-chat.js index 6277978cc78..f9928727428 100644 --- a/server/utils/agents/aibitat/example/websocket/websock-multi-turn-chat.js +++ b/server/utils/agents/aibitat/example/websocket/websock-multi-turn-chat.js @@ -3,7 +3,6 @@ // Scraping is enabled, but search requires AGENT_GSE_* keys. const express = require("express"); -const chalk = require("chalk"); const AIbitat = require("../../index.js"); const { websocket, @@ -11,6 +10,7 @@ const { webScraping, } = require("../../plugins/index.js"); const path = require("path"); +const logger = require("../../../../logger/index.js"); const port = 3000; const app = express(); require("express-ws")(app); @@ -31,11 +31,13 @@ app.ws("/ws", function (ws, _response) { }); ws.on("close", function () { - console.log("Socket killed"); + logger.info("Socket killed", { origin: "websock-multi-turn-chat.js" }); return; }); - console.log("Socket online and waiting..."); + logger.info("Socket online and waiting...", { + origin: "websock-multi-turn-chat.js", + }); runAIbitat(ws).catch((error) => { ws.send( JSON.stringify({ @@ -53,7 +55,9 @@ app.all("*", function (_, response) { }); app.listen(port, () => { - console.log(`Testing HTTP/WSS server listening at http://localhost:${port}`); + logger.info(`Testing HTTP/WSS server listening at http://localhost:${port}`, { + origin: "websock-multi-turn-chat.js", + }); }); const Agent = { @@ -66,7 +70,9 @@ async function runAIbitat(socket) { throw new Error( "This example requires a valid OPEN_AI_KEY in the env.development file" ); - console.log(chalk.blue("Booting AIbitat class & starting agent(s)")); + logger.info("Booting AIbitat class & starting agent(s)", { + origin: "websock-multi-turn-chat.js", + }); const aibitat = new AIbitat({ provider: "openai", model: "gpt-4o", diff --git a/server/utils/agents/aibitat/plugins/cli.js b/server/utils/agents/aibitat/plugins/cli.js index 06a60e98e67..9b15395b717 100644 --- a/server/utils/agents/aibitat/plugins/cli.js +++ b/server/utils/agents/aibitat/plugins/cli.js @@ -2,6 +2,7 @@ const { input } = require("@inquirer/prompts"); const chalk = require("chalk"); const { RetryError } = require("../error"); +const logger = require("../../../logger"); /** * Command-line Interface plugin. It prints the messages on the console and asks for feedback @@ -19,9 +20,13 @@ const cli = { let printing = []; aibitat.onError(async (error) => { - console.error(chalk.red(` error: ${error?.message}`)); + logger.error(chalk.red(` error: ${error?.message}`), { + origin: "cli.js", + }); if (error instanceof RetryError) { - console.error(chalk.red(` retrying in 60 seconds...`)); + logger.error(chalk.red(` retrying in 60 seconds...`), { + origin: "cli.js", + }); setTimeout(() => { aibitat.retry(); }, 60000); @@ -30,8 +35,7 @@ const cli = { }); aibitat.onStart(() => { - console.log(); - console.log("🚀 starting chat ...\n"); + logger.info("🚀 starting chat ...\n", { origin: "cli.js" }); printing = [Promise.resolve()]; }); @@ -46,17 +50,17 @@ const cli = { aibitat.onTerminate(async () => { await Promise.all(printing); - console.log("🚀 chat finished"); + logger.info("🚀 chat finished", { origin: "cli.js" }); }); aibitat.onInterrupt(async (node) => { await Promise.all(printing); const feedback = await this.askForFeedback(node); // Add an extra line after the message - console.log(); + logger.info("", { origin: "cli.js" }); if (feedback === "exit") { - console.log("🚀 chat finished"); + logger.info("🚀 chat finished", { origin: "cli.js" }); return process.exit(0); } @@ -80,10 +84,10 @@ const cli = { )} ${replying}:`; if (!simulateStream) { - console.log(reference); - console.log(message.content); + logger.info(reference, { origin: "cli.js" }); + logger.info(message.content, { origin: "cli.js" }); // Add an extra line after the message - console.log(); + logger.info("", { origin: "cli.js" }); return; } @@ -114,8 +118,8 @@ const cli = { } // Add an extra line after the message - console.log(); - console.log(); + logger.info("", { origin: "cli.js" }); + logger.info("", { origin: "cli.js" }); }, /** diff --git a/server/utils/agents/aibitat/plugins/memory.js b/server/utils/agents/aibitat/plugins/memory.js index d31a264d18d..200eda127d3 100644 --- a/server/utils/agents/aibitat/plugins/memory.js +++ b/server/utils/agents/aibitat/plugins/memory.js @@ -1,6 +1,7 @@ const { v4 } = require("uuid"); const { getVectorDbClass, getLLMProvider } = require("../../../helpers"); const { Deduplicator } = require("../utils/dedupe"); +const logger = require("../../../logger"); const memory = { name: "rag-memory", @@ -77,7 +78,7 @@ const memory = { this.tracker.trackRun(this.name, { action, content }); return response; } catch (error) { - console.log(error); + logger.error(error.message, { origin: "memory.handler" }); return `There was an error while calling the function. ${error.message}`; } }, diff --git a/server/utils/agents/aibitat/plugins/sql-agent/SQLConnectors/MSSQL.js b/server/utils/agents/aibitat/plugins/sql-agent/SQLConnectors/MSSQL.js index ed75aa7aa71..f2e95162ff1 100644 --- a/server/utils/agents/aibitat/plugins/sql-agent/SQLConnectors/MSSQL.js +++ b/server/utils/agents/aibitat/plugins/sql-agent/SQLConnectors/MSSQL.js @@ -1,5 +1,6 @@ const mssql = require("mssql"); const UrlPattern = require("url-pattern"); +const logger = require("../../../../../logger"); class MSSQLConnector { #connected = false; @@ -69,7 +70,7 @@ class MSSQLConnector { result.rows = query.recordset; result.count = query.rowsAffected.reduce((sum, a) => sum + a, 0); } catch (err) { - console.log(this.constructor.name, err); + logger.error(err.message, { origin: "MSSQLConnector.runQuery" }); result.error = err.message; } finally { await this._client.close(); diff --git a/server/utils/agents/aibitat/plugins/sql-agent/SQLConnectors/MySQL.js b/server/utils/agents/aibitat/plugins/sql-agent/SQLConnectors/MySQL.js index d9982ab318e..9c745d88673 100644 --- a/server/utils/agents/aibitat/plugins/sql-agent/SQLConnectors/MySQL.js +++ b/server/utils/agents/aibitat/plugins/sql-agent/SQLConnectors/MySQL.js @@ -1,5 +1,6 @@ const mysql = require("mysql2/promise"); const UrlPattern = require("url-pattern"); +const logger = require("../../../../../logger"); class MySQLConnector { #connected = false; @@ -39,7 +40,7 @@ class MySQLConnector { result.rows = query; result.count = query?.length; } catch (err) { - console.log(this.constructor.name, err); + logger.error(err.message, { origin: this.constructor.name }); result.error = err.message; } finally { await this._client.end(); diff --git a/server/utils/agents/aibitat/plugins/sql-agent/SQLConnectors/Postgresql.js b/server/utils/agents/aibitat/plugins/sql-agent/SQLConnectors/Postgresql.js index 463fea51018..2c531da7222 100644 --- a/server/utils/agents/aibitat/plugins/sql-agent/SQLConnectors/Postgresql.js +++ b/server/utils/agents/aibitat/plugins/sql-agent/SQLConnectors/Postgresql.js @@ -1,4 +1,5 @@ const pgSql = require("pg"); +const logger = require("../../../../../logger"); class PostgresSQLConnector { #connected = false; @@ -32,7 +33,7 @@ class PostgresSQLConnector { result.rows = query.rows; result.count = query.rowCount; } catch (err) { - console.log(this.constructor.name, err); + logger.error(err.message, { origin: this.constructor.name }); result.error = err.message; } finally { await this._client.end(); diff --git a/server/utils/agents/aibitat/plugins/web-browsing.js b/server/utils/agents/aibitat/plugins/web-browsing.js index f4269fe139f..85edddb331c 100644 --- a/server/utils/agents/aibitat/plugins/web-browsing.js +++ b/server/utils/agents/aibitat/plugins/web-browsing.js @@ -1,4 +1,5 @@ const { SystemSettings } = require("../../../../models/systemSettings"); +const logger = require("../../../logger"); const webBrowsing = { name: "web-browsing", @@ -118,8 +119,10 @@ const webBrowsing = { }); }) .catch((e) => { - console.log(e); - return []; + logger.error(e.message, { + origin: "web-browsing._googleSearchEngine", + }); + return {}; }); if (data.length === 0) @@ -224,7 +227,9 @@ const webBrowsing = { })); }) .catch((e) => { - console.log(e); + logger.error(e.message, { + origin: "web-browsing._bingWebSearchEngine", + }); return []; }); diff --git a/server/utils/agents/aibitat/plugins/websocket.js b/server/utils/agents/aibitat/plugins/websocket.js index 8c8800ff3e2..2cd6fb2a7ee 100644 --- a/server/utils/agents/aibitat/plugins/websocket.js +++ b/server/utils/agents/aibitat/plugins/websocket.js @@ -1,6 +1,7 @@ const chalk = require("chalk"); const { RetryError } = require("../error"); const { Telemetry } = require("../../../../models/telemetry"); +const logger = require("../../../logger"); const SOCKET_TIMEOUT_MS = 300 * 1_000; // 5 mins /** @@ -129,10 +130,13 @@ const websocket = { }; socketTimeout = setTimeout(() => { - console.log( + logger.info( chalk.red( `Client took too long to respond, chat thread is dead after ${SOCKET_TIMEOUT_MS}ms` - ) + ), + { + origin: "websocket.js", + } ); resolve("exit"); return; diff --git a/server/utils/agents/aibitat/providers/ai-provider.js b/server/utils/agents/aibitat/providers/ai-provider.js index b3a8b1791be..67a0617ae7a 100644 --- a/server/utils/agents/aibitat/providers/ai-provider.js +++ b/server/utils/agents/aibitat/providers/ai-provider.js @@ -14,6 +14,7 @@ const { ChatOpenAI } = require("@langchain/openai"); const { ChatAnthropic } = require("@langchain/anthropic"); const { ChatOllama } = require("@langchain/community/chat_models/ollama"); const { toValidNumber } = require("../../../http"); +const logger = require("../../../logger"); const DEFAULT_WORKSPACE_PROMPT = "You are a helpful ai assistant who can assist the user and use tools available to help answer the users prompts and questions."; @@ -28,10 +29,9 @@ class Provider { } providerLog(text, ...args) { - console.log( - `\x1b[36m[AgentLLM${this?.model ? ` - ${this.model}` : ""}]\x1b[0m ${text}`, - ...args - ); + logger.info(`${this?.model ? ` - ${this.model}` : ""} ${text} ${args}`, { + origin: "AgentLLM", + }); } get client() { diff --git a/server/utils/agents/index.js b/server/utils/agents/index.js index cfa3d6a8ac2..05850b6b2b0 100644 --- a/server/utils/agents/index.js +++ b/server/utils/agents/index.js @@ -6,6 +6,7 @@ const { const { WorkspaceChats } = require("../../models/workspaceChats"); const { safeJsonParse } = require("../http"); const { USER_AGENT, WORKSPACE_AGENT } = require("./defaults"); +const logger = require("../logger"); class AgentHandler { #invocationUUID; @@ -21,11 +22,13 @@ class AgentHandler { } log(text, ...args) { - console.log(`\x1b[36m[AgentHandler]\x1b[0m ${text}`, ...args); + logger.info(text, { origin: "AgentHandler", ...args }); } closeAlert() { - this.log(`End ${this.#invocationUUID}::${this.provider}:${this.model}`); + logger.info(`End ${this.#invocationUUID}::${this.provider}:${this.model}`, { + origin: "AgentHandler", + }); } async #chatHistory(limit = 10) { @@ -62,7 +65,7 @@ class AgentHandler { }); return agentHistory; } catch (e) { - this.log("Error loading chat history", e.message); + logger.error(e.message, { origin: "AgentHandler#chatHistory" }); return []; } } @@ -186,7 +189,12 @@ class AgentHandler { this.provider = this.invocation.workspace.agentProvider || "openai"; this.model = this.invocation.workspace.agentModel || this.#providerDefault(); - this.log(`Start ${this.#invocationUUID}::${this.provider}:${this.model}`); + logger.info( + `Start ${this.#invocationUUID}::${this.provider}:${this.model}`, + { + origin: "AgentHandler#providerSetupAndCheck", + } + ); this.#checkSetup(); } @@ -206,8 +214,9 @@ class AgentHandler { definition.required && (!args.hasOwnProperty(param) || args[param] === null) ) { - this.log( - `'${param}' required parameter for '${pluginName}' plugin is missing. Plugin may not function or crash agent.` + logger.info( + `'${param}' required parameter for '${pluginName}' plugin is missing. Plugin may not function or crash agent.`, + { origin: "AgentHandler#parseCallOptions" } ); continue; } @@ -224,8 +233,9 @@ class AgentHandler { if (name.includes("#")) { const [parent, childPluginName] = name.split("#"); if (!AgentPlugins.hasOwnProperty(parent)) { - this.log( - `${parent} is not a valid plugin. Skipping inclusion to agent cluster.` + logger.info( + `${parent} is not a valid plugin. Skipping inclusion to agent cluster.`, + { origin: "AgentHandler#attachPlugins" } ); continue; } @@ -234,8 +244,9 @@ class AgentHandler { (child) => child.name === childPluginName ); if (!childPlugin) { - this.log( - `${parent} does not have child plugin named ${childPluginName}. Skipping inclusion to agent cluster.` + logger.info( + `${parent} does not have child plugin named ${childPluginName}. Skipping inclusion to agent cluster.`, + { origin: "AgentHandler#attachPlugins" } ); continue; } @@ -246,16 +257,18 @@ class AgentHandler { name ); this.aibitat.use(childPlugin.plugin(callOpts)); - this.log( - `Attached ${parent}:${childPluginName} plugin to Agent cluster` + logger.info( + `Attached ${parent}:${childPluginName} plugin to Agent cluster`, + { origin: "AgentHandler#attachPlugins" } ); continue; } // Load single-stage plugin. if (!AgentPlugins.hasOwnProperty(name)) { - this.log( - `${name} is not a valid plugin. Skipping inclusion to agent cluster.` + logger.info( + `${name} is not a valid plugin. Skipping inclusion to agent cluster.`, + { origin: "AgentHandler#attachPlugins" } ); continue; } @@ -266,13 +279,17 @@ class AgentHandler { ); const AIbitatPlugin = AgentPlugins[name]; this.aibitat.use(AIbitatPlugin.plugin(callOpts)); - this.log(`Attached ${name} plugin to Agent cluster`); + logger.info(`Attached ${name} plugin to Agent cluster`, { + origin: "AgentHandler#attachPlugins", + }); } } async #loadAgents() { // Default User agent and workspace agent - this.log(`Attaching user and default agent to Agent cluster.`); + logger.info(`Attaching user and default agent to Agent cluster.`, { + origin: "AgentHandler#loadAgents", + }); this.aibitat.agent(USER_AGENT.name, await USER_AGENT.getDefinition()); this.aibitat.agent( WORKSPACE_AGENT.name, @@ -307,7 +324,12 @@ class AgentHandler { }); // Attach standard websocket plugin for frontend communication. - this.log(`Attached ${AgentPlugins.websocket.name} plugin to Agent cluster`); + logger.info( + `Attached ${AgentPlugins.websocket.name} plugin to Agent cluster`, + { + origin: "AgentHandler#createAIbitat", + } + ); this.aibitat.use( AgentPlugins.websocket.plugin({ socket: args.socket, @@ -317,8 +339,11 @@ class AgentHandler { ); // Attach standard chat-history plugin for message storage. - this.log( - `Attached ${AgentPlugins.chatHistory.name} plugin to Agent cluster` + logger.info( + `Attached ${AgentPlugins.chatHistory.name} plugin to Agent cluster`, + { + origin: "AgentHandler#createAIbitat", + } ); this.aibitat.use(AgentPlugins.chatHistory.plugin()); diff --git a/server/utils/boot/index.js b/server/utils/boot/index.js index 8a3dcbd25aa..f3afcc99d34 100644 --- a/server/utils/boot/index.js +++ b/server/utils/boot/index.js @@ -1,13 +1,16 @@ +const chalk = require("chalk"); const { Telemetry } = require("../../models/telemetry"); const { BackgroundService } = require("../BackgroundWorkers"); const { EncryptionManager } = require("../EncryptionManager"); const { CommunicationKey } = require("../comKey"); +const logger = require("../logger"); const setupTelemetry = require("../telemetry"); function bootSSL(app, port = 3001) { try { - console.log( - `\x1b[33m[SSL BOOT ENABLED]\x1b[0m Loading the certificate and key for HTTPS mode...` + logger.info( + chalk.yellow(`Loading the certificate and key for HTTPS mode...`), + { origin: "bootSSL" } ); const fs = require("fs"); const https = require("https"); @@ -22,21 +25,20 @@ function bootSSL(app, port = 3001) { new CommunicationKey(true); new EncryptionManager(); new BackgroundService().boot(); - console.log(`Primary server in HTTPS mode listening on port ${port}`); + logger.info(`Primary server in HTTPS mode listening on port ${port}`, { + origin: "bootSSL", + }); }) .on("error", catchSigTerms); require("express-ws")(app, server); // Apply same certificate + server for WSS connections return { app, server }; } catch (e) { - console.error( - `\x1b[31m[SSL BOOT FAILED]\x1b[0m ${e.message} - falling back to HTTP boot.`, - { - ENABLE_HTTPS: process.env.ENABLE_HTTPS, - HTTPS_KEY_PATH: process.env.HTTPS_KEY_PATH, - HTTPS_CERT_PATH: process.env.HTTPS_CERT_PATH, - stacktrace: e.stack, - } + logger.error( + chalk.red( + `[SSL BOOT FAILED] ${e.message} - falling back to HTTP boot. ${{ ENABLE_HTTPS: process.env.ENABLE_HTTPS, HTTPS_KEY_PATH: process.env.HTTPS_KEY_PATH, HTTPS_CERT_PATH: process.env.HTTPS_CERT_PATH }}` + ), + { origin: "bootSSL" } ); return bootHTTP(app, port); } @@ -51,7 +53,9 @@ function bootHTTP(app, port = 3001) { new CommunicationKey(true); new EncryptionManager(); new BackgroundService().boot(); - console.log(`Primary server in HTTP mode listening on port ${port}`); + logger.info(`Primary server in HTTP mode listening on port ${port}`, { + origin: "bootHTTP", + }); }) .on("error", catchSigTerms); diff --git a/server/utils/chats/embed.js b/server/utils/chats/embed.js index 8488aedd514..f8cbb1b76c9 100644 --- a/server/utils/chats/embed.js +++ b/server/utils/chats/embed.js @@ -7,6 +7,7 @@ const { writeResponseChunk, } = require("../helpers/chat/responses"); const { DocumentManager } = require("../DocumentManager"); +const logger = require("../logger"); async function streamChatWithForEmbed( response, @@ -165,8 +166,11 @@ async function streamChatWithForEmbed( // If streaming is not explicitly enabled for connector // we do regular waiting of a response and send a single chunk. if (LLMConnector.streamingEnabled() !== true) { - console.log( - `\x1b[31m[STREAMING DISABLED]\x1b[0m Streaming is not available for ${LLMConnector.constructor.name}. Will use regular chat method.` + logger.info( + `Streaming is not available for ${LLMConnector.constructor.name}. Will use regular chat method.`, + { + origin: "streamChatWithForEmbed", + } ); completeText = await LLMConnector.getChatCompletion(messages, { temperature: embed.workspace?.openAiTemp ?? LLMConnector.defaultTemp, diff --git a/server/utils/chats/stream.js b/server/utils/chats/stream.js index 770e6cb6b82..b8ca103b09a 100644 --- a/server/utils/chats/stream.js +++ b/server/utils/chats/stream.js @@ -11,6 +11,8 @@ const { recentChatHistory, sourceIdentifier, } = require("./index"); +const chalk = require("chalk"); +const logger = require("../logger"); const VALID_CHAT_MODE = ["chat", "query"]; @@ -232,8 +234,13 @@ async function streamChatWithWorkspace( // If streaming is not explicitly enabled for connector // we do regular waiting of a response and send a single chunk. if (LLMConnector.streamingEnabled() !== true) { - console.log( - `\x1b[31m[STREAMING DISABLED]\x1b[0m Streaming is not available for ${LLMConnector.constructor.name}. Will use regular chat method.` + logger.info( + chalk.yellow( + `Streaming is not available for ${LLMConnector.constructor.name}. Will use regular chat method.` + ), + { + origin: "streamChatWithWorkspace", + } ); completeText = await LLMConnector.getChatCompletion(messages, { temperature: workspace?.openAiTemp ?? LLMConnector.defaultTemp, diff --git a/server/utils/collectorApi/index.js b/server/utils/collectorApi/index.js index 7f5781918d8..6ae973e3928 100644 --- a/server/utils/collectorApi/index.js +++ b/server/utils/collectorApi/index.js @@ -4,6 +4,8 @@ const { EncryptionManager } = require("../EncryptionManager"); // of docker this endpoint is not exposed so it is only on the Docker instances internal network // so no additional security is needed on the endpoint directly. Auth is done however by the express // middleware prior to leaving the node-side of the application so that is good enough >:) + +const logger = require("../logger"); class CollectorApi { constructor() { const { CommunicationKey } = require("../comKey"); @@ -12,7 +14,7 @@ class CollectorApi { } log(text, ...args) { - console.log(`\x1b[36m[CollectorApi]\x1b[0m ${text}`, ...args); + logger.info(`${text} ${args}`, { origin: "CollectorApi" }); } #attachOptions() { diff --git a/server/utils/comKey/index.js b/server/utils/comKey/index.js index 5cc6b0c056f..432273378c0 100644 --- a/server/utils/comKey/index.js +++ b/server/utils/comKey/index.js @@ -1,6 +1,7 @@ const crypto = require("crypto"); const fs = require("fs"); const path = require("path"); +const logger = require("../logger"); const keyPath = process.env.NODE_ENV === "development" ? path.resolve(__dirname, `../../storage/comkey`) @@ -30,7 +31,7 @@ class CommunicationKey { } log(text, ...args) { - console.log(`\x1b[36m[CommunicationKey]\x1b[0m ${text}`, ...args); + logger.info(`${text} ${args}`, { origin: "CommunicationKey" }); } #readPrivateKey() { diff --git a/server/utils/database/index.js b/server/utils/database/index.js index 75f5f7116de..fc7a34cf5a8 100644 --- a/server/utils/database/index.js +++ b/server/utils/database/index.js @@ -1,5 +1,7 @@ +const chalk = require("chalk"); const { getGitVersion } = require("../../endpoints/utils"); const { Telemetry } = require("../../models/telemetry"); +const logger = require("../logger"); function checkColumnTemplate(tablename = null, column = null) { if (!tablename || !column) @@ -35,7 +37,9 @@ async function checkForMigrations(model, db) { if (toMigrate.length === 0) return; - console.log(`Running ${toMigrate.length} migrations`, toMigrate); + logger.info(`Running ${toMigrate.length} migrations ${toMigrate}`, { + origin: "checkForMigrations", + }); await db.exec(toMigrate.join(";\n")); return; } @@ -48,9 +52,9 @@ async function checkForMigrations(model, db) { async function validateTablePragmas(force = false) { try { if (process.env.NODE_ENV !== "development" && force === false) { - console.log( - `\x1b[34m[MIGRATIONS STUBBED]\x1b[0m Please ping /migrate once server starts to run migrations` - ); + logger.info("Please ping /migrate once server starts to run migrations", { + origin: "MIGRATIONS STUBBED", + }); return; } const { SystemSettings } = require("../../models/systemSettings"); @@ -86,21 +90,31 @@ async function validateTablePragmas(force = false) { // You can see all Telemetry events by ctrl+f `Telemetry.sendEvent` calls to verify this claim. async function setupTelemetry() { if (process.env.DISABLE_TELEMETRY === "true") { - console.log( - `\x1b[31m[TELEMETRY DISABLED]\x1b[0m Telemetry is marked as disabled - no events will send. Telemetry helps Mintplex Labs Inc improve AnythingLLM.` + logger.info( + chalk.red( + "Telemetry is marked as disabled - no events will send. Telemetry helps Mintplex Labs Inc improve AnythingLLM." + ), + { + origin: "TELEMETRY DISABLED", + } ); return true; } if (Telemetry.isDev()) { - console.log( - `\x1b[33m[TELEMETRY STUBBED]\x1b[0m Anonymous Telemetry stubbed in development.` - ); + logger.info(chalk.yellow("Anonymous Telemetry stubbed in development."), { + origin: "TELEMETRY STUBBED", + }); return; } - console.log( - `\x1b[32m[TELEMETRY ENABLED]\x1b[0m Anonymous Telemetry enabled. Telemetry helps Mintplex Labs Inc improve AnythingLLM.` + logger.info( + chalk.green( + "Anonymous Telemetry enabled. Telemetry helps Mintplex Labs Inc improve AnythingLLM." + ), + { + origin: "TELEMETRY ENABLED", + } ); await Telemetry.findOrCreateId(); await Telemetry.sendTelemetry("server_boot", { diff --git a/server/utils/files/index.js b/server/utils/files/index.js index 58bdf807a4e..b19f0df3581 100644 --- a/server/utils/files/index.js +++ b/server/utils/files/index.js @@ -2,6 +2,7 @@ const fs = require("fs"); const path = require("path"); const { v5: uuidv5 } = require("uuid"); const { Document } = require("../../models/documents"); +const logger = require("../logger"); const { DocumentSyncQueue } = require("../../models/documentSyncQueue"); const documentsPath = process.env.NODE_ENV === "development" @@ -100,8 +101,9 @@ async function cachedVectorInformation(filename = null, checkOnly = false) { if (checkOnly) return exists; if (!exists) return { exists, chunks: [] }; - console.log( - `Cached vectorized results of ${filename} found! Using cached data to save on embed costs.` + logger.info( + `Cached vectorized results of ${filename} found! Using cached data to save on embed costs.`, + { origin: "cachedVectorInformation" } ); const rawData = fs.readFileSync(file, "utf8"); return { exists: true, chunks: JSON.parse(rawData) }; @@ -111,8 +113,9 @@ async function cachedVectorInformation(filename = null, checkOnly = false) { // filename is the fullpath to the doc so we can compare by filename to find cached matches. async function storeVectorResult(vectorData = [], filename = null) { if (!filename) return; - console.log( - `Caching vectorized results of ${filename} to prevent duplicated embedding.` + logger.info( + `Caching vectorized results of ${filename} to prevent duplicated embedding.`, + { origin: "storeVectorResult" } ); if (!fs.existsSync(vectorCachePath)) fs.mkdirSync(vectorCachePath); @@ -134,7 +137,9 @@ async function purgeSourceDocument(filename = null) { ) return; - console.log(`Purging source document of ${filename}.`); + logger.info(`Purging source document of ${filename}.`, { + origin: "purgeSourceDocument", + }); fs.rmSync(filePath); return; } @@ -146,7 +151,9 @@ async function purgeVectorCache(filename = null) { const filePath = path.resolve(vectorCachePath, `${digest}.json`); if (!fs.existsSync(filePath) || !fs.lstatSync(filePath).isFile()) return; - console.log(`Purging vector-cache of ${filename}.`); + logger.info(`Purging vector-cache of ${filename}.`, { + origin: "purgeVectorCache", + }); fs.rmSync(filePath); return; } diff --git a/server/utils/helpers/chat/index.js b/server/utils/helpers/chat/index.js index 6f565efe14e..6c589cf0546 100644 --- a/server/utils/helpers/chat/index.js +++ b/server/utils/helpers/chat/index.js @@ -1,5 +1,6 @@ const { sourceIdentifier } = require("../../chats"); const { safeJsonParse } = require("../../http"); +const logger = require("../../logger"); const { TokenManager } = require("../tiktoken"); const { convertToPromptHistory } = require("./responses"); @@ -21,7 +22,7 @@ You may think: "Doesn't this result in massive data loss?" - yes & no. Under the use cases we expect the tool to be used, which is mostly chatting with documents, we are able to use this approach with minimal blowback on the quality of responses. -We accomplish this by taking a rate-limit approach that is proportional to the model capacity. Since we support more than openAI models, this needs to +We accomplish this by taking a rate-limit approach that is proportional to the model capacity. Since we support more than openAI models, this needs to be generic and reliance on a "better summary" model just is not a luxury we can afford. The added latency overhead during prompting is also unacceptable. In general: system: at best 15% of token capacity @@ -37,7 +38,7 @@ we handle overflows by taking an aggressive path for two main cases. 2. Context window is exceeded in regular use. - We do not touch prompt since it is very likely to be <70% of window. - We check system prompt is not outrageous - if it is we cannonball it and keep context if present. -- We check a sliding window of history, only allowing up to 15% of the history to pass through if it fits, with a +- We check a sliding window of history, only allowing up to 15% of the history to pass through if it fits, with a preference for recent history if we can cannonball to fit it, otherwise it is omitted. We end up with a rather large prompt that fits through a given window with a lot of room for response in most use-cases. @@ -337,10 +338,11 @@ function cannonball({ truncText + tokenManager.bytesFromTokens(rightChunks); - console.log( + logger.info( `Cannonball results ${initialInputSize} -> ${tokenManager.countFromString( truncatedText - )} tokens.` + )} tokens.`, + { origin: "cannonball" } ); return truncatedText; } @@ -395,7 +397,7 @@ function fillSourceWindow({ } const log = (text, ...args) => { - console.log(`\x1b[36m[fillSourceWindow]\x1b[0m ${text}`, ...args); + logger.info(`${text} ${args}`, { origin: "fillSourceWindow" }); }; log( diff --git a/server/utils/helpers/chat/responses.js b/server/utils/helpers/chat/responses.js index 609b18190fe..a321408bb5f 100644 --- a/server/utils/helpers/chat/responses.js +++ b/server/utils/helpers/chat/responses.js @@ -1,9 +1,16 @@ const { v4: uuidv4 } = require("uuid"); const moment = require("moment"); +const logger = require("../../logger"); +const chalk = require("chalk"); function clientAbortedHandler(resolve, fullText) { - console.log( - "\x1b[43m\x1b[34m[STREAM ABORTED]\x1b[0m Client requested to abort stream. Exiting LLM stream handler early." + logger.info( + chalk.red( + "Client requested to abort stream. Exiting LLM stream handler early.", + { + origin: "STREAM ABORTED", + } + ) ); resolve(fullText); return; diff --git a/server/utils/logger/index.js b/server/utils/logger/index.js new file mode 100644 index 00000000000..bc4a7dbf352 --- /dev/null +++ b/server/utils/logger/index.js @@ -0,0 +1,25 @@ +const winston = require("winston"); + +const logger = winston.createLogger({ + level: "info", + transports: [ + new winston.transports.Console({ + format: winston.format.combine( + winston.format.colorize(), + winston.format.printf(({ level, message, origin }) => { + return `\x1b[36m[${origin}]\x1b[0m ${level}: ${message}`; + }) + ), + }), + new winston.transports.File({ + filename: "app.log", + format: winston.format.combine( + winston.format.printf(({ level, message, origin }) => { + return `[${origin}] ${level}: ${message}`; + }) + ), + }), + ], +}); + +module.exports = logger; diff --git a/server/utils/prisma/migrateFromSqlite.js b/server/utils/prisma/migrateFromSqlite.js index 4e5e379cf45..7f6d65391cc 100644 --- a/server/utils/prisma/migrateFromSqlite.js +++ b/server/utils/prisma/migrateFromSqlite.js @@ -2,6 +2,7 @@ const { PrismaClient } = require("@prisma/client"); const execSync = require("child_process").execSync; const fs = require("fs"); const path = require("path"); +const logger = require("../logger"); require("dotenv").config(); const DATABASE_PATH = process.env.DB_URL || "../../storage/anythingllm.db"; @@ -14,7 +15,9 @@ const BACKUP_PATH = path.join( function backupDatabase() { try { fs.copyFileSync(DATABASE_PATH, BACKUP_PATH); - console.log("Database backup created successfully."); + logger.info("Database backup created successfully.", { + origin: "migrateFromSqlite.js", + }); } catch (error) { console.error("Failed to create database backup:", error); } @@ -27,14 +30,18 @@ const prisma = new PrismaClient(); // Reset the prisma database and prepare it for migration of data from sqlite function resetAndMigrateDatabase() { try { - console.log("Resetting and migrating the database..."); + logger.info("Resetting and migrating the database...", { + origin: "migrateFromSqlite.js", + }); execSync("cd ../.. && npx prisma migrate reset --skip-seed --force", { stdio: "inherit", }); execSync("cd ../.. && npx prisma migrate dev --name init", { stdio: "inherit", }); - console.log("Database reset and initial migration completed successfully"); + logger.info("Database reset and initial migration completed successfully", { + origin: "migrateFromSqlite.js", + }); } catch (error) { console.error("Failed to reset and migrate the database:", error); } @@ -45,7 +52,9 @@ resetAndMigrateDatabase(); // Migrate data from sqlite to prisma async function migrateData() { try { - console.log("Starting data migration..."); + logger.info("Starting data migration...", { + origin: "migrateFromSqlite.js", + }); var legacyMap = { users: { count: 0, @@ -234,7 +243,9 @@ async function migrateData() { }); }); - console.log("Data migration completed successfully"); + logger.info("Data migration completed successfully", { + origin: "migrateFromSqlite.js", + }); } catch (error) { console.error("Data migration failed:", error); } finally { @@ -255,8 +266,11 @@ async function migrateTable(tableName, migrateRowFunc) { `SELECT COUNT(*) as count FROM sqlite_master WHERE name='${tableName}'` ); if (count === 0) { - console.log( - `${tableName} does not exist in legacy DB - nothing to migrate - skipping.` + logger.info( + `${tableName} does not exist in legacy DB - nothing to migrate - skipping.`, + { + origin: "migrateFromSqlite.js", + } ); return; } @@ -270,8 +284,14 @@ async function migrateTable(tableName, migrateRowFunc) { upserts.push(row); } } catch (e) { - console.error(e); - console.log({ tableName, upserts }); + logger.error( + `Error migrating ${tableName}: ${e.message} - ${JSON.stringify( + e + )} - ${JSON.stringify(upserts)}`, + { + origin: "migrateFromSqlite.js", + } + ); } finally { await db.close(); } diff --git a/server/utils/telemetry/index.js b/server/utils/telemetry/index.js index 1945de3fe88..52bb7055929 100644 --- a/server/utils/telemetry/index.js +++ b/server/utils/telemetry/index.js @@ -1,5 +1,7 @@ +const chalk = require("chalk"); const { getGitVersion } = require("../../endpoints/utils"); const { Telemetry } = require("../../models/telemetry"); +const logger = require("../logger"); // Telemetry is anonymized and your data is never read. This can be disabled by setting // DISABLE_TELEMETRY=true in the `.env` of however you setup. Telemetry helps us determine use @@ -7,21 +9,35 @@ const { Telemetry } = require("../../models/telemetry"); // You can see all Telemetry events by ctrl+f `Telemetry.sendEvent` calls to verify this claim. async function setupTelemetry() { if (process.env.DISABLE_TELEMETRY === "true") { - console.log( - `\x1b[31m[TELEMETRY DISABLED]\x1b[0m Telemetry is marked as disabled - no events will send. Telemetry helps Mintplex Labs Inc improve AnythingLLM.` + logger.info( + chalk.red( + "Telemetry is marked as disabled - no events will send. Telemetry helps Mintplex Labs Inc improve AnythingLLM." + ), + { + origin: "TELEMETRY DISABLED", + } ); return true; } if (Telemetry.isDev()) { - console.log( - `\x1b[33m[TELEMETRY STUBBED]\x1b[0m Anonymous Telemetry stubbed in development.` + logger.info( + chalk.yellow( + "Telemetry is marked as stubbed in development - no events will send. Telemetry helps Mintplex Labs Inc improve AnythingLLM." + ), + { + origin: "TELEMETRY STUBBED", + } ); return; } - - console.log( - `\x1b[32m[TELEMETRY ENABLED]\x1b[0m Anonymous Telemetry enabled. Telemetry helps Mintplex Labs Inc improve AnythingLLM.` + logger.info( + chalk.green( + "Anonymous Telemetry enabled. Telemetry helps Mintplex Labs Inc improve AnythingLLM." + ), + { + origin: "TELEMETRY ENABLED", + } ); await Telemetry.findOrCreateId(); await Telemetry.sendTelemetry("server_boot", { diff --git a/server/utils/vectorDbProviders/astra/index.js b/server/utils/vectorDbProviders/astra/index.js index efaaa135a70..b60d88f6a90 100644 --- a/server/utils/vectorDbProviders/astra/index.js +++ b/server/utils/vectorDbProviders/astra/index.js @@ -5,6 +5,7 @@ const { storeVectorResult, cachedVectorInformation } = require("../../files"); const { v4: uuidv4 } = require("uuid"); const { toChunks, getEmbeddingEngineSelection } = require("../../helpers"); const { sourceIdentifier } = require("../../chats"); +const logger = require("../../logger"); const AstraDB = { name: "AstraDB", @@ -53,7 +54,9 @@ const AstraDB = { if (!(await this.isRealCollection(collection))) return null; const count = await collection.countDocuments().catch((e) => { - console.error("Astra::namespaceExists", e.message); + logger.error(`namespaceExists:: ${e.message}`, { + origin: "AstraDB", + }); return null; }); @@ -109,7 +112,9 @@ const AstraDB = { const { pageContent, docId, ...metadata } = documentData; if (!pageContent || pageContent.length == 0) return false; - console.log("Adding new vectorized document into namespace", namespace); + logger.info("Adding new vectorized document into namespace", { + origin: "AstraDB", + }); if (!skipCache) { const cacheResult = await cachedVectorInformation(fullFilePath); if (cacheResult.exists) { @@ -167,7 +172,9 @@ const AstraDB = { }); const textChunks = await textSplitter.splitText(pageContent); - console.log("Chunks created from document:", textChunks.length); + logger.info(`Chunks created from document: ${textChunks.length}`, { + origin: "AstraDB", + }); const documentVectors = []; const vectors = []; const vectorValues = await EmbedderEngine.embedChunks(textChunks); @@ -203,7 +210,9 @@ const AstraDB = { if (vectors.length > 0) { const chunks = []; - console.log("Inserting vectorized chunks into Astra DB."); + logger.info("Inserting vectorized chunks into Astra DB.", { + origin: "AstraDB", + }); // AstraDB has maximum upsert size of 20 records per-request so we have to use a lower chunk size here // in order to do the queries - this takes a lot more time than other providers but there @@ -223,7 +232,9 @@ const AstraDB = { await DocumentVectors.bulkInsert(documentVectors); return { vectorized: true, error: null }; } catch (e) { - console.error("addDocumentToNamespace", e.message); + logger.error(`addDocumentToNamespace:: ${e.message}`, { + origin: "AstraDB", + }); return { vectorized: false, error: e.message }; } }, @@ -319,8 +330,9 @@ const AstraDB = { responses.forEach((response) => { if (response.$similarity < similarityThreshold) return; if (filterIdentifiers.includes(sourceIdentifier(response.metadata))) { - console.log( - "AstraDB: A source was filtered from context as it's parent document is pinned." + logger.info( + "A source was filtered from context as it's parent document is pinned.", + { origin: "AstraDB" } ); return; } @@ -352,7 +364,7 @@ const AstraDB = { const collections = resp ? JSON.parse(resp)?.status?.collections : []; return collections; } catch (e) { - console.error("Astra::AllNamespace", e); + logger.error(`allNamespace:: ${e.message}`, { origin: "AstraDB" }); return []; } }, diff --git a/server/utils/vectorDbProviders/chroma/index.js b/server/utils/vectorDbProviders/chroma/index.js index a79d4fc4201..8fba099bf9c 100644 --- a/server/utils/vectorDbProviders/chroma/index.js +++ b/server/utils/vectorDbProviders/chroma/index.js @@ -6,6 +6,7 @@ const { v4: uuidv4 } = require("uuid"); const { toChunks, getEmbeddingEngineSelection } = require("../../helpers"); const { parseAuthHeader } = require("../../http"); const { sourceIdentifier } = require("../../chats"); +const logger = require("../../logger"); const COLLECTION_REGEX = new RegExp( /^(?!\d+\.\d+\.\d+\.\d+$)(?!.*\.\.)(?=^[a-zA-Z0-9][a-zA-Z0-9_-]{1,61}[a-zA-Z0-9]$).{3,63}$/ ); @@ -139,8 +140,9 @@ const Chroma = { if ( filterIdentifiers.includes(sourceIdentifier(response.metadatas[0][i])) ) { - console.log( - "Chroma: A source was filtered from context as it's parent document is pinned." + logger.info( + "A source was filtered from context as it's parent document is pinned.", + { origin: "Chroma" } ); return; } @@ -173,7 +175,9 @@ const Chroma = { const collection = await client .getCollection({ name: this.normalize(namespace) }) .catch((e) => { - console.error("ChromaDB::namespaceExists", e.message); + logger.error(`namespaceExists:: ${e.message}`, { + origin: "Chroma", + }); return null; }); return !!collection; @@ -193,7 +197,12 @@ const Chroma = { const { pageContent, docId, ...metadata } = documentData; if (!pageContent || pageContent.length == 0) return false; - console.log("Adding new vectorized document into namespace", namespace); + logger.info( + `Adding new vectorized document into namespace: ${namespace}`, + { + origin: "Chroma", + } + ); if (skipCache) { const cacheResult = await cachedVectorInformation(fullFilePath); if (cacheResult.exists) { @@ -258,7 +267,9 @@ const Chroma = { }); const textChunks = await textSplitter.splitText(pageContent); - console.log("Chunks created from document:", textChunks.length); + logger.info(`Chunks created from document: ${textChunks.length}`, { + origin: "Chroma", + }); const documentVectors = []; const vectors = []; const vectorValues = await EmbedderEngine.embedChunks(textChunks); @@ -303,7 +314,9 @@ const Chroma = { if (vectors.length > 0) { const chunks = []; - console.log("Inserting vectorized chunks into Chroma collection."); + logger.info("Inserting vectorized chunks into Chroma collection.", { + origin: "Chroma", + }); for (const chunk of toChunks(vectors, 500)) chunks.push(chunk); const additionResult = await collection.add(submission); @@ -316,7 +329,9 @@ const Chroma = { await DocumentVectors.bulkInsert(documentVectors); return { vectorized: true, error: null }; } catch (e) { - console.error("addDocumentToNamespace", e.message); + logger.error(`addDocumentToNamespace:: ${e.message}`, { + origin: "Chroma", + }); return { vectorized: false, error: e.message }; } }, diff --git a/server/utils/vectorDbProviders/lance/index.js b/server/utils/vectorDbProviders/lance/index.js index e1e6a5e6305..e7e5e72babe 100644 --- a/server/utils/vectorDbProviders/lance/index.js +++ b/server/utils/vectorDbProviders/lance/index.js @@ -5,6 +5,7 @@ const { SystemSettings } = require("../../../models/systemSettings"); const { storeVectorResult, cachedVectorInformation } = require("../../files"); const { v4: uuidv4 } = require("uuid"); const { sourceIdentifier } = require("../../chats"); +const logger = require("../../logger"); const LanceDb = { uri: `${ @@ -78,8 +79,9 @@ const LanceDb = { return; const { vector: _, ...rest } = item; if (filterIdentifiers.includes(sourceIdentifier(rest))) { - console.log( - "LanceDB: A source was filtered from context as it's parent document is pinned." + logger.info( + "A source was filtered from context as it's parent document is pinned.", + { origin: "LanceDb" } ); return; } @@ -134,8 +136,9 @@ const LanceDb = { const { client } = await this.connect(); const exists = await this.namespaceExists(client, namespace); if (!exists) { - console.error( - `LanceDB:deleteDocumentFromNamespace - namespace ${namespace} does not exist.` + logger.error( + `deleteDocumentFromNamespace - namespace ${namespace} does not exist.`, + { origin: "LanceDb" } ); return; } @@ -161,7 +164,12 @@ const LanceDb = { const { pageContent, docId, ...metadata } = documentData; if (!pageContent || pageContent.length == 0) return false; - console.log("Adding new vectorized document into namespace", namespace); + logger.info( + `Adding new vectorized document into namespace: ${namespace}`, + { + origin: "LanceDb", + } + ); if (!skipCache) { const cacheResult = await cachedVectorInformation(fullFilePath); if (cacheResult.exists) { @@ -208,7 +216,9 @@ const LanceDb = { }); const textChunks = await textSplitter.splitText(pageContent); - console.log("Chunks created from document:", textChunks.length); + logger.info(`Chunks created from document: ${textChunks.length}`, { + origin: "LanceDb", + }); const documentVectors = []; const vectors = []; const submissions = []; @@ -243,7 +253,9 @@ const LanceDb = { const chunks = []; for (const chunk of toChunks(vectors, 500)) chunks.push(chunk); - console.log("Inserting vectorized chunks into LanceDB collection."); + logger.info("Inserting vectorized chunks into LanceDB collection.", { + origin: "LanceDb", + }); const { client } = await this.connect(); await this.updateOrCreateCollection(client, submissions, namespace); await storeVectorResult(chunks, fullFilePath); @@ -252,7 +264,9 @@ const LanceDb = { await DocumentVectors.bulkInsert(documentVectors); return { vectorized: true, error: null }; } catch (e) { - console.error("addDocumentToNamespace", e.message); + logger.error(`addDocumentToNamespace:: ${e.message}`, { + origin: "LanceDb", + }); return { vectorized: false, error: e.message }; } }, diff --git a/server/utils/vectorDbProviders/milvus/index.js b/server/utils/vectorDbProviders/milvus/index.js index 14d54d6ee72..944dc9fedd7 100644 --- a/server/utils/vectorDbProviders/milvus/index.js +++ b/server/utils/vectorDbProviders/milvus/index.js @@ -10,6 +10,7 @@ const { v4: uuidv4 } = require("uuid"); const { storeVectorResult, cachedVectorInformation } = require("../../files"); const { toChunks, getEmbeddingEngineSelection } = require("../../helpers"); const { sourceIdentifier } = require("../../chats"); +const logger = require("../../logger"); const Milvus = { name: "Milvus", @@ -80,7 +81,9 @@ const Milvus = { const { value } = await client .hasCollection({ collection_name: this.normalize(namespace) }) .catch((e) => { - console.error("MilvusDB::namespaceExists", e.message); + logger.error(`namespaceExists:: ${e.message}`, { + origin: "Milvus", + }); return { value: false }; }); return value; @@ -146,7 +149,12 @@ const Milvus = { const { pageContent, docId, ...metadata } = documentData; if (!pageContent || pageContent.length == 0) return false; - console.log("Adding new vectorized document into namespace", namespace); + logger.info( + `Adding new vectorized document into namespace: ${namespace}`, + { + origin: "Milvus", + } + ); if (skipCache) { const cacheResult = await cachedVectorInformation(fullFilePath); if (cacheResult.exists) { @@ -202,7 +210,9 @@ const Milvus = { }); const textChunks = await textSplitter.splitText(pageContent); - console.log("Chunks created from document:", textChunks.length); + logger.info(`Chunks created from document: ${textChunks.length}`, { + origin: "Milvus", + }); const documentVectors = []; const vectors = []; const vectorValues = await EmbedderEngine.embedChunks(textChunks); @@ -232,7 +242,9 @@ const Milvus = { const { client } = await this.connect(); await this.getOrCreateCollection(client, namespace, vectorDimension); - console.log("Inserting vectorized chunks into Milvus."); + logger.info("Inserting vectorized chunks into Milvus.", { + origin: "Milvus", + }); for (const chunk of toChunks(vectors, 100)) { chunks.push(chunk); const insertResult = await client.insert({ @@ -259,7 +271,9 @@ const Milvus = { await DocumentVectors.bulkInsert(documentVectors); return { vectorized: true, error: null }; } catch (e) { - console.error("addDocumentToNamespace", e.message); + logger.error(`addDocumentToNamespace:: ${e.message}`, { + origin: "Milvus", + }); return { vectorized: false, error: e.message }; } }, @@ -346,8 +360,9 @@ const Milvus = { response.results.forEach((match) => { if (match.score < similarityThreshold) return; if (filterIdentifiers.includes(sourceIdentifier(match.metadata))) { - console.log( - "Milvus: A source was filtered from context as it's parent document is pinned." + logger.info( + "A source was filtered from context as it's parent document is pinned.", + { origin: "Milvus" } ); return; } diff --git a/server/utils/vectorDbProviders/pinecone/index.js b/server/utils/vectorDbProviders/pinecone/index.js index 040f41d32f2..f3fda87d664 100644 --- a/server/utils/vectorDbProviders/pinecone/index.js +++ b/server/utils/vectorDbProviders/pinecone/index.js @@ -5,6 +5,7 @@ const { storeVectorResult, cachedVectorInformation } = require("../../files"); const { v4: uuidv4 } = require("uuid"); const { toChunks, getEmbeddingEngineSelection } = require("../../helpers"); const { sourceIdentifier } = require("../../chats"); +const logger = require("../../logger"); const PineconeDB = { name: "Pinecone", @@ -60,8 +61,9 @@ const PineconeDB = { response.matches.forEach((match) => { if (match.score < similarityThreshold) return; if (filterIdentifiers.includes(sourceIdentifier(match.metadata))) { - console.log( - "Pinecone: A source was filtered from context as it's parent document is pinned." + logger.info( + "Pinecone: A source was filtered from context as it's parent document is pinned.", + { origin: "Pinecone" } ); return; } @@ -104,7 +106,12 @@ const PineconeDB = { const { pageContent, docId, ...metadata } = documentData; if (!pageContent || pageContent.length == 0) return false; - console.log("Adding new vectorized document into namespace", namespace); + logger.info( + `Adding new vectorized document into namespace: ${namespace}`, + { + origin: "Pinecone", + } + ); if (!skipCache) { const cacheResult = await cachedVectorInformation(fullFilePath); if (cacheResult.exists) { @@ -153,7 +160,9 @@ const PineconeDB = { }); const textChunks = await textSplitter.splitText(pageContent); - console.log("Chunks created from document:", textChunks.length); + logger.info(`Chunks created from document: ${textChunks.length}`, { + origin: "Pinecone", + }); const documentVectors = []; const vectors = []; const vectorValues = await EmbedderEngine.embedChunks(textChunks); @@ -182,7 +191,9 @@ const PineconeDB = { const chunks = []; const { pineconeIndex } = await this.connect(); const pineconeNamespace = pineconeIndex.namespace(namespace); - console.log("Inserting vectorized chunks into Pinecone."); + logger.info("Inserting vectorized chunks into Pinecone.", { + origin: "Pinecone", + }); for (const chunk of toChunks(vectors, 100)) { chunks.push(chunk); await pineconeNamespace.upsert([...chunk]); @@ -193,7 +204,9 @@ const PineconeDB = { await DocumentVectors.bulkInsert(documentVectors); return { vectorized: true, error: null }; } catch (e) { - console.error("addDocumentToNamespace", e.message); + logger.error(`addDocumentToNamespace:: ${e.message}`, { + origin: "Pinecone", + }); return { vectorized: false, error: e.message }; } }, diff --git a/server/utils/vectorDbProviders/qdrant/index.js b/server/utils/vectorDbProviders/qdrant/index.js index 36550f098fd..fb5fc5b115d 100644 --- a/server/utils/vectorDbProviders/qdrant/index.js +++ b/server/utils/vectorDbProviders/qdrant/index.js @@ -5,6 +5,7 @@ const { storeVectorResult, cachedVectorInformation } = require("../../files"); const { v4: uuidv4 } = require("uuid"); const { toChunks, getEmbeddingEngineSelection } = require("../../helpers"); const { sourceIdentifier } = require("../../chats"); +const logger = require("../../logger"); const QDrant = { name: "QDrant", @@ -71,8 +72,9 @@ const QDrant = { responses.forEach((response) => { if (response.score < similarityThreshold) return; if (filterIdentifiers.includes(sourceIdentifier(response?.payload))) { - console.log( - "QDrant: A source was filtered from context as it's parent document is pinned." + logger.info( + "A source was filtered from context as it's parent document is pinned.", + { origin: "QDrant" } ); return; } @@ -106,7 +108,9 @@ const QDrant = { namespaceExists: async function (client, namespace = null) { if (!namespace) throw new Error("No namespace value provided."); const collection = await client.getCollection(namespace).catch((e) => { - console.error("QDrant::namespaceExists", e.message); + logger.error(`namespaceExists:: ${e.message}`, { + origin: "QDrant", + }); return null; }); return !!collection; @@ -145,8 +149,12 @@ const QDrant = { let vectorDimension = null; const { pageContent, docId, ...metadata } = documentData; if (!pageContent || pageContent.length == 0) return false; - - console.log("Adding new vectorized document into namespace", namespace); + logger.info( + `Adding new vectorized document into namespace: ${namespace}`, + { + origin: "QDrant", + } + ); if (skipCache) { const cacheResult = await cachedVectorInformation(fullFilePath); if (cacheResult.exists) { @@ -187,8 +195,9 @@ const QDrant = { submission.vectors.push(chunk.vector); submission.payloads.push(payload); } else { - console.error( - "The 'id' property is not defined in chunk.payload - it will be omitted from being inserted in QDrant collection." + logger.error( + "The 'id' property is not defined in chunk.payload - it will be omitted from being inserted in QDrant collection.", + { origin: "QDrant" } ); } }); @@ -229,7 +238,9 @@ const QDrant = { }); const textChunks = await textSplitter.splitText(pageContent); - console.log("Chunks created from document:", textChunks.length); + logger.info(`Chunks created from document: ${textChunks.length}`, { + origin: "QDrant", + }); const documentVectors = []; const vectors = []; const vectorValues = await EmbedderEngine.embedChunks(textChunks); @@ -278,7 +289,9 @@ const QDrant = { if (vectors.length > 0) { const chunks = []; - console.log("Inserting vectorized chunks into QDrant collection."); + logger.info("Inserting vectorized chunks into QDrant collection.", { + origin: "QDrant", + }); for (const chunk of toChunks(vectors, 500)) chunks.push(chunk); const additionResult = await client.upsert(namespace, { @@ -298,7 +311,9 @@ const QDrant = { await DocumentVectors.bulkInsert(documentVectors); return { vectorized: true, error: null }; } catch (e) { - console.error("addDocumentToNamespace", e.message); + logger.error(`addDocumentToNamespace:: ${e.message}`, { + origin: "QDrant", + }); return { vectorized: false, error: e.message }; } }, diff --git a/server/utils/vectorDbProviders/weaviate/index.js b/server/utils/vectorDbProviders/weaviate/index.js index 176c56d634e..f31063caa21 100644 --- a/server/utils/vectorDbProviders/weaviate/index.js +++ b/server/utils/vectorDbProviders/weaviate/index.js @@ -6,6 +6,7 @@ const { v4: uuidv4 } = require("uuid"); const { toChunks, getEmbeddingEngineSelection } = require("../../helpers"); const { camelCase } = require("../../helpers/camelcase"); const { sourceIdentifier } = require("../../chats"); +const logger = require("../../logger"); const Weaviate = { name: "Weaviate", @@ -53,7 +54,9 @@ const Weaviate = { response?.data?.Aggregate?.[camelCase(namespace)]?.[0]?.meta?.count || 0 ); } catch (e) { - console.error(`Weaviate:namespaceCountWithClient`, e.message); + logger.error(`namespaceCountWithClient:: ${e.message}`, { + origin: "Weaviate", + }); return 0; } }, @@ -70,7 +73,9 @@ const Weaviate = { response?.data?.Aggregate?.[camelCase(namespace)]?.[0]?.meta?.count || 0 ); } catch (e) { - console.error(`Weaviate:namespaceCountWithClient`, e.message); + logger.error(`namespaceCountWithClient:: ${e.message}`, { + origin: "Weaviate", + }); return 0; } }, @@ -109,8 +114,9 @@ const Weaviate = { } = response; if (certainty < similarityThreshold) return; if (filterIdentifiers.includes(sourceIdentifier(rest))) { - console.log( - "Weaviate: A source was filtered from context as it's parent document is pinned." + logger.info( + "Weaviate: A source was filtered from context as it's parent document is pinned.", + { origin: "Weaviate" } ); return; } @@ -126,7 +132,7 @@ const Weaviate = { const { classes = [] } = await client.schema.getter().do(); return classes.map((classObj) => classObj.class); } catch (e) { - console.error("Weaviate::AllNamespace", e); + logger.error(`allNamespaces:: ${e.message}`, { origin: "Weaviate" }); return []; } }, @@ -192,7 +198,12 @@ const Weaviate = { } = documentData; if (!pageContent || pageContent.length == 0) return false; - console.log("Adding new vectorized document into namespace", namespace); + logger.info( + `Adding new vectorized document into namespace: ${namespace}`, + { + origin: "Weaviate", + } + ); if (skipCache) { const cacheResult = await cachedVectorInformation(fullFilePath); if (cacheResult.exists) { @@ -236,7 +247,9 @@ const Weaviate = { const { success: additionResult, errors = [] } = await this.addVectors(client, vectors); if (!additionResult) { - console.error("Weaviate::addVectors failed to insert", errors); + logger.error(`addVectors failed to insert: ${errors.join(",")}`, { + origin: "Weaviate", + }); throw new Error("Error embedding into Weaviate"); } } @@ -269,7 +282,9 @@ const Weaviate = { }); const textChunks = await textSplitter.splitText(pageContent); - console.log("Chunks created from document:", textChunks.length); + logger.info(`Chunks created from document: ${textChunks.length}`, { + origin: "Weaviate", + }); const documentVectors = []; const vectors = []; const vectorValues = await EmbedderEngine.embedChunks(textChunks); @@ -324,13 +339,17 @@ const Weaviate = { const chunks = []; for (const chunk of toChunks(vectors, 500)) chunks.push(chunk); - console.log("Inserting vectorized chunks into Weaviate collection."); + logger.info("Inserting vectorized chunks into Weaviate collection.", { + origin: "Weaviate", + }); const { success: additionResult, errors = [] } = await this.addVectors( client, vectors ); if (!additionResult) { - console.error("Weaviate::addVectors failed to insert", errors); + logger.error(`addVectors failed to insert: ${errors.join(",")}`, { + origin: "Weaviate", + }); throw new Error("Error embedding into Weaviate"); } await storeVectorResult(chunks, fullFilePath); @@ -339,7 +358,9 @@ const Weaviate = { await DocumentVectors.bulkInsert(documentVectors); return { vectorized: true, error: null }; } catch (e) { - console.error("addDocumentToNamespace", e.message); + logger.error(`addDocumentToNamespace:: ${e.message}`, { + origin: "Weaviate", + }); return { vectorized: false, error: e.message }; } }, diff --git a/server/utils/vectorDbProviders/zilliz/index.js b/server/utils/vectorDbProviders/zilliz/index.js index cb60d2e3f0f..c33e7a97eff 100644 --- a/server/utils/vectorDbProviders/zilliz/index.js +++ b/server/utils/vectorDbProviders/zilliz/index.js @@ -10,6 +10,7 @@ const { v4: uuidv4 } = require("uuid"); const { storeVectorResult, cachedVectorInformation } = require("../../files"); const { toChunks, getEmbeddingEngineSelection } = require("../../helpers"); const { sourceIdentifier } = require("../../chats"); +const logger = require("../../logger"); // Zilliz is basically a copy of Milvus DB class with a different constructor // to connect to the cloud @@ -81,7 +82,9 @@ const Zilliz = { const { value } = await client .hasCollection({ collection_name: this.normalize(namespace) }) .catch((e) => { - console.error("Zilliz::namespaceExists", e.message); + logger.error(`namespaceExists:: ${e.message}}`, { + origin: "Zilliz", + }); return { value: false }; }); return value; @@ -147,7 +150,12 @@ const Zilliz = { const { pageContent, docId, ...metadata } = documentData; if (!pageContent || pageContent.length == 0) return false; - console.log("Adding new vectorized document into namespace", namespace); + logger.info( + `Adding new vectorized document into namespace: ${namespace}`, + { + origin: "Zilliz", + } + ); if (skipCache) { const cacheResult = await cachedVectorInformation(fullFilePath); if (cacheResult.exists) { @@ -203,7 +211,9 @@ const Zilliz = { }); const textChunks = await textSplitter.splitText(pageContent); - console.log("Chunks created from document:", textChunks.length); + logger.info(`Chunks created from document: ${textChunks.length}`, { + origin: "Zilliz", + }); const documentVectors = []; const vectors = []; const vectorValues = await EmbedderEngine.embedChunks(textChunks); @@ -233,7 +243,9 @@ const Zilliz = { const { client } = await this.connect(); await this.getOrCreateCollection(client, namespace, vectorDimension); - console.log("Inserting vectorized chunks into Zilliz."); + logger.info("Inserting vectorized chunks into Zilliz.", { + origin: "Zilliz", + }); for (const chunk of toChunks(vectors, 100)) { chunks.push(chunk); const insertResult = await client.insert({ @@ -260,7 +272,9 @@ const Zilliz = { await DocumentVectors.bulkInsert(documentVectors); return { vectorized: true, error: null }; } catch (e) { - console.error("addDocumentToNamespace", e.message); + logger.error(`addDocumentToNamespace:: ${e.message}`, { + origin: "Zilliz", + }); return { vectorized: false, error: e.message }; } }, @@ -347,8 +361,9 @@ const Zilliz = { response.results.forEach((match) => { if (match.score < similarityThreshold) return; if (filterIdentifiers.includes(sourceIdentifier(match.metadata))) { - console.log( - "Zilliz: A source was filtered from context as it's parent document is pinned." + logger.info( + "Zilliz: A source was filtered from context as it's parent document is pinned.", + { origin: "Zilliz" } ); return; }