From 7985f389050ecc1321bb721e84b019a59c2da6bb Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 23 Sep 2024 18:32:30 -0400 Subject: [PATCH 1/2] Refactor handleDefaultStreamResponseV2 function for better error handling --- server/utils/helpers/chat/responses.js | 100 ++++++++++++++----------- 1 file changed, 56 insertions(+), 44 deletions(-) diff --git a/server/utils/helpers/chat/responses.js b/server/utils/helpers/chat/responses.js index 789b21242ca..b0cd1cb7249 100644 --- a/server/utils/helpers/chat/responses.js +++ b/server/utils/helpers/chat/responses.js @@ -11,52 +11,64 @@ function clientAbortedHandler(resolve, fullText) { function handleDefaultStreamResponseV2(response, stream, responseProps) { const { uuid = uuidv4(), sources = [] } = responseProps; - return new Promise(async (resolve) => { - let fullText = ""; - - // Establish listener to early-abort a streaming response - // in case things go sideways or the user does not like the response. - // We preserve the generated text but continue as if chat was completed - // to preserve previously generated content. - const handleAbort = () => clientAbortedHandler(resolve, fullText); - response.on("close", handleAbort); - - for await (const chunk of stream) { - const message = chunk?.choices?.[0]; - const token = message?.delta?.content; - - if (token) { - fullText += token; - writeResponseChunk(response, { - uuid, - sources: [], - type: "textResponseChunk", - textResponse: token, - close: false, - error: false, - }); - } - - // LocalAi returns '' and others return null on chunks - the last chunk is not "" or null. - // Either way, the key `finish_reason` must be present to determine ending chunk. - if ( - message?.hasOwnProperty("finish_reason") && // Got valid message and it is an object with finish_reason - message.finish_reason !== "" && - message.finish_reason !== null - ) { - writeResponseChunk(response, { - uuid, - sources, - type: "textResponseChunk", - textResponse: "", - close: true, - error: false, - }); - response.removeListener("close", handleAbort); - resolve(fullText); - break; // Break streaming when a valid finish_reason is first encountered + try { + let fullText = ""; + + // Establish listener to early-abort a streaming response + // in case things go sideways or the user does not like the response. + // We preserve the generated text but continue as if chat was completed + // to preserve previously generated content. + const handleAbort = () => clientAbortedHandler(resolve, fullText); + response.on("close", handleAbort); + + for await (const chunk of stream) { + const message = chunk?.choices?.[0]; + const token = message?.delta?.content; + + if (token) { + fullText += token; + writeResponseChunk(response, { + uuid, + sources: [], + type: "textResponseChunk", + textResponse: token, + close: false, + error: false, + }); + } + + // LocalAi returns '' and others return null on chunks - the last chunk is not "" or null. + // Either way, the key `finish_reason` must be present to determine ending chunk. + if ( + message?.hasOwnProperty("finish_reason") && // Got valid message and it is an object with finish_reason + message.finish_reason !== "" && + message.finish_reason !== null + ) { + writeResponseChunk(response, { + uuid, + sources, + type: "textResponseChunk", + textResponse: "", + close: true, + error: false, + }); + response.removeListener("close", handleAbort); + resolve(fullText); + break; // Break streaming when a valid finish_reason is first encountered + } } + } catch (e) { + console.error(e); + writeResponseChunk(response, { + id: uuidv4(), + type: "abort", + textResponse: null, + sources: [], + close: true, + error: e.message, + }); + response.end(); } }); } From d64eb938b9e6a7ffbd005e4638e53b8f844c26a8 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 23 Sep 2024 18:59:53 -0400 Subject: [PATCH 2/2] run yarn lint --- server/utils/helpers/chat/responses.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/utils/helpers/chat/responses.js b/server/utils/helpers/chat/responses.js index b0cd1cb7249..7034a85688f 100644 --- a/server/utils/helpers/chat/responses.js +++ b/server/utils/helpers/chat/responses.js @@ -21,7 +21,7 @@ function handleDefaultStreamResponseV2(response, stream, responseProps) { // to preserve previously generated content. const handleAbort = () => clientAbortedHandler(resolve, fullText); response.on("close", handleAbort); - + for await (const chunk of stream) { const message = chunk?.choices?.[0]; const token = message?.delta?.content;