From 6786f4eb49d4a4684dc57b0bf08575c06d56c53a Mon Sep 17 00:00:00 2001 From: timothycarambat Date: Mon, 13 Nov 2023 15:13:16 -0800 Subject: [PATCH 1/4] assume default model where appropriate --- server/utils/AiProviders/anthropic/index.js | 2 +- server/utils/AiProviders/openAi/index.js | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/server/utils/AiProviders/anthropic/index.js b/server/utils/AiProviders/anthropic/index.js index dca21422bd7..34f77b6e2cc 100644 --- a/server/utils/AiProviders/anthropic/index.js +++ b/server/utils/AiProviders/anthropic/index.js @@ -12,7 +12,7 @@ class AnthropicLLM { apiKey: process.env.ANTHROPIC_API_KEY, }); this.anthropic = anthropic; - this.model = process.env.ANTHROPIC_MODEL_PREF; + this.model = process.env.ANTHROPIC_MODEL_PREF || "claude-2"; this.limits = { history: this.promptWindowLimit() * 0.15, system: this.promptWindowLimit() * 0.15, diff --git a/server/utils/AiProviders/openAi/index.js b/server/utils/AiProviders/openAi/index.js index 1a5072f2c3c..80c6c35a690 100644 --- a/server/utils/AiProviders/openAi/index.js +++ b/server/utils/AiProviders/openAi/index.js @@ -11,7 +11,7 @@ class OpenAiLLM extends OpenAiEmbedder { apiKey: process.env.OPEN_AI_KEY, }); this.openai = new OpenAIApi(config); - this.model = process.env.OPEN_MODEL_PREF; + this.model = process.env.OPEN_MODEL_PREF || "gpt-3.5-turbo"; this.limits = { history: this.promptWindowLimit() * 0.15, system: this.promptWindowLimit() * 0.15, @@ -103,15 +103,14 @@ Context: } async sendChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) { - const model = process.env.OPEN_MODEL_PREF; - if (!(await this.isValidChatCompletionModel(model))) + if (!(await this.isValidChatCompletionModel(this.model))) throw new Error( `OpenAI chat: ${model} is not valid for chat completion!` ); const textResponse = await this.openai .createChatCompletion({ - model, + model: this.model, temperature: Number(workspace?.openAiTemp ?? 0.7), n: 1, messages: await this.compressMessages( From 7e9f04a16fb1d9e1d6a3762a7d4a40ec171b80f4 Mon Sep 17 00:00:00 2001 From: timothycarambat Date: Mon, 13 Nov 2023 15:16:10 -0800 Subject: [PATCH 2/4] merge with master and fix other model refs --- server/utils/AiProviders/lmStudio/index.js | 4 ++-- server/utils/AiProviders/openAi/index.js | 9 ++++----- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/server/utils/AiProviders/lmStudio/index.js b/server/utils/AiProviders/lmStudio/index.js index e0ccc316edb..4d9770e6651 100644 --- a/server/utils/AiProviders/lmStudio/index.js +++ b/server/utils/AiProviders/lmStudio/index.js @@ -73,7 +73,7 @@ Context: async sendChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) { if (!this.model) throw new Error( - `LMStudio chat: ${model} is not valid or defined for chat completion!` + `LMStudio chat: ${this.model} is not valid or defined for chat completion!` ); const textResponse = await this.lmstudio @@ -110,7 +110,7 @@ Context: async streamChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) { if (!this.model) throw new Error( - `LMStudio chat: ${model} is not valid or defined for chat completion!` + `LMStudio chat: ${this.model} is not valid or defined for chat completion!` ); const streamRequest = await this.lmstudio.createChatCompletion( diff --git a/server/utils/AiProviders/openAi/index.js b/server/utils/AiProviders/openAi/index.js index b0ddda05151..0c5b7116dd2 100644 --- a/server/utils/AiProviders/openAi/index.js +++ b/server/utils/AiProviders/openAi/index.js @@ -109,7 +109,7 @@ Context: async sendChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) { if (!(await this.isValidChatCompletionModel(this.model))) throw new Error( - `OpenAI chat: ${model} is not valid for chat completion!` + `OpenAI chat: ${this.model} is not valid for chat completion!` ); const textResponse = await this.openai @@ -144,15 +144,14 @@ Context: } async streamChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) { - const model = process.env.OPEN_MODEL_PREF; - if (!(await this.isValidChatCompletionModel(model))) + if (!(await this.isValidChatCompletionModel(this.model))) throw new Error( - `OpenAI chat: ${model} is not valid for chat completion!` + `OpenAI chat: ${this.model} is not valid for chat completion!` ); const streamRequest = await this.openai.createChatCompletion( { - model, + model: this.model, stream: true, temperature: Number(workspace?.openAiTemp ?? 0.7), n: 1, From 4e4f9ad4c6d1afa4c4d2e6c1b97d83ce62c41b18 Mon Sep 17 00:00:00 2001 From: timothycarambat Date: Mon, 13 Nov 2023 15:21:06 -0800 Subject: [PATCH 3/4] disallow robots --- server/index.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/server/index.js b/server/index.js index 5b22cca9173..01473d111e3 100644 --- a/server/index.js +++ b/server/index.js @@ -77,6 +77,11 @@ if (process.env.NODE_ENV !== "development") { app.use("/", function (_, response) { response.sendFile(path.join(__dirname, "public", "index.html")); }); + + app.get("/robots.txt", function (_, response) { + response.type("text/plain"); + response.send("User-agent: *\nDisallow: /").end(); + }); } app.use( From 59519402360eb3259bc74791a2e4b8d5dc94d613 Mon Sep 17 00:00:00 2001 From: timothycarambat Date: Mon, 13 Nov 2023 15:22:04 -0800 Subject: [PATCH 4/4] add public file --- frontend/public/robots.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 frontend/public/robots.txt diff --git a/frontend/public/robots.txt b/frontend/public/robots.txt new file mode 100644 index 00000000000..77470cb39f0 --- /dev/null +++ b/frontend/public/robots.txt @@ -0,0 +1,2 @@ +User-agent: * +Disallow: / \ No newline at end of file