diff --git a/frontend/src/components/LLMSelection/AzureAiOptions/index.jsx b/frontend/src/components/LLMSelection/AzureAiOptions/index.jsx
index 49e45cc6cfb..0274175beeb 100644
--- a/frontend/src/components/LLMSelection/AzureAiOptions/index.jsx
+++ b/frontend/src/components/LLMSelection/AzureAiOptions/index.jsx
@@ -71,6 +71,21 @@ export default function AzureAiOptions({ settings }) {
+
+
+
+
+
);
diff --git a/server/models/systemSettings.js b/server/models/systemSettings.js
index 42ce8723bc7..7f7d0ea34de 100644
--- a/server/models/systemSettings.js
+++ b/server/models/systemSettings.js
@@ -442,6 +442,7 @@ const SystemSettings = {
AzureOpenAiModelPref: process.env.OPEN_MODEL_PREF,
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
AzureOpenAiTokenLimit: process.env.AZURE_OPENAI_TOKEN_LIMIT || 4096,
+ AzureOpenAiModelType: process.env.AZURE_OPENAI_MODEL_TYPE || "default",
// Anthropic Keys
AnthropicApiKey: !!process.env.ANTHROPIC_API_KEY,
diff --git a/server/utils/AiProviders/azureOpenAi/index.js b/server/utils/AiProviders/azureOpenAi/index.js
index 6b726c88dc0..f15d6ecfdae 100644
--- a/server/utils/AiProviders/azureOpenAi/index.js
+++ b/server/utils/AiProviders/azureOpenAi/index.js
@@ -25,6 +25,8 @@ class AzureOpenAiLLM {
}
);
this.model = modelPreference ?? process.env.OPEN_MODEL_PREF;
+ this.isOTypeModel =
+ process.env.AZURE_OPENAI_MODEL_TYPE === "reasoning" || false;
this.limits = {
history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15,
@@ -34,20 +36,10 @@ class AzureOpenAiLLM {
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
this.#log(
- `Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens. API-Version: ${this.apiVersion}`
+ `Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens.\nAPI-Version: ${this.apiVersion}.\nModel Type: ${this.isOTypeModel ? "reasoning" : "default"}`
);
}
- /**
- * Check if the model is an o# type model.
- * NOTE: This is HIGHLY dependent on if the user named their deployment "o1" or "o3-mini" or something else to match the model name.
- * It cannot be determined by the model name alone since model deployments can be named arbitrarily.
- * @returns {boolean}
- */
- get isOTypeModel() {
- return this.model.startsWith("o");
- }
-
#log(text, ...args) {
console.log(`\x1b[32m[AzureOpenAi]\x1b[0m ${text}`, ...args);
}
@@ -65,7 +57,13 @@ class AzureOpenAiLLM {
}
streamingEnabled() {
- if (this.isOTypeModel && this.model !== "o3-mini") return false;
+ // Streaming of reasoning models is not supported
+ if (this.isOTypeModel) {
+ this.#log(
+ "Streaming will be disabled. AZURE_OPENAI_MODEL_TYPE is set to 'reasoning'."
+ );
+ return false;
+ }
return "streamGetChatCompletion" in this;
}
diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js
index 88e9de476b5..e3c1b91403d 100644
--- a/server/utils/helpers/updateENV.js
+++ b/server/utils/helpers/updateENV.js
@@ -35,6 +35,15 @@ const KEY_MAPPING = {
envKey: "EMBEDDING_MODEL_PREF",
checks: [isNotEmpty],
},
+ AzureOpenAiModelType: {
+ envKey: "AZURE_OPENAI_MODEL_TYPE",
+ checks: [
+ (input) =>
+ ["default", "reasoning"].includes(input)
+ ? null
+ : "Invalid model type. Must be one of: default, reasoning.",
+ ],
+ },
// Anthropic Settings
AnthropicApiKey: {