θΏ™ζ˜―indexlocζδΎ›ηš„ζœεŠ‘οΌŒδΈθ¦θΎ“ε…₯任何密码
Skip to content

Add reasoning flag for azure models with "default" fallback #3128

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Feb 5, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions frontend/src/components/LLMSelection/AzureAiOptions/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,21 @@ export default function AzureAiOptions({ settings }) {
</option>
</select>
</div>

<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Model Type
</label>
<select
name="AzureOpenAiModelType"
defaultValue={settings?.AzureOpenAiModelType || "default"}
className="border-none bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
required={true}
>
<option value="default">Default</option>
<option value="reasoning">Reasoning</option>
</select>
</div>
</div>
</div>
);
Expand Down
1 change: 1 addition & 0 deletions server/models/systemSettings.js
Original file line number Diff line number Diff line change
Expand Up @@ -442,6 +442,7 @@ const SystemSettings = {
AzureOpenAiModelPref: process.env.OPEN_MODEL_PREF,
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
AzureOpenAiTokenLimit: process.env.AZURE_OPENAI_TOKEN_LIMIT || 4096,
AzureOpenAiModelType: process.env.AZURE_OPENAI_MODEL_TYPE || "default",

// Anthropic Keys
AnthropicApiKey: !!process.env.ANTHROPIC_API_KEY,
Expand Down
22 changes: 10 additions & 12 deletions server/utils/AiProviders/azureOpenAi/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ class AzureOpenAiLLM {
}
);
this.model = modelPreference ?? process.env.OPEN_MODEL_PREF;
this.isOTypeModel =
process.env.AZURE_OPENAI_MODEL_TYPE === "reasoning" || false;
this.limits = {
history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15,
Expand All @@ -34,20 +36,10 @@ class AzureOpenAiLLM {
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
this.#log(
`Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens. API-Version: ${this.apiVersion}`
`Initialized. Model "${this.model}" @ ${this.promptWindowLimit()} tokens.\nAPI-Version: ${this.apiVersion}.\nModel Type: ${this.isOTypeModel ? "reasoning" : "default"}`
);
}

/**
* Check if the model is an o# type model.
* NOTE: This is HIGHLY dependent on if the user named their deployment "o1" or "o3-mini" or something else to match the model name.
* It cannot be determined by the model name alone since model deployments can be named arbitrarily.
* @returns {boolean}
*/
get isOTypeModel() {
return this.model.startsWith("o");
}

#log(text, ...args) {
console.log(`\x1b[32m[AzureOpenAi]\x1b[0m ${text}`, ...args);
}
Expand All @@ -65,7 +57,13 @@ class AzureOpenAiLLM {
}

streamingEnabled() {
if (this.isOTypeModel && this.model !== "o3-mini") return false;
// Streaming of reasoning models is not supported
if (this.isOTypeModel) {
this.#log(
"Streaming will be disabled. AZURE_OPENAI_MODEL_TYPE is set to 'reasoning'."
);
return false;
}
return "streamGetChatCompletion" in this;
}

Expand Down
9 changes: 9 additions & 0 deletions server/utils/helpers/updateENV.js
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,15 @@ const KEY_MAPPING = {
envKey: "EMBEDDING_MODEL_PREF",
checks: [isNotEmpty],
},
AzureOpenAiModelType: {
envKey: "AZURE_OPENAI_MODEL_TYPE",
checks: [
(input) =>
["default", "reasoning"].includes(input)
? null
: "Invalid model type. Must be one of: default, reasoning.",
],
},

// Anthropic Settings
AnthropicApiKey: {
Expand Down