diff --git a/server/utils/AiProviders/perplexity/models.js b/server/utils/AiProviders/perplexity/models.js index 5a71aac1876..6405653676e 100644 --- a/server/utils/AiProviders/perplexity/models.js +++ b/server/utils/AiProviders/perplexity/models.js @@ -1,39 +1,14 @@ const MODELS = { - "llama-3.1-sonar-small-128k-online": { - id: "llama-3.1-sonar-small-128k-online", - name: "llama-3.1-sonar-small-128k-online", - maxLength: 127072, - }, - "llama-3.1-sonar-large-128k-online": { - id: "llama-3.1-sonar-large-128k-online", - name: "llama-3.1-sonar-large-128k-online", - maxLength: 127072, - }, - "llama-3.1-sonar-huge-128k-online": { - id: "llama-3.1-sonar-huge-128k-online", - name: "llama-3.1-sonar-huge-128k-online", - maxLength: 127072, - }, - "llama-3.1-sonar-small-128k-chat": { - id: "llama-3.1-sonar-small-128k-chat", - name: "llama-3.1-sonar-small-128k-chat", - maxLength: 131072, - }, - "llama-3.1-sonar-large-128k-chat": { - id: "llama-3.1-sonar-large-128k-chat", - name: "llama-3.1-sonar-large-128k-chat", - maxLength: 131072, - }, - "llama-3.1-8b-instruct": { - id: "llama-3.1-8b-instruct", - name: "llama-3.1-8b-instruct", - maxLength: 131072, - }, - "llama-3.1-70b-instruct": { - id: "llama-3.1-70b-instruct", - name: "llama-3.1-70b-instruct", - maxLength: 131072, - }, + "sonar-pro": { + "id": "sonar-pro", + "name": "sonar-pro", + "maxLength": 200000 + }, + "sonar": { + "id": "sonar", + "name": "sonar", + "maxLength": 127072 + } }; module.exports.MODELS = MODELS; diff --git a/server/utils/AiProviders/perplexity/scripts/chat_models.txt b/server/utils/AiProviders/perplexity/scripts/chat_models.txt index fc3ab5b6f59..a19b067b17a 100644 --- a/server/utils/AiProviders/perplexity/scripts/chat_models.txt +++ b/server/utils/AiProviders/perplexity/scripts/chat_models.txt @@ -1,9 +1,4 @@ | Model | Parameter Count | Context Length | Model Type | | :---------------------------------- | :-------------- | :------------- | :-------------- | -| `llama-3.1-sonar-small-128k-online` | 8B | 127,072 | Chat Completion | -| `llama-3.1-sonar-large-128k-online` | 70B | 127,072 | Chat Completion | -| `llama-3.1-sonar-huge-128k-online` | 405B | 127,072 | Chat Completion | -| `llama-3.1-sonar-small-128k-chat` | 8B | 131,072 | Chat Completion | -| `llama-3.1-sonar-large-128k-chat` | 70B | 131,072 | Chat Completion | -| `llama-3.1-8b-instruct` | 8B | 131,072 | Chat Completion | -| `llama-3.1-70b-instruct` | 70B | 131,072 | Chat Completion | \ No newline at end of file +| `sonar-pro` | 8B | 200,000 | Chat Completion | +| `sonar` | 8B | 127,072 | Chat Completion | \ No newline at end of file diff --git a/server/utils/AiProviders/perplexity/scripts/parse.mjs b/server/utils/AiProviders/perplexity/scripts/parse.mjs index 3cb11588344..a5ba3af732c 100644 --- a/server/utils/AiProviders/perplexity/scripts/parse.mjs +++ b/server/utils/AiProviders/perplexity/scripts/parse.mjs @@ -8,7 +8,12 @@ // copy outputs into the export in ../models.js // Update the date below if you run this again because Perplexity added new models. -// Last Collected: Sept 12, 2024 +// Last Collected: Jan 23, 2025 + +// UPDATE: Jan 23, 2025 +// The table is no longer available on the website, but Perplexity has deprecated the +// old models so now we can just update the chat_models.txt file with the new models +// manually and then run this script to get the new models. import fs from "fs";