diff --git a/server/utils/AiProviders/perplexity/models.js b/server/utils/AiProviders/perplexity/models.js index 64bd2cffa..5a71aac18 100644 --- a/server/utils/AiProviders/perplexity/models.js +++ b/server/utils/AiProviders/perplexity/models.js @@ -1,59 +1,29 @@ const MODELS = { - "llama-3-sonar-small-32k-online": { - id: "llama-3-sonar-small-32k-online", - name: "llama-3-sonar-small-32k-online", - maxLength: 28000, - }, - "llama-3-sonar-small-32k-chat": { - id: "llama-3-sonar-small-32k-chat", - name: "llama-3-sonar-small-32k-chat", - maxLength: 32768, - }, - "llama-3-sonar-large-32k-online": { - id: "llama-3-sonar-large-32k-online", - name: "llama-3-sonar-large-32k-online", - maxLength: 28000, - }, - "llama-3-sonar-large-32k-chat": { - id: "llama-3-sonar-large-32k-chat", - name: "llama-3-sonar-large-32k-chat", - maxLength: 32768, - }, "llama-3.1-sonar-small-128k-online": { id: "llama-3.1-sonar-small-128k-online", name: "llama-3.1-sonar-small-128k-online", maxLength: 127072, }, + "llama-3.1-sonar-large-128k-online": { + id: "llama-3.1-sonar-large-128k-online", + name: "llama-3.1-sonar-large-128k-online", + maxLength: 127072, + }, + "llama-3.1-sonar-huge-128k-online": { + id: "llama-3.1-sonar-huge-128k-online", + name: "llama-3.1-sonar-huge-128k-online", + maxLength: 127072, + }, "llama-3.1-sonar-small-128k-chat": { id: "llama-3.1-sonar-small-128k-chat", name: "llama-3.1-sonar-small-128k-chat", maxLength: 131072, }, - "llama-3.1-sonar-large-128k-online": { - id: "llama-3.1-sonar-large-128k-online", - name: "llama-3.1-sonar-large-128k-online", - maxLength: 127072, - }, "llama-3.1-sonar-large-128k-chat": { id: "llama-3.1-sonar-large-128k-chat", name: "llama-3.1-sonar-large-128k-chat", maxLength: 131072, }, - "llama-3-8b-instruct": { - id: "llama-3-8b-instruct", - name: "llama-3-8b-instruct", - maxLength: 8192, - }, - "llama-3-70b-instruct": { - id: "llama-3-70b-instruct", - name: "llama-3-70b-instruct", - maxLength: 8192, - }, - "mixtral-8x7b-instruct": { - id: "mixtral-8x7b-instruct", - name: "mixtral-8x7b-instruct", - maxLength: 16384, - }, "llama-3.1-8b-instruct": { id: "llama-3.1-8b-instruct", name: "llama-3.1-8b-instruct", @@ -64,11 +34,6 @@ const MODELS = { name: "llama-3.1-70b-instruct", maxLength: 131072, }, - "llama-3.1-sonar-huge-128k-chat": { - id: "llama-3.1-sonar-huge-128k-chat", - name: "llama-3.1-sonar-huge-128k-chat", - maxLength: 127072, - }, }; module.exports.MODELS = MODELS; diff --git a/server/utils/AiProviders/perplexity/scripts/chat_models.txt b/server/utils/AiProviders/perplexity/scripts/chat_models.txt index ec9081efe..fc3ab5b6f 100644 --- a/server/utils/AiProviders/perplexity/scripts/chat_models.txt +++ b/server/utils/AiProviders/perplexity/scripts/chat_models.txt @@ -1,16 +1,9 @@ | Model | Parameter Count | Context Length | Model Type | | :---------------------------------- | :-------------- | :------------- | :-------------- | -| `llama-3-sonar-small-32k-online` | 8B | 28,000 | Chat Completion | -| `llama-3-sonar-small-32k-chat` | 8B | 32,768 | Chat Completion | -| `llama-3-sonar-large-32k-online` | 70B | 28,000 | Chat Completion | -| `llama-3-sonar-large-32k-chat` | 70B | 32,768 | Chat Completion | | `llama-3.1-sonar-small-128k-online` | 8B | 127,072 | Chat Completion | -| `llama-3.1-sonar-small-128k-chat` | 8B | 131,072 | Chat Completion | | `llama-3.1-sonar-large-128k-online` | 70B | 127,072 | Chat Completion | +| `llama-3.1-sonar-huge-128k-online` | 405B | 127,072 | Chat Completion | +| `llama-3.1-sonar-small-128k-chat` | 8B | 131,072 | Chat Completion | | `llama-3.1-sonar-large-128k-chat` | 70B | 131,072 | Chat Completion | -| `llama-3-8b-instruct` | 8B | 8,192 | Chat Completion | -| `llama-3-70b-instruct` | 70B | 8,192 | Chat Completion | -| `mixtral-8x7b-instruct` | 8x7B | 16,384 | Chat Completion | | `llama-3.1-8b-instruct` | 8B | 131,072 | Chat Completion | -| `llama-3.1-70b-instruct` | 70B | 131,072 | Chat Completion | -| `llama-3.1-sonar-huge-128k-chat` | 405B | 127,072 | Chat Completion | \ No newline at end of file +| `llama-3.1-70b-instruct` | 70B | 131,072 | Chat Completion | \ No newline at end of file diff --git a/server/utils/AiProviders/perplexity/scripts/parse.mjs b/server/utils/AiProviders/perplexity/scripts/parse.mjs index e4f27276f..3cb115883 100644 --- a/server/utils/AiProviders/perplexity/scripts/parse.mjs +++ b/server/utils/AiProviders/perplexity/scripts/parse.mjs @@ -8,7 +8,7 @@ // copy outputs into the export in ../models.js // Update the date below if you run this again because Perplexity added new models. -// Last Collected: Jul 31, 2024 +// Last Collected: Sept 12, 2024 import fs from "fs";