anything-llm/server/utils/AiProviders/perplexity/scripts/parse.mjs
Sean Hatfield 80ced5eba4
[FEAT] PerplexityAI Support (#778)
* add LLM support for perplexity

* update README & example env

* fix ENV keys in example env files

* slight changes for QA of perplexity support

* Update Perplexity AI name

---------

Co-authored-by: timothycarambat <rambat1010@gmail.com>
2024-02-22 12:48:57 -08:00

45 lines
1.3 KiB
JavaScript

// Perplexity does not provide a simple REST API to get models,
// so we have a table which we copy from their documentation
// https://docs.perplexity.ai/edit/model-cards that we can
// then parse and get all models from in a format that makes sense
// Why this does not exist is so bizarre, but whatever.
// To run, cd into this directory and run `node parse.mjs`
// copy outputs into the export in ../models.js
// Update the date below if you run this again because Perplexity added new models.
// Last Collected: Feb 22, 2024
import fs from "fs";
function parseChatModels() {
const models = {};
const tableString = fs.readFileSync("chat_models.txt", { encoding: "utf-8" });
const rows = tableString.split("\n").slice(2);
rows.forEach((row) => {
let [model, contextLength] = row
.split("|")
.slice(1, -1)
.map((text) => text.trim());
model = model.replace(/`|\s*\[\d+\]\s*/g, "");
const maxLength = Number(contextLength.replace(/\s*\[\d+\]\s*/g, ""));
if (model && maxLength) {
models[model] = {
id: model,
name: model,
maxLength: maxLength,
};
}
});
fs.writeFileSync(
"chat_models.json",
JSON.stringify(models, null, 2),
"utf-8"
);
return models;
}
parseChatModels();