mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2024-11-11 01:10:11 +01:00
1173 dynamic cache openrouter (#1176)
* patch agent invocation rule * Add dynamic model cache from OpenRouter API for context length and available models
This commit is contained in:
parent
9c00db7d3b
commit
ac6ca13f60
@ -40,7 +40,7 @@ function groupModels(models) {
|
|||||||
}, {});
|
}, {});
|
||||||
}
|
}
|
||||||
|
|
||||||
const groupedProviders = ["togetherai", "openai"];
|
const groupedProviders = ["togetherai", "openai", "openrouter"];
|
||||||
export default function useGetProviderModels(provider = null) {
|
export default function useGetProviderModels(provider = null) {
|
||||||
const [defaultModels, setDefaultModels] = useState([]);
|
const [defaultModels, setDefaultModels] = useState([]);
|
||||||
const [customModels, setCustomModels] = useState([]);
|
const [customModels, setCustomModels] = useState([]);
|
||||||
|
1
server/storage/models/.gitignore
vendored
1
server/storage/models/.gitignore
vendored
@ -1,3 +1,4 @@
|
|||||||
Xenova
|
Xenova
|
||||||
downloaded/*
|
downloaded/*
|
||||||
!downloaded/.placeholder
|
!downloaded/.placeholder
|
||||||
|
openrouter
|
@ -5,11 +5,9 @@ const {
|
|||||||
writeResponseChunk,
|
writeResponseChunk,
|
||||||
clientAbortedHandler,
|
clientAbortedHandler,
|
||||||
} = require("../../helpers/chat/responses");
|
} = require("../../helpers/chat/responses");
|
||||||
|
const fs = require("fs");
|
||||||
function openRouterModels() {
|
const path = require("path");
|
||||||
const { MODELS } = require("./models.js");
|
const { safeJsonParse } = require("../../http");
|
||||||
return MODELS || {};
|
|
||||||
}
|
|
||||||
|
|
||||||
class OpenRouterLLM {
|
class OpenRouterLLM {
|
||||||
constructor(embedder = null, modelPreference = null) {
|
constructor(embedder = null, modelPreference = null) {
|
||||||
@ -17,8 +15,9 @@ class OpenRouterLLM {
|
|||||||
if (!process.env.OPENROUTER_API_KEY)
|
if (!process.env.OPENROUTER_API_KEY)
|
||||||
throw new Error("No OpenRouter API key was set.");
|
throw new Error("No OpenRouter API key was set.");
|
||||||
|
|
||||||
|
this.basePath = "https://openrouter.ai/api/v1";
|
||||||
const config = new Configuration({
|
const config = new Configuration({
|
||||||
basePath: "https://openrouter.ai/api/v1",
|
basePath: this.basePath,
|
||||||
apiKey: process.env.OPENROUTER_API_KEY,
|
apiKey: process.env.OPENROUTER_API_KEY,
|
||||||
baseOptions: {
|
baseOptions: {
|
||||||
headers: {
|
headers: {
|
||||||
@ -38,6 +37,81 @@ class OpenRouterLLM {
|
|||||||
|
|
||||||
this.embedder = !embedder ? new NativeEmbedder() : embedder;
|
this.embedder = !embedder ? new NativeEmbedder() : embedder;
|
||||||
this.defaultTemp = 0.7;
|
this.defaultTemp = 0.7;
|
||||||
|
|
||||||
|
const cacheFolder = path.resolve(
|
||||||
|
process.env.STORAGE_DIR
|
||||||
|
? path.resolve(process.env.STORAGE_DIR, "models", "openrouter")
|
||||||
|
: path.resolve(__dirname, `../../../storage/models/openrouter`)
|
||||||
|
);
|
||||||
|
fs.mkdirSync(cacheFolder, { recursive: true });
|
||||||
|
this.cacheModelPath = path.resolve(cacheFolder, "models.json");
|
||||||
|
this.cacheAtPath = path.resolve(cacheFolder, ".cached_at");
|
||||||
|
}
|
||||||
|
|
||||||
|
log(text, ...args) {
|
||||||
|
console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args);
|
||||||
|
}
|
||||||
|
|
||||||
|
async init() {
|
||||||
|
await this.#syncModels();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This checks if the .cached_at file has a timestamp that is more than 1Week (in millis)
|
||||||
|
// from the current date. If it is, then we will refetch the API so that all the models are up
|
||||||
|
// to date.
|
||||||
|
#cacheIsStale() {
|
||||||
|
const MAX_STALE = 6.048e8; // 1 Week in MS
|
||||||
|
if (!fs.existsSync(this.cacheAtPath)) return true;
|
||||||
|
const now = Number(new Date());
|
||||||
|
const timestampMs = Number(fs.readFileSync(this.cacheAtPath));
|
||||||
|
return now - timestampMs > MAX_STALE;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The OpenRouter model API has a lot of models, so we cache this locally in the directory
|
||||||
|
// as if the cache directory JSON file is stale or does not exist we will fetch from API and store it.
|
||||||
|
// This might slow down the first request, but we need the proper token context window
|
||||||
|
// for each model and this is a constructor property - so we can really only get it if this cache exists.
|
||||||
|
// We used to have this as a chore, but given there is an API to get the info - this makes little sense.
|
||||||
|
async #syncModels() {
|
||||||
|
if (fs.existsSync(this.cacheModelPath) && !this.#cacheIsStale())
|
||||||
|
return false;
|
||||||
|
|
||||||
|
this.log(
|
||||||
|
"Model cache is not present or stale. Fetching from OpenRouter API."
|
||||||
|
);
|
||||||
|
await fetch(`${this.basePath}/models`, {
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then((res) => res.json())
|
||||||
|
.then(({ data = [] }) => {
|
||||||
|
const models = {};
|
||||||
|
data.forEach((model) => {
|
||||||
|
models[model.id] = {
|
||||||
|
id: model.id,
|
||||||
|
name: model.name,
|
||||||
|
organization:
|
||||||
|
model.id.split("/")[0].charAt(0).toUpperCase() +
|
||||||
|
model.id.split("/")[0].slice(1),
|
||||||
|
maxLength: model.context_length,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
fs.writeFileSync(this.cacheModelPath, JSON.stringify(models), {
|
||||||
|
encoding: "utf-8",
|
||||||
|
});
|
||||||
|
fs.writeFileSync(this.cacheAtPath, String(Number(new Date())), {
|
||||||
|
encoding: "utf-8",
|
||||||
|
});
|
||||||
|
return models;
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
console.error(e);
|
||||||
|
return {};
|
||||||
|
});
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
#appendContext(contextTexts = []) {
|
#appendContext(contextTexts = []) {
|
||||||
@ -52,8 +126,12 @@ class OpenRouterLLM {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
allModelInformation() {
|
models() {
|
||||||
return openRouterModels();
|
if (!fs.existsSync(this.cacheModelPath)) return {};
|
||||||
|
return safeJsonParse(
|
||||||
|
fs.readFileSync(this.cacheModelPath, { encoding: "utf-8" }),
|
||||||
|
{}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
streamingEnabled() {
|
streamingEnabled() {
|
||||||
@ -61,12 +139,13 @@ class OpenRouterLLM {
|
|||||||
}
|
}
|
||||||
|
|
||||||
promptWindowLimit() {
|
promptWindowLimit() {
|
||||||
const availableModels = this.allModelInformation();
|
const availableModels = this.models();
|
||||||
return availableModels[this.model]?.maxLength || 4096;
|
return availableModels[this.model]?.maxLength || 4096;
|
||||||
}
|
}
|
||||||
|
|
||||||
async isValidChatCompletionModel(model = "") {
|
async isValidChatCompletionModel(model = "") {
|
||||||
const availableModels = this.allModelInformation();
|
await this.#syncModels();
|
||||||
|
const availableModels = this.models();
|
||||||
return availableModels.hasOwnProperty(model);
|
return availableModels.hasOwnProperty(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -343,5 +422,4 @@ class OpenRouterLLM {
|
|||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
OpenRouterLLM,
|
OpenRouterLLM,
|
||||||
openRouterModels,
|
|
||||||
};
|
};
|
||||||
|
@ -1,778 +0,0 @@
|
|||||||
const MODELS = {
|
|
||||||
"openrouter/auto": {
|
|
||||||
id: "openrouter/auto",
|
|
||||||
name: "Auto (best for prompt)",
|
|
||||||
organization: "Openrouter",
|
|
||||||
maxLength: 128000,
|
|
||||||
},
|
|
||||||
"nousresearch/nous-capybara-7b:free": {
|
|
||||||
id: "nousresearch/nous-capybara-7b:free",
|
|
||||||
name: "Nous: Capybara 7B (free)",
|
|
||||||
organization: "Nousresearch",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"mistralai/mistral-7b-instruct:free": {
|
|
||||||
id: "mistralai/mistral-7b-instruct:free",
|
|
||||||
name: "Mistral 7B Instruct (free)",
|
|
||||||
organization: "Mistralai",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"openchat/openchat-7b:free": {
|
|
||||||
id: "openchat/openchat-7b:free",
|
|
||||||
name: "OpenChat 3.5 (free)",
|
|
||||||
organization: "Openchat",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"gryphe/mythomist-7b:free": {
|
|
||||||
id: "gryphe/mythomist-7b:free",
|
|
||||||
name: "MythoMist 7B (free)",
|
|
||||||
organization: "Gryphe",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"undi95/toppy-m-7b:free": {
|
|
||||||
id: "undi95/toppy-m-7b:free",
|
|
||||||
name: "Toppy M 7B (free)",
|
|
||||||
organization: "Undi95",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"openrouter/cinematika-7b:free": {
|
|
||||||
id: "openrouter/cinematika-7b:free",
|
|
||||||
name: "Cinematika 7B (alpha) (free)",
|
|
||||||
organization: "Openrouter",
|
|
||||||
maxLength: 8000,
|
|
||||||
},
|
|
||||||
"google/gemma-7b-it:free": {
|
|
||||||
id: "google/gemma-7b-it:free",
|
|
||||||
name: "Google: Gemma 7B (free)",
|
|
||||||
organization: "Google",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"jebcarter/psyfighter-13b": {
|
|
||||||
id: "jebcarter/psyfighter-13b",
|
|
||||||
name: "Psyfighter 13B",
|
|
||||||
organization: "Jebcarter",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"koboldai/psyfighter-13b-2": {
|
|
||||||
id: "koboldai/psyfighter-13b-2",
|
|
||||||
name: "Psyfighter v2 13B",
|
|
||||||
organization: "Koboldai",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"intel/neural-chat-7b": {
|
|
||||||
id: "intel/neural-chat-7b",
|
|
||||||
name: "Neural Chat 7B v3.1",
|
|
||||||
organization: "Intel",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"haotian-liu/llava-13b": {
|
|
||||||
id: "haotian-liu/llava-13b",
|
|
||||||
name: "Llava 13B",
|
|
||||||
organization: "Haotian-liu",
|
|
||||||
maxLength: 2048,
|
|
||||||
},
|
|
||||||
"nousresearch/nous-hermes-2-vision-7b": {
|
|
||||||
id: "nousresearch/nous-hermes-2-vision-7b",
|
|
||||||
name: "Nous: Hermes 2 Vision 7B (alpha)",
|
|
||||||
organization: "Nousresearch",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"meta-llama/llama-2-13b-chat": {
|
|
||||||
id: "meta-llama/llama-2-13b-chat",
|
|
||||||
name: "Meta: Llama v2 13B Chat",
|
|
||||||
organization: "Meta-llama",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"migtissera/synthia-70b": {
|
|
||||||
id: "migtissera/synthia-70b",
|
|
||||||
name: "Synthia 70B",
|
|
||||||
organization: "Migtissera",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"pygmalionai/mythalion-13b": {
|
|
||||||
id: "pygmalionai/mythalion-13b",
|
|
||||||
name: "Pygmalion: Mythalion 13B",
|
|
||||||
organization: "Pygmalionai",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"xwin-lm/xwin-lm-70b": {
|
|
||||||
id: "xwin-lm/xwin-lm-70b",
|
|
||||||
name: "Xwin 70B",
|
|
||||||
organization: "Xwin-lm",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"alpindale/goliath-120b": {
|
|
||||||
id: "alpindale/goliath-120b",
|
|
||||||
name: "Goliath 120B",
|
|
||||||
organization: "Alpindale",
|
|
||||||
maxLength: 6144,
|
|
||||||
},
|
|
||||||
"neversleep/noromaid-20b": {
|
|
||||||
id: "neversleep/noromaid-20b",
|
|
||||||
name: "Noromaid 20B",
|
|
||||||
organization: "Neversleep",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"gryphe/mythomist-7b": {
|
|
||||||
id: "gryphe/mythomist-7b",
|
|
||||||
name: "MythoMist 7B",
|
|
||||||
organization: "Gryphe",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"sophosympatheia/midnight-rose-70b": {
|
|
||||||
id: "sophosympatheia/midnight-rose-70b",
|
|
||||||
name: "Midnight Rose 70B",
|
|
||||||
organization: "Sophosympatheia",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"undi95/remm-slerp-l2-13b:extended": {
|
|
||||||
id: "undi95/remm-slerp-l2-13b:extended",
|
|
||||||
name: "ReMM SLERP 13B (extended)",
|
|
||||||
organization: "Undi95",
|
|
||||||
maxLength: 6144,
|
|
||||||
},
|
|
||||||
"mancer/weaver": {
|
|
||||||
id: "mancer/weaver",
|
|
||||||
name: "Mancer: Weaver (alpha)",
|
|
||||||
organization: "Mancer",
|
|
||||||
maxLength: 8000,
|
|
||||||
},
|
|
||||||
"nousresearch/nous-hermes-llama2-13b": {
|
|
||||||
id: "nousresearch/nous-hermes-llama2-13b",
|
|
||||||
name: "Nous: Hermes 13B",
|
|
||||||
organization: "Nousresearch",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"nousresearch/nous-capybara-7b": {
|
|
||||||
id: "nousresearch/nous-capybara-7b",
|
|
||||||
name: "Nous: Capybara 7B",
|
|
||||||
organization: "Nousresearch",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"meta-llama/codellama-34b-instruct": {
|
|
||||||
id: "meta-llama/codellama-34b-instruct",
|
|
||||||
name: "Meta: CodeLlama 34B Instruct",
|
|
||||||
organization: "Meta-llama",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"codellama/codellama-70b-instruct": {
|
|
||||||
id: "codellama/codellama-70b-instruct",
|
|
||||||
name: "Meta: CodeLlama 70B Instruct",
|
|
||||||
organization: "Codellama",
|
|
||||||
maxLength: 2048,
|
|
||||||
},
|
|
||||||
"phind/phind-codellama-34b": {
|
|
||||||
id: "phind/phind-codellama-34b",
|
|
||||||
name: "Phind: CodeLlama 34B v2",
|
|
||||||
organization: "Phind",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"teknium/openhermes-2-mistral-7b": {
|
|
||||||
id: "teknium/openhermes-2-mistral-7b",
|
|
||||||
name: "OpenHermes 2 Mistral 7B",
|
|
||||||
organization: "Teknium",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"teknium/openhermes-2.5-mistral-7b": {
|
|
||||||
id: "teknium/openhermes-2.5-mistral-7b",
|
|
||||||
name: "OpenHermes 2.5 Mistral 7B",
|
|
||||||
organization: "Teknium",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"undi95/remm-slerp-l2-13b": {
|
|
||||||
id: "undi95/remm-slerp-l2-13b",
|
|
||||||
name: "ReMM SLERP 13B",
|
|
||||||
organization: "Undi95",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"openrouter/cinematika-7b": {
|
|
||||||
id: "openrouter/cinematika-7b",
|
|
||||||
name: "Cinematika 7B (alpha)",
|
|
||||||
organization: "Openrouter",
|
|
||||||
maxLength: 8000,
|
|
||||||
},
|
|
||||||
"01-ai/yi-34b-chat": {
|
|
||||||
id: "01-ai/yi-34b-chat",
|
|
||||||
name: "Yi 34B Chat",
|
|
||||||
organization: "01-ai",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"01-ai/yi-34b": {
|
|
||||||
id: "01-ai/yi-34b",
|
|
||||||
name: "Yi 34B (base)",
|
|
||||||
organization: "01-ai",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"01-ai/yi-6b": {
|
|
||||||
id: "01-ai/yi-6b",
|
|
||||||
name: "Yi 6B (base)",
|
|
||||||
organization: "01-ai",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"togethercomputer/stripedhyena-nous-7b": {
|
|
||||||
id: "togethercomputer/stripedhyena-nous-7b",
|
|
||||||
name: "StripedHyena Nous 7B",
|
|
||||||
organization: "Togethercomputer",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"togethercomputer/stripedhyena-hessian-7b": {
|
|
||||||
id: "togethercomputer/stripedhyena-hessian-7b",
|
|
||||||
name: "StripedHyena Hessian 7B (base)",
|
|
||||||
organization: "Togethercomputer",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"mistralai/mixtral-8x7b": {
|
|
||||||
id: "mistralai/mixtral-8x7b",
|
|
||||||
name: "Mixtral 8x7B (base)",
|
|
||||||
organization: "Mistralai",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"nousresearch/nous-hermes-yi-34b": {
|
|
||||||
id: "nousresearch/nous-hermes-yi-34b",
|
|
||||||
name: "Nous: Hermes 2 Yi 34B",
|
|
||||||
organization: "Nousresearch",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"nousresearch/nous-hermes-2-mixtral-8x7b-sft": {
|
|
||||||
id: "nousresearch/nous-hermes-2-mixtral-8x7b-sft",
|
|
||||||
name: "Nous: Hermes 2 Mixtral 8x7B SFT",
|
|
||||||
organization: "Nousresearch",
|
|
||||||
maxLength: 32000,
|
|
||||||
},
|
|
||||||
"nousresearch/nous-hermes-2-mistral-7b-dpo": {
|
|
||||||
id: "nousresearch/nous-hermes-2-mistral-7b-dpo",
|
|
||||||
name: "Nous: Hermes 2 Mistral 7B DPO",
|
|
||||||
organization: "Nousresearch",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"open-orca/mistral-7b-openorca": {
|
|
||||||
id: "open-orca/mistral-7b-openorca",
|
|
||||||
name: "Mistral OpenOrca 7B",
|
|
||||||
organization: "Open-orca",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"huggingfaceh4/zephyr-7b-beta": {
|
|
||||||
id: "huggingfaceh4/zephyr-7b-beta",
|
|
||||||
name: "Hugging Face: Zephyr 7B",
|
|
||||||
organization: "Huggingfaceh4",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"openai/gpt-3.5-turbo": {
|
|
||||||
id: "openai/gpt-3.5-turbo",
|
|
||||||
name: "OpenAI: GPT-3.5 Turbo",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 16385,
|
|
||||||
},
|
|
||||||
"openai/gpt-3.5-turbo-0125": {
|
|
||||||
id: "openai/gpt-3.5-turbo-0125",
|
|
||||||
name: "OpenAI: GPT-3.5 Turbo 16k",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 16385,
|
|
||||||
},
|
|
||||||
"openai/gpt-3.5-turbo-1106": {
|
|
||||||
id: "openai/gpt-3.5-turbo-1106",
|
|
||||||
name: "OpenAI: GPT-3.5 Turbo 16k (older v1106)",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 16385,
|
|
||||||
},
|
|
||||||
"openai/gpt-3.5-turbo-0613": {
|
|
||||||
id: "openai/gpt-3.5-turbo-0613",
|
|
||||||
name: "OpenAI: GPT-3.5 Turbo (older v0613)",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 4095,
|
|
||||||
},
|
|
||||||
"openai/gpt-3.5-turbo-0301": {
|
|
||||||
id: "openai/gpt-3.5-turbo-0301",
|
|
||||||
name: "OpenAI: GPT-3.5 Turbo (older v0301)",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 4095,
|
|
||||||
},
|
|
||||||
"openai/gpt-3.5-turbo-16k": {
|
|
||||||
id: "openai/gpt-3.5-turbo-16k",
|
|
||||||
name: "OpenAI: GPT-3.5 Turbo 16k",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 16385,
|
|
||||||
},
|
|
||||||
"openai/gpt-4-turbo": {
|
|
||||||
id: "openai/gpt-4-turbo",
|
|
||||||
name: "OpenAI: GPT-4 Turbo",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 128000,
|
|
||||||
},
|
|
||||||
"openai/gpt-4-turbo-preview": {
|
|
||||||
id: "openai/gpt-4-turbo-preview",
|
|
||||||
name: "OpenAI: GPT-4 Turbo Preview",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 128000,
|
|
||||||
},
|
|
||||||
"openai/gpt-4-1106-preview": {
|
|
||||||
id: "openai/gpt-4-1106-preview",
|
|
||||||
name: "OpenAI: GPT-4 Turbo (older v1106)",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 128000,
|
|
||||||
},
|
|
||||||
"openai/gpt-4": {
|
|
||||||
id: "openai/gpt-4",
|
|
||||||
name: "OpenAI: GPT-4",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 8191,
|
|
||||||
},
|
|
||||||
"openai/gpt-4-0314": {
|
|
||||||
id: "openai/gpt-4-0314",
|
|
||||||
name: "OpenAI: GPT-4 (older v0314)",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 8191,
|
|
||||||
},
|
|
||||||
"openai/gpt-4-32k": {
|
|
||||||
id: "openai/gpt-4-32k",
|
|
||||||
name: "OpenAI: GPT-4 32k",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 32767,
|
|
||||||
},
|
|
||||||
"openai/gpt-4-32k-0314": {
|
|
||||||
id: "openai/gpt-4-32k-0314",
|
|
||||||
name: "OpenAI: GPT-4 32k (older v0314)",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 32767,
|
|
||||||
},
|
|
||||||
"openai/gpt-4-vision-preview": {
|
|
||||||
id: "openai/gpt-4-vision-preview",
|
|
||||||
name: "OpenAI: GPT-4 Vision",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 128000,
|
|
||||||
},
|
|
||||||
"openai/gpt-3.5-turbo-instruct": {
|
|
||||||
id: "openai/gpt-3.5-turbo-instruct",
|
|
||||||
name: "OpenAI: GPT-3.5 Turbo Instruct",
|
|
||||||
organization: "Openai",
|
|
||||||
maxLength: 4095,
|
|
||||||
},
|
|
||||||
"google/palm-2-chat-bison": {
|
|
||||||
id: "google/palm-2-chat-bison",
|
|
||||||
name: "Google: PaLM 2 Chat",
|
|
||||||
organization: "Google",
|
|
||||||
maxLength: 25804,
|
|
||||||
},
|
|
||||||
"google/palm-2-codechat-bison": {
|
|
||||||
id: "google/palm-2-codechat-bison",
|
|
||||||
name: "Google: PaLM 2 Code Chat",
|
|
||||||
organization: "Google",
|
|
||||||
maxLength: 20070,
|
|
||||||
},
|
|
||||||
"google/palm-2-chat-bison-32k": {
|
|
||||||
id: "google/palm-2-chat-bison-32k",
|
|
||||||
name: "Google: PaLM 2 Chat 32k",
|
|
||||||
organization: "Google",
|
|
||||||
maxLength: 91750,
|
|
||||||
},
|
|
||||||
"google/palm-2-codechat-bison-32k": {
|
|
||||||
id: "google/palm-2-codechat-bison-32k",
|
|
||||||
name: "Google: PaLM 2 Code Chat 32k",
|
|
||||||
organization: "Google",
|
|
||||||
maxLength: 91750,
|
|
||||||
},
|
|
||||||
"google/gemini-pro": {
|
|
||||||
id: "google/gemini-pro",
|
|
||||||
name: "Google: Gemini Pro 1.0",
|
|
||||||
organization: "Google",
|
|
||||||
maxLength: 91728,
|
|
||||||
},
|
|
||||||
"google/gemini-pro-vision": {
|
|
||||||
id: "google/gemini-pro-vision",
|
|
||||||
name: "Google: Gemini Pro Vision 1.0",
|
|
||||||
organization: "Google",
|
|
||||||
maxLength: 45875,
|
|
||||||
},
|
|
||||||
"google/gemini-pro-1.5": {
|
|
||||||
id: "google/gemini-pro-1.5",
|
|
||||||
name: "Google: Gemini Pro 1.5 (preview)",
|
|
||||||
organization: "Google",
|
|
||||||
maxLength: 2800000,
|
|
||||||
},
|
|
||||||
"perplexity/pplx-70b-online": {
|
|
||||||
id: "perplexity/pplx-70b-online",
|
|
||||||
name: "Perplexity: PPLX 70B Online",
|
|
||||||
organization: "Perplexity",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"perplexity/pplx-7b-online": {
|
|
||||||
id: "perplexity/pplx-7b-online",
|
|
||||||
name: "Perplexity: PPLX 7B Online",
|
|
||||||
organization: "Perplexity",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"perplexity/pplx-7b-chat": {
|
|
||||||
id: "perplexity/pplx-7b-chat",
|
|
||||||
name: "Perplexity: PPLX 7B Chat",
|
|
||||||
organization: "Perplexity",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"perplexity/pplx-70b-chat": {
|
|
||||||
id: "perplexity/pplx-70b-chat",
|
|
||||||
name: "Perplexity: PPLX 70B Chat",
|
|
||||||
organization: "Perplexity",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"perplexity/sonar-small-chat": {
|
|
||||||
id: "perplexity/sonar-small-chat",
|
|
||||||
name: "Perplexity: Sonar 7B",
|
|
||||||
organization: "Perplexity",
|
|
||||||
maxLength: 16384,
|
|
||||||
},
|
|
||||||
"perplexity/sonar-medium-chat": {
|
|
||||||
id: "perplexity/sonar-medium-chat",
|
|
||||||
name: "Perplexity: Sonar 8x7B",
|
|
||||||
organization: "Perplexity",
|
|
||||||
maxLength: 16384,
|
|
||||||
},
|
|
||||||
"perplexity/sonar-small-online": {
|
|
||||||
id: "perplexity/sonar-small-online",
|
|
||||||
name: "Perplexity: Sonar 7B Online",
|
|
||||||
organization: "Perplexity",
|
|
||||||
maxLength: 12000,
|
|
||||||
},
|
|
||||||
"perplexity/sonar-medium-online": {
|
|
||||||
id: "perplexity/sonar-medium-online",
|
|
||||||
name: "Perplexity: Sonar 8x7B Online",
|
|
||||||
organization: "Perplexity",
|
|
||||||
maxLength: 12000,
|
|
||||||
},
|
|
||||||
"fireworks/mixtral-8x22b-instruct-preview": {
|
|
||||||
id: "fireworks/mixtral-8x22b-instruct-preview",
|
|
||||||
name: "Fireworks Mixtral 8x22B Instruct OH (preview)",
|
|
||||||
organization: "Fireworks",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"anthropic/claude-3-opus": {
|
|
||||||
id: "anthropic/claude-3-opus",
|
|
||||||
name: "Anthropic: Claude 3 Opus",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 200000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-3-sonnet": {
|
|
||||||
id: "anthropic/claude-3-sonnet",
|
|
||||||
name: "Anthropic: Claude 3 Sonnet",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 200000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-3-haiku": {
|
|
||||||
id: "anthropic/claude-3-haiku",
|
|
||||||
name: "Anthropic: Claude 3 Haiku",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 200000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-3-opus:beta": {
|
|
||||||
id: "anthropic/claude-3-opus:beta",
|
|
||||||
name: "Anthropic: Claude 3 Opus (self-moderated)",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 200000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-3-sonnet:beta": {
|
|
||||||
id: "anthropic/claude-3-sonnet:beta",
|
|
||||||
name: "Anthropic: Claude 3 Sonnet (self-moderated)",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 200000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-3-haiku:beta": {
|
|
||||||
id: "anthropic/claude-3-haiku:beta",
|
|
||||||
name: "Anthropic: Claude 3 Haiku (self-moderated)",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 200000,
|
|
||||||
},
|
|
||||||
"meta-llama/llama-2-70b-chat": {
|
|
||||||
id: "meta-llama/llama-2-70b-chat",
|
|
||||||
name: "Meta: Llama v2 70B Chat",
|
|
||||||
organization: "Meta-llama",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"nousresearch/nous-capybara-34b": {
|
|
||||||
id: "nousresearch/nous-capybara-34b",
|
|
||||||
name: "Nous: Capybara 34B",
|
|
||||||
organization: "Nousresearch",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"jondurbin/airoboros-l2-70b": {
|
|
||||||
id: "jondurbin/airoboros-l2-70b",
|
|
||||||
name: "Airoboros 70B",
|
|
||||||
organization: "Jondurbin",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"jondurbin/bagel-34b": {
|
|
||||||
id: "jondurbin/bagel-34b",
|
|
||||||
name: "Bagel 34B v0.2",
|
|
||||||
organization: "Jondurbin",
|
|
||||||
maxLength: 8000,
|
|
||||||
},
|
|
||||||
"austism/chronos-hermes-13b": {
|
|
||||||
id: "austism/chronos-hermes-13b",
|
|
||||||
name: "Chronos Hermes 13B v2",
|
|
||||||
organization: "Austism",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"mistralai/mistral-7b-instruct": {
|
|
||||||
id: "mistralai/mistral-7b-instruct",
|
|
||||||
name: "Mistral 7B Instruct",
|
|
||||||
organization: "Mistralai",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"gryphe/mythomax-l2-13b": {
|
|
||||||
id: "gryphe/mythomax-l2-13b",
|
|
||||||
name: "MythoMax 13B",
|
|
||||||
organization: "Gryphe",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"openchat/openchat-7b": {
|
|
||||||
id: "openchat/openchat-7b",
|
|
||||||
name: "OpenChat 3.5",
|
|
||||||
organization: "Openchat",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"undi95/toppy-m-7b": {
|
|
||||||
id: "undi95/toppy-m-7b",
|
|
||||||
name: "Toppy M 7B",
|
|
||||||
organization: "Undi95",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"lizpreciatior/lzlv-70b-fp16-hf": {
|
|
||||||
id: "lizpreciatior/lzlv-70b-fp16-hf",
|
|
||||||
name: "lzlv 70B",
|
|
||||||
organization: "Lizpreciatior",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"mistralai/mixtral-8x7b-instruct": {
|
|
||||||
id: "mistralai/mixtral-8x7b-instruct",
|
|
||||||
name: "Mixtral 8x7B Instruct",
|
|
||||||
organization: "Mistralai",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"cognitivecomputations/dolphin-mixtral-8x7b": {
|
|
||||||
id: "cognitivecomputations/dolphin-mixtral-8x7b",
|
|
||||||
name: "Dolphin 2.6 Mixtral 8x7B 🐬",
|
|
||||||
organization: "Cognitivecomputations",
|
|
||||||
maxLength: 32000,
|
|
||||||
},
|
|
||||||
"neversleep/noromaid-mixtral-8x7b-instruct": {
|
|
||||||
id: "neversleep/noromaid-mixtral-8x7b-instruct",
|
|
||||||
name: "Noromaid Mixtral 8x7B Instruct",
|
|
||||||
organization: "Neversleep",
|
|
||||||
maxLength: 8000,
|
|
||||||
},
|
|
||||||
"nousresearch/nous-hermes-2-mixtral-8x7b-dpo": {
|
|
||||||
id: "nousresearch/nous-hermes-2-mixtral-8x7b-dpo",
|
|
||||||
name: "Nous: Hermes 2 Mixtral 8x7B DPO",
|
|
||||||
organization: "Nousresearch",
|
|
||||||
maxLength: 32000,
|
|
||||||
},
|
|
||||||
"rwkv/rwkv-5-world-3b": {
|
|
||||||
id: "rwkv/rwkv-5-world-3b",
|
|
||||||
name: "RWKV v5 World 3B",
|
|
||||||
organization: "Rwkv",
|
|
||||||
maxLength: 10000,
|
|
||||||
},
|
|
||||||
"recursal/rwkv-5-3b-ai-town": {
|
|
||||||
id: "recursal/rwkv-5-3b-ai-town",
|
|
||||||
name: "RWKV v5 3B AI Town",
|
|
||||||
organization: "Recursal",
|
|
||||||
maxLength: 10000,
|
|
||||||
},
|
|
||||||
"recursal/eagle-7b": {
|
|
||||||
id: "recursal/eagle-7b",
|
|
||||||
name: "RWKV v5: Eagle 7B",
|
|
||||||
organization: "Recursal",
|
|
||||||
maxLength: 10000,
|
|
||||||
},
|
|
||||||
"google/gemma-7b-it": {
|
|
||||||
id: "google/gemma-7b-it",
|
|
||||||
name: "Google: Gemma 7B",
|
|
||||||
organization: "Google",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"databricks/dbrx-instruct": {
|
|
||||||
id: "databricks/dbrx-instruct",
|
|
||||||
name: "Databricks: DBRX 132B Instruct",
|
|
||||||
organization: "Databricks",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"huggingfaceh4/zephyr-orpo-141b-a35b": {
|
|
||||||
id: "huggingfaceh4/zephyr-orpo-141b-a35b",
|
|
||||||
name: "Zephyr 141B-A35B",
|
|
||||||
organization: "Huggingfaceh4",
|
|
||||||
maxLength: 65536,
|
|
||||||
},
|
|
||||||
"anthropic/claude-2": {
|
|
||||||
id: "anthropic/claude-2",
|
|
||||||
name: "Anthropic: Claude v2",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 200000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-2.1": {
|
|
||||||
id: "anthropic/claude-2.1",
|
|
||||||
name: "Anthropic: Claude v2.1",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 200000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-2.0": {
|
|
||||||
id: "anthropic/claude-2.0",
|
|
||||||
name: "Anthropic: Claude v2.0",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 100000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-instant-1": {
|
|
||||||
id: "anthropic/claude-instant-1",
|
|
||||||
name: "Anthropic: Claude Instant v1",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 100000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-instant-1.2": {
|
|
||||||
id: "anthropic/claude-instant-1.2",
|
|
||||||
name: "Anthropic: Claude Instant v1.2",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 100000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-1": {
|
|
||||||
id: "anthropic/claude-1",
|
|
||||||
name: "Anthropic: Claude v1",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 100000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-1.2": {
|
|
||||||
id: "anthropic/claude-1.2",
|
|
||||||
name: "Anthropic: Claude (older v1)",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 100000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-instant-1.0": {
|
|
||||||
id: "anthropic/claude-instant-1.0",
|
|
||||||
name: "Anthropic: Claude Instant (older v1)",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 100000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-instant-1.1": {
|
|
||||||
id: "anthropic/claude-instant-1.1",
|
|
||||||
name: "Anthropic: Claude Instant (older v1.1)",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 100000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-2:beta": {
|
|
||||||
id: "anthropic/claude-2:beta",
|
|
||||||
name: "Anthropic: Claude v2 (self-moderated)",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 200000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-2.1:beta": {
|
|
||||||
id: "anthropic/claude-2.1:beta",
|
|
||||||
name: "Anthropic: Claude v2.1 (self-moderated)",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 200000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-2.0:beta": {
|
|
||||||
id: "anthropic/claude-2.0:beta",
|
|
||||||
name: "Anthropic: Claude v2.0 (self-moderated)",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 100000,
|
|
||||||
},
|
|
||||||
"anthropic/claude-instant-1:beta": {
|
|
||||||
id: "anthropic/claude-instant-1:beta",
|
|
||||||
name: "Anthropic: Claude Instant v1 (self-moderated)",
|
|
||||||
organization: "Anthropic",
|
|
||||||
maxLength: 100000,
|
|
||||||
},
|
|
||||||
"mistralai/mixtral-8x22b": {
|
|
||||||
id: "mistralai/mixtral-8x22b",
|
|
||||||
name: "Mistral: Mixtral 8x22B (base)",
|
|
||||||
organization: "Mistralai",
|
|
||||||
maxLength: 65536,
|
|
||||||
},
|
|
||||||
"huggingfaceh4/zephyr-7b-beta:free": {
|
|
||||||
id: "huggingfaceh4/zephyr-7b-beta:free",
|
|
||||||
name: "Hugging Face: Zephyr 7B (free)",
|
|
||||||
organization: "Huggingfaceh4",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"mistralai/mixtral-8x7b-instruct:nitro": {
|
|
||||||
id: "mistralai/mixtral-8x7b-instruct:nitro",
|
|
||||||
name: "Mixtral 8x7B Instruct (nitro)",
|
|
||||||
organization: "Mistralai",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"meta-llama/llama-2-70b-chat:nitro": {
|
|
||||||
id: "meta-llama/llama-2-70b-chat:nitro",
|
|
||||||
name: "Meta: Llama v2 70B Chat (nitro)",
|
|
||||||
organization: "Meta-llama",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"gryphe/mythomax-l2-13b:nitro": {
|
|
||||||
id: "gryphe/mythomax-l2-13b:nitro",
|
|
||||||
name: "MythoMax 13B (nitro)",
|
|
||||||
organization: "Gryphe",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"mistralai/mistral-7b-instruct:nitro": {
|
|
||||||
id: "mistralai/mistral-7b-instruct:nitro",
|
|
||||||
name: "Mistral 7B Instruct (nitro)",
|
|
||||||
organization: "Mistralai",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"google/gemma-7b-it:nitro": {
|
|
||||||
id: "google/gemma-7b-it:nitro",
|
|
||||||
name: "Google: Gemma 7B (nitro)",
|
|
||||||
organization: "Google",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"databricks/dbrx-instruct:nitro": {
|
|
||||||
id: "databricks/dbrx-instruct:nitro",
|
|
||||||
name: "Databricks: DBRX 132B Instruct (nitro)",
|
|
||||||
organization: "Databricks",
|
|
||||||
maxLength: 32768,
|
|
||||||
},
|
|
||||||
"gryphe/mythomax-l2-13b:extended": {
|
|
||||||
id: "gryphe/mythomax-l2-13b:extended",
|
|
||||||
name: "MythoMax 13B (extended)",
|
|
||||||
organization: "Gryphe",
|
|
||||||
maxLength: 8192,
|
|
||||||
},
|
|
||||||
"mistralai/mistral-tiny": {
|
|
||||||
id: "mistralai/mistral-tiny",
|
|
||||||
name: "Mistral Tiny",
|
|
||||||
organization: "Mistralai",
|
|
||||||
maxLength: 32000,
|
|
||||||
},
|
|
||||||
"mistralai/mistral-small": {
|
|
||||||
id: "mistralai/mistral-small",
|
|
||||||
name: "Mistral Small",
|
|
||||||
organization: "Mistralai",
|
|
||||||
maxLength: 32000,
|
|
||||||
},
|
|
||||||
"mistralai/mistral-medium": {
|
|
||||||
id: "mistralai/mistral-medium",
|
|
||||||
name: "Mistral Medium",
|
|
||||||
organization: "Mistralai",
|
|
||||||
maxLength: 32000,
|
|
||||||
},
|
|
||||||
"mistralai/mistral-large": {
|
|
||||||
id: "mistralai/mistral-large",
|
|
||||||
name: "Mistral Large",
|
|
||||||
organization: "Mistralai",
|
|
||||||
maxLength: 32000,
|
|
||||||
},
|
|
||||||
"cohere/command": {
|
|
||||||
id: "cohere/command",
|
|
||||||
name: "Cohere: Command",
|
|
||||||
organization: "Cohere",
|
|
||||||
maxLength: 4096,
|
|
||||||
},
|
|
||||||
"cohere/command-r": {
|
|
||||||
id: "cohere/command-r",
|
|
||||||
name: "Cohere: Command R",
|
|
||||||
organization: "Cohere",
|
|
||||||
maxLength: 128000,
|
|
||||||
},
|
|
||||||
"cohere/command-r-plus": {
|
|
||||||
id: "cohere/command-r-plus",
|
|
||||||
name: "Cohere: Command R+",
|
|
||||||
organization: "Cohere",
|
|
||||||
maxLength: 128000,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.MODELS = MODELS;
|
|
@ -1 +0,0 @@
|
|||||||
*.json
|
|
@ -1,37 +0,0 @@
|
|||||||
// OpenRouter has lots of models we can use so we use this script
|
|
||||||
// to cache all the models. We can see the list of all the models
|
|
||||||
// here: https://openrouter.ai/docs#models
|
|
||||||
|
|
||||||
// To run, cd into this directory and run `node parse.mjs`
|
|
||||||
// copy outputs into the export in ../models.js
|
|
||||||
|
|
||||||
// Update the date below if you run this again because OpenRouter added new models.
|
|
||||||
// Last Collected: Apr 14, 2024
|
|
||||||
|
|
||||||
import fs from "fs";
|
|
||||||
|
|
||||||
async function parseChatModels() {
|
|
||||||
const models = {};
|
|
||||||
const response = await fetch("https://openrouter.ai/api/v1/models");
|
|
||||||
const data = await response.json();
|
|
||||||
data.data.forEach((model) => {
|
|
||||||
models[model.id] = {
|
|
||||||
id: model.id,
|
|
||||||
name: model.name,
|
|
||||||
// capitalize first letter
|
|
||||||
organization:
|
|
||||||
model.id.split("/")[0].charAt(0).toUpperCase() +
|
|
||||||
model.id.split("/")[0].slice(1),
|
|
||||||
maxLength: model.context_length,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
fs.writeFileSync(
|
|
||||||
"chat_models.json",
|
|
||||||
JSON.stringify(models, null, 2),
|
|
||||||
"utf-8"
|
|
||||||
);
|
|
||||||
return models;
|
|
||||||
}
|
|
||||||
|
|
||||||
parseChatModels();
|
|
@ -1,4 +1,4 @@
|
|||||||
const { openRouterModels } = require("../AiProviders/openRouter");
|
const { OpenRouterLLM } = require("../AiProviders/openRouter");
|
||||||
const { perplexityModels } = require("../AiProviders/perplexity");
|
const { perplexityModels } = require("../AiProviders/perplexity");
|
||||||
const { togetherAiModels } = require("../AiProviders/togetherAi");
|
const { togetherAiModels } = require("../AiProviders/togetherAi");
|
||||||
const SUPPORT_CUSTOM_MODELS = [
|
const SUPPORT_CUSTOM_MODELS = [
|
||||||
@ -232,7 +232,8 @@ async function getPerplexityModels() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function getOpenRouterModels() {
|
async function getOpenRouterModels() {
|
||||||
const knownModels = await openRouterModels();
|
const openrouter = await new OpenRouterLLM().init();
|
||||||
|
const knownModels = openrouter.models();
|
||||||
if (!Object.keys(knownModels).length === 0)
|
if (!Object.keys(knownModels).length === 0)
|
||||||
return { models: [], error: null };
|
return { models: [], error: null };
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user