2023-12-07 23:48:27 +01:00
|
|
|
const SUPPORT_CUSTOM_MODELS = ["openai", "localai", "native-llm"];
|
2023-10-31 19:38:28 +01:00
|
|
|
|
2023-11-14 21:31:44 +01:00
|
|
|
async function getCustomModels(provider = "", apiKey = null, basePath = null) {
|
2023-10-31 19:38:28 +01:00
|
|
|
if (!SUPPORT_CUSTOM_MODELS.includes(provider))
|
|
|
|
return { models: [], error: "Invalid provider for custom models" };
|
|
|
|
|
|
|
|
switch (provider) {
|
|
|
|
case "openai":
|
|
|
|
return await openAiModels(apiKey);
|
2023-11-14 21:31:44 +01:00
|
|
|
case "localai":
|
|
|
|
return await localAIModels(basePath);
|
2023-12-07 23:48:27 +01:00
|
|
|
case "native-llm":
|
|
|
|
return nativeLLMModels();
|
2023-10-31 19:38:28 +01:00
|
|
|
default:
|
|
|
|
return { models: [], error: "Invalid provider for custom models" };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async function openAiModels(apiKey = null) {
|
|
|
|
const { Configuration, OpenAIApi } = require("openai");
|
|
|
|
const config = new Configuration({
|
|
|
|
apiKey: apiKey || process.env.OPEN_AI_KEY,
|
|
|
|
});
|
|
|
|
const openai = new OpenAIApi(config);
|
|
|
|
const models = (
|
|
|
|
await openai
|
|
|
|
.listModels()
|
|
|
|
.then((res) => res.data.data)
|
|
|
|
.catch((e) => {
|
|
|
|
console.error(`OpenAI:listModels`, e.message);
|
|
|
|
return [];
|
|
|
|
})
|
|
|
|
).filter(
|
|
|
|
(model) => !model.owned_by.includes("openai") && model.owned_by !== "system"
|
|
|
|
);
|
|
|
|
|
|
|
|
return { models, error: null };
|
|
|
|
}
|
|
|
|
|
2023-12-04 17:38:15 +01:00
|
|
|
async function localAIModels(basePath = null, apiKey = null) {
|
2023-11-14 21:31:44 +01:00
|
|
|
const { Configuration, OpenAIApi } = require("openai");
|
|
|
|
const config = new Configuration({
|
|
|
|
basePath,
|
2023-12-04 17:38:15 +01:00
|
|
|
...(!!apiKey ? { apiKey } : {}),
|
2023-11-14 21:31:44 +01:00
|
|
|
});
|
|
|
|
const openai = new OpenAIApi(config);
|
|
|
|
const models = await openai
|
|
|
|
.listModels()
|
|
|
|
.then((res) => res.data.data)
|
|
|
|
.catch((e) => {
|
|
|
|
console.error(`LocalAI:listModels`, e.message);
|
|
|
|
return [];
|
|
|
|
});
|
|
|
|
|
|
|
|
return { models, error: null };
|
|
|
|
}
|
|
|
|
|
2023-12-07 23:48:27 +01:00
|
|
|
function nativeLLMModels() {
|
|
|
|
const fs = require("fs");
|
|
|
|
const path = require("path");
|
|
|
|
const storageDir = path.resolve(
|
|
|
|
process.env.STORAGE_DIR
|
|
|
|
? path.resolve(process.env.STORAGE_DIR, "models", "downloaded")
|
|
|
|
: path.resolve(__dirname, `../../storage/models/downloaded`)
|
|
|
|
);
|
|
|
|
if (!fs.existsSync(storageDir))
|
|
|
|
return { models: [], error: "No model/downloaded storage folder found." };
|
|
|
|
|
|
|
|
const files = fs
|
|
|
|
.readdirSync(storageDir)
|
|
|
|
.filter((file) => file.toLowerCase().includes(".gguf"))
|
|
|
|
.map((file) => {
|
|
|
|
return { id: file, name: file };
|
|
|
|
});
|
|
|
|
return { models: files, error: null };
|
|
|
|
}
|
|
|
|
|
2023-10-31 19:38:28 +01:00
|
|
|
module.exports = {
|
|
|
|
getCustomModels,
|
|
|
|
};
|