anything-llm/frontend/src/components/Modals/MangeWorkspace/Settings/ChatModelPreference/useGetProviderModels.js

57 lines
1.7 KiB
JavaScript

import System from "@/models/system";
import { useEffect, useState } from "react";
// Providers which cannot use this feature for workspace<>model selection
export const DISABLED_PROVIDERS = ["azure", "lmstudio"];
const PROVIDER_DEFAULT_MODELS = {
openai: [
"gpt-3.5-turbo",
"gpt-3.5-turbo-1106",
"gpt-4",
"gpt-4-turbo-preview",
"gpt-4-1106-preview",
"gpt-4-32k",
],
gemini: ["gemini-pro"],
anthropic: ["claude-2", "claude-instant-1"],
azure: [],
lmstudio: [],
localai: [],
ollama: [],
togetherai: [],
native: [],
};
// For togetherAi, which has a large model list - we subgroup the options
// by their creator organization (eg: Meta, Mistral, etc)
// which makes selection easier to read.
function groupModels(models) {
return models.reduce((acc, model) => {
acc[model.organization] = acc[model.organization] || [];
acc[model.organization].push(model);
return acc;
}, {});
}
export default function useGetProviderModels(provider = null) {
const [defaultModels, setDefaultModels] = useState([]);
const [customModels, setCustomModels] = useState([]);
const [loading, setLoading] = useState(true);
useEffect(() => {
async function fetchProviderModels() {
if (!provider) return;
const { models = [] } = await System.customModels(provider);
if (PROVIDER_DEFAULT_MODELS.hasOwnProperty(provider))
setDefaultModels(PROVIDER_DEFAULT_MODELS[provider]);
provider === "togetherai"
? setCustomModels(groupModels(models))
: setCustomModels(models);
setLoading(false);
}
fetchProviderModels();
}, [provider]);
return { defaultModels, customModels, loading };
}