mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2024-11-11 01:10:11 +01:00
Support single-model providers for workspace LLMs (#1179)
This commit is contained in:
parent
df17fbda36
commit
323c080b5e
@ -31,8 +31,6 @@ export default function GenericOpenAiOptions({ settings }) {
|
||||
spellCheck={false}
|
||||
/>
|
||||
</div>
|
||||
{!settings?.credentialsOnly && (
|
||||
<>
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
Chat Model Name
|
||||
@ -63,8 +61,6 @@ export default function GenericOpenAiOptions({ settings }) {
|
||||
autoComplete="off"
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -159,6 +159,12 @@ export const AVAILABLE_LLM_PROVIDERS = [
|
||||
options: (settings) => <GenericOpenAiOptions settings={settings} />,
|
||||
description:
|
||||
"Connect to any OpenAi-compatible service via a custom configuration",
|
||||
requiredConfig: [
|
||||
"GenericOpenAiBasePath",
|
||||
"GenericOpenAiModelPref",
|
||||
"GenericOpenAiTokenLimit",
|
||||
"GenericOpenAiKey",
|
||||
],
|
||||
},
|
||||
{
|
||||
name: "Native",
|
||||
|
@ -5,6 +5,9 @@ import { AVAILABLE_LLM_PROVIDERS } from "@/pages/GeneralSettings/LLMPreference";
|
||||
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
|
||||
import ChatModelSelection from "../ChatModelSelection";
|
||||
|
||||
// Some providers can only be associated with a single model.
|
||||
// In that case there is no selection to be made so we can just move on.
|
||||
const NO_MODEL_SELECTION = ["default", "huggingface", "generic-openai"];
|
||||
const DISABLED_PROVIDERS = ["azure", "lmstudio", "native"];
|
||||
const LLM_DEFAULT = {
|
||||
name: "System default",
|
||||
@ -145,7 +148,7 @@ export default function WorkspaceLLMSelection({
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
{selectedLLM !== "default" && (
|
||||
{!NO_MODEL_SELECTION.includes(selectedLLM) && (
|
||||
<div className="mt-4 flex flex-col gap-y-1">
|
||||
<ChatModelSelection
|
||||
provider={selectedLLM}
|
||||
|
Loading…
Reference in New Issue
Block a user