merge with master

This commit is contained in:
timothycarambat 2024-02-21 15:16:01 -08:00
parent ae01785220
commit 28069040f3
3 changed files with 4 additions and 24 deletions

View File

@ -3,7 +3,6 @@ import Sidebar from "@/components/SettingsSidebar";
import { isMobile } from "react-device-detect";
import System from "@/models/system";
import showToast from "@/utils/toast";
import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
import OpenAiLogo from "@/media/llmprovider/openai.png";
import AzureOpenAiLogo from "@/media/llmprovider/azure.png";
import AnthropicLogo from "@/media/llmprovider/anthropic.png";
@ -20,7 +19,6 @@ import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
import AnthropicAiOptions from "@/components/LLMSelection/AnthropicAiOptions";
import LMStudioOptions from "@/components/LLMSelection/LMStudioOptions";
import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
@ -153,14 +151,6 @@ export default function GeneralLLMPreference() {
options: <MistralOptions settings={settings} />,
description: "Run open source models from Mistral AI.",
},
{
name: "Native",
value: "native",
logo: AnythingLLMIcon,
options: <NativeLLMOptions settings={settings} />,
description:
"Use a downloaded custom Llama model for chatting on this AnythingLLM instance.",
},
];
return (

View File

@ -8,7 +8,6 @@ import OllamaLogo from "@/media/llmprovider/ollama.png";
import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
import LocalAiLogo from "@/media/llmprovider/localai.png";
import TogetherAILogo from "@/media/llmprovider/togetherai.png";
import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
import MistralLogo from "@/media/llmprovider/mistral.jpeg";
import HuggingFaceLogo from "@/media/llmprovider/huggingface.png";
import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
@ -16,7 +15,6 @@ import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
import AnthropicAiOptions from "@/components/LLMSelection/AnthropicAiOptions";
import LMStudioOptions from "@/components/LLMSelection/LMStudioOptions";
import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
import MistralOptions from "@/components/LLMSelection/MistralOptions";
@ -128,14 +126,6 @@ export default function LLMPreference({
options: <MistralOptions settings={settings} />,
description: "Run open source models from Mistral AI.",
},
{
name: "Native",
value: "native",
logo: AnythingLLMIcon,
options: <NativeLLMOptions settings={settings} />,
description:
"Use a downloaded custom Llama model for chatting on this AnythingLLM instance.",
},
];
function handleForward() {