mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2024-11-19 20:50:09 +01:00
merge with master
This commit is contained in:
parent
ae01785220
commit
28069040f3
@ -3,7 +3,6 @@ import Sidebar from "@/components/SettingsSidebar";
|
||||
import { isMobile } from "react-device-detect";
|
||||
import System from "@/models/system";
|
||||
import showToast from "@/utils/toast";
|
||||
import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
|
||||
import OpenAiLogo from "@/media/llmprovider/openai.png";
|
||||
import AzureOpenAiLogo from "@/media/llmprovider/azure.png";
|
||||
import AnthropicLogo from "@/media/llmprovider/anthropic.png";
|
||||
@ -20,7 +19,6 @@ import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
|
||||
import AnthropicAiOptions from "@/components/LLMSelection/AnthropicAiOptions";
|
||||
import LMStudioOptions from "@/components/LLMSelection/LMStudioOptions";
|
||||
import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
|
||||
import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
|
||||
import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
|
||||
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
|
||||
import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
|
||||
@ -153,14 +151,6 @@ export default function GeneralLLMPreference() {
|
||||
options: <MistralOptions settings={settings} />,
|
||||
description: "Run open source models from Mistral AI.",
|
||||
},
|
||||
{
|
||||
name: "Native",
|
||||
value: "native",
|
||||
logo: AnythingLLMIcon,
|
||||
options: <NativeLLMOptions settings={settings} />,
|
||||
description:
|
||||
"Use a downloaded custom Llama model for chatting on this AnythingLLM instance.",
|
||||
},
|
||||
];
|
||||
|
||||
return (
|
||||
|
@ -8,7 +8,6 @@ import OllamaLogo from "@/media/llmprovider/ollama.png";
|
||||
import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
|
||||
import LocalAiLogo from "@/media/llmprovider/localai.png";
|
||||
import TogetherAILogo from "@/media/llmprovider/togetherai.png";
|
||||
import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
|
||||
import MistralLogo from "@/media/llmprovider/mistral.jpeg";
|
||||
import HuggingFaceLogo from "@/media/llmprovider/huggingface.png";
|
||||
import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
|
||||
@ -16,7 +15,6 @@ import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
|
||||
import AnthropicAiOptions from "@/components/LLMSelection/AnthropicAiOptions";
|
||||
import LMStudioOptions from "@/components/LLMSelection/LMStudioOptions";
|
||||
import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
|
||||
import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
|
||||
import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
|
||||
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
|
||||
import MistralOptions from "@/components/LLMSelection/MistralOptions";
|
||||
@ -128,14 +126,6 @@ export default function LLMPreference({
|
||||
options: <MistralOptions settings={settings} />,
|
||||
description: "Run open source models from Mistral AI.",
|
||||
},
|
||||
{
|
||||
name: "Native",
|
||||
value: "native",
|
||||
logo: AnythingLLMIcon,
|
||||
options: <NativeLLMOptions settings={settings} />,
|
||||
description:
|
||||
"Use a downloaded custom Llama model for chatting on this AnythingLLM instance.",
|
||||
},
|
||||
];
|
||||
|
||||
function handleForward() {
|
||||
|
@ -3,10 +3,10 @@ const fs = require("fs");
|
||||
process.env.NODE_ENV === "development"
|
||||
? require("dotenv").config({ path: `.env.${process.env.NODE_ENV}` })
|
||||
: require("dotenv").config({
|
||||
path: process.env.STORAGE_DIR
|
||||
? path.resolve(process.env.STORAGE_DIR, ".env")
|
||||
: path.resolve(__dirname, ".env"),
|
||||
});
|
||||
path: process.env.STORAGE_DIR
|
||||
? path.resolve(process.env.STORAGE_DIR, ".env")
|
||||
: path.resolve(__dirname, ".env"),
|
||||
});
|
||||
|
||||
const { viewLocalFiles, normalizePath } = require("../utils/files");
|
||||
const { purgeDocument, purgeFolder } = require("../utils/files/purgeDocument");
|
||||
|
Loading…
Reference in New Issue
Block a user