diff --git a/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx b/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
index 45ad5fd70..269495c03 100644
--- a/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
+++ b/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
@@ -3,7 +3,6 @@ import Sidebar from "@/components/SettingsSidebar";
import { isMobile } from "react-device-detect";
import System from "@/models/system";
import showToast from "@/utils/toast";
-import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
import OpenAiLogo from "@/media/llmprovider/openai.png";
import AzureOpenAiLogo from "@/media/llmprovider/azure.png";
import AnthropicLogo from "@/media/llmprovider/anthropic.png";
@@ -20,7 +19,6 @@ import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
import AnthropicAiOptions from "@/components/LLMSelection/AnthropicAiOptions";
import LMStudioOptions from "@/components/LLMSelection/LMStudioOptions";
import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
-import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
@@ -153,14 +151,6 @@ export default function GeneralLLMPreference() {
options: ,
description: "Run open source models from Mistral AI.",
},
- {
- name: "Native",
- value: "native",
- logo: AnythingLLMIcon,
- options: ,
- description:
- "Use a downloaded custom Llama model for chatting on this AnythingLLM instance.",
- },
];
return (
diff --git a/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx b/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx
index 6970dfa1f..f2c5a95d3 100644
--- a/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx
+++ b/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx
@@ -8,7 +8,6 @@ import OllamaLogo from "@/media/llmprovider/ollama.png";
import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
import LocalAiLogo from "@/media/llmprovider/localai.png";
import TogetherAILogo from "@/media/llmprovider/togetherai.png";
-import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
import MistralLogo from "@/media/llmprovider/mistral.jpeg";
import HuggingFaceLogo from "@/media/llmprovider/huggingface.png";
import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
@@ -16,7 +15,6 @@ import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
import AnthropicAiOptions from "@/components/LLMSelection/AnthropicAiOptions";
import LMStudioOptions from "@/components/LLMSelection/LMStudioOptions";
import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
-import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
import MistralOptions from "@/components/LLMSelection/MistralOptions";
@@ -128,14 +126,6 @@ export default function LLMPreference({
options: ,
description: "Run open source models from Mistral AI.",
},
- {
- name: "Native",
- value: "native",
- logo: AnythingLLMIcon,
- options: ,
- description:
- "Use a downloaded custom Llama model for chatting on this AnythingLLM instance.",
- },
];
function handleForward() {
diff --git a/server/endpoints/system.js b/server/endpoints/system.js
index b8ab8fc73..2bc3462ef 100644
--- a/server/endpoints/system.js
+++ b/server/endpoints/system.js
@@ -3,10 +3,10 @@ const fs = require("fs");
process.env.NODE_ENV === "development"
? require("dotenv").config({ path: `.env.${process.env.NODE_ENV}` })
: require("dotenv").config({
- path: process.env.STORAGE_DIR
- ? path.resolve(process.env.STORAGE_DIR, ".env")
- : path.resolve(__dirname, ".env"),
- });
+ path: process.env.STORAGE_DIR
+ ? path.resolve(process.env.STORAGE_DIR, ".env")
+ : path.resolve(__dirname, ".env"),
+ });
const { viewLocalFiles, normalizePath } = require("../utils/files");
const { purgeDocument, purgeFolder } = require("../utils/files/purgeDocument");