529 UI update llm embedder and vectordb selection pages (#533)

* move llm, embedder, vectordb items to components folder

* add backdrop blur to search in llm, embedder, vectordb preferences

* implement searchable llm preference in settings

* implement searchable embedder in settings

* remove unused useState from embedder preferences

* implement searchable vector database in settings

* fix save changes button not appearing on change for llm, embedder, and vectordb settings pages

* sort selected items in all settings and put selected item at top of list

* no auto-top for selection

---------

Co-authored-by: timothycarambat <rambat1010@gmail.com>
This commit is contained in:
Sean Hatfield 2024-01-04 18:20:58 -08:00 committed by GitHub
parent e9f7b9b79e
commit d95d1a9dfd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 414 additions and 550 deletions

View File

@ -1,4 +1,4 @@
import { useEffect, useState } from "react";
import React, { useEffect, useState } from "react";
import System from "@/models/system";
export default function LocalAiOptions({ settings }) {
@ -12,67 +12,64 @@ export default function LocalAiOptions({ settings }) {
return (
<div className="w-full flex flex-col gap-y-4">
<div className="w-full flex items-center gap-4">
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
LocalAI Base URL
</label>
<input
type="url"
name="EmbeddingBasePath"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://localhost:8080/v1"
defaultValue={settings?.EmbeddingBasePath}
onChange={(e) => setBasePathValue(e.target.value)}
onBlur={() => setBasePath(basePathValue)}
required={true}
autoComplete="off"
spellCheck={false}
/>
</div>
<LocalAIModelSelection
settings={settings}
apiKey={apiKey}
basePath={basePath}
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
LocalAI Base URL
</label>
<input
type="url"
name="EmbeddingBasePath"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://localhost:8080/v1"
defaultValue={settings?.EmbeddingBasePath}
onChange={(e) => setBasePathValue(e.target.value)}
onBlur={() => setBasePath(basePathValue)}
required={true}
autoComplete="off"
spellCheck={false}
/>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Max embedding chunk length
</label>
<input
type="number"
name="EmbeddingModelMaxChunkLength"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="1000"
min={1}
onScroll={(e) => e.target.blur()}
defaultValue={settings?.EmbeddingModelMaxChunkLength}
required={false}
autoComplete="off"
/>
</div>
</div>
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-semibold flex items-center gap-x-2">
Local AI API Key{" "}
<p className="!text-xs !italic !font-thin">optional</p>
</label>
</div>
<input
type="password"
name="LocalAiApiKey"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="sk-mysecretkey"
defaultValue={settings?.LocalAiApiKey ? "*".repeat(20) : ""}
autoComplete="off"
spellCheck={false}
onChange={(e) => setApiKeyValue(e.target.value)}
onBlur={() => setApiKey(apiKeyValue)}
/>
<LocalAIModelSelection
settings={settings}
apiKey={apiKey}
basePath={basePath}
/>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Max embedding chunk length
</label>
<input
type="number"
name="EmbeddingModelMaxChunkLength"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="1000"
min={1}
onScroll={(e) => e.target.blur()}
defaultValue={settings?.EmbeddingModelMaxChunkLength}
required={false}
autoComplete="off"
/>
</div>
</div>
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-semibold flex items-center gap-x-2">
Local AI API Key{" "}
<p className="!text-xs !italic !font-thin">optional</p>
</label>
</div>
<input
type="password"
name="LocalAiApiKey"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="sk-mysecretkey"
defaultValue={settings?.LocalAiApiKey ? "*".repeat(20) : ""}
autoComplete="off"
spellCheck={false}
onChange={(e) => setApiKeyValue(e.target.value)}
onBlur={() => setApiKey(apiKeyValue)}
/>
</div>
</div>
</div>

View File

@ -71,12 +71,10 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-semibold block">
Local AI API Key
<label className="text-white text-sm font-semibold flex items-center gap-x-2">
Local AI API Key{" "}
<p className="!text-xs !italic !font-thin">optional</p>
</label>
<p className="text-xs italic text-white/60">
optional API key to use if running LocalAI with API keys.
</p>
</div>
<input

View File

@ -1,39 +0,0 @@
import React from "react";
export default function VectorDBOption({
name,
link,
description,
value,
image,
checked = false,
onClick,
}) {
return (
<div onClick={() => onClick(value)}>
<input
type="checkbox"
value={value}
className="peer hidden"
checked={checked}
readOnly={true}
formNoValidate={true}
/>
<label className="transition-all duration-300 inline-flex flex-col h-full w-60 cursor-pointer items-start justify-between rounded-2xl bg-preference-gradient border-2 border-transparent shadow-md px-5 py-4 text-white hover:bg-selected-preference-gradient hover:text-underline hover:border-white/60 peer-checked:border-white peer-checked:border-opacity-90 peer-checked:bg-selected-preference-gradient">
<div className="flex items-center">
<img src={image} alt={name} className="h-10 w-10 rounded" />
<div className="ml-4 text-sm font-semibold">{name}</div>
</div>
<div className="mt-2 text-xs font-base text-white tracking-wide">
{description}
</div>
<a
href={`https://${link}`}
className="mt-2 text-xs text-white font-medium underline"
>
{link}
</a>
</label>
</div>
);
}

View File

@ -1,4 +1,4 @@
export default function VectorDatabaseItem({
export default function VectorDBItem({
name,
value,
image,

View File

@ -8,25 +8,28 @@ import OpenAiLogo from "@/media/llmprovider/openai.png";
import AzureOpenAiLogo from "@/media/llmprovider/azure.png";
import LocalAiLogo from "@/media/llmprovider/localai.png";
import PreLoader from "@/components/Preloader";
import LLMProviderOption from "@/components/LLMSelection/LLMProviderOption";
import ChangeWarningModal from "@/components/ChangeWarning";
import OpenAiOptions from "@/components/EmbeddingSelection/OpenAiOptions";
import AzureAiOptions from "@/components/EmbeddingSelection/AzureAiOptions";
import LocalAiOptions from "@/components/EmbeddingSelection/LocalAiOptions";
import NativeEmbeddingOptions from "@/components/EmbeddingSelection/NativeEmbeddingOptions";
import EmbedderItem from "@/components/EmbeddingSelection/EmbedderItem";
import { MagnifyingGlass } from "@phosphor-icons/react";
export default function GeneralEmbeddingPreference() {
const [saving, setSaving] = useState(false);
const [hasChanges, setHasChanges] = useState(false);
const [hasEmbeddings, setHasEmbeddings] = useState(false);
const [embeddingChoice, setEmbeddingChoice] = useState("openai");
const [settings, setSettings] = useState(null);
const [loading, setLoading] = useState(true);
const [searchQuery, setSearchQuery] = useState("");
const [filteredEmbedders, setFilteredEmbedders] = useState([]);
const [selectedEmbedder, setSelectedEmbedder] = useState(null);
const handleSubmit = async (e) => {
e.preventDefault();
if (
embeddingChoice !== settings?.EmbeddingEngine &&
selectedEmbedder !== settings?.EmbeddingEngine &&
hasChanges &&
hasEmbeddings
) {
@ -38,11 +41,11 @@ export default function GeneralEmbeddingPreference() {
const handleSaveSettings = async () => {
setSaving(true);
const data = new FormData(document.getElementById("embedding-form"));
const form = document.getElementById("embedding-form");
const settingsData = {};
for (let [key, value] of data.entries()) {
settingsData[key] = value;
}
const formData = new FormData(form);
settingsData.EmbeddingEngine = selectedEmbedder;
for (var [key, value] of formData.entries()) settingsData[key] = value;
const { error } = await System.updateSystem(settingsData);
if (error) {
@ -57,7 +60,7 @@ export default function GeneralEmbeddingPreference() {
};
const updateChoice = (selection) => {
setEmbeddingChoice(selection);
setSelectedEmbedder(selection);
setHasChanges(true);
};
@ -65,13 +68,52 @@ export default function GeneralEmbeddingPreference() {
async function fetchKeys() {
const _settings = await System.keys();
setSettings(_settings);
setEmbeddingChoice(_settings?.EmbeddingEngine || "openai");
setSelectedEmbedder(_settings?.EmbeddingEngine || "native");
setHasEmbeddings(_settings?.HasExistingEmbeddings || false);
setLoading(false);
}
fetchKeys();
}, []);
const EMBEDDERS = [
{
name: "AnythingLLM Embedder",
value: "native",
logo: AnythingLLMIcon,
options: <NativeEmbeddingOptions settings={settings} />,
description:
"Use the built-in embedding engine for AnythingLLM. Zero setup!",
},
{
name: "OpenAI",
value: "openai",
logo: OpenAiLogo,
options: <OpenAiOptions settings={settings} />,
description: "The standard option for most non-commercial use.",
},
{
name: "Azure OpenAI",
value: "azure",
logo: AzureOpenAiLogo,
options: <AzureAiOptions settings={settings} />,
description: "The enterprise option of OpenAI hosted on Azure services.",
},
{
name: "Local AI",
value: "localai",
logo: LocalAiLogo,
options: <LocalAiOptions settings={settings} />,
description: "Run embedding models locally on your own machine.",
},
];
useEffect(() => {
const filtered = EMBEDDERS.filter((embedder) =>
embedder.name.toLowerCase().includes(searchQuery.toLowerCase())
);
setFilteredEmbedders(filtered);
}, [searchQuery, selectedEmbedder]);
return (
<div className="w-screen h-screen overflow-hidden bg-sidebar flex">
<ChangeWarningModal
@ -98,7 +140,6 @@ export default function GeneralEmbeddingPreference() {
<form
id="embedding-form"
onSubmit={handleSubmit}
onChange={() => setHasChanges(true)}
className="flex w-full"
>
<div className="flex flex-col w-full px-1 md:px-20 md:py-12 py-16">
@ -132,59 +173,52 @@ export default function GeneralEmbeddingPreference() {
<div className="text-white text-sm font-medium py-4">
Embedding Providers
</div>
<div className="w-full flex md:flex-wrap overflow-x-scroll gap-4">
<input
hidden={true}
name="EmbeddingEngine"
value={embeddingChoice}
/>
<LLMProviderOption
name="AnythingLLM Embedder"
value="native"
description="Use the built-in embedding engine for AnythingLLM. Zero setup!"
checked={embeddingChoice === "native"}
image={AnythingLLMIcon}
onClick={updateChoice}
/>
<LLMProviderOption
name="OpenAI"
value="openai"
link="openai.com"
description="Use OpenAI's text-embedding-ada-002 embedding model."
checked={embeddingChoice === "openai"}
image={OpenAiLogo}
onClick={updateChoice}
/>
<LLMProviderOption
name="Azure OpenAI"
value="azure"
link="azure.microsoft.com"
description="The enterprise option of OpenAI hosted on Azure services."
checked={embeddingChoice === "azure"}
image={AzureOpenAiLogo}
onClick={updateChoice}
/>
<LLMProviderOption
name="LocalAI"
value="localai"
link="localai.io"
description="Self hosted LocalAI embedding engine."
checked={embeddingChoice === "localai"}
image={LocalAiLogo}
onClick={updateChoice}
/>
</div>
<div className="mt-10 flex flex-wrap gap-4">
{embeddingChoice === "native" && <NativeEmbeddingOptions />}
{embeddingChoice === "openai" && (
<OpenAiOptions settings={settings} />
)}
{embeddingChoice === "azure" && (
<AzureAiOptions settings={settings} />
)}
{embeddingChoice === "localai" && (
<LocalAiOptions settings={settings} />
)}
<div className="w-full">
<div className="w-full relative border-slate-300/20 shadow border-4 rounded-xl text-white">
<div className="w-full p-4 absolute top-0 rounded-t-lg backdrop-blur-sm">
<div className="w-full flex items-center sticky top-0 z-20">
<MagnifyingGlass
size={16}
weight="bold"
className="absolute left-4 z-30 text-white"
/>
<input
type="text"
placeholder="Search Embedding providers"
className="bg-zinc-600 z-20 pl-10 rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
onChange={(e) => setSearchQuery(e.target.value)}
autoComplete="off"
onKeyDown={(e) => {
if (e.key === "Enter") e.preventDefault();
}}
/>
</div>
</div>
<div className="px-4 pt-[70px] flex flex-col gap-y-1 max-h-[390px] overflow-y-auto no-scroll pb-4">
{filteredEmbedders.map((embedder) => {
return (
<EmbedderItem
key={embedder.name}
name={embedder.name}
value={embedder.value}
image={embedder.logo}
description={embedder.description}
checked={selectedEmbedder === embedder.value}
onClick={() => updateChoice(embedder.value)}
/>
);
})}
</div>
</div>
<div
onChange={() => setHasChanges(true)}
className="mt-4 flex flex-col gap-y-1"
>
{selectedEmbedder &&
EMBEDDERS.find(
(embedder) => embedder.value === selectedEmbedder
)?.options}
</div>
</div>
</>
</div>

View File

@ -12,7 +12,6 @@ import OllamaLogo from "@/media/llmprovider/ollama.png";
import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
import LocalAiLogo from "@/media/llmprovider/localai.png";
import PreLoader from "@/components/Preloader";
import LLMProviderOption from "@/components/LLMSelection/LLMProviderOption";
import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
import AnthropicAiOptions from "@/components/LLMSelection/AnthropicAiOptions";
@ -21,21 +20,31 @@ import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
import LLMItem from "@/components/LLMSelection/LLMItem";
import { MagnifyingGlass } from "@phosphor-icons/react";
export default function GeneralLLMPreference() {
const [saving, setSaving] = useState(false);
const [hasChanges, setHasChanges] = useState(false);
const [llmChoice, setLLMChoice] = useState("openai");
const [settings, setSettings] = useState(null);
const [loading, setLoading] = useState(true);
const [searchQuery, setSearchQuery] = useState("");
const [filteredLLMs, setFilteredLLMs] = useState([]);
const [selectedLLM, setSelectedLLM] = useState(null);
const isHosted = window.location.hostname.includes("useanything.com");
const handleSubmit = async (e) => {
e.preventDefault();
setSaving(true);
const form = e.target;
const data = {};
const form = new FormData(e.target);
for (var [key, value] of form.entries()) data[key] = value;
const formData = new FormData(form);
data.LLMProvider = selectedLLM;
for (var [key, value] of formData.entries()) data[key] = value;
const { error } = await System.updateSystem(data);
setSaving(true);
if (error) {
showToast(`Failed to save LLM settings: ${error}`, "error");
} else {
@ -46,7 +55,7 @@ export default function GeneralLLMPreference() {
};
const updateLLMChoice = (selection) => {
setLLMChoice(selection);
setSelectedLLM(selection);
setHasChanges(true);
};
@ -54,12 +63,80 @@ export default function GeneralLLMPreference() {
async function fetchKeys() {
const _settings = await System.keys();
setSettings(_settings);
setLLMChoice(_settings?.LLMProvider);
setSelectedLLM(_settings?.LLMProvider);
setLoading(false);
}
fetchKeys();
}, []);
useEffect(() => {
const filtered = LLMS.filter((llm) =>
llm.name.toLowerCase().includes(searchQuery.toLowerCase())
);
setFilteredLLMs(filtered);
}, [searchQuery, selectedLLM]);
const LLMS = [
{
name: "OpenAI",
value: "openai",
logo: OpenAiLogo,
options: <OpenAiOptions settings={settings} />,
description: "The standard option for most non-commercial use.",
},
{
name: "Azure OpenAI",
value: "azure",
logo: AzureOpenAiLogo,
options: <AzureAiOptions settings={settings} />,
description: "The enterprise option of OpenAI hosted on Azure services.",
},
{
name: "Anthropic",
value: "anthropic",
logo: AnthropicLogo,
options: <AnthropicAiOptions settings={settings} />,
description: "A friendly AI Assistant hosted by Anthropic.",
},
{
name: "Gemini",
value: "gemini",
logo: GeminiLogo,
options: <GeminiLLMOptions settings={settings} />,
description: "Google's largest and most capable AI model",
},
{
name: "Ollama",
value: "ollama",
logo: OllamaLogo,
options: <OllamaLLMOptions settings={settings} />,
description: "Run LLMs locally on your own machine.",
},
{
name: "LM Studio",
value: "lmstudio",
logo: LMStudioLogo,
options: <LMStudioOptions settings={settings} />,
description:
"Discover, download, and run thousands of cutting edge LLMs in a few clicks.",
},
{
name: "Local AI",
value: "localai",
logo: LocalAiLogo,
options: <LocalAiOptions settings={settings} />,
description: "Run LLMs locally on your own machine.",
},
{
name: "Native",
value: "native",
logo: AnythingLLMIcon,
options: <NativeLLMOptions settings={settings} />,
description:
"Use a downloaded custom Llama model for chatting on this AnythingLLM instance.",
},
];
return (
<div className="w-screen h-screen overflow-hidden bg-sidebar flex">
{!isMobile && <Sidebar />}
@ -78,11 +155,7 @@ export default function GeneralLLMPreference() {
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[26px] bg-main-gradient w-full h-full overflow-y-scroll border-4 border-accent"
>
{isMobile && <SidebarMobileHeader />}
<form
onSubmit={handleSubmit}
onChange={() => setHasChanges(true)}
className="flex w-full"
>
<form onSubmit={handleSubmit} className="flex w-full">
<div className="flex flex-col w-full px-1 md:px-20 md:py-12 py-16">
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
<div className="items-center flex gap-x-4">
@ -109,107 +182,51 @@ export default function GeneralLLMPreference() {
<div className="text-white text-sm font-medium py-4">
LLM Providers
</div>
<div className="w-full flex md:flex-wrap overflow-x-scroll gap-4">
<input hidden={true} name="LLMProvider" value={llmChoice} />
<LLMProviderOption
name="OpenAI"
value="openai"
link="openai.com"
description="The standard option for most non-commercial use."
checked={llmChoice === "openai"}
image={OpenAiLogo}
onClick={updateLLMChoice}
/>
<LLMProviderOption
name="Azure OpenAI"
value="azure"
link="azure.microsoft.com"
description="The enterprise option of OpenAI hosted on Azure services."
checked={llmChoice === "azure"}
image={AzureOpenAiLogo}
onClick={updateLLMChoice}
/>
<LLMProviderOption
name="Anthropic Claude 2"
value="anthropic"
link="anthropic.com"
description="A friendly AI Assistant hosted by Anthropic."
checked={llmChoice === "anthropic"}
image={AnthropicLogo}
onClick={updateLLMChoice}
/>
<LLMProviderOption
name="Google Gemini"
value="gemini"
link="ai.google.dev"
description="Google's largest and most capable AI model"
checked={llmChoice === "gemini"}
image={GeminiLogo}
onClick={updateLLMChoice}
/>
<LLMProviderOption
name="LM Studio"
value="lmstudio"
link="lmstudio.ai"
description="Discover, download, and run thousands of cutting edge LLMs in a few clicks."
checked={llmChoice === "lmstudio"}
image={LMStudioLogo}
onClick={updateLLMChoice}
/>
<LLMProviderOption
name="Local AI"
value="localai"
link="localai.io"
description="Run LLMs locally on your own machine."
checked={llmChoice === "localai"}
image={LocalAiLogo}
onClick={updateLLMChoice}
/>
<LLMProviderOption
name="Ollama"
value="ollama"
link="ollama.ai"
description="Run LLMs locally on your own machine."
checked={llmChoice === "ollama"}
image={OllamaLogo}
onClick={updateLLMChoice}
/>
{!window.location.hostname.includes("useanything.com") && (
<LLMProviderOption
name="Custom Llama Model"
value="native"
description="Use a downloaded custom Llama model for chatting on this AnythingLLM instance."
checked={llmChoice === "native"}
image={AnythingLLMIcon}
onClick={updateLLMChoice}
/>
)}
</div>
<div className="mt-10 flex flex-wrap gap-4 max-w-[800px]">
{llmChoice === "openai" && (
<OpenAiOptions settings={settings} />
)}
{llmChoice === "azure" && (
<AzureAiOptions settings={settings} />
)}
{llmChoice === "anthropic" && (
<AnthropicAiOptions settings={settings} showAlert={true} />
)}
{llmChoice === "gemini" && (
<GeminiLLMOptions settings={settings} />
)}
{llmChoice === "lmstudio" && (
<LMStudioOptions settings={settings} showAlert={true} />
)}
{llmChoice === "localai" && (
<LocalAiOptions settings={settings} showAlert={true} />
)}
{llmChoice === "ollama" && (
<OllamaLLMOptions settings={settings} />
)}
{llmChoice === "native" && (
<NativeLLMOptions settings={settings} />
)}
<div className="w-full">
<div className="w-full relative border-slate-300/20 shadow border-4 rounded-xl text-white">
<div className="w-full p-4 absolute top-0 rounded-t-lg backdrop-blur-sm">
<div className="w-full flex items-center sticky top-0">
<MagnifyingGlass
size={16}
weight="bold"
className="absolute left-4 z-30 text-white"
/>
<input
type="text"
placeholder="Search LLM providers"
className="bg-zinc-600 z-20 pl-10 rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
onChange={(e) => setSearchQuery(e.target.value)}
autoComplete="off"
onKeyDown={(e) => {
if (e.key === "Enter") e.preventDefault();
}}
/>
</div>
</div>
<div className="px-4 pt-[70px] flex flex-col gap-y-1 max-h-[390px] overflow-y-auto no-scroll pb-4">
{filteredLLMs.map((llm) => {
if (llm.value === "native" && isHosted) return null;
return (
<LLMItem
key={llm.name}
name={llm.name}
value={llm.value}
image={llm.logo}
description={llm.description}
checked={selectedLLM === llm.value}
onClick={() => updateLLMChoice(llm.value)}
/>
);
})}
</div>
</div>
<div
onChange={() => setHasChanges(true)}
className="mt-4 flex flex-col gap-y-1"
>
{selectedLLM &&
LLMS.find((llm) => llm.value === selectedLLM)?.options}
</div>
</div>
</div>
</form>

View File

@ -9,36 +9,86 @@ import LanceDbLogo from "@/media/vectordbs/lancedb.png";
import WeaviateLogo from "@/media/vectordbs/weaviate.png";
import QDrantLogo from "@/media/vectordbs/qdrant.png";
import PreLoader from "@/components/Preloader";
import VectorDBOption from "@/components/VectorDBOption";
import ChangeWarningModal from "@/components/ChangeWarning";
import { MagnifyingGlass } from "@phosphor-icons/react";
import LanceDBOptions from "@/components/VectorDBSelection/LanceDBOptions";
import ChromaDBOptions from "@/components/VectorDBSelection/ChromaDBOptions";
import PineconeDBOptions from "@/components/VectorDBSelection/PineconeDBOptions";
import QDrantDBOptions from "@/components/VectorDBSelection/QDrantDBOptions";
import WeaviateDBOptions from "@/components/VectorDBSelection/WeaviateDBOptions";
import VectorDBItem from "@/components/VectorDBSelection/VectorDBItem";
export default function GeneralVectorDatabase() {
const [saving, setSaving] = useState(false);
const [hasChanges, setHasChanges] = useState(false);
const [hasEmbeddings, setHasEmbeddings] = useState(false);
const [vectorDB, setVectorDB] = useState("lancedb");
const [settings, setSettings] = useState({});
const [loading, setLoading] = useState(true);
const [searchQuery, setSearchQuery] = useState("");
const [filteredVDBs, setFilteredVDBs] = useState([]);
const [selectedVDB, setSelectedVDB] = useState(null);
useEffect(() => {
async function fetchKeys() {
const _settings = await System.keys();
console.log(_settings);
setSettings(_settings);
setVectorDB(_settings?.VectorDB || "lancedb");
setSelectedVDB(_settings?.VectorDB || "lancedb");
setHasEmbeddings(_settings?.HasExistingEmbeddings || false);
setLoading(false);
}
fetchKeys();
}, []);
const VECTOR_DBS = [
{
name: "LanceDB",
value: "lancedb",
logo: LanceDbLogo,
options: <LanceDBOptions />,
description:
"100% local vector DB that runs on the same instance as AnythingLLM.",
},
{
name: "Chroma",
value: "chroma",
logo: ChromaLogo,
options: <ChromaDBOptions settings={settings} />,
description:
"Open source vector database you can host yourself or on the cloud.",
},
{
name: "Pinecone",
value: "pinecone",
logo: PineconeLogo,
options: <PineconeDBOptions settings={settings} />,
description: "100% cloud-based vector database for enterprise use cases.",
},
{
name: "QDrant",
value: "qdrant",
logo: QDrantLogo,
options: <QDrantDBOptions settings={settings} />,
description: "Open source local and distributed cloud vector database.",
},
{
name: "Weaviate",
value: "weaviate",
logo: WeaviateLogo,
options: <WeaviateDBOptions settings={settings} />,
description:
"Open source local and cloud hosted multi-modal vector database.",
},
];
const updateVectorChoice = (selection) => {
setHasChanges(true);
setVectorDB(selection);
setSelectedVDB(selection);
};
const handleSubmit = async (e) => {
e.preventDefault();
if (vectorDB !== settings?.VectorDB && hasChanges && hasEmbeddings) {
if (selectedVDB !== settings?.VectorDB && hasChanges && hasEmbeddings) {
document.getElementById("confirmation-modal")?.showModal();
} else {
await handleSaveSettings();
@ -47,11 +97,11 @@ export default function GeneralVectorDatabase() {
const handleSaveSettings = async () => {
setSaving(true);
const data = new FormData(document.getElementById("vectordb-form"));
const form = document.getElementById("vectordb-form");
const settingsData = {};
for (let [key, value] of data.entries()) {
settingsData[key] = value;
}
const formData = new FormData(form);
settingsData.VectorDB = selectedVDB;
for (var [key, value] of formData.entries()) settingsData[key] = value;
const { error } = await System.updateSystem(settingsData);
if (error) {
@ -65,6 +115,13 @@ export default function GeneralVectorDatabase() {
document.getElementById("confirmation-modal")?.close();
};
useEffect(() => {
const filtered = VECTOR_DBS.filter((vdb) =>
vdb.name.toLowerCase().includes(searchQuery.toLowerCase())
);
setFilteredVDBs(filtered);
}, [searchQuery, selectedVDB]);
return (
<div className="w-screen h-screen overflow-hidden bg-sidebar flex">
<ChangeWarningModal
@ -91,7 +148,6 @@ export default function GeneralVectorDatabase() {
<form
id="vectordb-form"
onSubmit={handleSubmit}
onChange={() => setHasChanges(true)}
className="flex w-full"
>
<div className="flex flex-col w-full px-1 md:px-20 md:py-12 py-16">
@ -119,236 +175,52 @@ export default function GeneralVectorDatabase() {
<div className="text-white text-sm font-medium py-4">
Select your preferred vector database provider
</div>
<div className="w-full flex md:flex-wrap overflow-x-scroll gap-4 max-w-[900px]">
<input hidden={true} name="VectorDB" value={vectorDB} />
<VectorDBOption
name="Chroma"
value="chroma"
link="trychroma.com"
description="Open source vector database you can host yourself or on the cloud."
checked={vectorDB === "chroma"}
image={ChromaLogo}
onClick={updateVectorChoice}
/>
<VectorDBOption
name="Pinecone"
value="pinecone"
link="pinecone.io"
description="100% cloud-based vector database for enterprise use cases."
checked={vectorDB === "pinecone"}
image={PineconeLogo}
onClick={updateVectorChoice}
/>
<VectorDBOption
name="QDrant"
value="qdrant"
link="qdrant.tech"
description="Open source local and distributed cloud vector database."
checked={vectorDB === "qdrant"}
image={QDrantLogo}
onClick={updateVectorChoice}
/>
<VectorDBOption
name="Weaviate"
value="weaviate"
link="weaviate.io"
description="Open source local and cloud hosted multi-modal vector database."
checked={vectorDB === "weaviate"}
image={WeaviateLogo}
onClick={updateVectorChoice}
/>
<VectorDBOption
name="LanceDB"
value="lancedb"
link="lancedb.com"
description="100% local vector DB that runs on the same instance as AnythingLLM."
checked={vectorDB === "lancedb"}
image={LanceDbLogo}
onClick={updateVectorChoice}
/>
</div>
<div className="mt-10 flex flex-wrap gap-4 max-w-[800px]">
{vectorDB === "pinecone" && (
<>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Pinecone DB API Key
</label>
<input
type="password"
name="PineConeKey"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="Pinecone API Key"
defaultValue={
settings?.PineConeKey ? "*".repeat(20) : ""
}
required={true}
autoComplete="off"
spellCheck={false}
<div className="w-full">
<div className="w-full relative border-slate-300/20 shadow border-4 rounded-xl text-white">
<div className="w-full p-4 absolute top-0 rounded-t-lg backdrop-blur-sm">
<div className="w-full flex items-center sticky top-0 z-20">
<MagnifyingGlass
size={16}
weight="bold"
className="absolute left-4 z-30 text-white"
/>
</div>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Pinecone Index Environment
</label>
<input
type="text"
name="PineConeEnvironment"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="us-gcp-west-1"
defaultValue={settings?.PineConeEnvironment}
required={true}
placeholder="Search vector databases"
className="bg-zinc-600 z-20 pl-10 rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
onChange={(e) => {
e.preventDefault();
setSearchQuery(e.target.value);
}}
autoComplete="off"
spellCheck={false}
onKeyDown={(e) => {
if (e.key === "Enter") e.preventDefault();
}}
/>
</div>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Pinecone Index Name
</label>
<input
type="text"
name="PineConeIndex"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="my-index"
defaultValue={settings?.PineConeIndex}
required={true}
autoComplete="off"
spellCheck={false}
/>
</div>
</>
)}
{vectorDB === "chroma" && (
<>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Chroma Endpoint
</label>
<input
type="url"
name="ChromaEndpoint"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://localhost:8000"
defaultValue={settings?.ChromaEndpoint}
required={true}
autoComplete="off"
spellCheck={false}
/>
</div>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
API Header
</label>
<input
name="ChromaApiHeader"
autoComplete="off"
type="text"
defaultValue={settings?.ChromaApiHeader}
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="X-Api-Key"
/>
</div>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
API Key
</label>
<input
name="ChromaApiKey"
autoComplete="off"
type="password"
defaultValue={
settings?.ChromaApiKey ? "*".repeat(20) : ""
}
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="sk-myApiKeyToAccessMyChromaInstance"
/>
</div>
</>
)}
{vectorDB === "lancedb" && (
<div className="w-full h-40 items-center justify-center flex">
<p className="text-sm font-base text-white text-opacity-60">
There is no configuration needed for LanceDB.
</p>
</div>
)}
{vectorDB === "qdrant" && (
<>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
QDrant API Endpoint
</label>
<input
type="url"
name="QdrantEndpoint"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://localhost:6633"
defaultValue={settings?.QdrantEndpoint}
required={true}
autoComplete="off"
spellCheck={false}
<div className="px-4 pt-[70px] flex flex-col gap-y-1 max-h-[390px] overflow-y-auto no-scroll pb-4">
{filteredVDBs.map((vdb) => (
<VectorDBItem
key={vdb.name}
name={vdb.name}
value={vdb.value}
image={vdb.logo}
description={vdb.description}
checked={selectedVDB === vdb.value}
onClick={() => updateVectorChoice(vdb.value)}
/>
</div>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
API Key
</label>
<input
type="password"
name="QdrantApiKey"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="wOeqxsYP4....1244sba"
defaultValue={settings?.QdrantApiKey}
autoComplete="off"
spellCheck={false}
/>
</div>
</>
)}
{vectorDB === "weaviate" && (
<>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Weaviate Endpoint
</label>
<input
type="url"
name="WeaviateEndpoint"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://localhost:8080"
defaultValue={settings?.WeaviateEndpoint}
required={true}
autoComplete="off"
spellCheck={false}
/>
</div>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
API Key
</label>
<input
type="password"
name="WeaviateApiKey"
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="sk-123Abcweaviate"
defaultValue={settings?.WeaviateApiKey}
autoComplete="off"
spellCheck={false}
/>
</div>
</>
)}
))}
</div>
</div>
<div
onChange={() => setHasChanges(true)}
className="mt-4 flex flex-col gap-y-1"
>
{selectedVDB &&
VECTOR_DBS.find((vdb) => vdb.value === selectedVDB)
?.options}
</div>
</div>
</div>
</form>

View File

@ -8,7 +8,7 @@ import NativeEmbeddingOptions from "@/components/EmbeddingSelection/NativeEmbedd
import OpenAiOptions from "@/components/EmbeddingSelection/OpenAiOptions";
import AzureAiOptions from "@/components/EmbeddingSelection/AzureAiOptions";
import LocalAiOptions from "@/components/EmbeddingSelection/LocalAiOptions";
import EmbedderItem from "./EmbedderItem";
import EmbedderItem from "@/components/EmbeddingSelection/EmbedderItem";
import System from "@/models/system";
import paths from "@/utils/paths";
import showToast from "@/utils/toast";
@ -108,22 +108,17 @@ export default function EmbeddingPreference({
}, []);
useEffect(() => {
if (searchQuery.trim() === "") {
setFilteredEmbedders(EMBEDDERS);
} else {
const lowercasedQuery = searchQuery.toLowerCase();
const filtered = EMBEDDERS.filter((embedder) =>
embedder.name.toLowerCase().includes(lowercasedQuery)
);
setFilteredEmbedders(filtered);
}
}, [searchQuery]);
const filtered = EMBEDDERS.filter((embedder) =>
embedder.name.toLowerCase().includes(searchQuery.toLowerCase())
);
setFilteredEmbedders(filtered);
}, [searchQuery, selectedEmbedder]);
return (
<div>
<form ref={formRef} onSubmit={handleSubmit} className="w-full">
<div className="w-full relative border-slate-300/40 shadow border-2 rounded-lg text-white">
<div className="w-full p-4 absolute top-0 rounded-t-lg bg-accent/50">
<div className="w-full p-4 absolute top-0 rounded-t-lg backdrop-blur-sm">
<div className="w-full flex items-center sticky top-0 z-20">
<MagnifyingGlass
size={16}

View File

@ -16,7 +16,7 @@ import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
import LLMItem from "./LLMItem";
import LLMItem from "@/components/LLMSelection/LLMItem";
import System from "@/models/system";
import paths from "@/utils/paths";
import showToast from "@/utils/toast";
@ -144,23 +144,18 @@ export default function LLMPreference({
}, []);
useEffect(() => {
if (searchQuery.trim() === "") {
setFilteredLLMs(LLMS);
} else {
const lowercasedQuery = searchQuery.toLowerCase();
const filtered = LLMS.filter((llm) =>
llm.name.toLowerCase().includes(lowercasedQuery)
);
setFilteredLLMs(filtered);
}
}, [searchQuery]);
const filtered = LLMS.filter((llm) =>
llm.name.toLowerCase().includes(searchQuery.toLowerCase())
);
setFilteredLLMs(filtered);
}, [searchQuery, selectedLLM]);
return (
<div>
<form ref={formRef} onSubmit={handleSubmit} className="w-full">
<div className="w-full relative border-slate-300/40 shadow border-2 rounded-lg text-white">
<div className="w-full p-4 absolute top-0 rounded-t-lg bg-accent/50">
<div className="w-full flex items-center sticky top-0 z-20">
<div className="w-full p-4 absolute top-0 rounded-t-lg backdrop-blur-sm">
<div className="w-full flex items-center sticky top-0">
<MagnifyingGlass
size={16}
weight="bold"

View File

@ -6,7 +6,6 @@ import LanceDbLogo from "@/media/vectordbs/lancedb.png";
import WeaviateLogo from "@/media/vectordbs/weaviate.png";
import QDrantLogo from "@/media/vectordbs/qdrant.png";
import System from "@/models/system";
import VectorDatabaseItem from "./VectorDatabaseItem";
import paths from "@/utils/paths";
import PineconeDBOptions from "@/components/VectorDBSelection/PineconeDBOptions";
import ChromaDBOptions from "@/components/VectorDBSelection/ChromaDBOptions";
@ -15,6 +14,7 @@ import WeaviateDBOptions from "@/components/VectorDBSelection/WeaviateDBOptions"
import LanceDBOptions from "@/components/VectorDBSelection/LanceDBOptions";
import showToast from "@/utils/toast";
import { useNavigate } from "react-router-dom";
import VectorDBItem from "@/components/VectorDBSelection/VectorDBItem";
const TITLE = "Vector Database Connection";
const DESCRIPTION =
@ -118,22 +118,17 @@ export default function VectorDatabaseConnection({
}, []);
useEffect(() => {
if (searchQuery.trim() === "") {
setFilteredVDBs(VECTOR_DBS);
} else {
const lowercasedQuery = searchQuery.toLowerCase();
const filtered = VECTOR_DBS.filter((vdb) =>
vdb.name.toLowerCase().includes(lowercasedQuery)
);
setFilteredVDBs(filtered);
}
}, [searchQuery]);
const filtered = VECTOR_DBS.filter((vdb) =>
vdb.name.toLowerCase().includes(searchQuery.toLowerCase())
);
setFilteredVDBs(filtered);
}, [searchQuery, selectedVDB]);
return (
<>
<form ref={formRef} onSubmit={handleSubmit} className="w-full">
<div className="w-full relative border-slate-300/40 shadow border-2 rounded-lg text-white pb-4">
<div className="w-full p-4 absolute top-0 rounded-t-lg bg-accent/50">
<div className="w-full p-4 absolute top-0 rounded-t-lg backdrop-blur-sm">
<div className="w-full flex items-center sticky top-0 z-20">
<MagnifyingGlass
size={16}
@ -154,7 +149,7 @@ export default function VectorDatabaseConnection({
</div>
<div className="px-4 pt-[70px] flex flex-col gap-y-1 max-h-[390px] overflow-y-auto no-scroll">
{filteredVDBs.map((vdb) => (
<VectorDatabaseItem
<VectorDBItem
key={vdb.name}
name={vdb.name}
value={vdb.value}