diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 83792da7..58c42b62 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -22,7 +22,7 @@
// Terraform support
"ghcr.io/devcontainers/features/terraform:1": {},
// Just a wrap to install needed packages
- "ghcr.io/rocker-org/devcontainer-features/apt-packages:1": {
+ "ghcr.io/devcontainers-contrib/features/apt-packages:1": {
// Dependencies copied from ../docker/Dockerfile plus some dev stuff
"packages": [
"build-essential",
diff --git a/.prettierignore b/.prettierignore
index faedf325..e3b0c14e 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -10,3 +10,7 @@ frontend/bundleinspector.html
#server
server/swagger/openapi.json
+
+#embed
+**/static/**
+embed/src/utils/chat/hljs.js
diff --git a/.prettierrc b/.prettierrc
index 3574c1df..5e2bccfe 100644
--- a/.prettierrc
+++ b/.prettierrc
@@ -17,7 +17,7 @@
}
},
{
- "files": "*.config.js",
+ "files": ["*.config.js"],
"options": {
"semi": false,
"parser": "flow",
diff --git a/collector/package.json b/collector/package.json
index 785604e3..938d65e1 100644
--- a/collector/package.json
+++ b/collector/package.json
@@ -12,7 +12,7 @@
"scripts": {
"dev": "NODE_ENV=development nodemon --ignore hotdir --ignore storage --trace-warnings index.js",
"start": "NODE_ENV=production node index.js",
- "lint": "yarn prettier --write ./processSingleFile ./processLink ./utils index.js"
+ "lint": "yarn prettier --ignore-path ../.prettierignore --write ./processSingleFile ./processLink ./utils index.js"
},
"dependencies": {
"@googleapis/youtube": "^9.0.0",
diff --git a/docker/.env.example b/docker/.env.example
index 6368a190..174a9d69 100644
--- a/docker/.env.example
+++ b/docker/.env.example
@@ -128,6 +128,12 @@ GID='1000'
# VOYAGEAI_API_KEY=
# EMBEDDING_MODEL_PREF='voyage-large-2-instruct'
+# EMBEDDING_ENGINE='litellm'
+# EMBEDDING_MODEL_PREF='text-embedding-ada-002'
+# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
+# LITE_LLM_BASE_PATH='http://127.0.0.1:4000'
+# LITE_LLM_API_KEY='sk-123abc'
+
###########################################
######## Vector Database Selection ########
###########################################
diff --git a/embed/.prettierignore b/embed/.prettierignore
deleted file mode 100644
index d90a3c08..00000000
--- a/embed/.prettierignore
+++ /dev/null
@@ -1,9 +0,0 @@
-# defaults
-**/.git
-**/.svn
-**/.hg
-**/node_modules
-
-**/dist
-**/static/**
-src/utils/chat/hljs.js
diff --git a/embed/jsconfig.json b/embed/jsconfig.json
index c8cc81fd..20cd368c 100644
--- a/embed/jsconfig.json
+++ b/embed/jsconfig.json
@@ -4,9 +4,7 @@
"target": "esnext",
"jsx": "react",
"paths": {
- "@/*": [
- "./src/*"
- ],
- }
- }
-}
\ No newline at end of file
+ "@/*": ["./src/*"],
+ },
+ },
+}
diff --git a/embed/package.json b/embed/package.json
index eb399930..712af8e6 100644
--- a/embed/package.json
+++ b/embed/package.json
@@ -1,6 +1,7 @@
{
"name": "anythingllm-embedded-chat",
"private": false,
+ "license": "MIT",
"type": "module",
"scripts": {
"dev": "nodemon -e js,jsx,css --watch src --exec \"yarn run dev:preview\"",
@@ -8,7 +9,7 @@
"dev:build": "vite build && cat src/static/tailwind@3.4.1.js >> dist/anythingllm-chat-widget.js",
"build": "vite build && cat src/static/tailwind@3.4.1.js >> dist/anythingllm-chat-widget.js && npx terser --compress -o dist/anythingllm-chat-widget.min.js -- dist/anythingllm-chat-widget.js",
"build:publish": "yarn build && mkdir -p ../frontend/public/embed && cp -r dist/anythingllm-chat-widget.min.js ../frontend/public/embed/anythingllm-chat-widget.min.js",
- "lint": "yarn prettier --write ./src"
+ "lint": "yarn prettier --ignore-path ../.prettierignore --write ./src"
},
"dependencies": {
"@microsoft/fetch-event-source": "^2.0.1",
diff --git a/embed/vite.config.js b/embed/vite.config.js
index 21506422..9e23c70d 100644
--- a/embed/vite.config.js
+++ b/embed/vite.config.js
@@ -38,7 +38,7 @@ export default defineConfig({
rollupOptions: {
external: [
// Reduces transformation time by 50% and we don't even use this variant, so we can ignore.
- /@phosphor-icons\/react\/dist\/ssr/,
+ /@phosphor-icons\/react\/dist\/ssr/
]
},
commonjsOptions: {
@@ -51,7 +51,7 @@ export default defineConfig({
emptyOutDir: true,
inlineDynamicImports: true,
assetsDir: "",
- sourcemap: 'inline',
+ sourcemap: "inline"
},
optimizeDeps: {
esbuildOptions: {
@@ -60,5 +60,5 @@ export default defineConfig({
},
plugins: []
}
- },
+ }
})
diff --git a/frontend/jsconfig.json b/frontend/jsconfig.json
index c8cc81fd..e21fc376 100644
--- a/frontend/jsconfig.json
+++ b/frontend/jsconfig.json
@@ -4,9 +4,7 @@
"target": "esnext",
"jsx": "react",
"paths": {
- "@/*": [
- "./src/*"
- ],
+ "@/*": ["./src/*"]
}
}
-}
\ No newline at end of file
+}
diff --git a/frontend/package.json b/frontend/package.json
index 11e612fc..8aa4dcfa 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -7,7 +7,7 @@
"start": "vite --open",
"dev": "NODE_ENV=development vite --debug --host=0.0.0.0",
"build": "vite build",
- "lint": "yarn prettier --write ./src",
+ "lint": "yarn prettier --ignore-path ../.prettierignore --write ./src",
"preview": "vite preview"
},
"dependencies": {
@@ -63,4 +63,4 @@
"tailwindcss": "^3.3.1",
"vite": "^4.3.0"
}
-}
+}
\ No newline at end of file
diff --git a/frontend/src/components/ChatBubble/index.jsx b/frontend/src/components/ChatBubble/index.jsx
index 8d311883..c5a1f190 100644
--- a/frontend/src/components/ChatBubble/index.jsx
+++ b/frontend/src/components/ChatBubble/index.jsx
@@ -1,5 +1,5 @@
import React from "react";
-import Jazzicon from "../UserIcon";
+import UserIcon from "../UserIcon";
import { userFromStorage } from "@/utils/request";
import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants";
@@ -11,8 +11,7 @@ export default function ChatBubble({ message, type, popMsg }) {
-
diff --git a/frontend/src/components/DefaultChat/index.jsx b/frontend/src/components/DefaultChat/index.jsx
index 43ae6e7a..ae52a0d2 100644
--- a/frontend/src/components/DefaultChat/index.jsx
+++ b/frontend/src/components/DefaultChat/index.jsx
@@ -13,7 +13,7 @@ import { isMobile } from "react-device-detect";
import { SidebarMobileHeader } from "../Sidebar";
import ChatBubble from "../ChatBubble";
import System from "@/models/system";
-import Jazzicon from "../UserIcon";
+import UserIcon from "../UserIcon";
import { userFromStorage } from "@/utils/request";
import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants";
import useUser from "@/hooks/useUser";
@@ -46,7 +46,7 @@ export default function DefaultChatContainer() {
className={`pt-10 pb-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
-
+
-
+
-
+
-
@@ -151,7 +150,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
-
+
-
@@ -213,7 +211,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
-
+
-
@@ -275,7 +272,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
-
+
+
+
+
+ setBasePathValue(e.target.value)}
+ onBlur={() => setBasePath(basePathValue)}
+ />
+
+
+
+
+ e.target.blur()}
+ defaultValue={settings?.EmbeddingModelMaxChunkLength}
+ required={false}
+ autoComplete="off"
+ />
+
+
+
+
+
+
setApiKeyValue(e.target.value)}
+ onBlur={() => setApiKey(apiKeyValue)}
+ />
+
+
+
+ );
+}
+
+function LiteLLMModelSelection({ settings, basePath = null, apiKey = null }) {
+ const [customModels, setCustomModels] = useState([]);
+ const [loading, setLoading] = useState(true);
+
+ useEffect(() => {
+ async function findCustomModels() {
+ if (!basePath) {
+ setCustomModels([]);
+ setLoading(false);
+ return;
+ }
+ setLoading(true);
+ const { models } = await System.customModels(
+ "litellm",
+ typeof apiKey === "boolean" ? null : apiKey,
+ basePath
+ );
+ setCustomModels(models || []);
+ setLoading(false);
+ }
+ findCustomModels();
+ }, [basePath, apiKey]);
+
+ if (loading || customModels.length == 0) {
+ return (
+
+
+
+
+ );
+ }
+
+ return (
+
+
+
+
+
+
+
+ );
+}
+
+function EmbeddingModelTooltip() {
+ return (
+
+
+
+
+ Be sure to select a valid embedding model. Chat models are not
+ embedding models. See{" "}
+
+ this page
+ {" "}
+ for more information.
+
+
+
+ );
+}
diff --git a/frontend/src/components/Modals/Password/index.jsx b/frontend/src/components/Modals/Password/index.jsx
index 8f86b611..7010fc66 100644
--- a/frontend/src/components/Modals/Password/index.jsx
+++ b/frontend/src/components/Modals/Password/index.jsx
@@ -33,12 +33,12 @@ export default function PasswordModal({ mode = "single" }) {
alt="login illustration"
/>
-
+
diff --git a/frontend/src/components/SettingsSidebar/index.jsx b/frontend/src/components/SettingsSidebar/index.jsx
index 2d59d0ff..297a1620 100644
--- a/frontend/src/components/SettingsSidebar/index.jsx
+++ b/frontend/src/components/SettingsSidebar/index.jsx
@@ -85,7 +85,7 @@ export default function SettingsSidebar() {
/>
{/* Header Information */}
@@ -109,12 +109,14 @@ export default function SettingsSidebar() {
{/* Primary Body */}
-
+
@@ -139,22 +141,21 @@ export default function SettingsSidebar() {
Instance Settings
-
diff --git a/frontend/src/components/Sidebar/index.jsx b/frontend/src/components/Sidebar/index.jsx
index a32c0a3b..d4b4e007 100644
--- a/frontend/src/components/Sidebar/index.jsx
+++ b/frontend/src/components/Sidebar/index.jsx
@@ -32,34 +32,34 @@ export default function Sidebar() {
-
-
- {(!user || user?.role !== "default") && (
-
- )}
+
+
+
+ {(!user || user?.role !== "default") && (
+
+ )}
+
+
-
-
@@ -156,12 +156,9 @@ export function SidebarMobileHeader() {
{/* Primary Body */}
-
+
-
+
{(!user || user?.role !== "default") && (
-
diff --git a/frontend/src/components/UserIcon/index.jsx b/frontend/src/components/UserIcon/index.jsx
index 6cc9b57d..7fc6b8df 100644
--- a/frontend/src/components/UserIcon/index.jsx
+++ b/frontend/src/components/UserIcon/index.jsx
@@ -2,7 +2,7 @@ import React, { useRef, useEffect } from "react";
import JAZZ from "@metamask/jazzicon";
import usePfp from "../../hooks/usePfp";
-export default function Jazzicon({ size = 10, user, role }) {
+export default function UserIcon({ size = 36, user, role }) {
const { pfp } = usePfp();
const divRef = useRef(null);
const seed = user?.uid
diff --git a/frontend/src/components/UserIcon/workspace.png b/frontend/src/components/UserIcon/workspace.png
new file mode 100644
index 00000000..537d583c
Binary files /dev/null and b/frontend/src/components/UserIcon/workspace.png differ
diff --git a/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/Actions/EditMessage/index.jsx b/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/Actions/EditMessage/index.jsx
new file mode 100644
index 00000000..f9346b26
--- /dev/null
+++ b/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/Actions/EditMessage/index.jsx
@@ -0,0 +1,126 @@
+import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants";
+import { Pencil } from "@phosphor-icons/react";
+import { useState, useEffect, useRef } from "react";
+import { Tooltip } from "react-tooltip";
+const EDIT_EVENT = "toggle-message-edit";
+
+export function useEditMessage({ chatId, role }) {
+ const [isEditing, setIsEditing] = useState(false);
+
+ function onEditEvent(e) {
+ if (e.detail.chatId !== chatId || e.detail.role !== role) {
+ setIsEditing(false);
+ return false;
+ }
+ setIsEditing((prev) => !prev);
+ }
+
+ useEffect(() => {
+ function listenForEdits() {
+ if (!chatId || !role) return;
+ window.addEventListener(EDIT_EVENT, onEditEvent);
+ }
+ listenForEdits();
+ return () => {
+ window.removeEventListener(EDIT_EVENT, onEditEvent);
+ };
+ }, [chatId, role]);
+
+ return { isEditing, setIsEditing };
+}
+
+export function EditMessageAction({ chatId = null, role, isEditing }) {
+ function handleEditClick() {
+ window.dispatchEvent(
+ new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
+ );
+ }
+
+ if (!chatId || isEditing) return null;
+ return (
+
+ );
+}
+
+export function EditMessageForm({
+ role,
+ chatId,
+ message,
+ adjustTextArea,
+ saveChanges,
+}) {
+ const formRef = useRef(null);
+ function handleSaveMessage(e) {
+ e.preventDefault();
+ const form = new FormData(e.target);
+ const editedMessage = form.get("editedMessage");
+ saveChanges({ editedMessage, chatId, role });
+ window.dispatchEvent(
+ new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
+ );
+ }
+
+ function cancelEdits() {
+ window.dispatchEvent(
+ new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
+ );
+ return false;
+ }
+
+ useEffect(() => {
+ if (!formRef || !formRef.current) return;
+ formRef.current.focus();
+ adjustTextArea({ target: formRef.current });
+ }, [formRef]);
+
+ return (
+
+ );
+}
diff --git a/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/Actions/index.jsx b/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/Actions/index.jsx
index 41fd7067..85590e7f 100644
--- a/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/Actions/index.jsx
+++ b/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/HistoricalMessage/Actions/index.jsx
@@ -2,14 +2,15 @@ import React, { memo, useState } from "react";
import useCopyText from "@/hooks/useCopyText";
import {
Check,
- ClipboardText,
ThumbsUp,
ThumbsDown,
ArrowsClockwise,
+ Copy,
} from "@phosphor-icons/react";
import { Tooltip } from "react-tooltip";
import Workspace from "@/models/workspace";
import TTSMessage from "./TTSButton";
+import { EditMessageAction } from "./EditMessage";
const Actions = ({
message,
@@ -18,9 +19,10 @@ const Actions = ({
slug,
isLastMessage,
regenerateMessage,
+ isEditing,
+ role,
}) => {
const [selectedFeedback, setSelectedFeedback] = useState(feedbackScore);
-
const handleFeedback = async (newFeedback) => {
const updatedFeedback =
selectedFeedback === newFeedback ? null : newFeedback;
@@ -32,14 +34,15 @@ const Actions = ({
- {isLastMessage && (
+
+ {isLastMessage && !isEditing && (
)}
- {chatId && (
+ {chatId && role !== "user" && !isEditing && (
<>
) : (
-
+
)}
{
- return (
-
-
-
-
- {error ? (
+ const { isEditing } = useEditMessage({ chatId, role });
+ const adjustTextArea = (event) => {
+ const element = event.target;
+ element.style.height = "auto";
+ element.style.height = element.scrollHeight + "px";
+ };
+
+ if (!!error) {
+ return (
+
+
+
+
-
+
Could not
respond to message.
@@ -42,6 +51,30 @@ const HistoricalMessage = ({
{error}
+
+
+
+ );
+ }
+
+ return (
+
+
+
+
+ {isEditing ? (
+
) : (
)}
- {role === "assistant" && !error && (
-
- )}
+
{role === "assistant" &&
}
@@ -84,8 +117,7 @@ function ProfileImage({ role, workspace }) {
}
return (
-
;
+ return
;
}
export default memo(PromptReply);
diff --git a/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/index.jsx b/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/index.jsx
index 6e9f4e77..19b65453 100644
--- a/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/index.jsx
+++ b/frontend/src/components/WorkspaceChat/ChatContainer/ChatHistory/index.jsx
@@ -7,14 +7,18 @@ import { ArrowDown } from "@phosphor-icons/react";
import debounce from "lodash.debounce";
import useUser from "@/hooks/useUser";
import Chartable from "./Chartable";
+import Workspace from "@/models/workspace";
+import { useParams } from "react-router-dom";
export default function ChatHistory({
history = [],
workspace,
sendCommand,
+ updateHistory,
regenerateAssistantMessage,
}) {
const { user } = useUser();
+ const { threadSlug = null } = useParams();
const { showing, showModal, hideModal } = useManageWorkspaceModal();
const [isAtBottom, setIsAtBottom] = useState(true);
const chatHistoryRef = useRef(null);
@@ -87,6 +91,46 @@ export default function ChatHistory({
sendCommand(`${heading} ${message}`, true);
};
+ const saveEditedMessage = async ({ editedMessage, chatId, role }) => {
+ if (!editedMessage) return; // Don't save empty edits.
+
+ // if the edit was a user message, we will auto-regenerate the response and delete all
+ // messages post modified message
+ if (role === "user") {
+ // remove all messages after the edited message
+ // technically there are two chatIds per-message pair, this will split the first.
+ const updatedHistory = history.slice(
+ 0,
+ history.findIndex((msg) => msg.chatId === chatId) + 1
+ );
+
+ // update last message in history to edited message
+ updatedHistory[updatedHistory.length - 1].content = editedMessage;
+ // remove all edited messages after the edited message in backend
+ await Workspace.deleteEditedChats(workspace.slug, threadSlug, chatId);
+ sendCommand(editedMessage, true, updatedHistory);
+ return;
+ }
+
+ // If role is an assistant we simply want to update the comment and save on the backend as an edit.
+ if (role === "assistant") {
+ const updatedHistory = [...history];
+ const targetIdx = history.findIndex(
+ (msg) => msg.chatId === chatId && msg.role === role
+ );
+ if (targetIdx < 0) return;
+ updatedHistory[targetIdx].content = editedMessage;
+ updateHistory(updatedHistory);
+ await Workspace.updateChatResponse(
+ workspace.slug,
+ threadSlug,
+ chatId,
+ editedMessage
+ );
+ return;
+ }
+ };
+
if (history.length === 0) {
return (
@@ -172,6 +216,7 @@ export default function ChatHistory({
error={props.error}
regenerateMessage={regenerateAssistantMessage}
isLastMessage={isLastBotReply}
+ saveEditedMessage={saveEditedMessage}
/>
);
})}
diff --git a/frontend/src/components/WorkspaceChat/ChatContainer/index.jsx b/frontend/src/components/WorkspaceChat/ChatContainer/index.jsx
index 494ee57d..28d87e0d 100644
--- a/frontend/src/components/WorkspaceChat/ChatContainer/index.jsx
+++ b/frontend/src/components/WorkspaceChat/ChatContainer/index.jsx
@@ -240,6 +240,7 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
history={chatHistory}
workspace={workspace}
sendCommand={sendCommand}
+ updateHistory={setChatHistory}
regenerateAssistantMessage={regenerateAssistantMessage}
/>
{
+ if (res.ok) return true;
+ throw new Error("Failed to update chat.");
+ })
+ .catch((e) => {
+ console.log(e);
+ return false;
+ });
+ },
+ _deleteEditedChats: async function (slug = "", startingId) {
+ return await fetch(`${API_BASE}/workspace/${slug}/delete-edited-chats`, {
+ method: "DELETE",
+ headers: baseHeaders(),
+ body: JSON.stringify({ startingId }),
+ })
+ .then((res) => {
+ if (res.ok) return true;
+ throw new Error("Failed to delete chats.");
+ })
+ .catch((e) => {
+ console.log(e);
+ return false;
+ });
+ },
+ threads: WorkspaceThread,
};
export default Workspace;
diff --git a/frontend/src/models/workspaceThread.js b/frontend/src/models/workspaceThread.js
index 039ee186..a73006c9 100644
--- a/frontend/src/models/workspaceThread.js
+++ b/frontend/src/models/workspaceThread.js
@@ -163,6 +163,51 @@ const WorkspaceThread = {
}
);
},
+ _deleteEditedChats: async function (
+ workspaceSlug = "",
+ threadSlug = "",
+ startingId
+ ) {
+ return await fetch(
+ `${API_BASE}/workspace/${workspaceSlug}/thread/${threadSlug}/delete-edited-chats`,
+ {
+ method: "DELETE",
+ headers: baseHeaders(),
+ body: JSON.stringify({ startingId }),
+ }
+ )
+ .then((res) => {
+ if (res.ok) return true;
+ throw new Error("Failed to delete chats.");
+ })
+ .catch((e) => {
+ console.log(e);
+ return false;
+ });
+ },
+ _updateChatResponse: async function (
+ workspaceSlug = "",
+ threadSlug = "",
+ chatId,
+ newText
+ ) {
+ return await fetch(
+ `${API_BASE}/workspace/${workspaceSlug}/thread/${threadSlug}/update-chat`,
+ {
+ method: "POST",
+ headers: baseHeaders(),
+ body: JSON.stringify({ chatId, newText }),
+ }
+ )
+ .then((res) => {
+ if (res.ok) return true;
+ throw new Error("Failed to update chat.");
+ })
+ .catch((e) => {
+ console.log(e);
+ return false;
+ });
+ },
};
export default WorkspaceThread;
diff --git a/frontend/src/pages/GeneralSettings/EmbeddingPreference/index.jsx b/frontend/src/pages/GeneralSettings/EmbeddingPreference/index.jsx
index 5a0f51c1..4d032dc0 100644
--- a/frontend/src/pages/GeneralSettings/EmbeddingPreference/index.jsx
+++ b/frontend/src/pages/GeneralSettings/EmbeddingPreference/index.jsx
@@ -11,6 +11,7 @@ import OllamaLogo from "@/media/llmprovider/ollama.png";
import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
import CohereLogo from "@/media/llmprovider/cohere.png";
import VoyageAiLogo from "@/media/embeddingprovider/voyageai.png";
+import LiteLLMLogo from "@/media/llmprovider/litellm.png";
import PreLoader from "@/components/Preloader";
import ChangeWarningModal from "@/components/ChangeWarning";
@@ -22,6 +23,7 @@ import OllamaEmbeddingOptions from "@/components/EmbeddingSelection/OllamaOption
import LMStudioEmbeddingOptions from "@/components/EmbeddingSelection/LMStudioOptions";
import CohereEmbeddingOptions from "@/components/EmbeddingSelection/CohereOptions";
import VoyageAiOptions from "@/components/EmbeddingSelection/VoyageAiOptions";
+import LiteLLMOptions from "@/components/EmbeddingSelection/LiteLLMOptions";
import EmbedderItem from "@/components/EmbeddingSelection/EmbedderItem";
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
@@ -88,6 +90,13 @@ const EMBEDDERS = [
options: (settings) => ,
description: "Run powerful embedding models from Voyage AI.",
},
+ {
+ name: "LiteLLM",
+ value: "litellm",
+ logo: LiteLLMLogo,
+ options: (settings) => ,
+ description: "Run powerful embedding models from LiteLLM.",
+ },
];
export default function GeneralEmbeddingPreference() {
diff --git a/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx b/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
index 35358636..b4fa666f 100644
--- a/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
+++ b/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
@@ -301,6 +301,13 @@ export const EMBEDDING_ENGINE_PRIVACY = {
],
logo: VoyageAiLogo,
},
+ litellm: {
+ name: "LiteLLM",
+ description: [
+ "Your document text is only accessible on the server running LiteLLM and to the providers you configured in LiteLLM.",
+ ],
+ logo: LiteLLMLogo,
+ },
};
export default function DataHandling({ setHeader, setForwardBtn, setBackBtn }) {
diff --git a/frontend/src/utils/chat/index.js b/frontend/src/utils/chat/index.js
index c5730dbe..a57b11e2 100644
--- a/frontend/src/utils/chat/index.js
+++ b/frontend/src/utils/chat/index.js
@@ -108,13 +108,10 @@ export default function handleChat(
} else if (type === "finalizeResponseStream") {
const chatIdx = _chatHistory.findIndex((chat) => chat.uuid === uuid);
if (chatIdx !== -1) {
- const existingHistory = { ..._chatHistory[chatIdx] };
- const updatedHistory = {
- ...existingHistory,
- chatId, // finalize response stream only has some specific keys for data. we are explicitly listing them here.
- };
- _chatHistory[chatIdx] = updatedHistory;
+ _chatHistory[chatIdx - 1] = { ..._chatHistory[chatIdx - 1], chatId }; // update prompt with chatID
+ _chatHistory[chatIdx] = { ..._chatHistory[chatIdx], chatId }; // update response with chatID
}
+
setChatHistory([..._chatHistory]);
setLoadingResponse(false);
} else if (type === "stopGeneration") {
diff --git a/frontend/vite.config.js b/frontend/vite.config.js
index 3785b947..ff96bdcd 100644
--- a/frontend/vite.config.js
+++ b/frontend/vite.config.js
@@ -51,7 +51,7 @@ export default defineConfig({
rollupOptions: {
external: [
// Reduces transformation time by 50% and we don't even use this variant, so we can ignore.
- /@phosphor-icons\/react\/dist\/ssr/,
+ /@phosphor-icons\/react\/dist\/ssr/
]
},
commonjsOptions: {
diff --git a/server/.env.example b/server/.env.example
index f51d6177..6148d594 100644
--- a/server/.env.example
+++ b/server/.env.example
@@ -125,6 +125,12 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
# VOYAGEAI_API_KEY=
# EMBEDDING_MODEL_PREF='voyage-large-2-instruct'
+# EMBEDDING_ENGINE='litellm'
+# EMBEDDING_MODEL_PREF='text-embedding-ada-002'
+# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
+# LITE_LLM_BASE_PATH='http://127.0.0.1:4000'
+# LITE_LLM_API_KEY='sk-123abc'
+
###########################################
######## Vector Database Selection ########
###########################################
diff --git a/server/endpoints/workspaceThreads.js b/server/endpoints/workspaceThreads.js
index e2aead97..1c207e52 100644
--- a/server/endpoints/workspaceThreads.js
+++ b/server/endpoints/workspaceThreads.js
@@ -1,4 +1,9 @@
-const { multiUserMode, userFromSession, reqBody } = require("../utils/http");
+const {
+ multiUserMode,
+ userFromSession,
+ reqBody,
+ safeJsonParse,
+} = require("../utils/http");
const { validatedRequest } = require("../utils/middleware/validatedRequest");
const { Telemetry } = require("../models/telemetry");
const {
@@ -168,6 +173,77 @@ function workspaceThreadEndpoints(app) {
}
}
);
+
+ app.delete(
+ "/workspace/:slug/thread/:threadSlug/delete-edited-chats",
+ [
+ validatedRequest,
+ flexUserRoleValid([ROLES.all]),
+ validWorkspaceAndThreadSlug,
+ ],
+ async (request, response) => {
+ try {
+ const { startingId } = reqBody(request);
+ const user = await userFromSession(request, response);
+ const workspace = response.locals.workspace;
+ const thread = response.locals.thread;
+
+ await WorkspaceChats.delete({
+ workspaceId: Number(workspace.id),
+ thread_id: Number(thread.id),
+ user_id: user?.id,
+ id: { gte: Number(startingId) },
+ });
+
+ response.sendStatus(200).end();
+ } catch (e) {
+ console.log(e.message, e);
+ response.sendStatus(500).end();
+ }
+ }
+ );
+
+ app.post(
+ "/workspace/:slug/thread/:threadSlug/update-chat",
+ [
+ validatedRequest,
+ flexUserRoleValid([ROLES.all]),
+ validWorkspaceAndThreadSlug,
+ ],
+ async (request, response) => {
+ try {
+ const { chatId, newText = null } = reqBody(request);
+ if (!newText || !String(newText).trim())
+ throw new Error("Cannot save empty response");
+
+ const user = await userFromSession(request, response);
+ const workspace = response.locals.workspace;
+ const thread = response.locals.thread;
+ const existingChat = await WorkspaceChats.get({
+ workspaceId: workspace.id,
+ thread_id: thread.id,
+ user_id: user?.id,
+ id: Number(chatId),
+ });
+ if (!existingChat) throw new Error("Invalid chat.");
+
+ const chatResponse = safeJsonParse(existingChat.response, null);
+ if (!chatResponse) throw new Error("Failed to parse chat response");
+
+ await WorkspaceChats._update(existingChat.id, {
+ response: JSON.stringify({
+ ...chatResponse,
+ text: String(newText),
+ }),
+ });
+
+ response.sendStatus(200).end();
+ } catch (e) {
+ console.log(e.message, e);
+ response.sendStatus(500).end();
+ }
+ }
+ );
}
module.exports = { workspaceThreadEndpoints };
diff --git a/server/endpoints/workspaces.js b/server/endpoints/workspaces.js
index 2657eb97..6d6f29bb 100644
--- a/server/endpoints/workspaces.js
+++ b/server/endpoints/workspaces.js
@@ -380,7 +380,6 @@ function workspaceEndpoints(app) {
const history = multiUserMode(response)
? await WorkspaceChats.forWorkspaceByUser(workspace.id, user.id)
: await WorkspaceChats.forWorkspace(workspace.id);
-
response.status(200).json({ history: convertToChatHistory(history) });
} catch (e) {
console.log(e.message, e);
@@ -420,6 +419,67 @@ function workspaceEndpoints(app) {
}
);
+ app.delete(
+ "/workspace/:slug/delete-edited-chats",
+ [validatedRequest, flexUserRoleValid([ROLES.all]), validWorkspaceSlug],
+ async (request, response) => {
+ try {
+ const { startingId } = reqBody(request);
+ const user = await userFromSession(request, response);
+ const workspace = response.locals.workspace;
+
+ await WorkspaceChats.delete({
+ workspaceId: workspace.id,
+ thread_id: null,
+ user_id: user?.id,
+ id: { gte: Number(startingId) },
+ });
+
+ response.sendStatus(200).end();
+ } catch (e) {
+ console.log(e.message, e);
+ response.sendStatus(500).end();
+ }
+ }
+ );
+
+ app.post(
+ "/workspace/:slug/update-chat",
+ [validatedRequest, flexUserRoleValid([ROLES.all]), validWorkspaceSlug],
+ async (request, response) => {
+ try {
+ const { chatId, newText = null } = reqBody(request);
+ if (!newText || !String(newText).trim())
+ throw new Error("Cannot save empty response");
+
+ const user = await userFromSession(request, response);
+ const workspace = response.locals.workspace;
+ const existingChat = await WorkspaceChats.get({
+ workspaceId: workspace.id,
+ thread_id: null,
+ user_id: user?.id,
+ id: Number(chatId),
+ });
+ if (!existingChat) throw new Error("Invalid chat.");
+
+ const chatResponse = safeJsonParse(existingChat.response, null);
+ if (!chatResponse) throw new Error("Failed to parse chat response");
+
+ await WorkspaceChats._update(existingChat.id, {
+ response: JSON.stringify({
+ ...chatResponse,
+ text: String(newText),
+ }),
+ });
+
+ response.sendStatus(200).end();
+ } catch (e) {
+ console.log(e.message, e);
+ response.sendStatus(500).end();
+ }
+ }
+ );
+
app.post(
"/workspace/:slug/chat-feedback/:chatId",
[validatedRequest, flexUserRoleValid([ROLES.all]), validWorkspaceSlug],
diff --git a/server/models/workspaceChats.js b/server/models/workspaceChats.js
index c81992ca..bda40064 100644
--- a/server/models/workspaceChats.js
+++ b/server/models/workspaceChats.js
@@ -220,6 +220,24 @@ const WorkspaceChats = {
console.error(error.message);
}
},
+
+ // Explicit update of settings + key validations.
+ // Only use this method when directly setting a key value
+ // that takes no user input for the keys being modified.
+ _update: async function (id = null, data = {}) {
+ if (!id) throw new Error("No workspace chat id provided for update");
+
+ try {
+ await prisma.workspace_chats.update({
+ where: { id },
+ data,
+ });
+ return true;
+ } catch (error) {
+ console.error(error.message);
+ return false;
+ }
+ },
};
module.exports = { WorkspaceChats };
diff --git a/server/package.json b/server/package.json
index 4f995470..1b0ba280 100644
--- a/server/package.json
+++ b/server/package.json
@@ -12,7 +12,7 @@
"scripts": {
"dev": "NODE_ENV=development nodemon --ignore documents --ignore vector-cache --ignore storage --ignore swagger --trace-warnings index.js",
"start": "NODE_ENV=production node index.js",
- "lint": "yarn prettier --write ./endpoints ./models ./utils index.js",
+ "lint": "yarn prettier --ignore-path ../.prettierignore --write ./endpoints ./models ./utils index.js",
"swagger": "node ./swagger/init.js",
"sqlite:migrate": "cd ./utils/prisma && node migrateFromSqlite.js"
},
@@ -32,7 +32,7 @@
"@langchain/textsplitters": "0.0.0",
"@pinecone-database/pinecone": "^2.0.1",
"@prisma/client": "5.3.1",
- "@qdrant/js-client-rest": "^1.4.0",
+ "@qdrant/js-client-rest": "^1.9.0",
"@xenova/transformers": "^2.14.0",
"@zilliz/milvus2-sdk-node": "^2.3.5",
"archiver": "^5.3.1",
diff --git a/server/utils/EmbeddingEngines/liteLLM/index.js b/server/utils/EmbeddingEngines/liteLLM/index.js
new file mode 100644
index 00000000..cd22480b
--- /dev/null
+++ b/server/utils/EmbeddingEngines/liteLLM/index.js
@@ -0,0 +1,93 @@
+const { toChunks, maximumChunkLength } = require("../../helpers");
+
+class LiteLLMEmbedder {
+ constructor() {
+ const { OpenAI: OpenAIApi } = require("openai");
+ if (!process.env.LITE_LLM_BASE_PATH)
+ throw new Error(
+ "LiteLLM must have a valid base path to use for the api."
+ );
+ this.basePath = process.env.LITE_LLM_BASE_PATH;
+ this.openai = new OpenAIApi({
+ baseURL: this.basePath,
+ apiKey: process.env.LITE_LLM_API_KEY ?? null,
+ });
+ this.model = process.env.EMBEDDING_MODEL_PREF || "text-embedding-ada-002";
+
+ // Limit of how many strings we can process in a single pass to stay with resource or network limits
+ this.maxConcurrentChunks = 500;
+ this.embeddingMaxChunkLength = maximumChunkLength();
+ }
+
+ async embedTextInput(textInput) {
+ const result = await this.embedChunks(
+ Array.isArray(textInput) ? textInput : [textInput]
+ );
+ return result?.[0] || [];
+ }
+
+ async embedChunks(textChunks = []) {
+ // Because there is a hard POST limit on how many chunks can be sent at once to LiteLLM (~8mb)
+ // we concurrently execute each max batch of text chunks possible.
+ // Refer to constructor maxConcurrentChunks for more info.
+ const embeddingRequests = [];
+ for (const chunk of toChunks(textChunks, this.maxConcurrentChunks)) {
+ embeddingRequests.push(
+ new Promise((resolve) => {
+ this.openai.embeddings
+ .create({
+ model: this.model,
+ input: chunk,
+ })
+ .then((result) => {
+ resolve({ data: result?.data, error: null });
+ })
+ .catch((e) => {
+ e.type =
+ e?.response?.data?.error?.code ||
+ e?.response?.status ||
+ "failed_to_embed";
+ e.message = e?.response?.data?.error?.message || e.message;
+ resolve({ data: [], error: e });
+ });
+ })
+ );
+ }
+
+ const { data = [], error = null } = await Promise.all(
+ embeddingRequests
+ ).then((results) => {
+ // If any errors were returned from LiteLLM abort the entire sequence because the embeddings
+ // will be incomplete.
+ const errors = results
+ .filter((res) => !!res.error)
+ .map((res) => res.error)
+ .flat();
+ if (errors.length > 0) {
+ let uniqueErrors = new Set();
+ errors.map((error) =>
+ uniqueErrors.add(`[${error.type}]: ${error.message}`)
+ );
+
+ return {
+ data: [],
+ error: Array.from(uniqueErrors).join(", "),
+ };
+ }
+ return {
+ data: results.map((res) => res?.data || []).flat(),
+ error: null,
+ };
+ });
+
+ if (!!error) throw new Error(`LiteLLM Failed to embed: ${error}`);
+ return data.length > 0 &&
+ data.every((embd) => embd.hasOwnProperty("embedding"))
+ ? data.map((embd) => embd.embedding)
+ : null;
+ }
+}
+
+module.exports = {
+ LiteLLMEmbedder,
+};
diff --git a/server/utils/helpers/chat/responses.js b/server/utils/helpers/chat/responses.js
index d07eae30..609b1819 100644
--- a/server/utils/helpers/chat/responses.js
+++ b/server/utils/helpers/chat/responses.js
@@ -174,6 +174,7 @@ function convertToChatHistory(history = []) {
role: "user",
content: prompt,
sentAt: moment(createdAt).unix(),
+ chatId: id,
},
{
type: data?.type || "chart",
diff --git a/server/utils/helpers/index.js b/server/utils/helpers/index.js
index e60202a6..8f0df126 100644
--- a/server/utils/helpers/index.js
+++ b/server/utils/helpers/index.js
@@ -128,6 +128,9 @@ function getEmbeddingEngineSelection() {
case "voyageai":
const { VoyageAiEmbedder } = require("../EmbeddingEngines/voyageAi");
return new VoyageAiEmbedder();
+ case "litellm":
+ const { LiteLLMEmbedder } = require("../EmbeddingEngines/liteLLM");
+ return new LiteLLMEmbedder();
default:
return new NativeEmbedder();
}
diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js
index d5cdc68f..1a0e710a 100644
--- a/server/utils/helpers/updateENV.js
+++ b/server/utils/helpers/updateENV.js
@@ -577,6 +577,7 @@ function supportedEmbeddingModel(input = "") {
"lmstudio",
"cohere",
"voyageai",
+ "litellm",
];
return supported.includes(input)
? null
diff --git a/server/utils/vectorDbProviders/qdrant/index.js b/server/utils/vectorDbProviders/qdrant/index.js
index ff55c06f..77945915 100644
--- a/server/utils/vectorDbProviders/qdrant/index.js
+++ b/server/utils/vectorDbProviders/qdrant/index.js
@@ -95,7 +95,7 @@ const QDrant = {
return {
name: namespace,
...collection,
- vectorCount: collection.vectors_count,
+ vectorCount: (await client.count(namespace, { exact: true })).count,
};
},
hasNamespace: async function (namespace = null) {
diff --git a/server/yarn.lock b/server/yarn.lock
index d274e574..c6cf4c2c 100644
--- a/server/yarn.lock
+++ b/server/yarn.lock
@@ -1036,7 +1036,7 @@
resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570"
integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==
-"@qdrant/js-client-rest@^1.4.0":
+"@qdrant/js-client-rest@^1.9.0":
version "1.9.0"
resolved "https://registry.yarnpkg.com/@qdrant/js-client-rest/-/js-client-rest-1.9.0.tgz#deef8acb520f47f9db1c1517758ccf88c12e69fe"
integrity sha512-YiX/IskbRCoAY2ujyPDI6FBcO0ygAS4pgkGaJ7DcrJFh4SZV2XHs+u0KM7mO72RWJn1eJQFF2PQwxG+401xxJg==