Compare commits

..

11 Commits

Author SHA1 Message Date
shatfield4
25b4e58933 implement truncated prompt as thread name 2024-06-06 18:32:00 -07:00
shatfield4
b9b6ec51aa Merge branch 'master' into 1606-feat-rename-new-thread-when-thread-is-created 2024-06-06 16:29:54 -07:00
Sean Hatfield
1b8386b079
[FIX] ChromaDB namespace normalization (#1625)
* chromadb namespace normalization

* update normalization function with more clarity

---------

Co-authored-by: timothycarambat <rambat1010@gmail.com>
2024-06-06 15:38:05 -07:00
Timothy Carambat
05e40a0af4
Fix typo manageworkspace (#1624)
* chore: fix typo of ManageWorkspace

* fix typo of typo fixes

---------

Co-authored-by: jazelly <xzha4350@gmail.com>
2024-06-06 13:27:44 -07:00
Anush
771889ad7f
[FIX] Incorrect vectors count with Qdrant (#1561)
Co-authored-by: Timothy Carambat <rambat1010@gmail.com>
2024-06-06 13:18:01 -07:00
Sean Hatfield
c139b09148
[FIX] Login screen image ui bug fix + fix footer icons when overflowing (#1602)
* login screen image ui bug fix + fixed footer icons

* fix uneven top/botton padding on footer

* patch Sidebar UI in mobile

---------

Co-authored-by: Timothy Carambat <rambat1010@gmail.com>
2024-06-06 13:12:39 -07:00
Sean Hatfield
26c220503c
[FEAT] Edit message button (#1392)
* WIP edit message feature

* WIP edit message

* WIP editing messages feature

* Fix PFPs
TODO: Fix default user profile image
Add User and Assistant workspace response

* unset PFP changes for later PR

---------

Co-authored-by: timothycarambat <rambat1010@gmail.com>
2024-06-06 12:56:11 -07:00
Timothy Carambat
98cef508a6
Feature/devcontv2 (#1622)
* Updated apt-packages source for devcontainer

Switched the devcontainer's package source to a different repository to
align with updated dependencies and package availability. The previous
source from 'rocker-org' is replaced with 'devcontainers-contrib', which
may offer more recent or relevant development tools.

* Subject: Centralize prettier ignores and refine
config

Body:
Centralized all prettier ignore rules by removing individual
`.prettierignore` files in subprojects and updating the root
`.prettierignore` to include previously ignored patterns, ensuring
consistency across the workspace. Additionally, the prettier
configuration was refined by making the file pattern for `.config.js`
files consistent and adjusting quote styles for better readability. All
lint scripts across the project were updated to respect the centralized
ignore path, enhancing maintainability.

The consolidation simplifies the process of managing ignore rules as the
project scales, ensuring developers can focus on writing code without
worrying about divergent formatting standards. These changes also align
with introducing comprehensive linting across multiple environments to
keep the codebase clean and consistent.

This adjustment is a foundational step towards a more streamlined and
unified code base, making it easier for new contributors to adhere to
established coding standards and reducing the cognitive load associated
with managing multiple configuration files across the project.

* unset package json changes

---------

Co-authored-by: Francisco Bischoff <franzbischoff@gmail.com>
Co-authored-by: Francisco Bischoff <984592+franzbischoff@users.noreply.github.com>
2024-06-06 12:50:42 -07:00
Sean Hatfield
d29292ebd2
[FEAT] Add LiteLLM embedding provider support (#1579)
* add liteLLM embedding provider support

* update tooltip id

---------

Co-authored-by: timothycarambat <rambat1010@gmail.com>
2024-06-06 12:43:34 -07:00
timothycarambat
5578e567ce move translated READMEs into subfolder 2024-06-06 12:15:45 -07:00
Ikko Eltociear Ashimine
dfcf32e9c0
docs: add Japanese README (#1574)
* docs: add Japanese README

* docs: update README.md
2024-06-06 12:13:15 -07:00
69 changed files with 1238 additions and 207 deletions

View File

@ -22,7 +22,7 @@
// Terraform support
"ghcr.io/devcontainers/features/terraform:1": {},
// Just a wrap to install needed packages
"ghcr.io/rocker-org/devcontainer-features/apt-packages:1": {
"ghcr.io/devcontainers-contrib/features/apt-packages:1": {
// Dependencies copied from ../docker/Dockerfile plus some dev stuff
"packages": [
"build-essential",

View File

@ -10,3 +10,7 @@ frontend/bundleinspector.html
#server
server/swagger/openapi.json
#embed
**/static/**
embed/src/utils/chat/hljs.js

View File

@ -17,7 +17,7 @@
}
},
{
"files": "*.config.js",
"files": ["*.config.js"],
"options": {
"semi": false,
"parser": "flow",

View File

@ -29,7 +29,7 @@
</p>
<p align="center">
<b>English</b> · <a href='/README.zh-CN.md'>简体中文</a>
<b>English</b> · <a href='./locales/README.zh-CN.md'>简体中文</a> · <a href='./locales/README.ja-JP.md'>日本語</a>
</p>
<p align="center">

View File

@ -12,7 +12,7 @@
"scripts": {
"dev": "NODE_ENV=development nodemon --ignore hotdir --ignore storage --trace-warnings index.js",
"start": "NODE_ENV=production node index.js",
"lint": "yarn prettier --write ./processSingleFile ./processLink ./utils index.js"
"lint": "yarn prettier --ignore-path ../.prettierignore --write ./processSingleFile ./processLink ./utils index.js"
},
"dependencies": {
"@googleapis/youtube": "^9.0.0",

View File

@ -128,6 +128,12 @@ GID='1000'
# VOYAGEAI_API_KEY=
# EMBEDDING_MODEL_PREF='voyage-large-2-instruct'
# EMBEDDING_ENGINE='litellm'
# EMBEDDING_MODEL_PREF='text-embedding-ada-002'
# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
# LITE_LLM_BASE_PATH='http://127.0.0.1:4000'
# LITE_LLM_API_KEY='sk-123abc'
###########################################
######## Vector Database Selection ########
###########################################

View File

@ -1,9 +0,0 @@
# defaults
**/.git
**/.svn
**/.hg
**/node_modules
**/dist
**/static/**
src/utils/chat/hljs.js

View File

@ -4,9 +4,7 @@
"target": "esnext",
"jsx": "react",
"paths": {
"@/*": [
"./src/*"
],
}
}
"@/*": ["./src/*"],
},
},
}

View File

@ -1,6 +1,7 @@
{
"name": "anythingllm-embedded-chat",
"private": false,
"license": "MIT",
"type": "module",
"scripts": {
"dev": "nodemon -e js,jsx,css --watch src --exec \"yarn run dev:preview\"",
@ -8,7 +9,7 @@
"dev:build": "vite build && cat src/static/tailwind@3.4.1.js >> dist/anythingllm-chat-widget.js",
"build": "vite build && cat src/static/tailwind@3.4.1.js >> dist/anythingllm-chat-widget.js && npx terser --compress -o dist/anythingllm-chat-widget.min.js -- dist/anythingllm-chat-widget.js",
"build:publish": "yarn build && mkdir -p ../frontend/public/embed && cp -r dist/anythingllm-chat-widget.min.js ../frontend/public/embed/anythingllm-chat-widget.min.js",
"lint": "yarn prettier --write ./src"
"lint": "yarn prettier --ignore-path ../.prettierignore --write ./src"
},
"dependencies": {
"@microsoft/fetch-event-source": "^2.0.1",

View File

@ -38,7 +38,7 @@ export default defineConfig({
rollupOptions: {
external: [
// Reduces transformation time by 50% and we don't even use this variant, so we can ignore.
/@phosphor-icons\/react\/dist\/ssr/,
/@phosphor-icons\/react\/dist\/ssr/
]
},
commonjsOptions: {
@ -51,7 +51,7 @@ export default defineConfig({
emptyOutDir: true,
inlineDynamicImports: true,
assetsDir: "",
sourcemap: 'inline',
sourcemap: "inline"
},
optimizeDeps: {
esbuildOptions: {
@ -60,5 +60,5 @@ export default defineConfig({
},
plugins: []
}
},
}
})

View File

@ -4,9 +4,7 @@
"target": "esnext",
"jsx": "react",
"paths": {
"@/*": [
"./src/*"
],
"@/*": ["./src/*"]
}
}
}

View File

@ -7,7 +7,7 @@
"start": "vite --open",
"dev": "NODE_ENV=development vite --debug --host=0.0.0.0",
"build": "vite build",
"lint": "yarn prettier --write ./src",
"lint": "yarn prettier --ignore-path ../.prettierignore --write ./src",
"preview": "vite preview"
},
"dependencies": {

View File

@ -1,5 +1,5 @@
import React from "react";
import Jazzicon from "../UserIcon";
import UserIcon from "../UserIcon";
import { userFromStorage } from "@/utils/request";
import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants";
@ -11,8 +11,7 @@ export default function ChatBubble({ message, type, popMsg }) {
<div className={`flex justify-center items-end w-full ${backgroundColor}`}>
<div className={`py-8 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}>
<div className="flex gap-x-5">
<Jazzicon
size={36}
<UserIcon
user={{ uid: isUser ? userFromStorage()?.username : "system" }}
role={type}
/>

View File

@ -13,7 +13,7 @@ import { isMobile } from "react-device-detect";
import { SidebarMobileHeader } from "../Sidebar";
import ChatBubble from "../ChatBubble";
import System from "@/models/system";
import Jazzicon from "../UserIcon";
import UserIcon from "../UserIcon";
import { userFromStorage } from "@/utils/request";
import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants";
import useUser from "@/hooks/useUser";
@ -46,7 +46,7 @@ export default function DefaultChatContainer() {
className={`pt-10 pb-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} />
<UserIcon user={{ uid: "system" }} role={"assistant"} />
<span
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}
@ -70,7 +70,7 @@ export default function DefaultChatContainer() {
className={`pb-4 pt-2 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} />
<UserIcon user={{ uid: "system" }} role={"assistant"} />
<span
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}
@ -93,7 +93,7 @@ export default function DefaultChatContainer() {
className={`pt-2 pb-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} />
<UserIcon user={{ uid: "system" }} role={"assistant"} />
<div>
<span
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}
@ -127,8 +127,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon
size={36}
<UserIcon
user={{ uid: userFromStorage()?.username }}
role={"user"}
/>
@ -151,7 +150,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} />
<UserIcon user={{ uid: "system" }} role={"assistant"} />
<div>
<span
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}
@ -188,8 +187,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon
size={36}
<UserIcon
user={{ uid: userFromStorage()?.username }}
role={"user"}
/>
@ -213,7 +211,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} />
<UserIcon user={{ uid: "system" }} role={"assistant"} />
<span
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}
@ -251,8 +249,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon
size={36}
<UserIcon
user={{ uid: userFromStorage()?.username }}
role={"user"}
/>
@ -275,7 +272,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} />
<UserIcon user={{ uid: "system" }} role={"assistant"} />
<div>
<span
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}

View File

@ -0,0 +1,186 @@
import { useEffect, useState } from "react";
import System from "@/models/system";
import { Warning } from "@phosphor-icons/react";
import { Tooltip } from "react-tooltip";
export default function LiteLLMOptions({ settings }) {
const [basePathValue, setBasePathValue] = useState(settings?.LiteLLMBasePath);
const [basePath, setBasePath] = useState(settings?.LiteLLMBasePath);
const [apiKeyValue, setApiKeyValue] = useState(settings?.LiteLLMAPIKey);
const [apiKey, setApiKey] = useState(settings?.LiteLLMAPIKey);
return (
<div className="w-full flex flex-col gap-y-4">
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Base URL
</label>
<input
type="url"
name="LiteLLMBasePath"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://127.0.0.1:4000"
defaultValue={settings?.LiteLLMBasePath}
required={true}
autoComplete="off"
spellCheck={false}
onChange={(e) => setBasePathValue(e.target.value)}
onBlur={() => setBasePath(basePathValue)}
/>
</div>
<LiteLLMModelSelection
settings={settings}
basePath={basePath}
apiKey={apiKey}
/>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Max embedding chunk length
</label>
<input
type="number"
name="EmbeddingModelMaxChunkLength"
className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="8192"
min={1}
onScroll={(e) => e.target.blur()}
defaultValue={settings?.EmbeddingModelMaxChunkLength}
required={false}
autoComplete="off"
/>
</div>
</div>
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-semibold flex items-center gap-x-2">
API Key <p className="!text-xs !italic !font-thin">optional</p>
</label>
</div>
<input
type="password"
name="LiteLLMAPIKey"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="sk-mysecretkey"
defaultValue={settings?.LiteLLMAPIKey ? "*".repeat(20) : ""}
autoComplete="off"
spellCheck={false}
onChange={(e) => setApiKeyValue(e.target.value)}
onBlur={() => setApiKey(apiKeyValue)}
/>
</div>
</div>
</div>
);
}
function LiteLLMModelSelection({ settings, basePath = null, apiKey = null }) {
const [customModels, setCustomModels] = useState([]);
const [loading, setLoading] = useState(true);
useEffect(() => {
async function findCustomModels() {
if (!basePath) {
setCustomModels([]);
setLoading(false);
return;
}
setLoading(true);
const { models } = await System.customModels(
"litellm",
typeof apiKey === "boolean" ? null : apiKey,
basePath
);
setCustomModels(models || []);
setLoading(false);
}
findCustomModels();
}, [basePath, apiKey]);
if (loading || customModels.length == 0) {
return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Embedding Model Selection
</label>
<select
name="EmbeddingModelPref"
disabled={true}
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
<option disabled={true} selected={true}>
{basePath?.includes("/v1")
? "-- loading available models --"
: "-- waiting for URL --"}
</option>
</select>
</div>
);
}
return (
<div className="flex flex-col w-60">
<div className="flex items-center">
<label className="text-white text-sm font-semibold block mb-4">
Embedding Model Selection
</label>
<EmbeddingModelTooltip />
</div>
<select
name="EmbeddingModelPref"
required={true}
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{customModels.length > 0 && (
<optgroup label="Your loaded models">
{customModels.map((model) => {
return (
<option
key={model.id}
value={model.id}
selected={settings.EmbeddingModelPref === model.id}
>
{model.id}
</option>
);
})}
</optgroup>
)}
</select>
</div>
);
}
function EmbeddingModelTooltip() {
return (
<div className="flex items-center justify-center -mt-3 ml-1">
<Warning
size={14}
className="ml-1 text-orange-500 cursor-pointer"
data-tooltip-id="model-tooltip"
data-tooltip-place="right"
/>
<Tooltip
delayHide={300}
id="model-tooltip"
className="max-w-xs"
clickable={true}
>
<p className="text-sm">
Be sure to select a valid embedding model. Chat models are not
embedding models. See{" "}
<a
href="https://litellm.vercel.app/docs/embedding/supported_embedding"
target="_blank"
rel="noreferrer"
className="underline"
>
this page
</a>{" "}
for more information.
</p>
</Tooltip>
</div>
);
}

View File

@ -33,12 +33,12 @@ export default function PasswordModal({ mode = "single" }) {
alt="login illustration"
/>
</div>
<div className="flex flex-col items-center justify-center h-full w-full md:w-1/2 z-50 relative">
<div className="flex flex-col items-center justify-center h-full w-full md:w-1/2 z-50 relative -mt-20">
<img
src={loginLogo}
alt="Logo"
className={`hidden md:flex rounded-2xl w-fit m-4 z-30 ${
mode === "single" ? "md:top-[170px]" : "md:top-36"
className={`hidden relative md:flex rounded-2xl w-fit m-4 z-30 ${
mode === "single" ? "md:top-2" : "md:top-12"
} absolute max-h-[65px] md:bg-login-gradient md:shadow-[0_4px_14px_rgba(0,0,0,0.25)]`}
style={{ objectFit: "contain" }}
/>

View File

@ -111,10 +111,12 @@ export default function SettingsSidebar() {
{/* Primary Body */}
<div className="h-full flex flex-col w-full justify-between pt-4 overflow-y-scroll no-scroll">
<div className="h-auto md:sidebar-items md:dark:sidebar-items">
<div className=" flex flex-col gap-y-4 pb-8 overflow-y-scroll no-scroll">
<div className="flex flex-col gap-y-4 pb-[60px] overflow-y-scroll no-scroll">
<SidebarOptions user={user} />
</div>
</div>
</div>
<div className="absolute bottom-2 left-0 right-0 pt-2 bg-sidebar bg-opacity-80 backdrop-filter backdrop-blur-md">
<Footer />
</div>
</div>
@ -139,22 +141,21 @@ export default function SettingsSidebar() {
</Link>
<div
ref={sidebarRef}
style={{ height: "calc(100% - 76px)" }}
className="transition-all duration-500 relative m-[16px] rounded-[16px] bg-sidebar border-2 border-outline min-w-[250px] p-[10px]"
className="transition-all duration-500 relative m-[16px] rounded-[16px] bg-sidebar border-2 border-outline min-w-[250px] p-[10px] h-[calc(100%-76px)]"
>
<div className="w-full h-full flex flex-col overflow-x-hidden items-between min-w-[235px]">
<div className="text-white text-opacity-60 text-sm font-medium uppercase mt-[4px] mb-0 ml-2">
Instance Settings
</div>
<div className="relative h-full flex flex-col w-full justify-between pt-[10px] overflow-y-scroll no-scroll">
<div className="relative h-[calc(100%-60px)] flex flex-col w-full justify-between pt-[10px] overflow-y-scroll no-scroll">
<div className="h-auto sidebar-items">
<div className="flex flex-col gap-y-2 h-full pb-8 overflow-y-scroll no-scroll">
<div className="flex flex-col gap-y-2 pb-[60px] overflow-y-scroll no-scroll">
<SidebarOptions user={user} />
</div>
</div>
<div className="mb-2">
<Footer />
</div>
<div className="absolute bottom-0 left-0 right-0 pt-4 pb-3 rounded-b-[16px] bg-sidebar bg-opacity-80 backdrop-filter backdrop-blur-md z-50">
<Footer />
</div>
</div>
</div>

View File

@ -27,7 +27,6 @@ export default function ThreadItem({
const { slug } = useParams();
const optionsContainer = useRef(null);
const [showOptions, setShowOptions] = useState(false);
const [name, setName] = useState(thread.name);
const linkTo = !thread.slug
? paths.workspace.chat(slug)
: paths.workspace.thread(slug, thread.slug);
@ -97,7 +96,7 @@ export default function ThreadItem({
isActive ? "font-medium text-white" : "text-slate-400"
}`}
>
{truncate(name, 25)}
{truncate(thread.name, 25)}
</p>
</a>
)}
@ -133,7 +132,6 @@ export default function ThreadItem({
workspace={workspace}
thread={thread}
onRemove={onRemove}
onRename={setName}
close={() => setShowOptions(false)}
/>
)}
@ -144,14 +142,7 @@ export default function ThreadItem({
);
}
function OptionsMenu({
containerRef,
workspace,
thread,
onRename,
onRemove,
close,
}) {
function OptionsMenu({ containerRef, workspace, thread, onRemove, close }) {
const menuRef = useRef(null);
// Ref menu options
@ -208,7 +199,7 @@ function OptionsMenu({
return;
}
onRename(name);
thread.name = name;
close();
};

View File

@ -12,6 +12,26 @@ export default function ThreadContainer({ workspace }) {
const [loading, setLoading] = useState(true);
const [ctrlPressed, setCtrlPressed] = useState(false);
useEffect(() => {
const chatHandler = (event) => {
const { threadSlug, newName } = event.detail;
setThreads((prevThreads) =>
prevThreads.map((thread) => {
if (thread.slug === threadSlug) {
return { ...thread, name: newName };
}
return thread;
})
);
};
window.addEventListener("renameThread", chatHandler);
return () => {
window.removeEventListener("renameThread", chatHandler);
};
}, []);
useEffect(() => {
async function fetchThreads() {
if (!workspace.slug) return;

View File

@ -4,7 +4,7 @@ import "react-loading-skeleton/dist/skeleton.css";
import Workspace from "@/models/workspace";
import ManageWorkspace, {
useManageWorkspaceModal,
} from "../../Modals/MangeWorkspace";
} from "../../Modals/ManageWorkspace";
import paths from "@/utils/paths";
import { useParams } from "react-router-dom";
import { GearSix, SquaresFour, UploadSimple } from "@phosphor-icons/react";

View File

@ -32,18 +32,17 @@ export default function Sidebar() {
<img
src={logo}
alt="Logo"
className="rounded max-h-[24px]"
style={{ objectFit: "contain" }}
className="rounded max-h-[24px] object-contain"
/>
</Link>
<div
ref={sidebarRef}
style={{ height: "calc(100% - 76px)" }}
className="relative m-[16px] rounded-[16px] bg-sidebar border-2 border-outline min-w-[250px] p-[10px]"
className="relative m-[16px] rounded-[16px] bg-sidebar border-2 border-outline min-w-[250px] p-[10px] h-[calc(100%-76px)]"
>
<div className="flex flex-col h-full overflow-x-hidden">
<div className="flex-grow flex flex-col min-w-[235px]">
<div className="flex flex-col gap-y-2 pb-8 overflow-y-scroll no-scroll">
<div className="relative h-[calc(100%-60px)] flex flex-col w-full justify-between pt-[10px] overflow-y-scroll no-scroll">
<div className="flex flex-col gap-y-2 pb-[60px] overflow-y-scroll no-scroll">
<div className="flex gap-x-2 items-center justify-between">
{(!user || user?.role !== "default") && (
<button
@ -59,7 +58,8 @@ export default function Sidebar() {
</div>
<ActiveWorkspaces />
</div>
<div className="flex flex-col flex-grow justify-end mb-2">
</div>
<div className="absolute bottom-0 left-0 right-0 pt-4 pb-3 rounded-b-[16px] bg-sidebar bg-opacity-80 backdrop-filter backdrop-blur-md z-30">
<Footer />
</div>
</div>
@ -156,12 +156,9 @@ export function SidebarMobileHeader() {
</div>
{/* Primary Body */}
<div className="h-full flex flex-col w-full justify-between pt-4 overflow-y-hidden ">
<div className="h-full flex flex-col w-full justify-between pt-4 ">
<div className="h-auto md:sidebar-items">
<div
style={{ height: "calc(100vw - -3rem)" }}
className=" flex flex-col gap-y-4 pb-8 overflow-y-scroll no-scroll"
>
<div className=" flex flex-col gap-y-4 overflow-y-scroll no-scroll pb-[60px]">
<div className="flex gap-x-2 items-center justify-between">
{(!user || user?.role !== "default") && (
<button
@ -178,7 +175,7 @@ export function SidebarMobileHeader() {
<ActiveWorkspaces />
</div>
</div>
<div>
<div className="z-99 absolute bottom-0 left-0 right-0 pt-2 pb-6 rounded-br-[26px] bg-sidebar bg-opacity-80 backdrop-filter backdrop-blur-md">
<Footer />
</div>
</div>

View File

@ -2,7 +2,7 @@ import React, { useRef, useEffect } from "react";
import JAZZ from "@metamask/jazzicon";
import usePfp from "../../hooks/usePfp";
export default function Jazzicon({ size = 10, user, role }) {
export default function UserIcon({ size = 36, user, role }) {
const { pfp } = usePfp();
const divRef = useRef(null);
const seed = user?.uid

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -0,0 +1,126 @@
import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants";
import { Pencil } from "@phosphor-icons/react";
import { useState, useEffect, useRef } from "react";
import { Tooltip } from "react-tooltip";
const EDIT_EVENT = "toggle-message-edit";
export function useEditMessage({ chatId, role }) {
const [isEditing, setIsEditing] = useState(false);
function onEditEvent(e) {
if (e.detail.chatId !== chatId || e.detail.role !== role) {
setIsEditing(false);
return false;
}
setIsEditing((prev) => !prev);
}
useEffect(() => {
function listenForEdits() {
if (!chatId || !role) return;
window.addEventListener(EDIT_EVENT, onEditEvent);
}
listenForEdits();
return () => {
window.removeEventListener(EDIT_EVENT, onEditEvent);
};
}, [chatId, role]);
return { isEditing, setIsEditing };
}
export function EditMessageAction({ chatId = null, role, isEditing }) {
function handleEditClick() {
window.dispatchEvent(
new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
);
}
if (!chatId || isEditing) return null;
return (
<div
className={`mt-3 relative ${
role === "user" && !isEditing ? "opacity-0" : ""
} group-hover:opacity-100 transition-all duration-300`}
>
<button
onClick={handleEditClick}
data-tooltip-id="edit-input-text"
data-tooltip-content={`Edit ${
role === "user" ? "Prompt" : "Response"
} `}
className="border-none text-zinc-300"
aria-label={`Edit ${role === "user" ? "Prompt" : "Response"}`}
>
<Pencil size={18} className="mb-1" />
</button>
<Tooltip
id="edit-input-text"
place="bottom"
delayShow={300}
className="tooltip !text-xs"
/>
</div>
);
}
export function EditMessageForm({
role,
chatId,
message,
adjustTextArea,
saveChanges,
}) {
const formRef = useRef(null);
function handleSaveMessage(e) {
e.preventDefault();
const form = new FormData(e.target);
const editedMessage = form.get("editedMessage");
saveChanges({ editedMessage, chatId, role });
window.dispatchEvent(
new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
);
}
function cancelEdits() {
window.dispatchEvent(
new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
);
return false;
}
useEffect(() => {
if (!formRef || !formRef.current) return;
formRef.current.focus();
adjustTextArea({ target: formRef.current });
}, [formRef]);
return (
<form onSubmit={handleSaveMessage} className="flex flex-col w-full">
<textarea
ref={formRef}
name="editedMessage"
className={`w-full rounded ${
role === "user" ? USER_BACKGROUND_COLOR : AI_BACKGROUND_COLOR
} border border-white/20 active:outline-none focus:outline-none focus:ring-0 pr-16 pl-1.5 pt-1.5 resize-y`}
defaultValue={message}
onChange={adjustTextArea}
/>
<div className="mt-3 flex justify-center">
<button
type="submit"
className="px-2 py-1 bg-gray-200 text-gray-700 font-medium rounded-md mr-2 hover:bg-gray-300 focus:outline-none focus:ring-2 focus:ring-blue-500 focus:ring-offset-2"
>
Save & Submit
</button>
<button
type="button"
className="px-2 py-1 bg-historical-msg-system text-white font-medium rounded-md hover:bg-historical-msg-user/90 focus:outline-none focus:ring-2 focus:ring-gray-400 focus:ring-offset-2"
onClick={cancelEdits}
>
Cancel
</button>
</div>
</form>
);
}

View File

@ -2,14 +2,15 @@ import React, { memo, useState } from "react";
import useCopyText from "@/hooks/useCopyText";
import {
Check,
ClipboardText,
ThumbsUp,
ThumbsDown,
ArrowsClockwise,
Copy,
} from "@phosphor-icons/react";
import { Tooltip } from "react-tooltip";
import Workspace from "@/models/workspace";
import TTSMessage from "./TTSButton";
import { EditMessageAction } from "./EditMessage";
const Actions = ({
message,
@ -18,9 +19,10 @@ const Actions = ({
slug,
isLastMessage,
regenerateMessage,
isEditing,
role,
}) => {
const [selectedFeedback, setSelectedFeedback] = useState(feedbackScore);
const handleFeedback = async (newFeedback) => {
const updatedFeedback =
selectedFeedback === newFeedback ? null : newFeedback;
@ -32,14 +34,15 @@ const Actions = ({
<div className="flex w-full justify-between items-center">
<div className="flex justify-start items-center gap-x-4">
<CopyMessage message={message} />
{isLastMessage && (
<EditMessageAction chatId={chatId} role={role} isEditing={isEditing} />
{isLastMessage && !isEditing && (
<RegenerateMessage
regenerateMessage={regenerateMessage}
slug={slug}
chatId={chatId}
/>
)}
{chatId && (
{chatId && role !== "user" && !isEditing && (
<>
<FeedbackButton
isSelected={selectedFeedback === true}
@ -111,7 +114,7 @@ function CopyMessage({ message }) {
{copied ? (
<Check size={18} className="mb-1" />
) : (
<ClipboardText size={18} className="mb-1" />
<Copy size={18} className="mb-1" />
)}
</button>
<Tooltip

View File

@ -1,6 +1,6 @@
import React, { memo } from "react";
import { Warning } from "@phosphor-icons/react";
import Jazzicon from "../../../../UserIcon";
import UserIcon from "../../../../UserIcon";
import Actions from "./Actions";
import renderMarkdown from "@/utils/chat/markdown";
import { userFromStorage } from "@/utils/request";
@ -8,6 +8,7 @@ import Citations from "../Citation";
import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants";
import { v4 } from "uuid";
import createDOMPurify from "dompurify";
import { EditMessageForm, useEditMessage } from "./Actions/EditMessage";
const DOMPurify = createDOMPurify(window);
const HistoricalMessage = ({
@ -21,7 +22,16 @@ const HistoricalMessage = ({
chatId = null,
isLastMessage = false,
regenerateMessage,
saveEditedMessage,
}) => {
const { isEditing } = useEditMessage({ chatId, role });
const adjustTextArea = (event) => {
const element = event.target;
element.style.height = "auto";
element.style.height = element.scrollHeight + "px";
};
if (!!error) {
return (
<div
key={uuid}
@ -29,12 +39,11 @@ const HistoricalMessage = ({
role === "user" ? USER_BACKGROUND_COLOR : AI_BACKGROUND_COLOR
}`}
>
<div className={`py-8 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}>
<div className="py-8 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col">
<div className="flex gap-x-5">
<ProfileImage role={role} workspace={workspace} />
{error ? (
<div className="p-2 rounded-lg bg-red-50 text-red-500">
<span className={`inline-block `}>
<span className="inline-block">
<Warning className="h-4 w-4 mb-1 inline-block" /> Could not
respond to message.
</span>
@ -42,6 +51,30 @@ const HistoricalMessage = ({
{error}
</p>
</div>
</div>
</div>
</div>
);
}
return (
<div
key={uuid}
className={`flex justify-center items-end w-full group ${
role === "user" ? USER_BACKGROUND_COLOR : AI_BACKGROUND_COLOR
}`}
>
<div className={`py-8 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}>
<div className="flex gap-x-5">
<ProfileImage role={role} workspace={workspace} />
{isEditing ? (
<EditMessageForm
role={role}
chatId={chatId}
message={message}
adjustTextArea={adjustTextArea}
saveChanges={saveEditedMessage}
/>
) : (
<span
className={`flex flex-col gap-y-1`}
@ -51,7 +84,6 @@ const HistoricalMessage = ({
/>
)}
</div>
{role === "assistant" && !error && (
<div className="flex gap-x-5">
<div className="relative w-[35px] h-[35px] rounded-full flex-shrink-0 overflow-hidden" />
<Actions
@ -61,9 +93,10 @@ const HistoricalMessage = ({
slug={workspace?.slug}
isLastMessage={isLastMessage}
regenerateMessage={regenerateMessage}
isEditing={isEditing}
role={role}
/>
</div>
)}
{role === "assistant" && <Citations sources={sources} />}
</div>
</div>
@ -84,8 +117,7 @@ function ProfileImage({ role, workspace }) {
}
return (
<Jazzicon
size={36}
<UserIcon
user={{
uid: role === "user" ? userFromStorage()?.username : workspace.slug,
}}

View File

@ -1,6 +1,6 @@
import { memo } from "react";
import { Warning } from "@phosphor-icons/react";
import Jazzicon from "../../../../UserIcon";
import UserIcon from "../../../../UserIcon";
import renderMarkdown from "@/utils/chat/markdown";
import Citations from "../Citation";
@ -84,7 +84,7 @@ export function WorkspaceProfileImage({ workspace }) {
);
}
return <Jazzicon size={36} user={{ uid: workspace.slug }} role="assistant" />;
return <UserIcon user={{ uid: workspace.slug }} role="assistant" />;
}
export default memo(PromptReply);

View File

@ -1,20 +1,24 @@
import HistoricalMessage from "./HistoricalMessage";
import PromptReply from "./PromptReply";
import { useEffect, useRef, useState } from "react";
import { useManageWorkspaceModal } from "../../../Modals/MangeWorkspace";
import ManageWorkspace from "../../../Modals/MangeWorkspace";
import { useManageWorkspaceModal } from "../../../Modals/ManageWorkspace";
import ManageWorkspace from "../../../Modals/ManageWorkspace";
import { ArrowDown } from "@phosphor-icons/react";
import debounce from "lodash.debounce";
import useUser from "@/hooks/useUser";
import Chartable from "./Chartable";
import Workspace from "@/models/workspace";
import { useParams } from "react-router-dom";
export default function ChatHistory({
history = [],
workspace,
sendCommand,
updateHistory,
regenerateAssistantMessage,
}) {
const { user } = useUser();
const { threadSlug = null } = useParams();
const { showing, showModal, hideModal } = useManageWorkspaceModal();
const [isAtBottom, setIsAtBottom] = useState(true);
const chatHistoryRef = useRef(null);
@ -87,6 +91,46 @@ export default function ChatHistory({
sendCommand(`${heading} ${message}`, true);
};
const saveEditedMessage = async ({ editedMessage, chatId, role }) => {
if (!editedMessage) return; // Don't save empty edits.
// if the edit was a user message, we will auto-regenerate the response and delete all
// messages post modified message
if (role === "user") {
// remove all messages after the edited message
// technically there are two chatIds per-message pair, this will split the first.
const updatedHistory = history.slice(
0,
history.findIndex((msg) => msg.chatId === chatId) + 1
);
// update last message in history to edited message
updatedHistory[updatedHistory.length - 1].content = editedMessage;
// remove all edited messages after the edited message in backend
await Workspace.deleteEditedChats(workspace.slug, threadSlug, chatId);
sendCommand(editedMessage, true, updatedHistory);
return;
}
// If role is an assistant we simply want to update the comment and save on the backend as an edit.
if (role === "assistant") {
const updatedHistory = [...history];
const targetIdx = history.findIndex(
(msg) => msg.chatId === chatId && msg.role === role
);
if (targetIdx < 0) return;
updatedHistory[targetIdx].content = editedMessage;
updateHistory(updatedHistory);
await Workspace.updateChatResponse(
workspace.slug,
threadSlug,
chatId,
editedMessage
);
return;
}
};
if (history.length === 0) {
return (
<div className="flex flex-col h-full md:mt-0 pb-44 md:pb-40 w-full justify-end items-center">
@ -172,6 +216,7 @@ export default function ChatHistory({
error={props.error}
regenerateMessage={regenerateAssistantMessage}
isLastMessage={isLastBotReply}
saveEditedMessage={saveEditedMessage}
/>
);
})}

View File

@ -12,6 +12,7 @@ import handleSocketResponse, {
AGENT_SESSION_END,
AGENT_SESSION_START,
} from "@/utils/chat/agent";
import truncate from "truncate";
export default function ChatContainer({ workspace, knownHistory = [] }) {
const { threadSlug = null } = useParams();
@ -39,6 +40,18 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
event.preventDefault();
if (!message || message === "") return false;
// If first message and it is a thread
// and message is not blank/whitespace,
// then send event to rename the thread
if (threadSlug && chatHistory.length === 0 && message.trim().length > 0) {
const truncatedName = truncate(message, 22);
window.dispatchEvent(
new CustomEvent("renameThread", {
detail: { threadSlug, newName: truncatedName },
})
);
}
const prevChatHistory = [
...chatHistory,
{ content: message, role: "user" },
@ -240,6 +253,7 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
history={chatHistory}
workspace={workspace}
sendCommand={sendCommand}
updateHistory={setChatHistory}
regenerateAssistantMessage={regenerateAssistantMessage}
/>
<PromptInput

View File

@ -22,6 +22,7 @@ export default function WorkspaceChat({ loading, workspace }) {
const chatHistory = threadSlug
? await Workspace.threads.chatHistory(workspace.slug, threadSlug)
: await Workspace.chatHistory(workspace.slug);
setHistory(chatHistory);
setLoadingHistory(false);
}

View File

@ -90,6 +90,26 @@ const Workspace = {
return false;
});
},
deleteEditedChats: async function (slug = "", threadSlug = "", startingId) {
if (!!threadSlug)
return this.threads._deleteEditedChats(slug, threadSlug, startingId);
return this._deleteEditedChats(slug, startingId);
},
updateChatResponse: async function (
slug = "",
threadSlug = "",
chatId,
newText
) {
if (!!threadSlug)
return this.threads._updateChatResponse(
slug,
threadSlug,
chatId,
newText
);
return this._updateChatResponse(slug, chatId, newText);
},
streamChat: async function ({ slug }, message, handleChat) {
const ctrl = new AbortController();
@ -287,8 +307,6 @@ const Workspace = {
return null;
});
},
threads: WorkspaceThread,
uploadPfp: async function (formData, slug) {
return await fetch(`${API_BASE}/workspace/${slug}/upload-pfp`, {
method: "POST",
@ -336,6 +354,37 @@ const Workspace = {
return { success: false, error: e.message };
});
},
_updateChatResponse: async function (slug = "", chatId, newText) {
return await fetch(`${API_BASE}/workspace/${slug}/update-chat`, {
method: "POST",
headers: baseHeaders(),
body: JSON.stringify({ chatId, newText }),
})
.then((res) => {
if (res.ok) return true;
throw new Error("Failed to update chat.");
})
.catch((e) => {
console.log(e);
return false;
});
},
_deleteEditedChats: async function (slug = "", startingId) {
return await fetch(`${API_BASE}/workspace/${slug}/delete-edited-chats`, {
method: "DELETE",
headers: baseHeaders(),
body: JSON.stringify({ startingId }),
})
.then((res) => {
if (res.ok) return true;
throw new Error("Failed to delete chats.");
})
.catch((e) => {
console.log(e);
return false;
});
},
threads: WorkspaceThread,
};
export default Workspace;

View File

@ -163,6 +163,51 @@ const WorkspaceThread = {
}
);
},
_deleteEditedChats: async function (
workspaceSlug = "",
threadSlug = "",
startingId
) {
return await fetch(
`${API_BASE}/workspace/${workspaceSlug}/thread/${threadSlug}/delete-edited-chats`,
{
method: "DELETE",
headers: baseHeaders(),
body: JSON.stringify({ startingId }),
}
)
.then((res) => {
if (res.ok) return true;
throw new Error("Failed to delete chats.");
})
.catch((e) => {
console.log(e);
return false;
});
},
_updateChatResponse: async function (
workspaceSlug = "",
threadSlug = "",
chatId,
newText
) {
return await fetch(
`${API_BASE}/workspace/${workspaceSlug}/thread/${threadSlug}/update-chat`,
{
method: "POST",
headers: baseHeaders(),
body: JSON.stringify({ chatId, newText }),
}
)
.then((res) => {
if (res.ok) return true;
throw new Error("Failed to update chat.");
})
.catch((e) => {
console.log(e);
return false;
});
},
};
export default WorkspaceThread;

View File

@ -11,6 +11,7 @@ import OllamaLogo from "@/media/llmprovider/ollama.png";
import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
import CohereLogo from "@/media/llmprovider/cohere.png";
import VoyageAiLogo from "@/media/embeddingprovider/voyageai.png";
import LiteLLMLogo from "@/media/llmprovider/litellm.png";
import PreLoader from "@/components/Preloader";
import ChangeWarningModal from "@/components/ChangeWarning";
@ -22,6 +23,7 @@ import OllamaEmbeddingOptions from "@/components/EmbeddingSelection/OllamaOption
import LMStudioEmbeddingOptions from "@/components/EmbeddingSelection/LMStudioOptions";
import CohereEmbeddingOptions from "@/components/EmbeddingSelection/CohereOptions";
import VoyageAiOptions from "@/components/EmbeddingSelection/VoyageAiOptions";
import LiteLLMOptions from "@/components/EmbeddingSelection/LiteLLMOptions";
import EmbedderItem from "@/components/EmbeddingSelection/EmbedderItem";
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
@ -88,6 +90,13 @@ const EMBEDDERS = [
options: (settings) => <VoyageAiOptions settings={settings} />,
description: "Run powerful embedding models from Voyage AI.",
},
{
name: "LiteLLM",
value: "litellm",
logo: LiteLLMLogo,
options: (settings) => <LiteLLMOptions settings={settings} />,
description: "Run powerful embedding models from LiteLLM.",
},
];
export default function GeneralEmbeddingPreference() {

View File

@ -301,6 +301,13 @@ export const EMBEDDING_ENGINE_PRIVACY = {
],
logo: VoyageAiLogo,
},
litellm: {
name: "LiteLLM",
description: [
"Your document text is only accessible on the server running LiteLLM and to the providers you configured in LiteLLM.",
],
logo: LiteLLMLogo,
},
};
export default function DataHandling({ setHeader, setForwardBtn, setBackBtn }) {

View File

@ -108,13 +108,10 @@ export default function handleChat(
} else if (type === "finalizeResponseStream") {
const chatIdx = _chatHistory.findIndex((chat) => chat.uuid === uuid);
if (chatIdx !== -1) {
const existingHistory = { ..._chatHistory[chatIdx] };
const updatedHistory = {
...existingHistory,
chatId, // finalize response stream only has some specific keys for data. we are explicitly listing them here.
};
_chatHistory[chatIdx] = updatedHistory;
_chatHistory[chatIdx - 1] = { ..._chatHistory[chatIdx - 1], chatId }; // update prompt with chatID
_chatHistory[chatIdx] = { ..._chatHistory[chatIdx], chatId }; // update response with chatID
}
setChatHistory([..._chatHistory]);
setLoadingResponse(false);
} else if (type === "stopGeneration") {

View File

@ -51,7 +51,7 @@ export default defineConfig({
rollupOptions: {
external: [
// Reduces transformation time by 50% and we don't even use this variant, so we can ignore.
/@phosphor-icons\/react\/dist\/ssr/,
/@phosphor-icons\/react\/dist\/ssr/
]
},
commonjsOptions: {

235
locales/README.ja-JP.md Normal file
View File

@ -0,0 +1,235 @@
<a name="readme-top"></a>
<p align="center">
<a href="https://useanything.com"><img src="https://github.com/Mintplex-Labs/anything-llm/blob/master/images/wordmark.png?raw=true" alt="AnythingLLM logo"></a>
</p>
<div align='center'>
<a href="https://trendshift.io/repositories/2415" target="_blank"><img src="https://trendshift.io/api/badge/repositories/2415" alt="Mintplex-Labs%2Fanything-llm | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
</div>
<p align="center">
<b>AnythingLLM:</b> あなたが探していたオールインワンAIアプリ。<br />
ドキュメントとチャットし、AIエージェントを使用し、高度にカスタマイズ可能で、複数ユーザー対応、面倒な設定は不要です。
</p>
<p align="center">
<a href="https://discord.gg/6UyHPeGZAC" target="_blank">
<img src="https://img.shields.io/badge/chat-mintplex_labs-blue.svg?style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAMAAABEpIrGAAAAIGNIUk0AAHomAACAhAAA+gAAAIDoAAB1MAAA6mAAADqYAAAXcJy6UTwAAAH1UExURQAAAP////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////r6+ubn5+7u7/3+/v39/enq6urq6/v7+97f39rb26eoqT1BQ0pOT4+Rkuzs7cnKykZKS0NHSHl8fdzd3ejo6UxPUUBDRdzc3RwgIh8jJSAkJm5xcvHx8aanqB4iJFBTVezt7V5hYlJVVuLj43p9fiImKCMnKZKUlaaoqSElJ21wcfT09O3u7uvr6zE0Nr6/wCUpK5qcnf7+/nh7fEdKTHx+f0tPUOTl5aipqiouMGtubz5CRDQ4OsTGxufn515hY7a3uH1/gXBydIOFhlVYWvX29qaoqCQoKs7Pz/Pz87/AwUtOUNfY2dHR0mhrbOvr7E5RUy8zNXR2d/f39+Xl5UZJSx0hIzQ3Odra2/z8/GlsbaGjpERHSezs7L/BwScrLTQ4Odna2zM3Obm7u3x/gKSmp9jZ2T1AQu/v71pdXkVISr2+vygsLiInKTg7PaOlpisvMcXGxzk8PldaXPLy8u7u7rm6u7S1tsDBwvj4+MPExbe4ueXm5s/Q0Kyf7ewAAAAodFJOUwAABClsrNjx/QM2l9/7lhmI6jTB/kA1GgKJN+nea6vy/MLZQYeVKK3rVA5tAAAAAWJLR0QB/wIt3gAAAAd0SU1FB+cKBAAmMZBHjXIAAAISSURBVDjLY2CAAkYmZhZWNnYODnY2VhZmJkYGVMDIycXNw6sBBbw8fFycyEoYGfkFBDVQgKAAPyMjQl5IWEQDDYgIC8FUMDKKsmlgAWyiEBWMjGJY5YEqxMAqGMWFNXAAYXGgAkYJSQ2cQFKCkYFRShq3AmkpRgYJbghbU0tbB0Tr6ukbgGhDI10gySfBwCwDUWBsYmpmDqQtLK2sbTQ0bO3sHYA8GWYGWWj4WTs6Obu4ami4OTm7exhqeHp5+4DCVJZBDmqdr7ufn3+ArkZgkJ+fU3CIRmgYWFiOARYGvo5OQUHhEUAFTkF+kVHRsLBgkIeyYmLjwoOc4hMSk5JTnINS06DC8gwcEEZ6RqZGlpOfc3ZObl5+gZ+TR2ERWFyBQQFMF5eklmqUpQb5+ReU61ZUOvkFVVXXQBSAraitq29o1GiKcfLzc29u0mjxBzq0tQ0kww5xZHtHUGeXhkZhdxBYgZ4d0LI6c4gjwd7siQQraOp1AivQ6CuAKZCDBBRQQQNQgUb/BGf3cqCCiZOcnCe3QQIKHNRTpk6bDgpZjRkzg3pBQTBrdtCcuZCgluAD0vPmL1gIdvSixUuWgqNs2YJ+DUhkEYxuggkGmOQUcckrioPTJCOXEnZ5JS5YslbGnuyVERlDDFvGEUPOWvwqaH6RVkHKeuDMK6SKnHlVhTgx8jeTmqy6Eij7K6nLqiGyPwChsa1MUrnq1wAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAyMy0xMC0wNFQwMDozODo0OSswMDowMB9V0a8AAAAldEVYdGRhdGU6bW9kaWZ5ADIwMjMtMTAtMDRUMDA6Mzg6NDkrMDA6MDBuCGkTAAAAKHRFWHRkYXRlOnRpbWVzdGFtcAAyMDIzLTEwLTA0VDAwOjM4OjQ5KzAwOjAwOR1IzAAAAABJRU5ErkJggg==" alt="Discord">
</a> |
<a href="https://github.com/Mintplex-Labs/anything-llm/blob/master/LICENSE" target="_blank">
<img src="https://img.shields.io/static/v1?label=license&message=MIT&color=white" alt="ライセンス">
</a> |
<a href="https://docs.useanything.com" target="_blank">
ドキュメント
</a> |
<a href="https://my.mintplexlabs.com/aio-checkout?product=anythingllm" target="_blank">
ホストされたインスタンス
</a>
</p>
<p align="center">
<a href='../README.md'>English</a> · <a href='./README.zh-CN.md'>简体中文</a> · <b>日本語</b>
</p>
<p align="center">
👉 デスクトップ用AnythingLLMMac、Windows、Linux対応<a href="https://useanything.com/download" target="_blank">今すぐダウンロード</a>
</p>
これは、任意のドキュメント、リソース、またはコンテンツの断片を、チャット中にLLMが参照として使用できるコンテキストに変換できるフルスタックアプリケーションです。このアプリケーションを使用すると、使用するLLMまたはベクトルデータベースを選択し、マルチユーザー管理と権限をサポートできます。
![チャット](https://github.com/Mintplex-Labs/anything-llm/assets/16845892/cfc5f47c-bd91-4067-986c-f3f49621a859)
<details>
<summary><kbd>デモを見る!</kbd></summary>
[![ビデオを見る](/images/youtube.png)](https://youtu.be/f95rGD9trL0)
</details>
### 製品概要
AnythingLLMは、市販のLLMや人気のあるオープンソースLLM、およびベクトルDBソリューションを使用して、妥協のないプライベートChatGPTを構築できるフルスタックアプリケーションです。ローカルで実行することも、リモートでホストすることもでき、提供されたドキュメントと知的にチャットできます。
AnythingLLMは、ドキュメントを`ワークスペース`と呼ばれるオブジェクトに分割します。ワークスペースはスレッドのように機能しますが、ドキュメントのコンテナ化が追加されています。ワークスペースはドキュメントを共有できますが、互いに通信することはないため、各ワークスペースのコンテキストをクリーンに保つことができます。
AnythingLLMのいくつかのクールな機能
- **マルチユーザーインスタンスのサポートと権限付与**
- ワークスペース内のエージェント(ウェブを閲覧、コードを実行など)
- [ウェブサイト用のカスタム埋め込み可能なチャットウィジェット](./embed/README.md)
- 複数のドキュメントタイプのサポートPDF、TXT、DOCXなど
- シンプルなUIからベクトルデータベース内のドキュメントを管理
- 2つのチャットモード`会話`と`クエリ`。会話は以前の質問と修正を保持します。クエリはドキュメントに対するシンプルなQAです
- チャット中の引用
- 100%クラウドデプロイメント対応。
- 「独自のLLMを持参」モデル。
- 大規模なドキュメントを管理するための非常に効率的なコスト削減策。巨大なドキュメントやトランスクリプトを埋め込むために一度以上支払うことはありません。他のドキュメントチャットボットソリューションよりも90%コスト効率が良いです。
- カスタム統合のための完全な開発者API
### サポートされているLLM、埋め込みモデル、音声モデル、およびベクトルデータベース
**言語学習モデル:**
- [llama.cpp互換の任意のオープンソースモデル](/server/storage/models/README.md#text-generation-llm-selection)
- [OpenAI](https://openai.com)
- [OpenAI (汎用)](https://openai.com)
- [Azure OpenAI](https://azure.microsoft.com/en-us/products/ai-services/openai-service)
- [Anthropic](https://www.anthropic.com/)
- [Google Gemini Pro](https://ai.google.dev/)
- [Hugging Face (チャットモデル)](https://huggingface.co/)
- [Ollama (チャットモデル)](https://ollama.ai/)
- [LM Studio (すべてのモデル)](https://lmstudio.ai)
- [LocalAi (すべてのモデル)](https://localai.io/)
- [Together AI (チャットモデル)](https://www.together.ai/)
- [Perplexity (チャットモデル)](https://www.perplexity.ai/)
- [OpenRouter (チャットモデル)](https://openrouter.ai/)
- [Mistral](https://mistral.ai/)
- [Groq](https://groq.com/)
- [Cohere](https://cohere.com/)
- [KoboldCPP](https://github.com/LostRuins/koboldcpp)
**埋め込みモデル:**
- [AnythingLLMネイティブ埋め込み](/server/storage/models/README.md)(デフォルト)
- [OpenAI](https://openai.com)
- [Azure OpenAI](https://azure.microsoft.com/en-us/products/ai-services/openai-service)
- [LocalAi (すべて)](https://localai.io/)
- [Ollama (すべて)](https://ollama.ai/)
- [LM Studio (すべて)](https://lmstudio.ai)
- [Cohere](https://cohere.com/)
**音声変換モデル:**
- [AnythingLLM内蔵](https://github.com/Mintplex-Labs/anything-llm/tree/master/server/storage/models#audiovideo-transcription)(デフォルト)
- [OpenAI](https://openai.com/)
**TTSテキストから音声へサポート**
- ネイティブブラウザ内蔵(デフォルト)
- [OpenAI TTS](https://platform.openai.com/docs/guides/text-to-speech/voice-options)
- [ElevenLabs](https://elevenlabs.io/)
**STT音声からテキストへサポート**
- ネイティブブラウザ内蔵(デフォルト)
**ベクトルデータベース:**
- [LanceDB](https://github.com/lancedb/lancedb)(デフォルト)
- [Astra DB](https://www.datastax.com/products/datastax-astra)
- [Pinecone](https://pinecone.io)
- [Chroma](https://trychroma.com)
- [Weaviate](https://weaviate.io)
- [QDrant](https://qdrant.tech)
- [Milvus](https://milvus.io)
- [Zilliz](https://zilliz.com)
### 技術概要
このモレポは、主に3つのセクションで構成されています
- `frontend`: LLMが使用できるすべてのコンテンツを簡単に作成および管理できるviteJS + Reactフロントエンド。
- `server`: すべてのインタラクションを処理し、すべてのベクトルDB管理およびLLMインタラクションを行うNodeJS expressサーバー。
- `collector`: UIからドキュメントを処理および解析するNodeJS expressサーバー。
- `docker`: Dockerの指示およびビルドプロセス + ソースからのビルド情報。
- `embed`: [埋め込みウィジェット](./embed/README.md)の生成に特化したコード。
## 🛳 セルフホスティング
Mintplex Labsおよびコミュニティは、AnythingLLMをローカルで実行できる多数のデプロイメント方法、スクリプト、テンプレートを維持しています。以下の表を参照して、お好みの環境でのデプロイ方法を読むか、自動デプロイを行ってください。
| Docker | AWS | GCP | Digital Ocean | Render.com |
|----------------------------------------|----:|-----|---------------|------------|
| [![Docker上でデプロイ][docker-btn]][docker-deploy] | [![AWS上でデプロイ][aws-btn]][aws-deploy] | [![GCP上でデプロイ][gcp-btn]][gcp-deploy] | [![DigitalOcean上でデプロイ][do-btn]][do-deploy] | [![Render.com上でデプロイ][render-btn]][render-deploy] |
| Railway |
| --------------------------------------------------- |
| [![Railway上でデプロイ][railway-btn]][railway-deploy] |
[Dockerを使用せずに本番環境のAnythingLLMインスタンスを設定する →](./BARE_METAL.md)
## 開発環境のセットアップ方法
- `yarn setup` 各アプリケーションセクションに必要な`.env`ファイルを入力します(リポジトリのルートから)。
- 次に進む前にこれらを入力してください。`server/.env.development`が入力されていないと正しく動作しません。
- `yarn dev:server` ローカルでサーバーを起動します(リポジトリのルートから)。
- `yarn dev:frontend` ローカルでフロントエンドを起動します(リポジトリのルートから)。
- `yarn dev:collector` ドキュメントコレクターを実行します(リポジトリのルートから)。
[ドキュメントについて学ぶ](./server/storage/documents/DOCUMENTS.md)
[ベクトルキャッシュについて学ぶ](./server/storage/vector-cache/VECTOR_CACHE.md)
## 貢献する方法
- issueを作成する
- `<issue number>-<short name>`の形式のブランチ名でPRを作成する
- マージしましょう
## テレメトリーとプライバシー
Mintplex Labs Inc.によって開発されたAnythingLLMには、匿名の使用情報を収集するテレメトリー機能が含まれています。
<details>
<summary><kbd>AnythingLLMのテレメトリーとプライバシーについての詳細</kbd></summary>
### なぜ?
この情報を使用して、AnythingLLMの使用方法を理解し、新機能とバグ修正の優先順位を決定し、AnythingLLMのパフォーマンスと安定性を向上させるのに役立てます。
### オプトアウト
サーバーまたはdockerの.env設定で`DISABLE_TELEMETRY`を「true」に設定して、テレメトリーからオプトアウトします。アプリ内でも、サイドバー > `プライバシー`に移動してテレメトリーを無効にすることができます。
### 明示的に追跡するもの
製品およびロードマップの意思決定に役立つ使用詳細のみを追跡します。具体的には:
- インストールのタイプDockerまたはデスクトップ
- ドキュメントが追加または削除されたとき。ドキュメントについての情報はありません。イベントが発生したことのみを知ります。これにより、使用状況を把握できます。
- 使用中のベクトルデータベースのタイプ。どのベクトルデータベースプロバイダーが最も使用されているかを知り、更新があったときに優先して変更を行います。
- 使用中のLLMのタイプ。最も人気のある選択肢を知り、更新があったときに優先して変更を行います。
- チャットが送信された。これは最も一般的な「イベント」であり、すべてのインストールでのこのプロジェクトの日常的な「アクティビティ」についてのアイデアを提供します。再び、イベントのみが送信され、チャット自体の性質や内容に関する情報はありません。
これらの主張を検証するには、`Telemetry.sendTelemetry`が呼び出されるすべての場所を見つけてください。また、これらのイベントは出力ログに書き込まれるため、送信された具体的なデータも確認できます。IPアドレスやその他の識別情報は収集されません。テレメトリープロバイダーは[PostHog](https://posthog.com/)です。
[ソースコード内のすべてのテレメトリーイベントを表示](https://github.com/search?q=repo%3AMintplex-Labs%2Fanything-llm%20.sendTelemetry\(&type=code)
</details>
## 🔗 その他の製品
- **[VectorAdmin][vector-admin]**ベクトルデータベースを管理するためのオールインワンGUIおよびツールスイート。
- **[OpenAI Assistant Swarm][assistant-swarm]**単一のエージェントから指揮できるOpenAIアシスタントの軍隊に、ライブラリ全体を変換します。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
---
Copyright © 2024 [Mintplex Labs][profile-link]。<br />
このプロジェクトは[MIT](./LICENSE)ライセンスの下でライセンスされています。
<!-- LINK GROUP -->
[back-to-top]: https://img.shields.io/badge/-BACK_TO_TOP-222628?style=flat-square
[profile-link]: https://github.com/mintplex-labs
[vector-admin]: https://github.com/mintplex-labs/vector-admin
[assistant-swarm]: https://github.com/Mintplex-Labs/openai-assistant-swarm
[docker-btn]: ./images/deployBtns/docker.png
[docker-deploy]: ./docker/HOW_TO_USE_DOCKER.md
[aws-btn]: ./images/deployBtns/aws.png
[aws-deploy]: ./cloud-deployments/aws/cloudformation/DEPLOY.md
[gcp-btn]: https://deploy.cloud.run/button.svg
[gcp-deploy]: ./cloud-deployments/gcp/deployment/DEPLOY.md
[do-btn]: https://www.deploytodo.com/do-btn-blue.svg
[do-deploy]: ./cloud-deployments/digitalocean/terraform/DEPLOY.md
[render-btn]: https://render.com/images/deploy-to-render-button.svg
[render-deploy]: https://render.com/deploy?repo=https://github.com/Mintplex-Labs/anything-llm&branch=render
[render-btn]: https://render.com/images/deploy-to-render-button.svg
[render-deploy]: https://render.com/deploy?repo=https://github.com/Mintplex-Labs/anything-llm&branch=render
[railway-btn]: https://railway.app/button.svg
[railway-deploy]: https://railway.app/template/HNSCS1?referralCode=WFgJkn

View File

@ -25,7 +25,7 @@
</p>
<p align="center">
<a href='/README.md'>English</a> · <b>简体中文</b>
<a href='../README.md'>English</a> · <b>简体中文</b> · <a href='./README.ja-JP.md'>简体中文</a>
</p>
<p align="center">

View File

@ -125,6 +125,12 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
# VOYAGEAI_API_KEY=
# EMBEDDING_MODEL_PREF='voyage-large-2-instruct'
# EMBEDDING_ENGINE='litellm'
# EMBEDDING_MODEL_PREF='text-embedding-ada-002'
# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
# LITE_LLM_BASE_PATH='http://127.0.0.1:4000'
# LITE_LLM_API_KEY='sk-123abc'
###########################################
######## Vector Database Selection ########
###########################################

View File

@ -15,8 +15,8 @@ const {
validWorkspaceSlug,
} = require("../utils/middleware/validWorkspace");
const { writeResponseChunk } = require("../utils/helpers/chat/responses");
const generateThreadTitle = require("../utils/threadNames");
const { WorkspaceThread } = require("../models/workspaceThread");
const truncate = require("truncate");
function chatEndpoints(app) {
if (!app) return;
@ -206,21 +206,18 @@ function chatEndpoints(app) {
thread_id: thread.id,
});
// Generate thread name
// Set thread title to truncated message
if (chatCount === 1) {
try {
const generatedTitle = await generateThreadTitle(message);
if (generatedTitle) {
const { thread: updatedThread } = await WorkspaceThread.update(
thread,
{
name: generatedTitle,
name: truncate(message, 22),
}
);
if (!updatedThread) {
console.log("Failed to update thread name");
}
}
} catch (e) {
console.log("Error generating thread title:", e);
}

View File

@ -1,4 +1,9 @@
const { multiUserMode, userFromSession, reqBody } = require("../utils/http");
const {
multiUserMode,
userFromSession,
reqBody,
safeJsonParse,
} = require("../utils/http");
const { validatedRequest } = require("../utils/middleware/validatedRequest");
const { Telemetry } = require("../models/telemetry");
const {
@ -168,6 +173,77 @@ function workspaceThreadEndpoints(app) {
}
}
);
app.delete(
"/workspace/:slug/thread/:threadSlug/delete-edited-chats",
[
validatedRequest,
flexUserRoleValid([ROLES.all]),
validWorkspaceAndThreadSlug,
],
async (request, response) => {
try {
const { startingId } = reqBody(request);
const user = await userFromSession(request, response);
const workspace = response.locals.workspace;
const thread = response.locals.thread;
await WorkspaceChats.delete({
workspaceId: Number(workspace.id),
thread_id: Number(thread.id),
user_id: user?.id,
id: { gte: Number(startingId) },
});
response.sendStatus(200).end();
} catch (e) {
console.log(e.message, e);
response.sendStatus(500).end();
}
}
);
app.post(
"/workspace/:slug/thread/:threadSlug/update-chat",
[
validatedRequest,
flexUserRoleValid([ROLES.all]),
validWorkspaceAndThreadSlug,
],
async (request, response) => {
try {
const { chatId, newText = null } = reqBody(request);
if (!newText || !String(newText).trim())
throw new Error("Cannot save empty response");
const user = await userFromSession(request, response);
const workspace = response.locals.workspace;
const thread = response.locals.thread;
const existingChat = await WorkspaceChats.get({
workspaceId: workspace.id,
thread_id: thread.id,
user_id: user?.id,
id: Number(chatId),
});
if (!existingChat) throw new Error("Invalid chat.");
const chatResponse = safeJsonParse(existingChat.response, null);
if (!chatResponse) throw new Error("Failed to parse chat response");
await WorkspaceChats._update(existingChat.id, {
response: JSON.stringify({
...chatResponse,
text: String(newText),
}),
});
response.sendStatus(200).end();
} catch (e) {
console.log(e.message, e);
response.sendStatus(500).end();
}
}
);
}
module.exports = { workspaceThreadEndpoints };

View File

@ -380,7 +380,6 @@ function workspaceEndpoints(app) {
const history = multiUserMode(response)
? await WorkspaceChats.forWorkspaceByUser(workspace.id, user.id)
: await WorkspaceChats.forWorkspace(workspace.id);
response.status(200).json({ history: convertToChatHistory(history) });
} catch (e) {
console.log(e.message, e);
@ -420,6 +419,67 @@ function workspaceEndpoints(app) {
}
);
app.delete(
"/workspace/:slug/delete-edited-chats",
[validatedRequest, flexUserRoleValid([ROLES.all]), validWorkspaceSlug],
async (request, response) => {
try {
const { startingId } = reqBody(request);
const user = await userFromSession(request, response);
const workspace = response.locals.workspace;
await WorkspaceChats.delete({
workspaceId: workspace.id,
thread_id: null,
user_id: user?.id,
id: { gte: Number(startingId) },
});
response.sendStatus(200).end();
} catch (e) {
console.log(e.message, e);
response.sendStatus(500).end();
}
}
);
app.post(
"/workspace/:slug/update-chat",
[validatedRequest, flexUserRoleValid([ROLES.all]), validWorkspaceSlug],
async (request, response) => {
try {
const { chatId, newText = null } = reqBody(request);
if (!newText || !String(newText).trim())
throw new Error("Cannot save empty response");
const user = await userFromSession(request, response);
const workspace = response.locals.workspace;
const existingChat = await WorkspaceChats.get({
workspaceId: workspace.id,
thread_id: null,
user_id: user?.id,
id: Number(chatId),
});
if (!existingChat) throw new Error("Invalid chat.");
const chatResponse = safeJsonParse(existingChat.response, null);
if (!chatResponse) throw new Error("Failed to parse chat response");
await WorkspaceChats._update(existingChat.id, {
response: JSON.stringify({
...chatResponse,
text: String(newText),
}),
});
response.sendStatus(200).end();
} catch (e) {
console.log(e.message, e);
response.sendStatus(500).end();
}
}
);
app.post(
"/workspace/:slug/chat-feedback/:chatId",
[validatedRequest, flexUserRoleValid([ROLES.all]), validWorkspaceSlug],

View File

@ -220,6 +220,24 @@ const WorkspaceChats = {
console.error(error.message);
}
},
// Explicit update of settings + key validations.
// Only use this method when directly setting a key value
// that takes no user input for the keys being modified.
_update: async function (id = null, data = {}) {
if (!id) throw new Error("No workspace chat id provided for update");
try {
await prisma.workspace_chats.update({
where: { id },
data,
});
return true;
} catch (error) {
console.error(error.message);
return false;
}
},
};
module.exports = { WorkspaceChats };

View File

@ -12,7 +12,7 @@
"scripts": {
"dev": "NODE_ENV=development nodemon --ignore documents --ignore vector-cache --ignore storage --ignore swagger --trace-warnings index.js",
"start": "NODE_ENV=production node index.js",
"lint": "yarn prettier --write ./endpoints ./models ./utils index.js",
"lint": "yarn prettier --ignore-path ../.prettierignore --write ./endpoints ./models ./utils index.js",
"swagger": "node ./swagger/init.js",
"sqlite:migrate": "cd ./utils/prisma && node migrateFromSqlite.js"
},
@ -32,7 +32,7 @@
"@langchain/textsplitters": "0.0.0",
"@pinecone-database/pinecone": "^2.0.1",
"@prisma/client": "5.3.1",
"@qdrant/js-client-rest": "^1.4.0",
"@qdrant/js-client-rest": "^1.9.0",
"@xenova/transformers": "^2.14.0",
"@zilliz/milvus2-sdk-node": "^2.3.5",
"archiver": "^5.3.1",
@ -75,6 +75,7 @@
"sqlite3": "^5.1.6",
"swagger-autogen": "^2.23.5",
"swagger-ui-express": "^5.0.0",
"truncate": "^3.0.0",
"url-pattern": "^1.0.3",
"uuid": "^9.0.0",
"uuid-apikey": "^1.5.3",

View File

@ -0,0 +1,93 @@
const { toChunks, maximumChunkLength } = require("../../helpers");
class LiteLLMEmbedder {
constructor() {
const { OpenAI: OpenAIApi } = require("openai");
if (!process.env.LITE_LLM_BASE_PATH)
throw new Error(
"LiteLLM must have a valid base path to use for the api."
);
this.basePath = process.env.LITE_LLM_BASE_PATH;
this.openai = new OpenAIApi({
baseURL: this.basePath,
apiKey: process.env.LITE_LLM_API_KEY ?? null,
});
this.model = process.env.EMBEDDING_MODEL_PREF || "text-embedding-ada-002";
// Limit of how many strings we can process in a single pass to stay with resource or network limits
this.maxConcurrentChunks = 500;
this.embeddingMaxChunkLength = maximumChunkLength();
}
async embedTextInput(textInput) {
const result = await this.embedChunks(
Array.isArray(textInput) ? textInput : [textInput]
);
return result?.[0] || [];
}
async embedChunks(textChunks = []) {
// Because there is a hard POST limit on how many chunks can be sent at once to LiteLLM (~8mb)
// we concurrently execute each max batch of text chunks possible.
// Refer to constructor maxConcurrentChunks for more info.
const embeddingRequests = [];
for (const chunk of toChunks(textChunks, this.maxConcurrentChunks)) {
embeddingRequests.push(
new Promise((resolve) => {
this.openai.embeddings
.create({
model: this.model,
input: chunk,
})
.then((result) => {
resolve({ data: result?.data, error: null });
})
.catch((e) => {
e.type =
e?.response?.data?.error?.code ||
e?.response?.status ||
"failed_to_embed";
e.message = e?.response?.data?.error?.message || e.message;
resolve({ data: [], error: e });
});
})
);
}
const { data = [], error = null } = await Promise.all(
embeddingRequests
).then((results) => {
// If any errors were returned from LiteLLM abort the entire sequence because the embeddings
// will be incomplete.
const errors = results
.filter((res) => !!res.error)
.map((res) => res.error)
.flat();
if (errors.length > 0) {
let uniqueErrors = new Set();
errors.map((error) =>
uniqueErrors.add(`[${error.type}]: ${error.message}`)
);
return {
data: [],
error: Array.from(uniqueErrors).join(", "),
};
}
return {
data: results.map((res) => res?.data || []).flat(),
error: null,
};
});
if (!!error) throw new Error(`LiteLLM Failed to embed: ${error}`);
return data.length > 0 &&
data.every((embd) => embd.hasOwnProperty("embedding"))
? data.map((embd) => embd.embedding)
: null;
}
}
module.exports = {
LiteLLMEmbedder,
};

View File

@ -174,6 +174,7 @@ function convertToChatHistory(history = []) {
role: "user",
content: prompt,
sentAt: moment(createdAt).unix(),
chatId: id,
},
{
type: data?.type || "chart",

View File

@ -128,6 +128,9 @@ function getEmbeddingEngineSelection() {
case "voyageai":
const { VoyageAiEmbedder } = require("../EmbeddingEngines/voyageAi");
return new VoyageAiEmbedder();
case "litellm":
const { LiteLLMEmbedder } = require("../EmbeddingEngines/liteLLM");
return new LiteLLMEmbedder();
default:
return new NativeEmbedder();
}

View File

@ -577,6 +577,7 @@ function supportedEmbeddingModel(input = "") {
"lmstudio",
"cohere",
"voyageai",
"litellm",
];
return supported.includes(input)
? null

View File

@ -1,30 +0,0 @@
const { getLLMProvider } = require("../helpers");
async function generateThreadTitle(prompt) {
const systemPrompt =
"Listen to any instructions below and do not give any description or explanation when replying. Do not return anything else other than what is asked.";
const getTitlePrompt = `Take the message below and generate a short and concise title for a thread for it (max 22 characters or less). Do not return anything else.
Message:${prompt}\n\nTitle:`;
const LLMConnector = getLLMProvider();
const messages = await LLMConnector.compressMessages(
{
systemPrompt: systemPrompt,
userPrompt: getTitlePrompt,
},
[]
);
const title = await LLMConnector.getChatCompletion(messages, {
temperature: LLMConnector.defaultTemp,
});
// truncate title to 22 characters
const maxLength = 22;
const truncatedTitle =
title.length > maxLength ? title.slice(0, maxLength - 3) + "..." : title;
return truncatedTitle;
}
module.exports = generateThreadTitle;

View File

@ -6,9 +6,55 @@ const { v4: uuidv4 } = require("uuid");
const { toChunks, getEmbeddingEngineSelection } = require("../../helpers");
const { parseAuthHeader } = require("../../http");
const { sourceIdentifier } = require("../../chats");
const COLLECTION_REGEX = new RegExp(
/^(?!\d+\.\d+\.\d+\.\d+$)(?!.*\.\.)(?=^[a-zA-Z0-9][a-zA-Z0-9_-]{1,61}[a-zA-Z0-9]$).{3,63}$/
);
const Chroma = {
name: "Chroma",
// Chroma DB has specific requirements for collection names:
// (1) Must contain 3-63 characters
// (2) Must start and end with an alphanumeric character
// (3) Can only contain alphanumeric characters, underscores, or hyphens
// (4) Cannot contain two consecutive periods (..)
// (5) Cannot be a valid IPv4 address
// We need to enforce these rules by normalizing the collection names
// before communicating with the Chroma DB.
normalize: function (inputString) {
if (COLLECTION_REGEX.test(inputString)) return inputString;
let normalized = inputString.replace(/[^a-zA-Z0-9_-]/g, "-");
// Replace consecutive periods with a single period (if any)
normalized = normalized.replace(/\.\.+/g, ".");
// Ensure the name doesn't start with a non-alphanumeric character
if (normalized[0] && !/^[a-zA-Z0-9]$/.test(normalized[0])) {
normalized = "anythingllm-" + normalized.slice(1);
}
// Ensure the name doesn't end with a non-alphanumeric character
if (
normalized[normalized.length - 1] &&
!/^[a-zA-Z0-9]$/.test(normalized[normalized.length - 1])
) {
normalized = normalized.slice(0, -1);
}
// Ensure the length is between 3 and 63 characters
if (normalized.length < 3) {
normalized = `anythingllm-${normalized}`;
} else if (normalized.length > 63) {
// Recheck the norm'd name if sliced since its ending can still be invalid.
normalized = this.normalize(normalized.slice(0, 63));
}
// Ensure the name is not an IPv4 address
if (/^\d+\.\d+\.\d+\.\d+$/.test(normalized)) {
normalized = "-" + normalized.slice(1);
}
return normalized;
},
connect: async function () {
if (process.env.VECTOR_DB !== "chroma")
throw new Error("Chroma::Invalid ENV settings");
@ -59,7 +105,7 @@ const Chroma = {
},
namespaceCount: async function (_namespace = null) {
const { client } = await this.connect();
const namespace = await this.namespace(client, _namespace);
const namespace = await this.namespace(client, this.normalize(_namespace));
return namespace?.vectorCount || 0;
},
similarityResponse: async function (
@ -70,7 +116,9 @@ const Chroma = {
topN = 4,
filterIdentifiers = []
) {
const collection = await client.getCollection({ name: namespace });
const collection = await client.getCollection({
name: this.normalize(namespace),
});
const result = {
contextTexts: [],
sourceDocuments: [],
@ -106,7 +154,7 @@ const Chroma = {
namespace: async function (client, namespace = null) {
if (!namespace) throw new Error("No namespace value provided.");
const collection = await client
.getCollection({ name: namespace })
.getCollection({ name: this.normalize(namespace) })
.catch(() => null);
if (!collection) return null;
@ -118,12 +166,12 @@ const Chroma = {
hasNamespace: async function (namespace = null) {
if (!namespace) return false;
const { client } = await this.connect();
return await this.namespaceExists(client, namespace);
return await this.namespaceExists(client, this.normalize(namespace));
},
namespaceExists: async function (client, namespace = null) {
if (!namespace) throw new Error("No namespace value provided.");
const collection = await client
.getCollection({ name: namespace })
.getCollection({ name: this.normalize(namespace) })
.catch((e) => {
console.error("ChromaDB::namespaceExists", e.message);
return null;
@ -131,7 +179,7 @@ const Chroma = {
return !!collection;
},
deleteVectorsInNamespace: async function (client, namespace = null) {
await client.deleteCollection({ name: namespace });
await client.deleteCollection({ name: this.normalize(namespace) });
return true;
},
addDocumentToNamespace: async function (
@ -149,7 +197,7 @@ const Chroma = {
if (cacheResult.exists) {
const { client } = await this.connect();
const collection = await client.getOrCreateCollection({
name: namespace,
name: this.normalize(namespace),
metadata: { "hnsw:space": "cosine" },
});
const { chunks } = cacheResult;
@ -245,7 +293,7 @@ const Chroma = {
const { client } = await this.connect();
const collection = await client.getOrCreateCollection({
name: namespace,
name: this.normalize(namespace),
metadata: { "hnsw:space": "cosine" },
});
@ -274,7 +322,7 @@ const Chroma = {
const { client } = await this.connect();
if (!(await this.namespaceExists(client, namespace))) return;
const collection = await client.getCollection({
name: namespace,
name: this.normalize(namespace),
});
const knownDocuments = await DocumentVectors.where({ docId });
@ -299,7 +347,7 @@ const Chroma = {
throw new Error("Invalid request to performSimilaritySearch.");
const { client } = await this.connect();
if (!(await this.namespaceExists(client, namespace))) {
if (!(await this.namespaceExists(client, this.normalize(namespace)))) {
return {
contextTexts: [],
sources: [],
@ -330,9 +378,9 @@ const Chroma = {
const { namespace = null } = reqBody;
if (!namespace) throw new Error("namespace required");
const { client } = await this.connect();
if (!(await this.namespaceExists(client, namespace)))
if (!(await this.namespaceExists(client, this.normalize(namespace))))
throw new Error("Namespace by that name does not exist.");
const stats = await this.namespace(client, namespace);
const stats = await this.namespace(client, this.normalize(namespace));
return stats
? stats
: { message: "No stats were able to be fetched from DB for namespace" };
@ -340,11 +388,11 @@ const Chroma = {
"delete-namespace": async function (reqBody = {}) {
const { namespace = null } = reqBody;
const { client } = await this.connect();
if (!(await this.namespaceExists(client, namespace)))
if (!(await this.namespaceExists(client, this.normalize(namespace))))
throw new Error("Namespace by that name does not exist.");
const details = await this.namespace(client, namespace);
await this.deleteVectorsInNamespace(client, namespace);
const details = await this.namespace(client, this.normalize(namespace));
await this.deleteVectorsInNamespace(client, this.normalize(namespace));
return {
message: `Namespace ${namespace} was deleted along with ${details?.vectorCount} vectors.`,
};

View File

@ -95,7 +95,7 @@ const QDrant = {
return {
name: namespace,
...collection,
vectorCount: collection.vectors_count,
vectorCount: (await client.count(namespace, { exact: true })).count,
};
},
hasNamespace: async function (namespace = null) {

View File

@ -1036,7 +1036,7 @@
resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570"
integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==
"@qdrant/js-client-rest@^1.4.0":
"@qdrant/js-client-rest@^1.9.0":
version "1.9.0"
resolved "https://registry.yarnpkg.com/@qdrant/js-client-rest/-/js-client-rest-1.9.0.tgz#deef8acb520f47f9db1c1517758ccf88c12e69fe"
integrity sha512-YiX/IskbRCoAY2ujyPDI6FBcO0ygAS4pgkGaJ7DcrJFh4SZV2XHs+u0KM7mO72RWJn1eJQFF2PQwxG+401xxJg==
@ -6211,6 +6211,11 @@ triple-beam@^1.3.0:
resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.4.1.tgz#6fde70271dc6e5d73ca0c3b24e2d92afb7441984"
integrity sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==
truncate@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/truncate/-/truncate-3.0.0.tgz#7dbe19e2f72c614e36b79bab00fbfbeb1cbaf078"
integrity sha512-C+0Xojw7wZPl6MDq5UjMTuxZvBPK04mtdFet7k+GSZPINcvLZFCXg+15kWIL4wAqDB7CksIsKiRLbQ1wa7rKdw==
tslib@^2.2.0, tslib@^2.4.0, tslib@^2.5.3, tslib@^2.6.2:
version "2.6.2"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae"