Compare commits

...

7 Commits

Author SHA1 Message Date
timothycarambat
5c0c77babf patch Sidebar UI in mobile 2024-06-06 13:12:11 -07:00
Timothy Carambat
7108c38a2d
Merge branch 'master' into login-screen-footer-sidebar-ui-bug-fixes 2024-06-07 04:02:21 +08:00
Sean Hatfield
26c220503c
[FEAT] Edit message button (#1392)
* WIP edit message feature

* WIP edit message

* WIP editing messages feature

* Fix PFPs
TODO: Fix default user profile image
Add User and Assistant workspace response

* unset PFP changes for later PR

---------

Co-authored-by: timothycarambat <rambat1010@gmail.com>
2024-06-06 12:56:11 -07:00
Timothy Carambat
98cef508a6
Feature/devcontv2 (#1622)
* Updated apt-packages source for devcontainer

Switched the devcontainer's package source to a different repository to
align with updated dependencies and package availability. The previous
source from 'rocker-org' is replaced with 'devcontainers-contrib', which
may offer more recent or relevant development tools.

* Subject: Centralize prettier ignores and refine
config

Body:
Centralized all prettier ignore rules by removing individual
`.prettierignore` files in subprojects and updating the root
`.prettierignore` to include previously ignored patterns, ensuring
consistency across the workspace. Additionally, the prettier
configuration was refined by making the file pattern for `.config.js`
files consistent and adjusting quote styles for better readability. All
lint scripts across the project were updated to respect the centralized
ignore path, enhancing maintainability.

The consolidation simplifies the process of managing ignore rules as the
project scales, ensuring developers can focus on writing code without
worrying about divergent formatting standards. These changes also align
with introducing comprehensive linting across multiple environments to
keep the codebase clean and consistent.

This adjustment is a foundational step towards a more streamlined and
unified code base, making it easier for new contributors to adhere to
established coding standards and reducing the cognitive load associated
with managing multiple configuration files across the project.

* unset package json changes

---------

Co-authored-by: Francisco Bischoff <franzbischoff@gmail.com>
Co-authored-by: Francisco Bischoff <984592+franzbischoff@users.noreply.github.com>
2024-06-06 12:50:42 -07:00
Sean Hatfield
d29292ebd2
[FEAT] Add LiteLLM embedding provider support (#1579)
* add liteLLM embedding provider support

* update tooltip id

---------

Co-authored-by: timothycarambat <rambat1010@gmail.com>
2024-06-06 12:43:34 -07:00
timothycarambat
5578e567ce move translated READMEs into subfolder 2024-06-06 12:15:45 -07:00
Ikko Eltociear Ashimine
dfcf32e9c0
docs: add Japanese README (#1574)
* docs: add Japanese README

* docs: update README.md
2024-06-06 12:13:15 -07:00
43 changed files with 1084 additions and 96 deletions

View File

@ -22,7 +22,7 @@
// Terraform support // Terraform support
"ghcr.io/devcontainers/features/terraform:1": {}, "ghcr.io/devcontainers/features/terraform:1": {},
// Just a wrap to install needed packages // Just a wrap to install needed packages
"ghcr.io/rocker-org/devcontainer-features/apt-packages:1": { "ghcr.io/devcontainers-contrib/features/apt-packages:1": {
// Dependencies copied from ../docker/Dockerfile plus some dev stuff // Dependencies copied from ../docker/Dockerfile plus some dev stuff
"packages": [ "packages": [
"build-essential", "build-essential",

View File

@ -10,3 +10,7 @@ frontend/bundleinspector.html
#server #server
server/swagger/openapi.json server/swagger/openapi.json
#embed
**/static/**
embed/src/utils/chat/hljs.js

View File

@ -17,7 +17,7 @@
} }
}, },
{ {
"files": "*.config.js", "files": ["*.config.js"],
"options": { "options": {
"semi": false, "semi": false,
"parser": "flow", "parser": "flow",

View File

@ -29,7 +29,7 @@
</p> </p>
<p align="center"> <p align="center">
<b>English</b> · <a href='/README.zh-CN.md'>简体中文</a> <b>English</b> · <a href='./locales/README.zh-CN.md'>简体中文</a> · <a href='./locales/README.ja-JP.md'>日本語</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -12,7 +12,7 @@
"scripts": { "scripts": {
"dev": "NODE_ENV=development nodemon --ignore hotdir --ignore storage --trace-warnings index.js", "dev": "NODE_ENV=development nodemon --ignore hotdir --ignore storage --trace-warnings index.js",
"start": "NODE_ENV=production node index.js", "start": "NODE_ENV=production node index.js",
"lint": "yarn prettier --write ./processSingleFile ./processLink ./utils index.js" "lint": "yarn prettier --ignore-path ../.prettierignore --write ./processSingleFile ./processLink ./utils index.js"
}, },
"dependencies": { "dependencies": {
"@googleapis/youtube": "^9.0.0", "@googleapis/youtube": "^9.0.0",

View File

@ -128,6 +128,12 @@ GID='1000'
# VOYAGEAI_API_KEY= # VOYAGEAI_API_KEY=
# EMBEDDING_MODEL_PREF='voyage-large-2-instruct' # EMBEDDING_MODEL_PREF='voyage-large-2-instruct'
# EMBEDDING_ENGINE='litellm'
# EMBEDDING_MODEL_PREF='text-embedding-ada-002'
# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
# LITE_LLM_BASE_PATH='http://127.0.0.1:4000'
# LITE_LLM_API_KEY='sk-123abc'
########################################### ###########################################
######## Vector Database Selection ######## ######## Vector Database Selection ########
########################################### ###########################################

View File

@ -1,9 +0,0 @@
# defaults
**/.git
**/.svn
**/.hg
**/node_modules
**/dist
**/static/**
src/utils/chat/hljs.js

View File

@ -4,9 +4,7 @@
"target": "esnext", "target": "esnext",
"jsx": "react", "jsx": "react",
"paths": { "paths": {
"@/*": [ "@/*": ["./src/*"],
"./src/*" },
], },
} }
}
}

View File

@ -1,6 +1,7 @@
{ {
"name": "anythingllm-embedded-chat", "name": "anythingllm-embedded-chat",
"private": false, "private": false,
"license": "MIT",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "nodemon -e js,jsx,css --watch src --exec \"yarn run dev:preview\"", "dev": "nodemon -e js,jsx,css --watch src --exec \"yarn run dev:preview\"",
@ -8,7 +9,7 @@
"dev:build": "vite build && cat src/static/tailwind@3.4.1.js >> dist/anythingllm-chat-widget.js", "dev:build": "vite build && cat src/static/tailwind@3.4.1.js >> dist/anythingllm-chat-widget.js",
"build": "vite build && cat src/static/tailwind@3.4.1.js >> dist/anythingllm-chat-widget.js && npx terser --compress -o dist/anythingllm-chat-widget.min.js -- dist/anythingllm-chat-widget.js", "build": "vite build && cat src/static/tailwind@3.4.1.js >> dist/anythingllm-chat-widget.js && npx terser --compress -o dist/anythingllm-chat-widget.min.js -- dist/anythingllm-chat-widget.js",
"build:publish": "yarn build && mkdir -p ../frontend/public/embed && cp -r dist/anythingllm-chat-widget.min.js ../frontend/public/embed/anythingllm-chat-widget.min.js", "build:publish": "yarn build && mkdir -p ../frontend/public/embed && cp -r dist/anythingllm-chat-widget.min.js ../frontend/public/embed/anythingllm-chat-widget.min.js",
"lint": "yarn prettier --write ./src" "lint": "yarn prettier --ignore-path ../.prettierignore --write ./src"
}, },
"dependencies": { "dependencies": {
"@microsoft/fetch-event-source": "^2.0.1", "@microsoft/fetch-event-source": "^2.0.1",

View File

@ -38,7 +38,7 @@ export default defineConfig({
rollupOptions: { rollupOptions: {
external: [ external: [
// Reduces transformation time by 50% and we don't even use this variant, so we can ignore. // Reduces transformation time by 50% and we don't even use this variant, so we can ignore.
/@phosphor-icons\/react\/dist\/ssr/, /@phosphor-icons\/react\/dist\/ssr/
] ]
}, },
commonjsOptions: { commonjsOptions: {
@ -51,7 +51,7 @@ export default defineConfig({
emptyOutDir: true, emptyOutDir: true,
inlineDynamicImports: true, inlineDynamicImports: true,
assetsDir: "", assetsDir: "",
sourcemap: 'inline', sourcemap: "inline"
}, },
optimizeDeps: { optimizeDeps: {
esbuildOptions: { esbuildOptions: {
@ -60,5 +60,5 @@ export default defineConfig({
}, },
plugins: [] plugins: []
} }
}, }
}) })

View File

@ -4,9 +4,7 @@
"target": "esnext", "target": "esnext",
"jsx": "react", "jsx": "react",
"paths": { "paths": {
"@/*": [ "@/*": ["./src/*"]
"./src/*"
],
} }
} }
} }

View File

@ -7,7 +7,7 @@
"start": "vite --open", "start": "vite --open",
"dev": "NODE_ENV=development vite --debug --host=0.0.0.0", "dev": "NODE_ENV=development vite --debug --host=0.0.0.0",
"build": "vite build", "build": "vite build",
"lint": "yarn prettier --write ./src", "lint": "yarn prettier --ignore-path ../.prettierignore --write ./src",
"preview": "vite preview" "preview": "vite preview"
}, },
"dependencies": { "dependencies": {
@ -63,4 +63,4 @@
"tailwindcss": "^3.3.1", "tailwindcss": "^3.3.1",
"vite": "^4.3.0" "vite": "^4.3.0"
} }
} }

View File

@ -1,5 +1,5 @@
import React from "react"; import React from "react";
import Jazzicon from "../UserIcon"; import UserIcon from "../UserIcon";
import { userFromStorage } from "@/utils/request"; import { userFromStorage } from "@/utils/request";
import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants"; import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants";
@ -11,8 +11,7 @@ export default function ChatBubble({ message, type, popMsg }) {
<div className={`flex justify-center items-end w-full ${backgroundColor}`}> <div className={`flex justify-center items-end w-full ${backgroundColor}`}>
<div className={`py-8 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}> <div className={`py-8 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}>
<div className="flex gap-x-5"> <div className="flex gap-x-5">
<Jazzicon <UserIcon
size={36}
user={{ uid: isUser ? userFromStorage()?.username : "system" }} user={{ uid: isUser ? userFromStorage()?.username : "system" }}
role={type} role={type}
/> />

View File

@ -13,7 +13,7 @@ import { isMobile } from "react-device-detect";
import { SidebarMobileHeader } from "../Sidebar"; import { SidebarMobileHeader } from "../Sidebar";
import ChatBubble from "../ChatBubble"; import ChatBubble from "../ChatBubble";
import System from "@/models/system"; import System from "@/models/system";
import Jazzicon from "../UserIcon"; import UserIcon from "../UserIcon";
import { userFromStorage } from "@/utils/request"; import { userFromStorage } from "@/utils/request";
import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants"; import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants";
import useUser from "@/hooks/useUser"; import useUser from "@/hooks/useUser";
@ -46,7 +46,7 @@ export default function DefaultChatContainer() {
className={`pt-10 pb-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`} className={`pt-10 pb-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
> >
<div className="flex gap-x-5"> <div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} /> <UserIcon user={{ uid: "system" }} role={"assistant"} />
<span <span
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`} className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}
@ -70,7 +70,7 @@ export default function DefaultChatContainer() {
className={`pb-4 pt-2 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`} className={`pb-4 pt-2 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
> >
<div className="flex gap-x-5"> <div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} /> <UserIcon user={{ uid: "system" }} role={"assistant"} />
<span <span
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`} className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}
@ -93,7 +93,7 @@ export default function DefaultChatContainer() {
className={`pt-2 pb-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`} className={`pt-2 pb-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
> >
<div className="flex gap-x-5"> <div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} /> <UserIcon user={{ uid: "system" }} role={"assistant"} />
<div> <div>
<span <span
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`} className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}
@ -127,8 +127,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`} className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
> >
<div className="flex gap-x-5"> <div className="flex gap-x-5">
<Jazzicon <UserIcon
size={36}
user={{ uid: userFromStorage()?.username }} user={{ uid: userFromStorage()?.username }}
role={"user"} role={"user"}
/> />
@ -151,7 +150,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`} className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
> >
<div className="flex gap-x-5"> <div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} /> <UserIcon user={{ uid: "system" }} role={"assistant"} />
<div> <div>
<span <span
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`} className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}
@ -188,8 +187,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`} className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
> >
<div className="flex gap-x-5"> <div className="flex gap-x-5">
<Jazzicon <UserIcon
size={36}
user={{ uid: userFromStorage()?.username }} user={{ uid: userFromStorage()?.username }}
role={"user"} role={"user"}
/> />
@ -213,7 +211,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`} className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
> >
<div className="flex gap-x-5"> <div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} /> <UserIcon user={{ uid: "system" }} role={"assistant"} />
<span <span
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`} className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}
@ -251,8 +249,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`} className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
> >
<div className="flex gap-x-5"> <div className="flex gap-x-5">
<Jazzicon <UserIcon
size={36}
user={{ uid: userFromStorage()?.username }} user={{ uid: userFromStorage()?.username }}
role={"user"} role={"user"}
/> />
@ -275,7 +272,7 @@ export default function DefaultChatContainer() {
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`} className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
> >
<div className="flex gap-x-5"> <div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} /> <UserIcon user={{ uid: "system" }} role={"assistant"} />
<div> <div>
<span <span
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`} className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}

View File

@ -0,0 +1,186 @@
import { useEffect, useState } from "react";
import System from "@/models/system";
import { Warning } from "@phosphor-icons/react";
import { Tooltip } from "react-tooltip";
export default function LiteLLMOptions({ settings }) {
const [basePathValue, setBasePathValue] = useState(settings?.LiteLLMBasePath);
const [basePath, setBasePath] = useState(settings?.LiteLLMBasePath);
const [apiKeyValue, setApiKeyValue] = useState(settings?.LiteLLMAPIKey);
const [apiKey, setApiKey] = useState(settings?.LiteLLMAPIKey);
return (
<div className="w-full flex flex-col gap-y-4">
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Base URL
</label>
<input
type="url"
name="LiteLLMBasePath"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="http://127.0.0.1:4000"
defaultValue={settings?.LiteLLMBasePath}
required={true}
autoComplete="off"
spellCheck={false}
onChange={(e) => setBasePathValue(e.target.value)}
onBlur={() => setBasePath(basePathValue)}
/>
</div>
<LiteLLMModelSelection
settings={settings}
basePath={basePath}
apiKey={apiKey}
/>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Max embedding chunk length
</label>
<input
type="number"
name="EmbeddingModelMaxChunkLength"
className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="8192"
min={1}
onScroll={(e) => e.target.blur()}
defaultValue={settings?.EmbeddingModelMaxChunkLength}
required={false}
autoComplete="off"
/>
</div>
</div>
<div className="w-full flex items-center gap-4">
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-semibold flex items-center gap-x-2">
API Key <p className="!text-xs !italic !font-thin">optional</p>
</label>
</div>
<input
type="password"
name="LiteLLMAPIKey"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="sk-mysecretkey"
defaultValue={settings?.LiteLLMAPIKey ? "*".repeat(20) : ""}
autoComplete="off"
spellCheck={false}
onChange={(e) => setApiKeyValue(e.target.value)}
onBlur={() => setApiKey(apiKeyValue)}
/>
</div>
</div>
</div>
);
}
function LiteLLMModelSelection({ settings, basePath = null, apiKey = null }) {
const [customModels, setCustomModels] = useState([]);
const [loading, setLoading] = useState(true);
useEffect(() => {
async function findCustomModels() {
if (!basePath) {
setCustomModels([]);
setLoading(false);
return;
}
setLoading(true);
const { models } = await System.customModels(
"litellm",
typeof apiKey === "boolean" ? null : apiKey,
basePath
);
setCustomModels(models || []);
setLoading(false);
}
findCustomModels();
}, [basePath, apiKey]);
if (loading || customModels.length == 0) {
return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Embedding Model Selection
</label>
<select
name="EmbeddingModelPref"
disabled={true}
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
<option disabled={true} selected={true}>
{basePath?.includes("/v1")
? "-- loading available models --"
: "-- waiting for URL --"}
</option>
</select>
</div>
);
}
return (
<div className="flex flex-col w-60">
<div className="flex items-center">
<label className="text-white text-sm font-semibold block mb-4">
Embedding Model Selection
</label>
<EmbeddingModelTooltip />
</div>
<select
name="EmbeddingModelPref"
required={true}
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{customModels.length > 0 && (
<optgroup label="Your loaded models">
{customModels.map((model) => {
return (
<option
key={model.id}
value={model.id}
selected={settings.EmbeddingModelPref === model.id}
>
{model.id}
</option>
);
})}
</optgroup>
)}
</select>
</div>
);
}
function EmbeddingModelTooltip() {
return (
<div className="flex items-center justify-center -mt-3 ml-1">
<Warning
size={14}
className="ml-1 text-orange-500 cursor-pointer"
data-tooltip-id="model-tooltip"
data-tooltip-place="right"
/>
<Tooltip
delayHide={300}
id="model-tooltip"
className="max-w-xs"
clickable={true}
>
<p className="text-sm">
Be sure to select a valid embedding model. Chat models are not
embedding models. See{" "}
<a
href="https://litellm.vercel.app/docs/embedding/supported_embedding"
target="_blank"
rel="noreferrer"
className="underline"
>
this page
</a>{" "}
for more information.
</p>
</Tooltip>
</div>
);
}

View File

@ -116,7 +116,7 @@ export default function SettingsSidebar() {
</div> </div>
</div> </div>
</div> </div>
<div className="absolute bottom-6 left-0 right-0 pt-2 bg-sidebar bg-opacity-80 backdrop-filter backdrop-blur-md"> <div className="absolute bottom-2 left-0 right-0 pt-2 bg-sidebar bg-opacity-80 backdrop-filter backdrop-blur-md">
<Footer /> <Footer />
</div> </div>
</div> </div>

View File

@ -175,7 +175,7 @@ export function SidebarMobileHeader() {
<ActiveWorkspaces /> <ActiveWorkspaces />
</div> </div>
</div> </div>
<div className="absolute bottom-0 left-0 right-0 pt-2 pb-6 rounded-br-[26px] bg-sidebar bg-opacity-80 backdrop-filter backdrop-blur-md"> <div className="z-99 absolute bottom-0 left-0 right-0 pt-2 pb-6 rounded-br-[26px] bg-sidebar bg-opacity-80 backdrop-filter backdrop-blur-md">
<Footer /> <Footer />
</div> </div>
</div> </div>

View File

@ -2,7 +2,7 @@ import React, { useRef, useEffect } from "react";
import JAZZ from "@metamask/jazzicon"; import JAZZ from "@metamask/jazzicon";
import usePfp from "../../hooks/usePfp"; import usePfp from "../../hooks/usePfp";
export default function Jazzicon({ size = 10, user, role }) { export default function UserIcon({ size = 36, user, role }) {
const { pfp } = usePfp(); const { pfp } = usePfp();
const divRef = useRef(null); const divRef = useRef(null);
const seed = user?.uid const seed = user?.uid

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -0,0 +1,126 @@
import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants";
import { Pencil } from "@phosphor-icons/react";
import { useState, useEffect, useRef } from "react";
import { Tooltip } from "react-tooltip";
const EDIT_EVENT = "toggle-message-edit";
export function useEditMessage({ chatId, role }) {
const [isEditing, setIsEditing] = useState(false);
function onEditEvent(e) {
if (e.detail.chatId !== chatId || e.detail.role !== role) {
setIsEditing(false);
return false;
}
setIsEditing((prev) => !prev);
}
useEffect(() => {
function listenForEdits() {
if (!chatId || !role) return;
window.addEventListener(EDIT_EVENT, onEditEvent);
}
listenForEdits();
return () => {
window.removeEventListener(EDIT_EVENT, onEditEvent);
};
}, [chatId, role]);
return { isEditing, setIsEditing };
}
export function EditMessageAction({ chatId = null, role, isEditing }) {
function handleEditClick() {
window.dispatchEvent(
new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
);
}
if (!chatId || isEditing) return null;
return (
<div
className={`mt-3 relative ${
role === "user" && !isEditing ? "opacity-0" : ""
} group-hover:opacity-100 transition-all duration-300`}
>
<button
onClick={handleEditClick}
data-tooltip-id="edit-input-text"
data-tooltip-content={`Edit ${
role === "user" ? "Prompt" : "Response"
} `}
className="border-none text-zinc-300"
aria-label={`Edit ${role === "user" ? "Prompt" : "Response"}`}
>
<Pencil size={18} className="mb-1" />
</button>
<Tooltip
id="edit-input-text"
place="bottom"
delayShow={300}
className="tooltip !text-xs"
/>
</div>
);
}
export function EditMessageForm({
role,
chatId,
message,
adjustTextArea,
saveChanges,
}) {
const formRef = useRef(null);
function handleSaveMessage(e) {
e.preventDefault();
const form = new FormData(e.target);
const editedMessage = form.get("editedMessage");
saveChanges({ editedMessage, chatId, role });
window.dispatchEvent(
new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
);
}
function cancelEdits() {
window.dispatchEvent(
new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
);
return false;
}
useEffect(() => {
if (!formRef || !formRef.current) return;
formRef.current.focus();
adjustTextArea({ target: formRef.current });
}, [formRef]);
return (
<form onSubmit={handleSaveMessage} className="flex flex-col w-full">
<textarea
ref={formRef}
name="editedMessage"
className={`w-full rounded ${
role === "user" ? USER_BACKGROUND_COLOR : AI_BACKGROUND_COLOR
} border border-white/20 active:outline-none focus:outline-none focus:ring-0 pr-16 pl-1.5 pt-1.5 resize-y`}
defaultValue={message}
onChange={adjustTextArea}
/>
<div className="mt-3 flex justify-center">
<button
type="submit"
className="px-2 py-1 bg-gray-200 text-gray-700 font-medium rounded-md mr-2 hover:bg-gray-300 focus:outline-none focus:ring-2 focus:ring-blue-500 focus:ring-offset-2"
>
Save & Submit
</button>
<button
type="button"
className="px-2 py-1 bg-historical-msg-system text-white font-medium rounded-md hover:bg-historical-msg-user/90 focus:outline-none focus:ring-2 focus:ring-gray-400 focus:ring-offset-2"
onClick={cancelEdits}
>
Cancel
</button>
</div>
</form>
);
}

View File

@ -2,14 +2,15 @@ import React, { memo, useState } from "react";
import useCopyText from "@/hooks/useCopyText"; import useCopyText from "@/hooks/useCopyText";
import { import {
Check, Check,
ClipboardText,
ThumbsUp, ThumbsUp,
ThumbsDown, ThumbsDown,
ArrowsClockwise, ArrowsClockwise,
Copy,
} from "@phosphor-icons/react"; } from "@phosphor-icons/react";
import { Tooltip } from "react-tooltip"; import { Tooltip } from "react-tooltip";
import Workspace from "@/models/workspace"; import Workspace from "@/models/workspace";
import TTSMessage from "./TTSButton"; import TTSMessage from "./TTSButton";
import { EditMessageAction } from "./EditMessage";
const Actions = ({ const Actions = ({
message, message,
@ -18,9 +19,10 @@ const Actions = ({
slug, slug,
isLastMessage, isLastMessage,
regenerateMessage, regenerateMessage,
isEditing,
role,
}) => { }) => {
const [selectedFeedback, setSelectedFeedback] = useState(feedbackScore); const [selectedFeedback, setSelectedFeedback] = useState(feedbackScore);
const handleFeedback = async (newFeedback) => { const handleFeedback = async (newFeedback) => {
const updatedFeedback = const updatedFeedback =
selectedFeedback === newFeedback ? null : newFeedback; selectedFeedback === newFeedback ? null : newFeedback;
@ -32,14 +34,15 @@ const Actions = ({
<div className="flex w-full justify-between items-center"> <div className="flex w-full justify-between items-center">
<div className="flex justify-start items-center gap-x-4"> <div className="flex justify-start items-center gap-x-4">
<CopyMessage message={message} /> <CopyMessage message={message} />
{isLastMessage && ( <EditMessageAction chatId={chatId} role={role} isEditing={isEditing} />
{isLastMessage && !isEditing && (
<RegenerateMessage <RegenerateMessage
regenerateMessage={regenerateMessage} regenerateMessage={regenerateMessage}
slug={slug} slug={slug}
chatId={chatId} chatId={chatId}
/> />
)} )}
{chatId && ( {chatId && role !== "user" && !isEditing && (
<> <>
<FeedbackButton <FeedbackButton
isSelected={selectedFeedback === true} isSelected={selectedFeedback === true}
@ -111,7 +114,7 @@ function CopyMessage({ message }) {
{copied ? ( {copied ? (
<Check size={18} className="mb-1" /> <Check size={18} className="mb-1" />
) : ( ) : (
<ClipboardText size={18} className="mb-1" /> <Copy size={18} className="mb-1" />
)} )}
</button> </button>
<Tooltip <Tooltip

View File

@ -1,6 +1,6 @@
import React, { memo } from "react"; import React, { memo } from "react";
import { Warning } from "@phosphor-icons/react"; import { Warning } from "@phosphor-icons/react";
import Jazzicon from "../../../../UserIcon"; import UserIcon from "../../../../UserIcon";
import Actions from "./Actions"; import Actions from "./Actions";
import renderMarkdown from "@/utils/chat/markdown"; import renderMarkdown from "@/utils/chat/markdown";
import { userFromStorage } from "@/utils/request"; import { userFromStorage } from "@/utils/request";
@ -8,6 +8,7 @@ import Citations from "../Citation";
import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants"; import { AI_BACKGROUND_COLOR, USER_BACKGROUND_COLOR } from "@/utils/constants";
import { v4 } from "uuid"; import { v4 } from "uuid";
import createDOMPurify from "dompurify"; import createDOMPurify from "dompurify";
import { EditMessageForm, useEditMessage } from "./Actions/EditMessage";
const DOMPurify = createDOMPurify(window); const DOMPurify = createDOMPurify(window);
const HistoricalMessage = ({ const HistoricalMessage = ({
@ -21,20 +22,28 @@ const HistoricalMessage = ({
chatId = null, chatId = null,
isLastMessage = false, isLastMessage = false,
regenerateMessage, regenerateMessage,
saveEditedMessage,
}) => { }) => {
return ( const { isEditing } = useEditMessage({ chatId, role });
<div const adjustTextArea = (event) => {
key={uuid} const element = event.target;
className={`flex justify-center items-end w-full ${ element.style.height = "auto";
role === "user" ? USER_BACKGROUND_COLOR : AI_BACKGROUND_COLOR element.style.height = element.scrollHeight + "px";
}`} };
>
<div className={`py-8 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}> if (!!error) {
<div className="flex gap-x-5"> return (
<ProfileImage role={role} workspace={workspace} /> <div
{error ? ( key={uuid}
className={`flex justify-center items-end w-full ${
role === "user" ? USER_BACKGROUND_COLOR : AI_BACKGROUND_COLOR
}`}
>
<div className="py-8 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col">
<div className="flex gap-x-5">
<ProfileImage role={role} workspace={workspace} />
<div className="p-2 rounded-lg bg-red-50 text-red-500"> <div className="p-2 rounded-lg bg-red-50 text-red-500">
<span className={`inline-block `}> <span className="inline-block">
<Warning className="h-4 w-4 mb-1 inline-block" /> Could not <Warning className="h-4 w-4 mb-1 inline-block" /> Could not
respond to message. respond to message.
</span> </span>
@ -42,6 +51,30 @@ const HistoricalMessage = ({
{error} {error}
</p> </p>
</div> </div>
</div>
</div>
</div>
);
}
return (
<div
key={uuid}
className={`flex justify-center items-end w-full group ${
role === "user" ? USER_BACKGROUND_COLOR : AI_BACKGROUND_COLOR
}`}
>
<div className={`py-8 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}>
<div className="flex gap-x-5">
<ProfileImage role={role} workspace={workspace} />
{isEditing ? (
<EditMessageForm
role={role}
chatId={chatId}
message={message}
adjustTextArea={adjustTextArea}
saveChanges={saveEditedMessage}
/>
) : ( ) : (
<span <span
className={`flex flex-col gap-y-1`} className={`flex flex-col gap-y-1`}
@ -51,19 +84,19 @@ const HistoricalMessage = ({
/> />
)} )}
</div> </div>
{role === "assistant" && !error && ( <div className="flex gap-x-5">
<div className="flex gap-x-5"> <div className="relative w-[35px] h-[35px] rounded-full flex-shrink-0 overflow-hidden" />
<div className="relative w-[35px] h-[35px] rounded-full flex-shrink-0 overflow-hidden" /> <Actions
<Actions message={message}
message={message} feedbackScore={feedbackScore}
feedbackScore={feedbackScore} chatId={chatId}
chatId={chatId} slug={workspace?.slug}
slug={workspace?.slug} isLastMessage={isLastMessage}
isLastMessage={isLastMessage} regenerateMessage={regenerateMessage}
regenerateMessage={regenerateMessage} isEditing={isEditing}
/> role={role}
</div> />
)} </div>
{role === "assistant" && <Citations sources={sources} />} {role === "assistant" && <Citations sources={sources} />}
</div> </div>
</div> </div>
@ -84,8 +117,7 @@ function ProfileImage({ role, workspace }) {
} }
return ( return (
<Jazzicon <UserIcon
size={36}
user={{ user={{
uid: role === "user" ? userFromStorage()?.username : workspace.slug, uid: role === "user" ? userFromStorage()?.username : workspace.slug,
}} }}

View File

@ -1,6 +1,6 @@
import { memo } from "react"; import { memo } from "react";
import { Warning } from "@phosphor-icons/react"; import { Warning } from "@phosphor-icons/react";
import Jazzicon from "../../../../UserIcon"; import UserIcon from "../../../../UserIcon";
import renderMarkdown from "@/utils/chat/markdown"; import renderMarkdown from "@/utils/chat/markdown";
import Citations from "../Citation"; import Citations from "../Citation";
@ -84,7 +84,7 @@ export function WorkspaceProfileImage({ workspace }) {
); );
} }
return <Jazzicon size={36} user={{ uid: workspace.slug }} role="assistant" />; return <UserIcon user={{ uid: workspace.slug }} role="assistant" />;
} }
export default memo(PromptReply); export default memo(PromptReply);

View File

@ -7,14 +7,18 @@ import { ArrowDown } from "@phosphor-icons/react";
import debounce from "lodash.debounce"; import debounce from "lodash.debounce";
import useUser from "@/hooks/useUser"; import useUser from "@/hooks/useUser";
import Chartable from "./Chartable"; import Chartable from "./Chartable";
import Workspace from "@/models/workspace";
import { useParams } from "react-router-dom";
export default function ChatHistory({ export default function ChatHistory({
history = [], history = [],
workspace, workspace,
sendCommand, sendCommand,
updateHistory,
regenerateAssistantMessage, regenerateAssistantMessage,
}) { }) {
const { user } = useUser(); const { user } = useUser();
const { threadSlug = null } = useParams();
const { showing, showModal, hideModal } = useManageWorkspaceModal(); const { showing, showModal, hideModal } = useManageWorkspaceModal();
const [isAtBottom, setIsAtBottom] = useState(true); const [isAtBottom, setIsAtBottom] = useState(true);
const chatHistoryRef = useRef(null); const chatHistoryRef = useRef(null);
@ -87,6 +91,46 @@ export default function ChatHistory({
sendCommand(`${heading} ${message}`, true); sendCommand(`${heading} ${message}`, true);
}; };
const saveEditedMessage = async ({ editedMessage, chatId, role }) => {
if (!editedMessage) return; // Don't save empty edits.
// if the edit was a user message, we will auto-regenerate the response and delete all
// messages post modified message
if (role === "user") {
// remove all messages after the edited message
// technically there are two chatIds per-message pair, this will split the first.
const updatedHistory = history.slice(
0,
history.findIndex((msg) => msg.chatId === chatId) + 1
);
// update last message in history to edited message
updatedHistory[updatedHistory.length - 1].content = editedMessage;
// remove all edited messages after the edited message in backend
await Workspace.deleteEditedChats(workspace.slug, threadSlug, chatId);
sendCommand(editedMessage, true, updatedHistory);
return;
}
// If role is an assistant we simply want to update the comment and save on the backend as an edit.
if (role === "assistant") {
const updatedHistory = [...history];
const targetIdx = history.findIndex(
(msg) => msg.chatId === chatId && msg.role === role
);
if (targetIdx < 0) return;
updatedHistory[targetIdx].content = editedMessage;
updateHistory(updatedHistory);
await Workspace.updateChatResponse(
workspace.slug,
threadSlug,
chatId,
editedMessage
);
return;
}
};
if (history.length === 0) { if (history.length === 0) {
return ( return (
<div className="flex flex-col h-full md:mt-0 pb-44 md:pb-40 w-full justify-end items-center"> <div className="flex flex-col h-full md:mt-0 pb-44 md:pb-40 w-full justify-end items-center">
@ -172,6 +216,7 @@ export default function ChatHistory({
error={props.error} error={props.error}
regenerateMessage={regenerateAssistantMessage} regenerateMessage={regenerateAssistantMessage}
isLastMessage={isLastBotReply} isLastMessage={isLastBotReply}
saveEditedMessage={saveEditedMessage}
/> />
); );
})} })}

View File

@ -240,6 +240,7 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
history={chatHistory} history={chatHistory}
workspace={workspace} workspace={workspace}
sendCommand={sendCommand} sendCommand={sendCommand}
updateHistory={setChatHistory}
regenerateAssistantMessage={regenerateAssistantMessage} regenerateAssistantMessage={regenerateAssistantMessage}
/> />
<PromptInput <PromptInput

View File

@ -22,6 +22,7 @@ export default function WorkspaceChat({ loading, workspace }) {
const chatHistory = threadSlug const chatHistory = threadSlug
? await Workspace.threads.chatHistory(workspace.slug, threadSlug) ? await Workspace.threads.chatHistory(workspace.slug, threadSlug)
: await Workspace.chatHistory(workspace.slug); : await Workspace.chatHistory(workspace.slug);
setHistory(chatHistory); setHistory(chatHistory);
setLoadingHistory(false); setLoadingHistory(false);
} }

View File

@ -90,6 +90,26 @@ const Workspace = {
return false; return false;
}); });
}, },
deleteEditedChats: async function (slug = "", threadSlug = "", startingId) {
if (!!threadSlug)
return this.threads._deleteEditedChats(slug, threadSlug, startingId);
return this._deleteEditedChats(slug, startingId);
},
updateChatResponse: async function (
slug = "",
threadSlug = "",
chatId,
newText
) {
if (!!threadSlug)
return this.threads._updateChatResponse(
slug,
threadSlug,
chatId,
newText
);
return this._updateChatResponse(slug, chatId, newText);
},
streamChat: async function ({ slug }, message, handleChat) { streamChat: async function ({ slug }, message, handleChat) {
const ctrl = new AbortController(); const ctrl = new AbortController();
@ -287,8 +307,6 @@ const Workspace = {
return null; return null;
}); });
}, },
threads: WorkspaceThread,
uploadPfp: async function (formData, slug) { uploadPfp: async function (formData, slug) {
return await fetch(`${API_BASE}/workspace/${slug}/upload-pfp`, { return await fetch(`${API_BASE}/workspace/${slug}/upload-pfp`, {
method: "POST", method: "POST",
@ -336,6 +354,37 @@ const Workspace = {
return { success: false, error: e.message }; return { success: false, error: e.message };
}); });
}, },
_updateChatResponse: async function (slug = "", chatId, newText) {
return await fetch(`${API_BASE}/workspace/${slug}/update-chat`, {
method: "POST",
headers: baseHeaders(),
body: JSON.stringify({ chatId, newText }),
})
.then((res) => {
if (res.ok) return true;
throw new Error("Failed to update chat.");
})
.catch((e) => {
console.log(e);
return false;
});
},
_deleteEditedChats: async function (slug = "", startingId) {
return await fetch(`${API_BASE}/workspace/${slug}/delete-edited-chats`, {
method: "DELETE",
headers: baseHeaders(),
body: JSON.stringify({ startingId }),
})
.then((res) => {
if (res.ok) return true;
throw new Error("Failed to delete chats.");
})
.catch((e) => {
console.log(e);
return false;
});
},
threads: WorkspaceThread,
}; };
export default Workspace; export default Workspace;

View File

@ -163,6 +163,51 @@ const WorkspaceThread = {
} }
); );
}, },
_deleteEditedChats: async function (
workspaceSlug = "",
threadSlug = "",
startingId
) {
return await fetch(
`${API_BASE}/workspace/${workspaceSlug}/thread/${threadSlug}/delete-edited-chats`,
{
method: "DELETE",
headers: baseHeaders(),
body: JSON.stringify({ startingId }),
}
)
.then((res) => {
if (res.ok) return true;
throw new Error("Failed to delete chats.");
})
.catch((e) => {
console.log(e);
return false;
});
},
_updateChatResponse: async function (
workspaceSlug = "",
threadSlug = "",
chatId,
newText
) {
return await fetch(
`${API_BASE}/workspace/${workspaceSlug}/thread/${threadSlug}/update-chat`,
{
method: "POST",
headers: baseHeaders(),
body: JSON.stringify({ chatId, newText }),
}
)
.then((res) => {
if (res.ok) return true;
throw new Error("Failed to update chat.");
})
.catch((e) => {
console.log(e);
return false;
});
},
}; };
export default WorkspaceThread; export default WorkspaceThread;

View File

@ -11,6 +11,7 @@ import OllamaLogo from "@/media/llmprovider/ollama.png";
import LMStudioLogo from "@/media/llmprovider/lmstudio.png"; import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
import CohereLogo from "@/media/llmprovider/cohere.png"; import CohereLogo from "@/media/llmprovider/cohere.png";
import VoyageAiLogo from "@/media/embeddingprovider/voyageai.png"; import VoyageAiLogo from "@/media/embeddingprovider/voyageai.png";
import LiteLLMLogo from "@/media/llmprovider/litellm.png";
import PreLoader from "@/components/Preloader"; import PreLoader from "@/components/Preloader";
import ChangeWarningModal from "@/components/ChangeWarning"; import ChangeWarningModal from "@/components/ChangeWarning";
@ -22,6 +23,7 @@ import OllamaEmbeddingOptions from "@/components/EmbeddingSelection/OllamaOption
import LMStudioEmbeddingOptions from "@/components/EmbeddingSelection/LMStudioOptions"; import LMStudioEmbeddingOptions from "@/components/EmbeddingSelection/LMStudioOptions";
import CohereEmbeddingOptions from "@/components/EmbeddingSelection/CohereOptions"; import CohereEmbeddingOptions from "@/components/EmbeddingSelection/CohereOptions";
import VoyageAiOptions from "@/components/EmbeddingSelection/VoyageAiOptions"; import VoyageAiOptions from "@/components/EmbeddingSelection/VoyageAiOptions";
import LiteLLMOptions from "@/components/EmbeddingSelection/LiteLLMOptions";
import EmbedderItem from "@/components/EmbeddingSelection/EmbedderItem"; import EmbedderItem from "@/components/EmbeddingSelection/EmbedderItem";
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react"; import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
@ -88,6 +90,13 @@ const EMBEDDERS = [
options: (settings) => <VoyageAiOptions settings={settings} />, options: (settings) => <VoyageAiOptions settings={settings} />,
description: "Run powerful embedding models from Voyage AI.", description: "Run powerful embedding models from Voyage AI.",
}, },
{
name: "LiteLLM",
value: "litellm",
logo: LiteLLMLogo,
options: (settings) => <LiteLLMOptions settings={settings} />,
description: "Run powerful embedding models from LiteLLM.",
},
]; ];
export default function GeneralEmbeddingPreference() { export default function GeneralEmbeddingPreference() {

View File

@ -301,6 +301,13 @@ export const EMBEDDING_ENGINE_PRIVACY = {
], ],
logo: VoyageAiLogo, logo: VoyageAiLogo,
}, },
litellm: {
name: "LiteLLM",
description: [
"Your document text is only accessible on the server running LiteLLM and to the providers you configured in LiteLLM.",
],
logo: LiteLLMLogo,
},
}; };
export default function DataHandling({ setHeader, setForwardBtn, setBackBtn }) { export default function DataHandling({ setHeader, setForwardBtn, setBackBtn }) {

View File

@ -108,13 +108,10 @@ export default function handleChat(
} else if (type === "finalizeResponseStream") { } else if (type === "finalizeResponseStream") {
const chatIdx = _chatHistory.findIndex((chat) => chat.uuid === uuid); const chatIdx = _chatHistory.findIndex((chat) => chat.uuid === uuid);
if (chatIdx !== -1) { if (chatIdx !== -1) {
const existingHistory = { ..._chatHistory[chatIdx] }; _chatHistory[chatIdx - 1] = { ..._chatHistory[chatIdx - 1], chatId }; // update prompt with chatID
const updatedHistory = { _chatHistory[chatIdx] = { ..._chatHistory[chatIdx], chatId }; // update response with chatID
...existingHistory,
chatId, // finalize response stream only has some specific keys for data. we are explicitly listing them here.
};
_chatHistory[chatIdx] = updatedHistory;
} }
setChatHistory([..._chatHistory]); setChatHistory([..._chatHistory]);
setLoadingResponse(false); setLoadingResponse(false);
} else if (type === "stopGeneration") { } else if (type === "stopGeneration") {

View File

@ -51,7 +51,7 @@ export default defineConfig({
rollupOptions: { rollupOptions: {
external: [ external: [
// Reduces transformation time by 50% and we don't even use this variant, so we can ignore. // Reduces transformation time by 50% and we don't even use this variant, so we can ignore.
/@phosphor-icons\/react\/dist\/ssr/, /@phosphor-icons\/react\/dist\/ssr/
] ]
}, },
commonjsOptions: { commonjsOptions: {

235
locales/README.ja-JP.md Normal file
View File

@ -0,0 +1,235 @@
<a name="readme-top"></a>
<p align="center">
<a href="https://useanything.com"><img src="https://github.com/Mintplex-Labs/anything-llm/blob/master/images/wordmark.png?raw=true" alt="AnythingLLM logo"></a>
</p>
<div align='center'>
<a href="https://trendshift.io/repositories/2415" target="_blank"><img src="https://trendshift.io/api/badge/repositories/2415" alt="Mintplex-Labs%2Fanything-llm | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
</div>
<p align="center">
<b>AnythingLLM:</b> あなたが探していたオールインワンAIアプリ。<br />
ドキュメントとチャットし、AIエージェントを使用し、高度にカスタマイズ可能で、複数ユーザー対応、面倒な設定は不要です。
</p>
<p align="center">
<a href="https://discord.gg/6UyHPeGZAC" target="_blank">
<img src="https://img.shields.io/badge/chat-mintplex_labs-blue.svg?style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAMAAABEpIrGAAAAIGNIUk0AAHomAACAhAAA+gAAAIDoAAB1MAAA6mAAADqYAAAXcJy6UTwAAAH1UExURQAAAP////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////r6+ubn5+7u7/3+/v39/enq6urq6/v7+97f39rb26eoqT1BQ0pOT4+Rkuzs7cnKykZKS0NHSHl8fdzd3ejo6UxPUUBDRdzc3RwgIh8jJSAkJm5xcvHx8aanqB4iJFBTVezt7V5hYlJVVuLj43p9fiImKCMnKZKUlaaoqSElJ21wcfT09O3u7uvr6zE0Nr6/wCUpK5qcnf7+/nh7fEdKTHx+f0tPUOTl5aipqiouMGtubz5CRDQ4OsTGxufn515hY7a3uH1/gXBydIOFhlVYWvX29qaoqCQoKs7Pz/Pz87/AwUtOUNfY2dHR0mhrbOvr7E5RUy8zNXR2d/f39+Xl5UZJSx0hIzQ3Odra2/z8/GlsbaGjpERHSezs7L/BwScrLTQ4Odna2zM3Obm7u3x/gKSmp9jZ2T1AQu/v71pdXkVISr2+vygsLiInKTg7PaOlpisvMcXGxzk8PldaXPLy8u7u7rm6u7S1tsDBwvj4+MPExbe4ueXm5s/Q0Kyf7ewAAAAodFJOUwAABClsrNjx/QM2l9/7lhmI6jTB/kA1GgKJN+nea6vy/MLZQYeVKK3rVA5tAAAAAWJLR0QB/wIt3gAAAAd0SU1FB+cKBAAmMZBHjXIAAAISSURBVDjLY2CAAkYmZhZWNnYODnY2VhZmJkYGVMDIycXNw6sBBbw8fFycyEoYGfkFBDVQgKAAPyMjQl5IWEQDDYgIC8FUMDKKsmlgAWyiEBWMjGJY5YEqxMAqGMWFNXAAYXGgAkYJSQ2cQFKCkYFRShq3AmkpRgYJbghbU0tbB0Tr6ukbgGhDI10gySfBwCwDUWBsYmpmDqQtLK2sbTQ0bO3sHYA8GWYGWWj4WTs6Obu4ami4OTm7exhqeHp5+4DCVJZBDmqdr7ufn3+ArkZgkJ+fU3CIRmgYWFiOARYGvo5OQUHhEUAFTkF+kVHRsLBgkIeyYmLjwoOc4hMSk5JTnINS06DC8gwcEEZ6RqZGlpOfc3ZObl5+gZ+TR2ERWFyBQQFMF5eklmqUpQb5+ReU61ZUOvkFVVXXQBSAraitq29o1GiKcfLzc29u0mjxBzq0tQ0kww5xZHtHUGeXhkZhdxBYgZ4d0LI6c4gjwd7siQQraOp1AivQ6CuAKZCDBBRQQQNQgUb/BGf3cqCCiZOcnCe3QQIKHNRTpk6bDgpZjRkzg3pBQTBrdtCcuZCgluAD0vPmL1gIdvSixUuWgqNs2YJ+DUhkEYxuggkGmOQUcckrioPTJCOXEnZ5JS5YslbGnuyVERlDDFvGEUPOWvwqaH6RVkHKeuDMK6SKnHlVhTgx8jeTmqy6Eij7K6nLqiGyPwChsa1MUrnq1wAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAyMy0xMC0wNFQwMDozODo0OSswMDowMB9V0a8AAAAldEVYdGRhdGU6bW9kaWZ5ADIwMjMtMTAtMDRUMDA6Mzg6NDkrMDA6MDBuCGkTAAAAKHRFWHRkYXRlOnRpbWVzdGFtcAAyMDIzLTEwLTA0VDAwOjM4OjQ5KzAwOjAwOR1IzAAAAABJRU5ErkJggg==" alt="Discord">
</a> |
<a href="https://github.com/Mintplex-Labs/anything-llm/blob/master/LICENSE" target="_blank">
<img src="https://img.shields.io/static/v1?label=license&message=MIT&color=white" alt="ライセンス">
</a> |
<a href="https://docs.useanything.com" target="_blank">
ドキュメント
</a> |
<a href="https://my.mintplexlabs.com/aio-checkout?product=anythingllm" target="_blank">
ホストされたインスタンス
</a>
</p>
<p align="center">
<a href='../README.md'>English</a> · <a href='./README.zh-CN.md'>简体中文</a> · <b>日本語</b>
</p>
<p align="center">
👉 デスクトップ用AnythingLLMMac、Windows、Linux対応<a href="https://useanything.com/download" target="_blank">今すぐダウンロード</a>
</p>
これは、任意のドキュメント、リソース、またはコンテンツの断片を、チャット中にLLMが参照として使用できるコンテキストに変換できるフルスタックアプリケーションです。このアプリケーションを使用すると、使用するLLMまたはベクトルデータベースを選択し、マルチユーザー管理と権限をサポートできます。
![チャット](https://github.com/Mintplex-Labs/anything-llm/assets/16845892/cfc5f47c-bd91-4067-986c-f3f49621a859)
<details>
<summary><kbd>デモを見る!</kbd></summary>
[![ビデオを見る](/images/youtube.png)](https://youtu.be/f95rGD9trL0)
</details>
### 製品概要
AnythingLLMは、市販のLLMや人気のあるオープンソースLLM、およびベクトルDBソリューションを使用して、妥協のないプライベートChatGPTを構築できるフルスタックアプリケーションです。ローカルで実行することも、リモートでホストすることもでき、提供されたドキュメントと知的にチャットできます。
AnythingLLMは、ドキュメントを`ワークスペース`と呼ばれるオブジェクトに分割します。ワークスペースはスレッドのように機能しますが、ドキュメントのコンテナ化が追加されています。ワークスペースはドキュメントを共有できますが、互いに通信することはないため、各ワークスペースのコンテキストをクリーンに保つことができます。
AnythingLLMのいくつかのクールな機能
- **マルチユーザーインスタンスのサポートと権限付与**
- ワークスペース内のエージェント(ウェブを閲覧、コードを実行など)
- [ウェブサイト用のカスタム埋め込み可能なチャットウィジェット](./embed/README.md)
- 複数のドキュメントタイプのサポートPDF、TXT、DOCXなど
- シンプルなUIからベクトルデータベース内のドキュメントを管理
- 2つのチャットモード`会話`と`クエリ`。会話は以前の質問と修正を保持します。クエリはドキュメントに対するシンプルなQAです
- チャット中の引用
- 100%クラウドデプロイメント対応。
- 「独自のLLMを持参」モデル。
- 大規模なドキュメントを管理するための非常に効率的なコスト削減策。巨大なドキュメントやトランスクリプトを埋め込むために一度以上支払うことはありません。他のドキュメントチャットボットソリューションよりも90%コスト効率が良いです。
- カスタム統合のための完全な開発者API
### サポートされているLLM、埋め込みモデル、音声モデル、およびベクトルデータベース
**言語学習モデル:**
- [llama.cpp互換の任意のオープンソースモデル](/server/storage/models/README.md#text-generation-llm-selection)
- [OpenAI](https://openai.com)
- [OpenAI (汎用)](https://openai.com)
- [Azure OpenAI](https://azure.microsoft.com/en-us/products/ai-services/openai-service)
- [Anthropic](https://www.anthropic.com/)
- [Google Gemini Pro](https://ai.google.dev/)
- [Hugging Face (チャットモデル)](https://huggingface.co/)
- [Ollama (チャットモデル)](https://ollama.ai/)
- [LM Studio (すべてのモデル)](https://lmstudio.ai)
- [LocalAi (すべてのモデル)](https://localai.io/)
- [Together AI (チャットモデル)](https://www.together.ai/)
- [Perplexity (チャットモデル)](https://www.perplexity.ai/)
- [OpenRouter (チャットモデル)](https://openrouter.ai/)
- [Mistral](https://mistral.ai/)
- [Groq](https://groq.com/)
- [Cohere](https://cohere.com/)
- [KoboldCPP](https://github.com/LostRuins/koboldcpp)
**埋め込みモデル:**
- [AnythingLLMネイティブ埋め込み](/server/storage/models/README.md)(デフォルト)
- [OpenAI](https://openai.com)
- [Azure OpenAI](https://azure.microsoft.com/en-us/products/ai-services/openai-service)
- [LocalAi (すべて)](https://localai.io/)
- [Ollama (すべて)](https://ollama.ai/)
- [LM Studio (すべて)](https://lmstudio.ai)
- [Cohere](https://cohere.com/)
**音声変換モデル:**
- [AnythingLLM内蔵](https://github.com/Mintplex-Labs/anything-llm/tree/master/server/storage/models#audiovideo-transcription)(デフォルト)
- [OpenAI](https://openai.com/)
**TTSテキストから音声へサポート**
- ネイティブブラウザ内蔵(デフォルト)
- [OpenAI TTS](https://platform.openai.com/docs/guides/text-to-speech/voice-options)
- [ElevenLabs](https://elevenlabs.io/)
**STT音声からテキストへサポート**
- ネイティブブラウザ内蔵(デフォルト)
**ベクトルデータベース:**
- [LanceDB](https://github.com/lancedb/lancedb)(デフォルト)
- [Astra DB](https://www.datastax.com/products/datastax-astra)
- [Pinecone](https://pinecone.io)
- [Chroma](https://trychroma.com)
- [Weaviate](https://weaviate.io)
- [QDrant](https://qdrant.tech)
- [Milvus](https://milvus.io)
- [Zilliz](https://zilliz.com)
### 技術概要
このモレポは、主に3つのセクションで構成されています
- `frontend`: LLMが使用できるすべてのコンテンツを簡単に作成および管理できるviteJS + Reactフロントエンド。
- `server`: すべてのインタラクションを処理し、すべてのベクトルDB管理およびLLMインタラクションを行うNodeJS expressサーバー。
- `collector`: UIからドキュメントを処理および解析するNodeJS expressサーバー。
- `docker`: Dockerの指示およびビルドプロセス + ソースからのビルド情報。
- `embed`: [埋め込みウィジェット](./embed/README.md)の生成に特化したコード。
## 🛳 セルフホスティング
Mintplex Labsおよびコミュニティは、AnythingLLMをローカルで実行できる多数のデプロイメント方法、スクリプト、テンプレートを維持しています。以下の表を参照して、お好みの環境でのデプロイ方法を読むか、自動デプロイを行ってください。
| Docker | AWS | GCP | Digital Ocean | Render.com |
|----------------------------------------|----:|-----|---------------|------------|
| [![Docker上でデプロイ][docker-btn]][docker-deploy] | [![AWS上でデプロイ][aws-btn]][aws-deploy] | [![GCP上でデプロイ][gcp-btn]][gcp-deploy] | [![DigitalOcean上でデプロイ][do-btn]][do-deploy] | [![Render.com上でデプロイ][render-btn]][render-deploy] |
| Railway |
| --------------------------------------------------- |
| [![Railway上でデプロイ][railway-btn]][railway-deploy] |
[Dockerを使用せずに本番環境のAnythingLLMインスタンスを設定する →](./BARE_METAL.md)
## 開発環境のセットアップ方法
- `yarn setup` 各アプリケーションセクションに必要な`.env`ファイルを入力します(リポジトリのルートから)。
- 次に進む前にこれらを入力してください。`server/.env.development`が入力されていないと正しく動作しません。
- `yarn dev:server` ローカルでサーバーを起動します(リポジトリのルートから)。
- `yarn dev:frontend` ローカルでフロントエンドを起動します(リポジトリのルートから)。
- `yarn dev:collector` ドキュメントコレクターを実行します(リポジトリのルートから)。
[ドキュメントについて学ぶ](./server/storage/documents/DOCUMENTS.md)
[ベクトルキャッシュについて学ぶ](./server/storage/vector-cache/VECTOR_CACHE.md)
## 貢献する方法
- issueを作成する
- `<issue number>-<short name>`の形式のブランチ名でPRを作成する
- マージしましょう
## テレメトリーとプライバシー
Mintplex Labs Inc.によって開発されたAnythingLLMには、匿名の使用情報を収集するテレメトリー機能が含まれています。
<details>
<summary><kbd>AnythingLLMのテレメトリーとプライバシーについての詳細</kbd></summary>
### なぜ?
この情報を使用して、AnythingLLMの使用方法を理解し、新機能とバグ修正の優先順位を決定し、AnythingLLMのパフォーマンスと安定性を向上させるのに役立てます。
### オプトアウト
サーバーまたはdockerの.env設定で`DISABLE_TELEMETRY`を「true」に設定して、テレメトリーからオプトアウトします。アプリ内でも、サイドバー > `プライバシー`に移動してテレメトリーを無効にすることができます。
### 明示的に追跡するもの
製品およびロードマップの意思決定に役立つ使用詳細のみを追跡します。具体的には:
- インストールのタイプDockerまたはデスクトップ
- ドキュメントが追加または削除されたとき。ドキュメントについての情報はありません。イベントが発生したことのみを知ります。これにより、使用状況を把握できます。
- 使用中のベクトルデータベースのタイプ。どのベクトルデータベースプロバイダーが最も使用されているかを知り、更新があったときに優先して変更を行います。
- 使用中のLLMのタイプ。最も人気のある選択肢を知り、更新があったときに優先して変更を行います。
- チャットが送信された。これは最も一般的な「イベント」であり、すべてのインストールでのこのプロジェクトの日常的な「アクティビティ」についてのアイデアを提供します。再び、イベントのみが送信され、チャット自体の性質や内容に関する情報はありません。
これらの主張を検証するには、`Telemetry.sendTelemetry`が呼び出されるすべての場所を見つけてください。また、これらのイベントは出力ログに書き込まれるため、送信された具体的なデータも確認できます。IPアドレスやその他の識別情報は収集されません。テレメトリープロバイダーは[PostHog](https://posthog.com/)です。
[ソースコード内のすべてのテレメトリーイベントを表示](https://github.com/search?q=repo%3AMintplex-Labs%2Fanything-llm%20.sendTelemetry\(&type=code)
</details>
## 🔗 その他の製品
- **[VectorAdmin][vector-admin]**ベクトルデータベースを管理するためのオールインワンGUIおよびツールスイート。
- **[OpenAI Assistant Swarm][assistant-swarm]**単一のエージェントから指揮できるOpenAIアシスタントの軍隊に、ライブラリ全体を変換します。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
---
Copyright © 2024 [Mintplex Labs][profile-link]。<br />
このプロジェクトは[MIT](./LICENSE)ライセンスの下でライセンスされています。
<!-- LINK GROUP -->
[back-to-top]: https://img.shields.io/badge/-BACK_TO_TOP-222628?style=flat-square
[profile-link]: https://github.com/mintplex-labs
[vector-admin]: https://github.com/mintplex-labs/vector-admin
[assistant-swarm]: https://github.com/Mintplex-Labs/openai-assistant-swarm
[docker-btn]: ./images/deployBtns/docker.png
[docker-deploy]: ./docker/HOW_TO_USE_DOCKER.md
[aws-btn]: ./images/deployBtns/aws.png
[aws-deploy]: ./cloud-deployments/aws/cloudformation/DEPLOY.md
[gcp-btn]: https://deploy.cloud.run/button.svg
[gcp-deploy]: ./cloud-deployments/gcp/deployment/DEPLOY.md
[do-btn]: https://www.deploytodo.com/do-btn-blue.svg
[do-deploy]: ./cloud-deployments/digitalocean/terraform/DEPLOY.md
[render-btn]: https://render.com/images/deploy-to-render-button.svg
[render-deploy]: https://render.com/deploy?repo=https://github.com/Mintplex-Labs/anything-llm&branch=render
[render-btn]: https://render.com/images/deploy-to-render-button.svg
[render-deploy]: https://render.com/deploy?repo=https://github.com/Mintplex-Labs/anything-llm&branch=render
[railway-btn]: https://railway.app/button.svg
[railway-deploy]: https://railway.app/template/HNSCS1?referralCode=WFgJkn

View File

@ -25,7 +25,7 @@
</p> </p>
<p align="center"> <p align="center">
<a href='/README.md'>English</a> · <b>简体中文</b> <a href='../README.md'>English</a> · <b>简体中文</b> · <a href='./README.ja-JP.md'>简体中文</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -125,6 +125,12 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
# VOYAGEAI_API_KEY= # VOYAGEAI_API_KEY=
# EMBEDDING_MODEL_PREF='voyage-large-2-instruct' # EMBEDDING_MODEL_PREF='voyage-large-2-instruct'
# EMBEDDING_ENGINE='litellm'
# EMBEDDING_MODEL_PREF='text-embedding-ada-002'
# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
# LITE_LLM_BASE_PATH='http://127.0.0.1:4000'
# LITE_LLM_API_KEY='sk-123abc'
########################################### ###########################################
######## Vector Database Selection ######## ######## Vector Database Selection ########
########################################### ###########################################

View File

@ -1,4 +1,9 @@
const { multiUserMode, userFromSession, reqBody } = require("../utils/http"); const {
multiUserMode,
userFromSession,
reqBody,
safeJsonParse,
} = require("../utils/http");
const { validatedRequest } = require("../utils/middleware/validatedRequest"); const { validatedRequest } = require("../utils/middleware/validatedRequest");
const { Telemetry } = require("../models/telemetry"); const { Telemetry } = require("../models/telemetry");
const { const {
@ -168,6 +173,77 @@ function workspaceThreadEndpoints(app) {
} }
} }
); );
app.delete(
"/workspace/:slug/thread/:threadSlug/delete-edited-chats",
[
validatedRequest,
flexUserRoleValid([ROLES.all]),
validWorkspaceAndThreadSlug,
],
async (request, response) => {
try {
const { startingId } = reqBody(request);
const user = await userFromSession(request, response);
const workspace = response.locals.workspace;
const thread = response.locals.thread;
await WorkspaceChats.delete({
workspaceId: Number(workspace.id),
thread_id: Number(thread.id),
user_id: user?.id,
id: { gte: Number(startingId) },
});
response.sendStatus(200).end();
} catch (e) {
console.log(e.message, e);
response.sendStatus(500).end();
}
}
);
app.post(
"/workspace/:slug/thread/:threadSlug/update-chat",
[
validatedRequest,
flexUserRoleValid([ROLES.all]),
validWorkspaceAndThreadSlug,
],
async (request, response) => {
try {
const { chatId, newText = null } = reqBody(request);
if (!newText || !String(newText).trim())
throw new Error("Cannot save empty response");
const user = await userFromSession(request, response);
const workspace = response.locals.workspace;
const thread = response.locals.thread;
const existingChat = await WorkspaceChats.get({
workspaceId: workspace.id,
thread_id: thread.id,
user_id: user?.id,
id: Number(chatId),
});
if (!existingChat) throw new Error("Invalid chat.");
const chatResponse = safeJsonParse(existingChat.response, null);
if (!chatResponse) throw new Error("Failed to parse chat response");
await WorkspaceChats._update(existingChat.id, {
response: JSON.stringify({
...chatResponse,
text: String(newText),
}),
});
response.sendStatus(200).end();
} catch (e) {
console.log(e.message, e);
response.sendStatus(500).end();
}
}
);
} }
module.exports = { workspaceThreadEndpoints }; module.exports = { workspaceThreadEndpoints };

View File

@ -380,7 +380,6 @@ function workspaceEndpoints(app) {
const history = multiUserMode(response) const history = multiUserMode(response)
? await WorkspaceChats.forWorkspaceByUser(workspace.id, user.id) ? await WorkspaceChats.forWorkspaceByUser(workspace.id, user.id)
: await WorkspaceChats.forWorkspace(workspace.id); : await WorkspaceChats.forWorkspace(workspace.id);
response.status(200).json({ history: convertToChatHistory(history) }); response.status(200).json({ history: convertToChatHistory(history) });
} catch (e) { } catch (e) {
console.log(e.message, e); console.log(e.message, e);
@ -420,6 +419,67 @@ function workspaceEndpoints(app) {
} }
); );
app.delete(
"/workspace/:slug/delete-edited-chats",
[validatedRequest, flexUserRoleValid([ROLES.all]), validWorkspaceSlug],
async (request, response) => {
try {
const { startingId } = reqBody(request);
const user = await userFromSession(request, response);
const workspace = response.locals.workspace;
await WorkspaceChats.delete({
workspaceId: workspace.id,
thread_id: null,
user_id: user?.id,
id: { gte: Number(startingId) },
});
response.sendStatus(200).end();
} catch (e) {
console.log(e.message, e);
response.sendStatus(500).end();
}
}
);
app.post(
"/workspace/:slug/update-chat",
[validatedRequest, flexUserRoleValid([ROLES.all]), validWorkspaceSlug],
async (request, response) => {
try {
const { chatId, newText = null } = reqBody(request);
if (!newText || !String(newText).trim())
throw new Error("Cannot save empty response");
const user = await userFromSession(request, response);
const workspace = response.locals.workspace;
const existingChat = await WorkspaceChats.get({
workspaceId: workspace.id,
thread_id: null,
user_id: user?.id,
id: Number(chatId),
});
if (!existingChat) throw new Error("Invalid chat.");
const chatResponse = safeJsonParse(existingChat.response, null);
if (!chatResponse) throw new Error("Failed to parse chat response");
await WorkspaceChats._update(existingChat.id, {
response: JSON.stringify({
...chatResponse,
text: String(newText),
}),
});
response.sendStatus(200).end();
} catch (e) {
console.log(e.message, e);
response.sendStatus(500).end();
}
}
);
app.post( app.post(
"/workspace/:slug/chat-feedback/:chatId", "/workspace/:slug/chat-feedback/:chatId",
[validatedRequest, flexUserRoleValid([ROLES.all]), validWorkspaceSlug], [validatedRequest, flexUserRoleValid([ROLES.all]), validWorkspaceSlug],

View File

@ -220,6 +220,24 @@ const WorkspaceChats = {
console.error(error.message); console.error(error.message);
} }
}, },
// Explicit update of settings + key validations.
// Only use this method when directly setting a key value
// that takes no user input for the keys being modified.
_update: async function (id = null, data = {}) {
if (!id) throw new Error("No workspace chat id provided for update");
try {
await prisma.workspace_chats.update({
where: { id },
data,
});
return true;
} catch (error) {
console.error(error.message);
return false;
}
},
}; };
module.exports = { WorkspaceChats }; module.exports = { WorkspaceChats };

View File

@ -12,7 +12,7 @@
"scripts": { "scripts": {
"dev": "NODE_ENV=development nodemon --ignore documents --ignore vector-cache --ignore storage --ignore swagger --trace-warnings index.js", "dev": "NODE_ENV=development nodemon --ignore documents --ignore vector-cache --ignore storage --ignore swagger --trace-warnings index.js",
"start": "NODE_ENV=production node index.js", "start": "NODE_ENV=production node index.js",
"lint": "yarn prettier --write ./endpoints ./models ./utils index.js", "lint": "yarn prettier --ignore-path ../.prettierignore --write ./endpoints ./models ./utils index.js",
"swagger": "node ./swagger/init.js", "swagger": "node ./swagger/init.js",
"sqlite:migrate": "cd ./utils/prisma && node migrateFromSqlite.js" "sqlite:migrate": "cd ./utils/prisma && node migrateFromSqlite.js"
}, },

View File

@ -0,0 +1,93 @@
const { toChunks, maximumChunkLength } = require("../../helpers");
class LiteLLMEmbedder {
constructor() {
const { OpenAI: OpenAIApi } = require("openai");
if (!process.env.LITE_LLM_BASE_PATH)
throw new Error(
"LiteLLM must have a valid base path to use for the api."
);
this.basePath = process.env.LITE_LLM_BASE_PATH;
this.openai = new OpenAIApi({
baseURL: this.basePath,
apiKey: process.env.LITE_LLM_API_KEY ?? null,
});
this.model = process.env.EMBEDDING_MODEL_PREF || "text-embedding-ada-002";
// Limit of how many strings we can process in a single pass to stay with resource or network limits
this.maxConcurrentChunks = 500;
this.embeddingMaxChunkLength = maximumChunkLength();
}
async embedTextInput(textInput) {
const result = await this.embedChunks(
Array.isArray(textInput) ? textInput : [textInput]
);
return result?.[0] || [];
}
async embedChunks(textChunks = []) {
// Because there is a hard POST limit on how many chunks can be sent at once to LiteLLM (~8mb)
// we concurrently execute each max batch of text chunks possible.
// Refer to constructor maxConcurrentChunks for more info.
const embeddingRequests = [];
for (const chunk of toChunks(textChunks, this.maxConcurrentChunks)) {
embeddingRequests.push(
new Promise((resolve) => {
this.openai.embeddings
.create({
model: this.model,
input: chunk,
})
.then((result) => {
resolve({ data: result?.data, error: null });
})
.catch((e) => {
e.type =
e?.response?.data?.error?.code ||
e?.response?.status ||
"failed_to_embed";
e.message = e?.response?.data?.error?.message || e.message;
resolve({ data: [], error: e });
});
})
);
}
const { data = [], error = null } = await Promise.all(
embeddingRequests
).then((results) => {
// If any errors were returned from LiteLLM abort the entire sequence because the embeddings
// will be incomplete.
const errors = results
.filter((res) => !!res.error)
.map((res) => res.error)
.flat();
if (errors.length > 0) {
let uniqueErrors = new Set();
errors.map((error) =>
uniqueErrors.add(`[${error.type}]: ${error.message}`)
);
return {
data: [],
error: Array.from(uniqueErrors).join(", "),
};
}
return {
data: results.map((res) => res?.data || []).flat(),
error: null,
};
});
if (!!error) throw new Error(`LiteLLM Failed to embed: ${error}`);
return data.length > 0 &&
data.every((embd) => embd.hasOwnProperty("embedding"))
? data.map((embd) => embd.embedding)
: null;
}
}
module.exports = {
LiteLLMEmbedder,
};

View File

@ -174,6 +174,7 @@ function convertToChatHistory(history = []) {
role: "user", role: "user",
content: prompt, content: prompt,
sentAt: moment(createdAt).unix(), sentAt: moment(createdAt).unix(),
chatId: id,
}, },
{ {
type: data?.type || "chart", type: data?.type || "chart",

View File

@ -128,6 +128,9 @@ function getEmbeddingEngineSelection() {
case "voyageai": case "voyageai":
const { VoyageAiEmbedder } = require("../EmbeddingEngines/voyageAi"); const { VoyageAiEmbedder } = require("../EmbeddingEngines/voyageAi");
return new VoyageAiEmbedder(); return new VoyageAiEmbedder();
case "litellm":
const { LiteLLMEmbedder } = require("../EmbeddingEngines/liteLLM");
return new LiteLLMEmbedder();
default: default:
return new NativeEmbedder(); return new NativeEmbedder();
} }

View File

@ -577,6 +577,7 @@ function supportedEmbeddingModel(input = "") {
"lmstudio", "lmstudio",
"cohere", "cohere",
"voyageai", "voyageai",
"litellm",
]; ];
return supported.includes(input) return supported.includes(input)
? null ? null