Merge branch 'master' of github.com:Mintplex-Labs/anything-llm into edit-message-button

This commit is contained in:
timothycarambat 2024-05-23 14:15:18 -07:00
commit 956eeccfce
53 changed files with 934 additions and 282 deletions

View File

@ -0,0 +1,214 @@
---
apiVersion: v1
kind: PersistentVolume
metadata:
name: anything-llm-volume
annotations:
pv.beta.kubernetes.io/uid: "1000"
pv.beta.kubernetes.io/gid: "1000"
spec:
storageClassName: gp2
capacity:
storage: 5Gi
accessModes:
- ReadWriteOnce
awsElasticBlockStore:
# This is the volume UUID from AWS EC2 EBS Volumes list.
volumeID: "{{ anythingllm_awsElasticBlockStore_volumeID }}"
fsType: ext4
nodeAffinity:
required:
nodeSelectorTerms:
- matchExpressions:
- key: topology.kubernetes.io/zone
operator: In
values:
- us-east-1c
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: anything-llm-volume-claim
namespace: "{{ namespace }}"
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 5Gi
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: anything-llm
namespace: "{{ namespace }}"
labels:
anything-llm: "true"
spec:
selector:
matchLabels:
k8s-app: anything-llm
replicas: 1
strategy:
type: RollingUpdate
rollingUpdate:
maxSurge: 0%
maxUnavailable: 100%
template:
metadata:
labels:
anything-llm: "true"
k8s-app: anything-llm
app.kubernetes.io/name: anything-llm
app.kubernetes.io/part-of: anything-llm
annotations:
prometheus.io/scrape: "true"
prometheus.io/path: /metrics
prometheus.io/port: "9090"
spec:
serviceAccountName: "default"
terminationGracePeriodSeconds: 10
securityContext:
fsGroup: 1000
runAsNonRoot: true
runAsGroup: 1000
runAsUser: 1000
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchExpressions:
- key: topology.kubernetes.io/zone
operator: In
values:
- us-east-1c
containers:
- name: anything-llm
resources:
limits:
memory: "1Gi"
cpu: "500m"
requests:
memory: "512Mi"
cpu: "250m"
imagePullPolicy: IfNotPresent
image: "mintplexlabs/anythingllm:render"
securityContext:
allowPrivilegeEscalation: true
capabilities:
add:
- SYS_ADMIN
runAsNonRoot: true
runAsGroup: 1000
runAsUser: 1000
command:
# Specify a command to override the Dockerfile's ENTRYPOINT.
- /bin/bash
- -c
- |
set -x -e
sleep 3
echo "AWS_REGION: $AWS_REGION"
echo "SERVER_PORT: $SERVER_PORT"
echo "NODE_ENV: $NODE_ENV"
echo "STORAGE_DIR: $STORAGE_DIR"
{
cd /app/server/ &&
npx prisma generate --schema=./prisma/schema.prisma &&
npx prisma migrate deploy --schema=./prisma/schema.prisma &&
node /app/server/index.js
echo "Server process exited with status $?"
} &
{
node /app/collector/index.js
echo "Collector process exited with status $?"
} &
wait -n
exit $?
readinessProbe:
httpGet:
path: /v1/api/health
port: 8888
initialDelaySeconds: 15
periodSeconds: 5
successThreshold: 2
livenessProbe:
httpGet:
path: /v1/api/health
port: 8888
initialDelaySeconds: 15
periodSeconds: 5
failureThreshold: 3
env:
- name: AWS_REGION
value: "{{ aws_region }}"
- name: AWS_ACCESS_KEY_ID
value: "{{ aws_access_id }}"
- name: AWS_SECRET_ACCESS_KEY
value: "{{ aws_access_secret }}"
- name: SERVER_PORT
value: "3001"
- name: JWT_SECRET
value: "my-random-string-for-seeding" # Please generate random string at least 12 chars long.
- name: STORAGE_DIR
value: "/storage"
- name: NODE_ENV
value: "production"
- name: UID
value: "1000"
- name: GID
value: "1000"
volumeMounts:
- name: anything-llm-server-storage-volume-mount
mountPath: /storage
volumes:
- name: anything-llm-server-storage-volume-mount
persistentVolumeClaim:
claimName: anything-llm-volume-claim
---
# This serves the UI and the backend.
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: anything-llm-ingress
namespace: "{{ namespace }}"
annotations:
external-dns.alpha.kubernetes.io/hostname: "{{ namespace }}-chat.{{ base_domain }}"
kubernetes.io/ingress.class: "internal-ingress"
nginx.ingress.kubernetes.io/rewrite-target: /
ingress.kubernetes.io/ssl-redirect: "false"
spec:
rules:
- host: "{{ namespace }}-chat.{{ base_domain }}"
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: anything-llm-svc
port:
number: 3001
tls: # < placing a host in the TLS config will indicate a cert should be created
- hosts:
- "{{ namespace }}-chat.{{ base_domain }}"
secretName: letsencrypt-prod
---
apiVersion: v1
kind: Service
metadata:
labels:
kubernetes.io/name: anything-llm
name: anything-llm-svc
namespace: "{{ namespace }}"
spec:
ports:
# "port" is external port, and "targetPort" is internal.
- port: 3301
targetPort: 3001
name: traffic
- port: 9090
targetPort: 9090
name: metrics
selector:
k8s-app: anything-llm

View File

@ -37,7 +37,10 @@ export default function App() {
return (
<>
<Head />
<div className={`fixed inset-0 z-50 ${isChatOpen ? "block" : "hidden"}`}>
<div
id="anything-llm-embed-chat-container"
className={`fixed inset-0 z-50 ${isChatOpen ? "block" : "hidden"}`}
>
<div
className={`${windowHeight} ${windowWidth} h-full w-full bg-white fixed bottom-0 right-0 mb-4 md:mr-4 rounded-2xl border border-gray-300 shadow-[0_4px_14px_rgba(0,0,0,0.25)] ${positionClasses[position]}`}
id="anything-llm-chat"
@ -53,6 +56,7 @@ export default function App() {
</div>
{!isChatOpen && (
<div
id="anything-llm-embed-chat-button-container"
className={`fixed bottom-0 ${positionClasses[position]} mb-4 z-50`}
>
<OpenButton

View File

@ -23,6 +23,7 @@ export default function OpenButton({ settings, isOpen, toggleOpen }) {
: CHAT_ICONS.plus;
return (
<button
id="anything-llm-embed-chat-button"
onClick={toggleOpen}
className={`flex items-center justify-center p-4 rounded-full bg-[${settings.buttonColor}] text-white text-2xl`}
aria-label="Toggle Menu"

File diff suppressed because one or more lines are too long

View File

@ -1,27 +1,41 @@
import { createContext, useEffect, useState } from "react";
import AnythingLLM from "./media/logo/anything-llm.png";
import DefaultLoginLogo from "./media/illustrations/login-logo.svg";
import System from "./models/system";
export const LogoContext = createContext();
export function LogoProvider({ children }) {
const [logo, setLogo] = useState("");
const [loginLogo, setLoginLogo] = useState("");
const [isCustomLogo, setIsCustomLogo] = useState(false);
useEffect(() => {
async function fetchInstanceLogo() {
try {
const logoURL = await System.fetchLogo();
logoURL ? setLogo(logoURL) : setLogo(AnythingLLM);
const { isCustomLogo, logoURL } = await System.fetchLogo();
if (logoURL) {
setLogo(logoURL);
setLoginLogo(isCustomLogo ? logoURL : DefaultLoginLogo);
setIsCustomLogo(isCustomLogo);
} else {
setLogo(AnythingLLM);
setLoginLogo(DefaultLoginLogo);
setIsCustomLogo(false);
}
} catch (err) {
setLogo(AnythingLLM);
setLoginLogo(DefaultLoginLogo);
setIsCustomLogo(false);
console.error("Failed to fetch logo:", err);
}
}
fetchInstanceLogo();
}, []);
return (
<LogoContext.Provider value={{ logo, setLogo }}>
<LogoContext.Provider value={{ logo, setLogo, loginLogo, isCustomLogo }}>
{children}
</LogoContext.Provider>
);

View File

@ -9,9 +9,7 @@ export default function ChatBubble({ message, type, popMsg }) {
return (
<div className={`flex justify-center items-end w-full ${backgroundColor}`}>
<div
className={`py-8 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col`}
>
<div className={`py-8 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}>
<div className="flex gap-x-5">
<Jazzicon
size={36}

View File

@ -43,7 +43,7 @@ export default function DefaultChatContainer() {
className={`flex justify-center items-end w-full ${AI_BACKGROUND_COLOR} md:mt-0 mt-[40px]`}
>
<div
className={`pt-10 pb-6 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col`}
className={`pt-10 pb-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} />
@ -67,7 +67,7 @@ export default function DefaultChatContainer() {
className={`flex justify-center items-end w-full ${AI_BACKGROUND_COLOR}`}
>
<div
className={`pb-4 pt-2 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col`}
className={`pb-4 pt-2 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} />
@ -90,7 +90,7 @@ export default function DefaultChatContainer() {
className={`flex justify-center items-end w-full ${AI_BACKGROUND_COLOR}`}
>
<div
className={`pt-2 pb-6 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col`}
className={`pt-2 pb-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} />
@ -124,7 +124,7 @@ export default function DefaultChatContainer() {
className={`flex justify-center items-end w-full ${USER_BACKGROUND_COLOR}`}
>
<div
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col`}
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon
@ -148,7 +148,7 @@ export default function DefaultChatContainer() {
className={`flex justify-center items-end w-full ${AI_BACKGROUND_COLOR}`}
>
<div
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col`}
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} />
@ -185,7 +185,7 @@ export default function DefaultChatContainer() {
className={`flex justify-center items-end w-full ${USER_BACKGROUND_COLOR}`}
>
<div
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col`}
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon
@ -210,7 +210,7 @@ export default function DefaultChatContainer() {
className={`flex justify-center items-end w-full ${AI_BACKGROUND_COLOR}`}
>
<div
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col`}
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} />
@ -248,7 +248,7 @@ export default function DefaultChatContainer() {
className={`flex justify-center items-end w-full ${USER_BACKGROUND_COLOR}`}
>
<div
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col`}
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon
@ -272,7 +272,7 @@ export default function DefaultChatContainer() {
className={`flex justify-center items-end w-full ${AI_BACKGROUND_COLOR}`}
>
<div
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col`}
className={`py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}
>
<div className="flex gap-x-5">
<Jazzicon size={36} user={{ uid: "system" }} role={"assistant"} />

View File

@ -108,10 +108,13 @@ export default function Footer() {
rel="noreferrer"
className="transition-all duration-300 p-2 rounded-full text-white bg-sidebar-button hover:bg-menu-item-selected-gradient hover:border-slate-100 hover:border-opacity-50 border-transparent border"
>
{React.createElement(ICON_COMPONENTS[item.icon], {
weight: "fill",
className: "h-5 w-5",
})}
{React.createElement(
ICON_COMPONENTS?.[item.icon] ?? ICON_COMPONENTS.Info,
{
weight: "fill",
className: "h-5 w-5",
}
)}
</a>
))}
{!isMobile && <SettingsButton />}

View File

@ -30,7 +30,11 @@ export default function GeminiLLMOptions({ settings }) {
required={true}
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{["gemini-pro", "gemini-1.5-pro-latest"].map((model) => {
{[
"gemini-pro",
"gemini-1.5-pro-latest",
"gemini-1.5-flash-latest",
].map((model) => {
return (
<option key={model} value={model}>
{model}

View File

@ -168,6 +168,7 @@ export default function MultiUserAuth() {
const [token, setToken] = useState(null);
const [showRecoveryForm, setShowRecoveryForm] = useState(false);
const [showResetPasswordForm, setShowResetPasswordForm] = useState(false);
const [customAppName, setCustomAppName] = useState(null);
const {
isOpen: isRecoveryCodeModalOpen,
@ -250,6 +251,15 @@ export default function MultiUserAuth() {
}
}, [downloadComplete, user, token]);
useEffect(() => {
const fetchCustomAppName = async () => {
const { appName } = await System.fetchCustomAppName();
setCustomAppName(appName || "");
setLoading(false);
};
fetchCustomAppName();
}, []);
if (showRecoveryForm) {
return (
<RecoveryForm
@ -272,11 +282,11 @@ export default function MultiUserAuth() {
Welcome to
</h3>
<p className="text-4xl md:text-2xl font-bold bg-gradient-to-r from-[#75D6FF] via-[#FFFFFF] to-[#FFFFFF] bg-clip-text text-transparent">
AnythingLLM
{customAppName || "AnythingLLM"}
</p>
</div>
<p className="text-sm text-white/90 text-center">
Sign in to your AnythingLLM account.
Sign in to your {customAppName || "AnythingLLM"} account.
</p>
</div>
</div>

View File

@ -1,7 +1,6 @@
import React, { useEffect, useState } from "react";
import System from "../../../models/system";
import { AUTH_TOKEN } from "../../../utils/constants";
import useLogo from "../../../hooks/useLogo";
import paths from "../../../utils/paths";
import ModalWrapper from "@/components/ModalWrapper";
import { useModal } from "@/hooks/useModal";
@ -10,10 +9,10 @@ import RecoveryCodeModal from "@/components/Modals/DisplayRecoveryCodeModal";
export default function SingleUserAuth() {
const [loading, setLoading] = useState(false);
const [error, setError] = useState(null);
const { logo: _initLogo } = useLogo();
const [recoveryCodes, setRecoveryCodes] = useState([]);
const [downloadComplete, setDownloadComplete] = useState(false);
const [token, setToken] = useState(null);
const [customAppName, setCustomAppName] = useState(null);
const {
isOpen: isRecoveryCodeModalOpen,
@ -57,6 +56,15 @@ export default function SingleUserAuth() {
}
}, [downloadComplete, token]);
useEffect(() => {
const fetchCustomAppName = async () => {
const { appName } = await System.fetchCustomAppName();
setCustomAppName(appName || "");
setLoading(false);
};
fetchCustomAppName();
}, []);
return (
<>
<form onSubmit={handleLogin}>
@ -68,11 +76,11 @@ export default function SingleUserAuth() {
Welcome to
</h3>
<p className="text-4xl md:text-2xl font-bold bg-gradient-to-r from-[#75D6FF] via-[#FFFFFF] to-[#FFFFFF] bg-clip-text text-transparent">
AnythingLLM
{customAppName || "AnythingLLM"}
</p>
</div>
<p className="text-sm text-white/90 text-center">
Sign in to your AnythingLLM instance.
Sign in to your {customAppName || "AnythingLLM"} instance.
</p>
</div>
</div>

View File

@ -9,10 +9,9 @@ import {
} from "../../../utils/constants";
import useLogo from "../../../hooks/useLogo";
import illustration from "@/media/illustrations/login-illustration.svg";
import loginLogo from "@/media/illustrations/login-logo.svg";
export default function PasswordModal({ mode = "single" }) {
const { logo: _initLogo } = useLogo();
const { loginLogo } = useLogo();
return (
<div className="fixed top-0 left-0 right-0 z-50 w-full overflow-x-hidden overflow-y-auto md:inset-0 h-[calc(100%-1rem)] h-full bg-[#25272C] flex flex-col md:flex-row items-center justify-center">
<div
@ -37,10 +36,11 @@ export default function PasswordModal({ mode = "single" }) {
<div className="flex flex-col items-center justify-center h-full w-full md:w-1/2 z-50 relative">
<img
src={loginLogo}
className={`mb-8 w-[84px] h-[84px] absolute ${
mode === "single" ? "md:top-50" : "md:top-36"
} top-44 z-30`}
alt="logo"
alt="Logo"
className={`hidden md:flex rounded-2xl w-fit m-4 z-30 ${
mode === "single" ? "md:top-[170px]" : "md:top-36"
} absolute max-h-[65px] md:bg-login-gradient md:shadow-[0_4px_14px_rgba(0,0,0,0.25)]`}
style={{ objectFit: "contain" }}
/>
{mode === "single" ? <SingleUserAuth /> : <MultiUserAuth />}
</div>

View File

@ -368,7 +368,7 @@ export function Chartable({ props, workspace }) {
if (!!props.chatId) {
return (
<div className="flex justify-center items-end w-full">
<div className="py-2 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col">
<div className="py-2 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col">
<div className="flex gap-x-5">
<WorkspaceProfileImage workspace={workspace} />
<div className="relative w-full">
@ -389,7 +389,7 @@ export function Chartable({ props, workspace }) {
return (
<div className="flex justify-center items-end w-full">
<div className="py-2 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col">
<div className="py-2 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col">
<div className="relative w-full">
<DownloadGraph onClick={handleDownload} />
<div ref={ref}>{renderChart()}</div>

View File

@ -64,9 +64,7 @@ const HistoricalMessage = ({
role === "user" ? USER_BACKGROUND_COLOR : AI_BACKGROUND_COLOR
}`}
>
<div
className={`py-8 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col`}
>
<div className={`py-8 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col`}>
<div className="flex gap-x-5">
<ProfileImage role={role} workspace={workspace} />
{isEditing ? (

View File

@ -21,7 +21,7 @@ const PromptReply = ({
<div
className={`flex justify-center items-end w-full ${assistantBackgroundColor}`}
>
<div className="py-6 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col">
<div className="py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col">
<div className="flex gap-x-5">
<WorkspaceProfileImage workspace={workspace} />
<div className="mt-3 ml-5 dot-falling"></div>
@ -36,7 +36,7 @@ const PromptReply = ({
<div
className={`flex justify-center items-end w-full ${assistantBackgroundColor}`}
>
<div className="py-6 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col">
<div className="py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col">
<div className="flex gap-x-5">
<WorkspaceProfileImage workspace={workspace} />
<span
@ -57,7 +57,7 @@ const PromptReply = ({
key={uuid}
className={`flex justify-center items-end w-full ${assistantBackgroundColor}`}
>
<div className="py-6 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col">
<div className="py-6 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col">
<div className="flex gap-x-5">
<WorkspaceProfileImage workspace={workspace} />
<span

View File

@ -229,7 +229,7 @@ export default function ChatHistory({
function StatusResponse({ props }) {
return (
<div className="flex justify-center items-end w-full">
<div className="py-2 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col">
<div className="py-2 px-4 w-full flex gap-x-5 md:max-w-[80%] flex-col">
<div className="flex gap-x-5">
<span
className={`text-xs inline-block p-2 rounded-lg text-white/60 font-mono whitespace-pre-line`}

View File

@ -8,7 +8,7 @@ export default function LoadingChat() {
return (
<div
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
className="transition-all duration-500 relative md:ml-[2px] md:mr-[8px] md:my-[16px] md:rounded-[26px] bg-main-gradient w-full h-full overflow-y-scroll"
className="p-4 transition-all duration-500 relative md:ml-[2px] md:mr-[8px] md:my-[16px] md:rounded-[26px] bg-main-gradient w-full h-full overflow-y-scroll"
>
<Skeleton.default
height="100px"
@ -16,7 +16,7 @@ export default function LoadingChat() {
highlightColor={highlightColor}
baseColor={baseColor}
count={1}
className="max-w-full md:max-w-[75%] p-4 rounded-b-2xl rounded-tr-2xl rounded-tl-sm mt-6"
className="max-w-full md:max-w-[80%] p-4 rounded-b-2xl rounded-tr-2xl rounded-tl-sm mt-6"
containerClassName="flex justify-start"
/>
<Skeleton.default
@ -25,7 +25,7 @@ export default function LoadingChat() {
baseColor={baseColor}
highlightColor={highlightColor}
count={1}
className="max-w-full md:max-w-[75%] p-4 rounded-b-2xl rounded-tr-2xl rounded-tl-sm mt-6"
className="max-w-full md:max-w-[80%] p-4 rounded-b-2xl rounded-tr-2xl rounded-tl-sm mt-6"
containerClassName="flex justify-end"
/>
<Skeleton.default
@ -34,7 +34,7 @@ export default function LoadingChat() {
baseColor={baseColor}
highlightColor={highlightColor}
count={1}
className="max-w-full md:max-w-[75%] p-4 rounded-b-2xl rounded-tr-2xl rounded-tl-sm mt-6"
className="max-w-full md:max-w-[80%] p-4 rounded-b-2xl rounded-tr-2xl rounded-tl-sm mt-6"
containerClassName="flex justify-start"
/>
<Skeleton.default
@ -43,7 +43,7 @@ export default function LoadingChat() {
baseColor={baseColor}
highlightColor={highlightColor}
count={1}
className="max-w-full md:max-w-[75%] p-4 rounded-b-2xl rounded-tr-2xl rounded-tl-sm mt-6"
className="max-w-full md:max-w-[80%] p-4 rounded-b-2xl rounded-tr-2xl rounded-tl-sm mt-6"
containerClassName="flex justify-end"
/>
<Skeleton.default
@ -52,7 +52,7 @@ export default function LoadingChat() {
baseColor={baseColor}
highlightColor={highlightColor}
count={1}
className="max-w-full md:max-w-[75%] p-4 rounded-b-2xl rounded-tr-2xl rounded-tl-sm mt-6"
className="max-w-full md:max-w-[80%] p-4 rounded-b-2xl rounded-tr-2xl rounded-tl-sm mt-6"
containerClassName="flex justify-start"
/>
</div>

View File

@ -10,7 +10,7 @@ export const DISABLED_PROVIDERS = [
];
const PROVIDER_DEFAULT_MODELS = {
openai: [],
gemini: ["gemini-pro", "gemini-1.5-pro-latest"],
gemini: ["gemini-pro", "gemini-1.5-pro-latest", "gemini-1.5-flash-latest"],
anthropic: [
"claude-instant-1.2",
"claude-2.0",

View File

@ -2,6 +2,6 @@ import { useContext } from "react";
import { LogoContext } from "../LogoContext";
export default function useLogo() {
const { logo, setLogo } = useContext(LogoContext);
return { logo, setLogo };
const { logo, setLogo, loginLogo, isCustomLogo } = useContext(LogoContext);
return { logo, setLogo, loginLogo, isCustomLogo };
}

View File

@ -6,6 +6,7 @@ const System = {
cacheKeys: {
footerIcons: "anythingllm_footer_links",
supportEmail: "anythingllm_support_email",
customAppName: "anythingllm_custom_app_name",
},
ping: async function () {
return await fetch(`${API_BASE}/ping`)
@ -305,19 +306,58 @@ const System = {
);
return { email: supportEmail, error: null };
},
fetchCustomAppName: async function () {
const cache = window.localStorage.getItem(this.cacheKeys.customAppName);
const { appName, lastFetched } = cache
? safeJsonParse(cache, { appName: "", lastFetched: 0 })
: { appName: "", lastFetched: 0 };
if (!!appName && Date.now() - lastFetched < 3_600_000)
return { appName: appName, error: null };
const { customAppName, error } = await fetch(
`${API_BASE}/system/custom-app-name`,
{
method: "GET",
cache: "no-cache",
headers: baseHeaders(),
}
)
.then((res) => res.json())
.catch((e) => {
console.log(e);
return { customAppName: "", error: e.message };
});
if (!customAppName || !!error) {
window.localStorage.removeItem(this.cacheKeys.customAppName);
return { appName: "", error: null };
}
window.localStorage.setItem(
this.cacheKeys.customAppName,
JSON.stringify({ appName: customAppName, lastFetched: Date.now() })
);
return { appName: customAppName, error: null };
},
fetchLogo: async function () {
return await fetch(`${API_BASE}/system/logo`, {
method: "GET",
cache: "no-cache",
})
.then((res) => {
if (res.ok && res.status !== 204) return res.blob();
.then(async (res) => {
if (res.ok && res.status !== 204) {
const isCustomLogo = res.headers.get("X-Is-Custom-Logo") === "true";
const blob = await res.blob();
const logoURL = URL.createObjectURL(blob);
return { isCustomLogo, logoURL };
}
throw new Error("Failed to fetch logo!");
})
.then((blob) => URL.createObjectURL(blob))
.catch((e) => {
console.log(e);
return null;
return { isCustomLogo: false, logoURL: null };
});
},
fetchPfp: async function (id) {

View File

@ -0,0 +1,100 @@
import Admin from "@/models/admin";
import System from "@/models/system";
import showToast from "@/utils/toast";
import { useEffect, useState } from "react";
export default function CustomAppName() {
const [loading, setLoading] = useState(true);
const [hasChanges, setHasChanges] = useState(false);
const [customAppName, setCustomAppName] = useState("");
const [originalAppName, setOriginalAppName] = useState("");
const [canCustomize, setCanCustomize] = useState(false);
useEffect(() => {
const fetchInitialParams = async () => {
const settings = await System.keys();
if (!settings?.MultiUserMode && !settings?.RequiresAuth) {
setCanCustomize(false);
return false;
}
const { appName } = await System.fetchCustomAppName();
setCustomAppName(appName || "");
setOriginalAppName(appName || "");
setCanCustomize(true);
setLoading(false);
};
fetchInitialParams();
}, []);
const updateCustomAppName = async (e, newValue = null) => {
e.preventDefault();
let custom_app_name = newValue;
if (newValue === null) {
const form = new FormData(e.target);
custom_app_name = form.get("customAppName");
}
const { success, error } = await Admin.updateSystemPreferences({
custom_app_name,
});
if (!success) {
showToast(`Failed to update custom app name: ${error}`, "error");
return;
} else {
showToast("Successfully updated custom app name.", "success");
window.localStorage.removeItem(System.cacheKeys.customAppName);
setCustomAppName(custom_app_name);
setOriginalAppName(custom_app_name);
setHasChanges(false);
}
};
const handleChange = (e) => {
setCustomAppName(e.target.value);
setHasChanges(true);
};
if (!canCustomize || loading) return null;
return (
<form className="mb-6" onSubmit={updateCustomAppName}>
<div className="flex flex-col gap-y-1">
<h2 className="text-base leading-6 font-bold text-white">
Custom App Name
</h2>
<p className="text-xs leading-[18px] font-base text-white/60">
Set a custom app name that is displayed on the login page.
</p>
</div>
<div className="flex items-center gap-x-4">
<input
name="customAppName"
type="text"
className="bg-zinc-900 mt-3 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 max-w-[275px] placeholder:text-white/20"
placeholder="AnythingLLM"
required={true}
autoComplete="off"
onChange={handleChange}
value={customAppName}
/>
{originalAppName !== "" && (
<button
type="button"
onClick={(e) => updateCustomAppName(e, "")}
className="mt-4 text-white text-base font-medium hover:text-opacity-60"
>
Clear
</button>
)}
</div>
{hasChanges && (
<button
type="submit"
className="transition-all mt-6 w-fit duration-300 border border-slate-200 px-5 py-2.5 rounded-lg text-white text-sm items-center flex gap-x-2 hover:bg-slate-200 hover:text-slate-800 focus:ring-gray-800"
>
Save
</button>
)}
</form>
);
}

View File

@ -2,7 +2,6 @@ import useLogo from "@/hooks/useLogo";
import System from "@/models/system";
import showToast from "@/utils/toast";
import { useEffect, useRef, useState } from "react";
import AnythingLLM from "@/media/logo/anything-llm.png";
import { Plus } from "@phosphor-icons/react";
export default function CustomLogo() {
@ -36,7 +35,7 @@ export default function CustomLogo() {
return;
}
const logoURL = await System.fetchLogo();
const { logoURL } = await System.fetchLogo();
_setLogo(logoURL);
showToast("Image uploaded successfully.", "success");
@ -51,13 +50,13 @@ export default function CustomLogo() {
if (!success) {
console.error("Failed to remove logo:", error);
showToast(`Failed to remove logo: ${error}`, "error");
const logoURL = await System.fetchLogo();
const { logoURL } = await System.fetchLogo();
setLogo(logoURL);
setIsDefaultLogo(false);
return;
}
const logoURL = await System.fetchLogo();
const { logoURL } = await System.fetchLogo();
_setLogo(logoURL);
showToast("Image successfully removed.", "success");

View File

@ -4,6 +4,7 @@ import FooterCustomization from "./FooterCustomization";
import SupportEmail from "./SupportEmail";
import CustomLogo from "./CustomLogo";
import CustomMessages from "./CustomMessages";
import CustomAppName from "./CustomAppName";
export default function Appearance() {
return (
@ -25,6 +26,7 @@ export default function Appearance() {
</p>
</div>
<CustomLogo />
<CustomAppName />
<CustomMessages />
<FooterCustomization />
<SupportEmail />

View File

@ -7,7 +7,7 @@ import useQuery from "@/hooks/useQuery";
import ChatRow from "./ChatRow";
import showToast from "@/utils/toast";
import System from "@/models/system";
import { CaretDown, Download } from "@phosphor-icons/react";
import { CaretDown, Download, Trash } from "@phosphor-icons/react";
import { saveAs } from "file-saver";
const exportOptions = {
@ -49,6 +49,12 @@ export default function WorkspaceChats() {
const [showMenu, setShowMenu] = useState(false);
const menuRef = useRef();
const openMenuButton = useRef();
const query = useQuery();
const [loading, setLoading] = useState(true);
const [chats, setChats] = useState([]);
const [offset, setOffset] = useState(Number(query.get("offset") || 0));
const [canNext, setCanNext] = useState(false);
const handleDumpChats = async (exportType) => {
const chats = await System.exportChats(exportType);
if (!!chats) {
@ -62,6 +68,18 @@ export default function WorkspaceChats() {
}
};
const handleClearAllChats = async () => {
if (
!window.confirm(
`Are you sure you want to clear all chats?\n\nThis action is irreversible.`
)
)
return false;
await System.deleteChat(-1);
setChats([]);
showToast("Cleared all chats.", "success");
};
const toggleMenu = () => {
setShowMenu(!showMenu);
};
@ -83,6 +101,16 @@ export default function WorkspaceChats() {
};
}, []);
useEffect(() => {
async function fetchChats() {
const { chats: _chats, hasPages = false } = await System.chats(offset);
setChats(_chats);
setCanNext(hasPages);
setLoading(false);
}
fetchChats();
}, [offset]);
return (
<div className="w-screen h-screen overflow-hidden bg-sidebar flex">
<Sidebar />
@ -100,7 +128,7 @@ export default function WorkspaceChats() {
<button
ref={openMenuButton}
onClick={toggleMenu}
className="flex items-center gap-x-2 px-4 py-2 rounded-lg bg-[#2C2F36] text-white text-sm hover:bg-[#3D4147] shadow-md border border-[#3D4147]"
className="flex items-center gap-x-2 px-4 py-1 rounded-lg bg-[#46C8FF] hover:text-white text-xs font-semibold hover:bg-[#2C2F36] shadow-[0_4px_14px_rgba(0,0,0,0.25)] h-[34px] w-fit"
>
<Download size={18} weight="bold" />
Export
@ -128,26 +156,43 @@ export default function WorkspaceChats() {
</div>
</div>
</div>
{chats.length > 0 && (
<button
onClick={handleClearAllChats}
className="flex items-center gap-x-2 px-4 py-1 border hover:border-transparent border-white/40 text-white/40 rounded-lg bg-transparent hover:text-white text-xs font-semibold hover:bg-red-500 shadow-[0_4px_14px_rgba(0,0,0,0.25)] h-[34px] w-fit"
>
<Trash size={18} weight="bold" />
Clear Chats
</button>
)}
</div>
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
These are all the recorded chats and messages that have been sent
by users ordered by their creation date.
</p>
</div>
<ChatsContainer />
<ChatsContainer
loading={loading}
chats={chats}
setChats={setChats}
offset={offset}
setOffset={setOffset}
canNext={canNext}
/>
</div>
</div>
</div>
);
}
function ChatsContainer() {
const query = useQuery();
const [loading, setLoading] = useState(true);
const [chats, setChats] = useState([]);
const [offset, setOffset] = useState(Number(query.get("offset") || 0));
const [canNext, setCanNext] = useState(false);
function ChatsContainer({
loading,
chats,
setChats,
offset,
setOffset,
canNext,
}) {
const handlePrevious = () => {
setOffset(Math.max(offset - 1, 0));
};
@ -155,20 +200,11 @@ function ChatsContainer() {
setOffset(offset + 1);
};
const handleDeleteChat = (chatId) => {
const handleDeleteChat = async (chatId) => {
await System.deleteChat(chatId);
setChats((prevChats) => prevChats.filter((chat) => chat.id !== chatId));
};
useEffect(() => {
async function fetchChats() {
const { chats: _chats, hasPages = false } = await System.chats(offset);
setChats(_chats);
setCanNext(hasPages);
setLoading(false);
}
fetchChats();
}, [offset]);
if (loading) {
return (
<Skeleton.default

View File

@ -394,16 +394,17 @@ export default function GeneralLLMPreference() {
>
<div className="flex gap-x-4 items-center">
<img
src={selectedLLMObject.logo}
alt={`${selectedLLMObject.name} logo`}
src={selectedLLMObject?.logo || AnythingLLMIcon}
alt={`${selectedLLMObject?.name} logo`}
className="w-10 h-10 rounded-md"
/>
<div className="flex flex-col text-left">
<div className="text-sm font-semibold text-white">
{selectedLLMObject.name}
{selectedLLMObject?.name || "None selected"}
</div>
<div className="mt-1 text-xs text-[#D2D5DB]">
{selectedLLMObject.description}
{selectedLLMObject?.description ||
"You need to select an LLM"}
</div>
</div>
</div>

View File

@ -33,10 +33,7 @@ function adminEndpoints(app) {
[validatedRequest, strictMultiUserRoleValid([ROLES.admin, ROLES.manager])],
async (_request, response) => {
try {
const users = (await User.where()).map((user) => {
const { password, ...rest } = user;
return rest;
});
const users = await User.where();
response.status(200).json({ users });
} catch (e) {
console.error(e);
@ -358,6 +355,9 @@ function adminEndpoints(app) {
?.value,
[]
) || [],
custom_app_name:
(await SystemSettings.get({ label: "custom_app_name" }))?.value ||
null,
};
response.status(200).json({ settings });
} catch (e) {

View File

@ -73,10 +73,7 @@ function apiAdminEndpoints(app) {
return;
}
const users = (await User.where()).map((user) => {
const { password, ...rest } = user;
return rest;
});
const users = await User.where();
response.status(200).json({ users });
} catch (e) {
console.error(e);

View File

@ -5,6 +5,7 @@ const {
viewLocalFiles,
findDocumentInDocuments,
normalizePath,
isWithin,
} = require("../../../utils/files");
const { reqBody } = require("../../../utils/http");
const { EventLogs } = require("../../../models/eventLogs");
@ -603,6 +604,8 @@ function apiDocumentEndpoints(app) {
try {
const { name } = reqBody(request);
const storagePath = path.join(documentsPath, normalizePath(name));
if (!isWithin(path.resolve(documentsPath), path.resolve(storagePath)))
throw new Error("Invalid path name");
if (fs.existsSync(storagePath)) {
response.status(500).json({

View File

@ -1,5 +1,5 @@
const { Document } = require("../models/documents");
const { normalizePath, documentsPath } = require("../utils/files");
const { normalizePath, documentsPath, isWithin } = require("../utils/files");
const { reqBody } = require("../utils/http");
const {
flexUserRoleValid,
@ -18,6 +18,8 @@ function documentEndpoints(app) {
try {
const { name } = reqBody(request);
const storagePath = path.join(documentsPath, normalizePath(name));
if (!isWithin(path.resolve(documentsPath), path.resolve(storagePath)))
throw new Error("Invalid folder name.");
if (fs.existsSync(storagePath)) {
response.status(500).json({

View File

@ -1,7 +1,7 @@
process.env.NODE_ENV === "development"
? require("dotenv").config({ path: `.env.${process.env.NODE_ENV}` })
: require("dotenv").config();
const { viewLocalFiles, normalizePath } = require("../utils/files");
const { viewLocalFiles, normalizePath, isWithin } = require("../utils/files");
const { purgeDocument, purgeFolder } = require("../utils/files/purgeDocument");
const { getVectorDbClass } = require("../utils/helpers");
const { updateENV, dumpENV } = require("../utils/helpers/updateENV");
@ -110,7 +110,7 @@ function systemEndpoints(app) {
if (await SystemSettings.isMultiUserMode()) {
const { username, password } = reqBody(request);
const existingUser = await User.get({ username: String(username) });
const existingUser = await User._get({ username: String(username) });
if (!existingUser) {
await EventLogs.logEvent(
@ -188,7 +188,7 @@ function systemEndpoints(app) {
// Return recovery codes to frontend
response.status(200).json({
valid: true,
user: existingUser,
user: User.filterFields(existingUser),
token: makeJWT(
{ id: existingUser.id, username: existingUser.username },
"30d"
@ -201,7 +201,7 @@ function systemEndpoints(app) {
response.status(200).json({
valid: true,
user: existingUser,
user: User.filterFields(existingUser),
token: makeJWT(
{ id: existingUser.id, username: existingUser.username },
"30d"
@ -436,14 +436,22 @@ function systemEndpoints(app) {
return;
}
let error = null;
const { usePassword, newPassword } = reqBody(request);
const { error } = await updateENV(
{
AuthToken: usePassword ? newPassword : "",
JWTSecret: usePassword ? v4() : "",
},
true
);
if (!usePassword) {
// Password is being disabled so directly unset everything to bypass validation.
process.env.AUTH_TOKEN = "";
process.env.JWT_SECRET = "";
} else {
error = await updateENV(
{
AuthToken: newPassword,
JWTSecret: v4(),
},
true
)?.error;
}
if (process.env.NODE_ENV === "production") await dumpENV();
response.status(200).json({ success: !error, error });
} catch (e) {
@ -518,17 +526,24 @@ function systemEndpoints(app) {
const defaultFilename = getDefaultFilename();
const logoPath = await determineLogoFilepath(defaultFilename);
const { found, buffer, size, mime } = fetchLogo(logoPath);
if (!found) {
response.sendStatus(204).end();
return;
}
const currentLogoFilename = await SystemSettings.currentLogoFilename();
response.writeHead(200, {
"Access-Control-Expose-Headers":
"Content-Disposition,X-Is-Custom-Logo,Content-Type,Content-Length",
"Content-Type": mime || "image/png",
"Content-Disposition": `attachment; filename=${path.basename(
logoPath
)}`,
"Content-Length": size,
"X-Is-Custom-Logo":
currentLogoFilename !== null &&
currentLogoFilename !== defaultFilename,
});
response.end(Buffer.from(buffer, "base64"));
return;
@ -565,6 +580,22 @@ function systemEndpoints(app) {
}
});
// No middleware protection in order to get this on the login page
app.get("/system/custom-app-name", async (_, response) => {
try {
const customAppName =
(
await SystemSettings.get({
label: "custom_app_name",
})
)?.value ?? null;
response.status(200).json({ customAppName: customAppName });
} catch (error) {
console.error("Error fetching custom app name:", error);
response.status(500).json({ message: "Internal server error" });
}
});
app.get(
"/system/pfp/:id",
[validatedRequest, flexUserRoleValid([ROLES.all])],
@ -614,11 +645,13 @@ function systemEndpoints(app) {
const userRecord = await User.get({ id: user.id });
const oldPfpFilename = userRecord.pfpFilename;
if (oldPfpFilename) {
const storagePath = path.join(__dirname, "../storage/assets/pfp");
const oldPfpPath = path.join(
__dirname,
`../storage/assets/pfp/${normalizePath(userRecord.pfpFilename)}`
storagePath,
normalizePath(userRecord.pfpFilename)
);
if (!isWithin(path.resolve(storagePath), path.resolve(oldPfpPath)))
throw new Error("Invalid path name");
if (fs.existsSync(oldPfpPath)) fs.unlinkSync(oldPfpPath);
}
@ -647,13 +680,14 @@ function systemEndpoints(app) {
const userRecord = await User.get({ id: user.id });
const oldPfpFilename = userRecord.pfpFilename;
console.log("oldPfpFilename", oldPfpFilename);
if (oldPfpFilename) {
const storagePath = path.join(__dirname, "../storage/assets/pfp");
const oldPfpPath = path.join(
__dirname,
`../storage/assets/pfp/${normalizePath(oldPfpFilename)}`
storagePath,
normalizePath(oldPfpFilename)
);
if (!isWithin(path.resolve(storagePath), path.resolve(oldPfpPath)))
throw new Error("Invalid path name");
if (fs.existsSync(oldPfpPath)) fs.unlinkSync(oldPfpPath);
}
@ -976,7 +1010,9 @@ function systemEndpoints(app) {
async (request, response) => {
try {
const { id } = request.params;
await WorkspaceChats.delete({ id: Number(id) });
Number(id) === -1
? await WorkspaceChats.delete({}, true)
: await WorkspaceChats.delete({ id: Number(id) });
response.json({ success: true, error: null });
} catch (e) {
console.error(e);
@ -1024,7 +1060,7 @@ function systemEndpoints(app) {
const updates = {};
if (username) {
updates.username = String(username);
updates.username = User.validations.username(String(username));
}
if (password) {
updates.password = String(password);

View File

@ -6,7 +6,7 @@ const {
userFromSession,
safeJsonParse,
} = require("../utils/http");
const { normalizePath } = require("../utils/files");
const { normalizePath, isWithin } = require("../utils/files");
const { Workspace } = require("../models/workspace");
const { Document } = require("../models/documents");
const { DocumentVectors } = require("../models/vectors");
@ -111,39 +111,45 @@ function workspaceEndpoints(app) {
handleFileUpload,
],
async function (request, response) {
const Collector = new CollectorApi();
const { originalname } = request.file;
const processingOnline = await Collector.online();
try {
const Collector = new CollectorApi();
const { originalname } = request.file;
const processingOnline = await Collector.online();
if (!processingOnline) {
response
.status(500)
.json({
success: false,
error: `Document processing API is not online. Document ${originalname} will not be processed automatically.`,
})
.end();
return;
if (!processingOnline) {
response
.status(500)
.json({
success: false,
error: `Document processing API is not online. Document ${originalname} will not be processed automatically.`,
})
.end();
return;
}
const { success, reason } =
await Collector.processDocument(originalname);
if (!success) {
response.status(500).json({ success: false, error: reason }).end();
return;
}
Collector.log(
`Document ${originalname} uploaded processed and successfully. It is now available in documents.`
);
await Telemetry.sendTelemetry("document_uploaded");
await EventLogs.logEvent(
"document_uploaded",
{
documentName: originalname,
},
response.locals?.user?.id
);
response.status(200).json({ success: true, error: null });
} catch (e) {
console.log(e.message, e);
response.sendStatus(500).end();
}
const { success, reason } = await Collector.processDocument(originalname);
if (!success) {
response.status(500).json({ success: false, error: reason }).end();
return;
}
Collector.log(
`Document ${originalname} uploaded processed and successfully. It is now available in documents.`
);
await Telemetry.sendTelemetry("document_uploaded");
await EventLogs.logEvent(
"document_uploaded",
{
documentName: originalname,
},
response.locals?.user?.id
);
response.status(200).json({ success: true, error: null });
}
);
@ -151,37 +157,42 @@ function workspaceEndpoints(app) {
"/workspace/:slug/upload-link",
[validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],
async (request, response) => {
const Collector = new CollectorApi();
const { link = "" } = reqBody(request);
const processingOnline = await Collector.online();
try {
const Collector = new CollectorApi();
const { link = "" } = reqBody(request);
const processingOnline = await Collector.online();
if (!processingOnline) {
response
.status(500)
.json({
success: false,
error: `Document processing API is not online. Link ${link} will not be processed automatically.`,
})
.end();
return;
if (!processingOnline) {
response
.status(500)
.json({
success: false,
error: `Document processing API is not online. Link ${link} will not be processed automatically.`,
})
.end();
return;
}
const { success, reason } = await Collector.processLink(link);
if (!success) {
response.status(500).json({ success: false, error: reason }).end();
return;
}
Collector.log(
`Link ${link} uploaded processed and successfully. It is now available in documents.`
);
await Telemetry.sendTelemetry("link_uploaded");
await EventLogs.logEvent(
"link_uploaded",
{ link },
response.locals?.user?.id
);
response.status(200).json({ success: true, error: null });
} catch (e) {
console.log(e.message, e);
response.sendStatus(500).end();
}
const { success, reason } = await Collector.processLink(link);
if (!success) {
response.status(500).json({ success: false, error: reason }).end();
return;
}
Collector.log(
`Link ${link} uploaded processed and successfully. It is now available in documents.`
);
await Telemetry.sendTelemetry("link_uploaded");
await EventLogs.logEvent(
"link_uploaded",
{ link },
response.locals?.user?.id
);
response.status(200).json({ success: true, error: null });
}
);
@ -648,13 +659,13 @@ function workspaceEndpoints(app) {
const oldPfpFilename = workspaceRecord.pfpFilename;
if (oldPfpFilename) {
const storagePath = path.join(__dirname, "../storage/assets/pfp");
const oldPfpPath = path.join(
__dirname,
`../storage/assets/pfp/${normalizePath(
workspaceRecord.pfpFilename
)}`
storagePath,
normalizePath(workspaceRecord.pfpFilename)
);
if (!isWithin(path.resolve(storagePath), path.resolve(oldPfpPath)))
throw new Error("Invalid path name");
if (fs.existsSync(oldPfpPath)) fs.unlinkSync(oldPfpPath);
}
@ -689,11 +700,13 @@ function workspaceEndpoints(app) {
const oldPfpFilename = workspaceRecord.pfpFilename;
if (oldPfpFilename) {
const storagePath = path.join(__dirname, "../storage/assets/pfp");
const oldPfpPath = path.join(
__dirname,
`../storage/assets/pfp/${normalizePath(oldPfpFilename)}`
storagePath,
normalizePath(oldPfpFilename)
);
if (!isWithin(path.resolve(storagePath), path.resolve(oldPfpPath)))
throw new Error("Invalid path name");
if (fs.existsSync(oldPfpPath)) fs.unlinkSync(oldPfpPath);
}

View File

@ -27,6 +27,7 @@ const SystemSettings = {
"agent_search_provider",
"default_agent_skills",
"agent_sql_connections",
"custom_app_name",
],
validations: {
footer_data: (updates) => {

View File

@ -10,6 +10,20 @@ const User = {
"role",
"suspended",
],
validations: {
username: (newValue = "") => {
try {
if (String(newValue).length > 100)
throw new Error("Username cannot be longer than 100 characters");
if (String(newValue).length < 2)
throw new Error("Username must be at least 2 characters");
return String(newValue);
} catch (e) {
throw new Error(e.message);
}
},
},
// validations for the above writable fields.
castColumnValue: function (key, value) {
switch (key) {
@ -19,6 +33,12 @@ const User = {
return String(value);
}
},
filterFields: function (user = {}) {
const { password, ...rest } = user;
return { ...rest };
},
create: async function ({ username, password, role = "default" }) {
const passwordCheck = this.checkPasswordComplexity(password);
if (!passwordCheck.checkedOK) {
@ -30,12 +50,12 @@ const User = {
const hashedPassword = bcrypt.hashSync(password, 10);
const user = await prisma.users.create({
data: {
username,
username: this.validations.username(username),
password: hashedPassword,
role,
role: String(role),
},
});
return { user, error: null };
return { user: this.filterFields(user), error: null };
} catch (error) {
console.error("FAILED TO CREATE USER.", error.message);
return { user: null, error: error.message };
@ -69,7 +89,13 @@ const User = {
// and force-casts to the proper type;
Object.entries(updates).forEach(([key, value]) => {
if (this.writable.includes(key)) {
updates[key] = this.castColumnValue(key, value);
if (this.validations.hasOwnProperty(key)) {
updates[key] = this.validations[key](
this.castColumnValue(key, value)
);
} else {
updates[key] = this.castColumnValue(key, value);
}
return;
}
delete updates[key];
@ -127,6 +153,17 @@ const User = {
},
get: async function (clause = {}) {
try {
const user = await prisma.users.findFirst({ where: clause });
return user ? this.filterFields({ ...user }) : null;
} catch (error) {
console.error(error.message);
return null;
}
},
// Returns user object with all fields
_get: async function (clause = {}) {
try {
const user = await prisma.users.findFirst({ where: clause });
return user ? { ...user } : null;
@ -162,7 +199,7 @@ const User = {
where: clause,
...(limit !== null ? { take: limit } : {}),
});
return users;
return users.map((usr) => this.filterFields(usr));
} catch (error) {
console.error(error.message);
return [];

View File

@ -17,8 +17,12 @@ class GeminiLLM {
this.gemini = genAI.getGenerativeModel(
{ model: this.model },
{
// Gemini-1.5-pro is only available on the v1beta API.
apiVersion: this.model === "gemini-1.5-pro-latest" ? "v1beta" : "v1",
// Gemini-1.5-pro and Gemini-1.5-flash are only available on the v1beta API.
apiVersion:
this.model === "gemini-1.5-pro-latest" ||
this.model === "gemini-1.5-flash-latest"
? "v1beta"
: "v1",
}
);
this.limits = {
@ -95,7 +99,11 @@ class GeminiLLM {
}
isValidChatCompletionModel(modelName = "") {
const validModels = ["gemini-pro", "gemini-1.5-pro-latest"];
const validModels = [
"gemini-pro",
"gemini-1.5-pro-latest",
"gemini-1.5-flash-latest",
];
return validModels.includes(modelName);
}

View File

@ -97,7 +97,7 @@ class GenericOpenAiLLM {
max_tokens: this.maxTokens,
})
.catch((e) => {
throw new Error(e.response.data.error.message);
throw new Error(e.message);
});
if (!result.hasOwnProperty("choices") || result.choices.length === 0)

View File

@ -103,7 +103,7 @@ class GroqLLM {
temperature,
})
.catch((e) => {
throw new Error(e.response.data.error.message);
throw new Error(e.message);
});
if (!result.hasOwnProperty("choices") || result.choices.length === 0)

View File

@ -92,7 +92,7 @@ class KoboldCPPLLM {
temperature,
})
.catch((e) => {
throw new Error(e.response.data.error.message);
throw new Error(e.message);
});
if (!result.hasOwnProperty("choices") || result.choices.length === 0)

View File

@ -1,7 +1,6 @@
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const {
writeResponseChunk,
clientAbortedHandler,
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
class LiteLLM {
@ -93,7 +92,7 @@ class LiteLLM {
max_tokens: parseInt(this.maxTokens), // LiteLLM requires int
})
.catch((e) => {
throw new Error(e.response.data.error.message);
throw new Error(e.message);
});
if (!result.hasOwnProperty("choices") || result.choices.length === 0)
@ -113,45 +112,7 @@ class LiteLLM {
}
handleStream(response, stream, responseProps) {
const { uuid = uuidv4(), sources = [] } = responseProps;
return new Promise(async (resolve) => {
let fullText = "";
const handleAbort = () => clientAbortedHandler(resolve, fullText);
response.on("close", handleAbort);
for await (const chunk of stream) {
const message = chunk?.choices?.[0];
const token = message?.delta?.content;
if (token) {
fullText += token;
writeResponseChunk(response, {
uuid,
sources: [],
type: "textResponseChunk",
textResponse: token,
close: false,
error: false,
});
}
// LiteLLM does not give a finish reason in stream until the final chunk
if (message.finish_reason || message.finish_reason === "stop") {
writeResponseChunk(response, {
uuid,
sources,
type: "textResponseChunk",
textResponse: "",
close: true,
error: false,
});
response.removeListener("close", handleAbort);
resolve(fullText);
}
}
});
return handleDefaultStreamResponseV2(response, stream, responseProps);
}
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations

View File

@ -130,7 +130,7 @@ class OpenAiLLM {
temperature,
})
.catch((e) => {
throw new Error(e.response.data.error.message);
throw new Error(e.message);
});
if (!result.hasOwnProperty("choices") || result.choices.length === 0)

View File

@ -142,7 +142,7 @@ class OpenRouterLLM {
temperature,
})
.catch((e) => {
throw new Error(e.response.data.error.message);
throw new Error(e.message);
});
if (!result.hasOwnProperty("choices") || result.choices.length === 0)

View File

@ -93,7 +93,7 @@ class PerplexityLLM {
temperature,
})
.catch((e) => {
throw new Error(e.response.data.error.message);
throw new Error(e.message);
});
if (!result.hasOwnProperty("choices") || result.choices.length === 0)

View File

@ -89,7 +89,7 @@ class TextGenWebUILLM {
temperature,
})
.catch((e) => {
throw new Error(e.response.data.error.message);
throw new Error(e.message);
});
if (!result.hasOwnProperty("choices") || result.choices.length === 0)

View File

@ -15,7 +15,22 @@ class VoyageAiEmbedder {
// Limit of how many strings we can process in a single pass to stay with resource or network limits
this.batchSize = 128; // Voyage AI's limit per request is 128 https://docs.voyageai.com/docs/rate-limits#use-larger-batches
this.embeddingMaxChunkLength = 4000; // https://docs.voyageai.com/docs/embeddings - assume a token is roughly 4 letters with some padding
this.embeddingMaxChunkLength = this.#getMaxEmbeddingLength();
}
// https://docs.voyageai.com/docs/embeddings
#getMaxEmbeddingLength() {
switch (this.model) {
case "voyage-large-2-instruct":
case "voyage-law-2":
case "voyage-code-2":
case "voyage-large-2":
return 16_000;
case "voyage-2":
return 4_000;
default:
return 4_000;
}
}
async embedTextInput(textInput) {
@ -23,7 +38,10 @@ class VoyageAiEmbedder {
Array.isArray(textInput) ? textInput : [textInput],
{ modelName: this.model }
);
return result || [];
// If given an array return the native Array[Array] format since that should be the outcome.
// But if given a single string, we need to flatten it so that we have a 1D array.
return (Array.isArray(textInput) ? result : result.flat()) || [];
}
async embedChunks(textChunks = []) {
@ -35,6 +53,12 @@ class VoyageAiEmbedder {
return embeddings;
} catch (error) {
console.error("Voyage AI Failed to embed:", error);
if (
error.message.includes(
"Cannot read properties of undefined (reading '0')"
)
)
throw new Error("Voyage AI failed to embed: Rate limit reached");
throw error;
}
}

View File

@ -20,11 +20,11 @@ const webScraping = {
examples: [
{
prompt: "What is useanything.com about?",
call: JSON.stringify({ uri: "https://useanything.com" }),
call: JSON.stringify({ url: "https://useanything.com" }),
},
{
prompt: "Scrape https://example.com",
call: JSON.stringify({ uri: "https://example.com" }),
call: JSON.stringify({ url: "https://example.com" }),
},
],
parameters: {

View File

@ -151,16 +151,27 @@ async function chatWithWorkspace(
};
}
contextTexts = [...contextTexts, ...vectorSearchResults.contextTexts];
const { fillSourceWindow } = require("../helpers/chat");
const filledSources = fillSourceWindow({
nDocs: workspace?.topN || 4,
searchResults: vectorSearchResults.sources,
history: rawHistory,
filterIdentifiers: pinnedDocIdentifiers,
});
// Why does contextTexts get all the info, but sources only get current search?
// This is to give the ability of the LLM to "comprehend" a contextual response without
// populating the Citations under a response with documents the user "thinks" are irrelevant
// due to how we manage backfilling of the context to keep chats with the LLM more correct in responses.
// If a past citation was used to answer the question - that is visible in the history so it logically makes sense
// and does not appear to the user that a new response used information that is otherwise irrelevant for a given prompt.
// TLDR; reduces GitHub issues for "LLM citing document that has no answer in it" while keep answers highly accurate.
contextTexts = [...contextTexts, ...filledSources.contextTexts];
sources = [...sources, ...vectorSearchResults.sources];
// If in query mode and no sources are found from the vector search and no pinned documents, do not
// If in query mode and no context chunks are found from search, backfill, or pins - do not
// let the LLM try to hallucinate a response or use general knowledge and exit early
if (
chatMode === "query" &&
vectorSearchResults.sources.length === 0 &&
pinnedDocIdentifiers.length === 0
) {
if (chatMode === "query" && contextTexts.length === 0) {
return {
id: uuid,
type: "textResponse",
@ -224,9 +235,7 @@ async function recentChatHistory({
workspace,
thread = null,
messageLimit = 20,
chatMode = null,
}) {
if (chatMode === "query") return { rawHistory: [], chatHistory: [] };
const rawHistory = (
await WorkspaceChats.where(
{

View File

@ -100,7 +100,6 @@ async function streamChatWithWorkspace(
workspace,
thread,
messageLimit,
chatMode,
});
// Look for pinned documents and see if the user decided to use this feature. We will also do a vector search
@ -157,16 +156,27 @@ async function streamChatWithWorkspace(
return;
}
contextTexts = [...contextTexts, ...vectorSearchResults.contextTexts];
const { fillSourceWindow } = require("../helpers/chat");
const filledSources = fillSourceWindow({
nDocs: workspace?.topN || 4,
searchResults: vectorSearchResults.sources,
history: rawHistory,
filterIdentifiers: pinnedDocIdentifiers,
});
// Why does contextTexts get all the info, but sources only get current search?
// This is to give the ability of the LLM to "comprehend" a contextual response without
// populating the Citations under a response with documents the user "thinks" are irrelevant
// due to how we manage backfilling of the context to keep chats with the LLM more correct in responses.
// If a past citation was used to answer the question - that is visible in the history so it logically makes sense
// and does not appear to the user that a new response used information that is otherwise irrelevant for a given prompt.
// TLDR; reduces GitHub issues for "LLM citing document that has no answer in it" while keep answers highly accurate.
contextTexts = [...contextTexts, ...filledSources.contextTexts];
sources = [...sources, ...vectorSearchResults.sources];
// If in query mode and no sources are found from the vector search and no pinned documents, do not
// If in query mode and no context chunks are found from search, backfill, or pins - do not
// let the LLM try to hallucinate a response or use general knowledge and exit early
if (
chatMode === "query" &&
sources.length === 0 &&
pinnedDocIdentifiers.length === 0
) {
if (chatMode === "query" && contextTexts.length === 0) {
writeResponseChunk(response, {
id: uuid,
type: "textResponse",

View File

@ -3,7 +3,7 @@ const fs = require("fs");
const { getType } = require("mime");
const { v4 } = require("uuid");
const { SystemSettings } = require("../../models/systemSettings");
const { normalizePath } = require(".");
const { normalizePath, isWithin } = require(".");
const LOGO_FILENAME = "anything-llm.png";
function validFilename(newFilename = "") {
@ -23,6 +23,8 @@ async function determineLogoFilepath(defaultFilename = LOGO_FILENAME) {
if (currentLogoFilename && validFilename(currentLogoFilename)) {
customLogoPath = path.join(basePath, normalizePath(currentLogoFilename));
if (!isWithin(path.resolve(basePath), path.resolve(customLogoPath)))
return defaultFilepath;
return fs.existsSync(customLogoPath) ? customLogoPath : defaultFilepath;
}
@ -52,17 +54,17 @@ function fetchLogo(logoPath) {
async function renameLogoFile(originalFilename = null) {
const extname = path.extname(originalFilename) || ".png";
const newFilename = `${v4()}${extname}`;
const originalFilepath = process.env.STORAGE_DIR
? path.join(
process.env.STORAGE_DIR,
"assets",
normalizePath(originalFilename)
)
: path.join(
__dirname,
`../../storage/assets`,
normalizePath(originalFilename)
);
const assetsDirectory = process.env.STORAGE_DIR
? path.join(process.env.STORAGE_DIR, "assets")
: path.join(__dirname, `../../storage/assets`);
const originalFilepath = path.join(
assetsDirectory,
normalizePath(originalFilename)
);
if (!isWithin(path.resolve(assetsDirectory), path.resolve(originalFilepath)))
throw new Error("Invalid file path.");
// The output always uses a random filename.
const outputFilepath = process.env.STORAGE_DIR
? path.join(process.env.STORAGE_DIR, "assets", normalizePath(newFilename))
: path.join(__dirname, `../../storage/assets`, normalizePath(newFilename));
@ -73,9 +75,13 @@ async function renameLogoFile(originalFilename = null) {
async function removeCustomLogo(logoFilename = LOGO_FILENAME) {
if (!logoFilename || !validFilename(logoFilename)) return false;
const logoPath = process.env.STORAGE_DIR
? path.join(process.env.STORAGE_DIR, `assets`, normalizePath(logoFilename))
: path.join(__dirname, `../../storage/assets`, normalizePath(logoFilename));
const assetsDirectory = process.env.STORAGE_DIR
? path.join(process.env.STORAGE_DIR, "assets")
: path.join(__dirname, `../../storage/assets`);
const logoPath = path.join(assetsDirectory, normalizePath(logoFilename));
if (!isWithin(path.resolve(assetsDirectory), path.resolve(logoPath)))
throw new Error("Invalid file path.");
if (fs.existsSync(logoPath)) fs.unlinkSync(logoPath);
return true;
}

View File

@ -2,7 +2,7 @@ const path = require("path");
const fs = require("fs");
const { getType } = require("mime");
const { User } = require("../../models/user");
const { normalizePath } = require(".");
const { normalizePath, isWithin } = require(".");
const { Workspace } = require("../../models/workspace");
function fetchPfp(pfpPath) {
@ -35,6 +35,8 @@ async function determinePfpFilepath(id) {
? path.join(process.env.STORAGE_DIR, "assets/pfp")
: path.join(__dirname, "../../storage/assets/pfp");
const pfpFilepath = path.join(basePath, normalizePath(pfpFilename));
if (!isWithin(path.resolve(basePath), path.resolve(pfpFilepath))) return null;
if (!fs.existsSync(pfpFilepath)) return null;
return pfpFilepath;
}
@ -48,6 +50,8 @@ async function determineWorkspacePfpFilepath(slug) {
? path.join(process.env.STORAGE_DIR, "assets/pfp")
: path.join(__dirname, "../../storage/assets/pfp");
const pfpFilepath = path.join(basePath, normalizePath(pfpFilename));
if (!isWithin(path.resolve(basePath), path.resolve(pfpFilepath))) return null;
if (!fs.existsSync(pfpFilepath)) return null;
return pfpFilepath;
}

View File

@ -1,3 +1,5 @@
const { sourceIdentifier } = require("../../chats");
const { safeJsonParse } = require("../../http");
const { TokenManager } = require("../tiktoken");
const { convertToPromptHistory } = require("./responses");
@ -343,7 +345,104 @@ function cannonball({
return truncatedText;
}
/**
* Fill the sources window with the priority of
* 1. Pinned documents (handled prior to function)
* 2. VectorSearch results
* 3. prevSources in chat history - starting from most recent.
*
* Ensuring the window always has the desired amount of sources so that followup questions
* in any chat mode have relevant sources, but not infinite sources. This function is used during chatting
* and allows follow-up questions within a query chat that otherwise would have zero sources and would fail.
* The added benefit is that during regular RAG chat, we have better coherence of citations that otherwise would
* also yield no results with no need for a ReRanker to run and take much longer to return a response.
*
* The side effect of this is follow-up unrelated questions now have citations that would look totally irrelevant, however
* we would rather optimize on the correctness of a response vs showing extraneous sources during a response. Given search
* results always take a priority a good unrelated question that produces RAG results will still function as desired and due to previous
* history backfill sources "changing context" mid-chat is handled appropriately.
* example:
* ---previous implementation---
* prompt 1: "What is anythingllm?" -> possibly get 4 good sources
* prompt 2: "Tell me some features" -> possible get 0 - 1 maybe relevant source + previous answer response -> bad response due to bad context mgmt
* ---next implementation---
* prompt 1: "What is anythingllm?" -> possibly get 4 good sources
* prompt 2: "Tell me some features" -> possible get 0 - 1 maybe relevant source + previous answer response -> backfill with 3 good sources from previous -> much better response
*
* @param {Object} config - params to call
* @param {object} config.nDocs = fill size of the window
* @param {object} config.searchResults = vector similarityResponse results for .sources
* @param {object[]} config.history - rawHistory of chat containing sources
* @param {string[]} config.filterIdentifiers - Pinned document identifiers to prevent duplicate context
* @returns {{
* contextTexts: string[],
* sources: object[],
* }} - Array of sources that should be added to window
*/
function fillSourceWindow({
nDocs = 4, // Number of documents
searchResults = [], // Sources from similarity search
history = [], // Raw history
filterIdentifiers = [], // pinned document sources
} = config) {
const sources = [...searchResults];
if (sources.length >= nDocs || history.length === 0) {
return {
sources,
contextTexts: sources.map((src) => src.text),
};
}
const log = (text, ...args) => {
console.log(`\x1b[36m[fillSourceWindow]\x1b[0m ${text}`, ...args);
};
log(
`Need to backfill ${nDocs - searchResults.length} chunks to fill in the source window for RAG!`
);
const seenChunks = new Set(searchResults.map((source) => source.id));
// We need to reverse again because we need to iterate from bottom of array (most recent chats)
// Looking at this function by itself you may think that this loop could be extreme for long history chats,
// but this was already handled where `history` we derived. This comes from `recentChatHistory` which
// includes a limit for history (default: 20). So this loop does not look as extreme as on first glance.
for (const chat of history.reverse()) {
if (sources.length >= nDocs) {
log(
`Citations backfilled to ${nDocs} references from ${searchResults.length} original citations.`
);
break;
}
const chatSources =
safeJsonParse(chat.response, { sources: [] })?.sources || [];
if (!chatSources?.length || !Array.isArray(chatSources)) continue;
const validSources = chatSources.filter((source) => {
return (
filterIdentifiers.includes(sourceIdentifier(source)) == false && // source cannot be in current pins
source.hasOwnProperty("score") && // source cannot have come from a pinned document that was previously pinned
source.hasOwnProperty("text") && // source has a valid text property we can use
seenChunks.has(source.id) == false // is unique
);
});
for (const validSource of validSources) {
if (sources.length >= nDocs) break;
sources.push(validSource);
seenChunks.add(validSource.id);
}
}
return {
sources,
contextTexts: sources.map((src) => src.text),
};
}
module.exports = {
messageArrayCompressor,
messageStringCompressor,
fillSourceWindow,
};

View File

@ -38,8 +38,13 @@ function handleDefaultStreamResponseV2(response, stream, responseProps) {
});
}
// LocalAi returns '' and others return null.
if (message.finish_reason !== "" && message.finish_reason !== null) {
// LocalAi returns '' and others return null on chunks - the last chunk is not "" or null.
// Either way, the key `finish_reason` must be present to determine ending chunk.
if (
message?.hasOwnProperty("finish_reason") && // Got valid message and it is an object with finish_reason
message.finish_reason !== "" &&
message.finish_reason !== null
) {
writeResponseChunk(response, {
uuid,
sources,
@ -50,6 +55,7 @@ function handleDefaultStreamResponseV2(response, stream, responseProps) {
});
response.removeListener("close", handleAbort);
resolve(fullText);
break; // Break streaming when a valid finish_reason is first encountered
}
}
});

View File

@ -526,7 +526,11 @@ function supportedTranscriptionProvider(input = "") {
}
function validGeminiModel(input = "") {
const validModels = ["gemini-pro", "gemini-1.5-pro-latest"];
const validModels = [
"gemini-pro",
"gemini-1.5-pro-latest",
"gemini-1.5-flash-latest",
];
return validModels.includes(input)
? null
: `Invalid Model type. Must be one of ${validModels.join(", ")}.`;

View File

@ -10,7 +10,7 @@
**Instructions**
- [Create an Astra account or sign in to an existing Astra account](astra.datastax.com)
- [Create an Astra account or sign in to an existing Astra account](https://astra.datastax.com)
- Create an Astra Serverless(Vector) Database.
- Make sure DB is in active state.
- Get `API ENDPOINT`and `Application Token` from Overview screen

View File

@ -1,6 +1,6 @@
# How to setup a local (or remote) Chroma Vector Database
[Official Chroma Docs](https://docs.trychroma.com/usage-guide#running-chroma-in-clientserver-mode) for reference.
[Official Chroma Docs](https://docs.trychroma.com/guides#running-chroma-in-clientserver-mode) for reference.
### How to get started