Merge branch 'master' of github.com:Mintplex-Labs/anything-llm into render

This commit is contained in:
timothycarambat 2024-05-01 13:33:07 -07:00
commit 6150ff41ea
91 changed files with 5879 additions and 2833 deletions

View File

@ -1,11 +1,14 @@
{
"cSpell.words": [
"AIbitat",
"adoc",
"aibitat",
"AIbitat",
"anythingllm",
"Astra",
"Chartable",
"comkey",
"cooldown",
"cooldowns",
"Deduplicator",
"Dockerized",
"Embeddable",
@ -17,6 +20,7 @@
"mbox",
"Milvus",
"Mintplex",
"moderations",
"Ollama",
"openai",
"opendocument",

View File

@ -4,69 +4,112 @@ const { reqBody } = require("../utils/http");
function extensions(app) {
if (!app) return;
app.post("/ext/github-repo", [verifyPayloadIntegrity], async function (request, response) {
try {
const loadGithubRepo = require("../utils/extensions/GithubRepo");
const { success, reason, data } = await loadGithubRepo(reqBody(request));
response.status(200).json({
success,
reason,
data
});
} catch (e) {
console.error(e);
response.status(200).json({
success: false,
reason: e.message || "A processing error occurred.",
data: {},
});
app.post(
"/ext/github-repo",
[verifyPayloadIntegrity],
async function (request, response) {
try {
const loadGithubRepo = require("../utils/extensions/GithubRepo");
const { success, reason, data } = await loadGithubRepo(
reqBody(request)
);
response.status(200).json({
success,
reason,
data,
});
} catch (e) {
console.error(e);
response.status(200).json({
success: false,
reason: e.message || "A processing error occurred.",
data: {},
});
}
return;
}
return;
});
);
// gets all branches for a specific repo
app.post("/ext/github-repo/branches", [verifyPayloadIntegrity], async function (request, response) {
try {
const GithubRepoLoader = require("../utils/extensions/GithubRepo/RepoLoader");
const allBranches = await (new GithubRepoLoader(reqBody(request))).getRepoBranches()
response.status(200).json({
success: true,
reason: null,
data: {
branches: allBranches
}
});
} catch (e) {
console.error(e);
response.status(400).json({
success: false,
reason: e.message,
data: {
branches: []
}
});
app.post(
"/ext/github-repo/branches",
[verifyPayloadIntegrity],
async function (request, response) {
try {
const GithubRepoLoader = require("../utils/extensions/GithubRepo/RepoLoader");
const allBranches = await new GithubRepoLoader(
reqBody(request)
).getRepoBranches();
response.status(200).json({
success: true,
reason: null,
data: {
branches: allBranches,
},
});
} catch (e) {
console.error(e);
response.status(400).json({
success: false,
reason: e.message,
data: {
branches: [],
},
});
}
return;
}
return;
});
);
app.post("/ext/youtube-transcript", [verifyPayloadIntegrity], async function (request, response) {
try {
const loadYouTubeTranscript = require("../utils/extensions/YoutubeTranscript");
const { success, reason, data } = await loadYouTubeTranscript(reqBody(request));
response.status(200).json({ success, reason, data });
} catch (e) {
console.error(e);
response.status(400).json({
success: false,
reason: e.message,
data: {
title: null,
author: null
}
});
app.post(
"/ext/youtube-transcript",
[verifyPayloadIntegrity],
async function (request, response) {
try {
const loadYouTubeTranscript = require("../utils/extensions/YoutubeTranscript");
const { success, reason, data } = await loadYouTubeTranscript(
reqBody(request)
);
response.status(200).json({ success, reason, data });
} catch (e) {
console.error(e);
response.status(400).json({
success: false,
reason: e.message,
data: {
title: null,
author: null,
},
});
}
return;
}
return;
});
);
app.post(
"/ext/confluence",
[verifyPayloadIntegrity],
async function (request, response) {
try {
const loadConfluence = require("../utils/extensions/Confluence");
const { success, reason, data } = await loadConfluence(
reqBody(request)
);
response.status(200).json({ success, reason, data });
} catch (e) {
console.error(e);
response.status(400).json({
success: false,
reason: e.message,
data: {
title: null,
author: null,
},
});
}
return;
}
);
}
module.exports = extensions;

View File

@ -28,7 +28,7 @@
"html-to-text": "^9.0.5",
"ignore": "^5.3.0",
"js-tiktoken": "^1.0.8",
"langchain": "0.0.201",
"langchain": "0.1.36",
"mammoth": "^1.6.0",
"mbox-parser": "^1.0.1",
"mime": "^3.0.0",
@ -36,7 +36,7 @@
"multer": "^1.4.5-lts.1",
"node-html-parser": "^6.1.13",
"officeparser": "^4.0.5",
"openai": "^3.2.1",
"openai": "4.38.5",
"pdf-parse": "^1.1.1",
"puppeteer": "~21.5.2",
"slugify": "^1.6.6",

View File

@ -2,13 +2,12 @@ const fs = require("fs");
class OpenAiWhisper {
constructor({ options }) {
const { Configuration, OpenAIApi } = require("openai");
const { OpenAI: OpenAIApi } = require("openai");
if (!options.openAiKey) throw new Error("No OpenAI API key was set.");
const config = new Configuration({
this.openai = new OpenAIApi({
apiKey: options.openAiKey,
});
this.openai = new OpenAIApi(config);
this.model = "whisper-1";
this.temperature = 0;
this.#log("Initialized.");
@ -19,22 +18,30 @@ class OpenAiWhisper {
}
async processFile(fullFilePath) {
return await this.openai
.createTranscription(
fs.createReadStream(fullFilePath),
this.model,
undefined,
"text",
this.temperature
)
.then((res) => {
if (res.hasOwnProperty("data"))
return { content: res.data, error: null };
return { content: "", error: "No content was able to be transcribed." };
return await this.openai.audio.transcriptions
.create({
file: fs.createReadStream(fullFilePath),
model: this.model,
model: "whisper-1",
response_format: "text",
temperature: this.temperature,
})
.catch((e) => {
this.#log(`Could not get any response from openai whisper`, e.message);
return { content: "", error: e.message };
.then((response) => {
if (!response) {
return {
content: "",
error: "No content was able to be transcribed.",
};
}
return { content: response, error: null };
})
.catch((error) => {
this.#log(
`Could not get any response from openai whisper`,
error.message
);
return { content: "", error: error.message };
});
}
}

View File

@ -0,0 +1,110 @@
const fs = require("fs");
const path = require("path");
const { default: slugify } = require("slugify");
const { v4 } = require("uuid");
const { writeToServerDocuments } = require("../../files");
const { tokenizeString } = require("../../tokenizer");
const {
ConfluencePagesLoader,
} = require("langchain/document_loaders/web/confluence");
function validSpaceUrl(spaceUrl = "") {
const UrlPattern = require("url-pattern");
const pattern = new UrlPattern(
"https\\://(:subdomain).atlassian.net/wiki/spaces/(:spaceKey)*"
);
const match = pattern.match(spaceUrl);
if (!match) return { valid: false, result: null };
return { valid: true, result: match };
}
async function loadConfluence({ pageUrl, username, accessToken }) {
if (!pageUrl || !username || !accessToken) {
return {
success: false,
reason:
"You need either a username and access token, or a personal access token (PAT), to use the Confluence connector.",
};
}
const validSpace = validSpaceUrl(pageUrl);
if (!validSpace.result) {
return {
success: false,
reason:
"Confluence space URL is not in the expected format of https://domain.atlassian.net/wiki/space/~SPACEID/*",
};
}
const { subdomain, spaceKey } = validSpace.result;
console.log(`-- Working Confluence ${subdomain}.atlassian.net --`);
const loader = new ConfluencePagesLoader({
baseUrl: `https://${subdomain}.atlassian.net/wiki`,
spaceKey,
username,
accessToken,
});
const { docs, error } = await loader
.load()
.then((docs) => {
return { docs, error: null };
})
.catch((e) => {
return {
docs: [],
error: e.message?.split("Error:")?.[1] || e.message,
};
});
if (!docs.length || !!error) {
return {
success: false,
reason: error ?? "No pages found for that Confluence space.",
};
}
const outFolder = slugify(
`${subdomain}-confluence-${v4().slice(0, 4)}`
).toLowerCase();
const outFolderPath = path.resolve(
__dirname,
`../../../../server/storage/documents/${outFolder}`
);
fs.mkdirSync(outFolderPath);
docs.forEach((doc) => {
const data = {
id: v4(),
url: doc.metadata.url + ".page",
title: doc.metadata.title || doc.metadata.source,
docAuthor: subdomain,
description: doc.metadata.title,
docSource: `${subdomain} Confluence`,
chunkSource: `confluence://${doc.metadata.url}`,
published: new Date().toLocaleString(),
wordCount: doc.pageContent.split(" ").length,
pageContent: doc.pageContent,
token_count_estimate: tokenizeString(doc.pageContent).length,
};
console.log(
`[Confluence Loader]: Saving ${doc.metadata.title} to ${outFolder}`
);
writeToServerDocuments(
data,
`${slugify(doc.metadata.title)}-${data.id}`,
outFolderPath
);
});
return {
success: true,
reason: null,
data: {
spaceKey,
destination: outFolder,
},
};
}
module.exports = loadConfluence;

File diff suppressed because it is too large Load Diff

View File

@ -64,7 +64,7 @@ GID='1000'
# LLM_PROVIDER='groq'
# GROQ_API_KEY=gsk_abcxyz
# GROQ_MODEL_PREF=llama2-70b-4096
# GROQ_MODEL_PREF=llama3-8b-8192
# LLM_PROVIDER='generic-openai'
# GENERIC_OPEN_AI_BASE_PATH='http://proxy.url.openai.com/v1'

View File

@ -14,6 +14,7 @@
"@metamask/jazzicon": "^2.0.0",
"@microsoft/fetch-event-source": "^2.0.1",
"@phosphor-icons/react": "^2.0.13",
"@tremor/react": "^3.15.1",
"dompurify": "^3.0.8",
"file-saver": "^2.0.5",
"he": "^1.2.0",
@ -30,6 +31,8 @@
"react-tag-input-component": "^2.0.2",
"react-toastify": "^9.1.3",
"react-tooltip": "^5.25.2",
"recharts": "^2.12.5",
"recharts-to-png": "^2.3.1",
"text-case": "^1.0.9",
"truncate": "^3.0.0",
"uuid": "^9.0.0"

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.5 KiB

View File

@ -1,9 +1,11 @@
import Github from "./github.svg";
import YouTube from "./youtube.svg";
import Confluence from "./confluence.jpeg";
const ConnectorImages = {
github: Github,
youtube: YouTube,
confluence: Confluence,
};
export default ConnectorImages;

View File

@ -24,12 +24,11 @@ export default function GroqAiOptions({ settings }) {
</label>
<select
name="GroqModelPref"
defaultValue={settings?.GroqModelPref || "llama2-70b-4096"}
defaultValue={settings?.GroqModelPref || "llama3-8b-8192"}
required={true}
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{[
"llama2-70b-4096",
"mixtral-8x7b-32768",
"llama3-8b-8192",
"llama3-70b-8192",

View File

@ -86,7 +86,7 @@ function TogetherAiModelSelection({ settings }) {
<option
key={model.id}
value={model.id}
selected={settings?.OpenRouterModelPref === model.id}
selected={settings?.TogetherAiModelPref === model.id}
>
{model.name}
</option>

View File

@ -0,0 +1,86 @@
import showToast from "@/utils/toast";
import { DownloadSimple, Key } from "@phosphor-icons/react";
import { saveAs } from "file-saver";
import { useState } from "react";
export default function RecoveryCodeModal({
recoveryCodes,
onDownloadComplete,
onClose,
}) {
const [downloadClicked, setDownloadClicked] = useState(false);
const downloadRecoveryCodes = () => {
const blob = new Blob([recoveryCodes.join("\n")], { type: "text/plain" });
saveAs(blob, "recovery_codes.txt");
setDownloadClicked(true);
};
const handleClose = () => {
if (downloadClicked) {
onDownloadComplete();
onClose();
}
};
const handleCopyToClipboard = () => {
navigator.clipboard.writeText(recoveryCodes.join(",\n")).then(() => {
showToast("Recovery codes copied to clipboard", "success", {
clear: true,
});
});
};
return (
<div className="inline-block bg-[#2C2F36] rounded-lg text-left overflow-hidden shadow-xl transform transition-all border-2 border-[#BCC9DB]/10 w-[600px] mx-4">
<div className="md:py-[35px] md:px-[50px] py-[28px] px-[20px]">
<div className="flex gap-x-2">
<Key size={24} className="text-white" weight="bold" />
<h3
className="text-lg leading-6 font-medium text-white"
id="modal-headline"
>
Recovery Codes
</h3>
</div>
<div className="mt-4">
<p className="text-sm text-white flex flex-col">
In order to reset your password in the future, you will need these
recovery codes. Download or copy your recovery codes to save them.{" "}
<br />
<b className="mt-4">These recovery codes are only shown once!</b>
</p>
<div
className="bg-[#1C1E21] text-white hover:text-[#46C8FF]
flex items-center justify-center rounded-md mt-6 cursor-pointer"
onClick={handleCopyToClipboard}
>
<ul className="space-y-2 md:p-6 p-4">
{recoveryCodes.map((code, index) => (
<li key={index} className="md:text-sm text-xs">
{code}
</li>
))}
</ul>
</div>
</div>
</div>
<div className="flex w-full justify-center items-center p-3 space-x-2 rounded-b border-gray-500/50 -mt-4 mb-4">
<button
type="button"
className="transition-all duration-300 text-xs md:w-[500px] md:h-[34px] h-[48px] w-full m-2 font-semibold rounded-lg bg-[#46C8FF] hover:bg-[#2C2F36] border-2 border-transparent hover:border-[#46C8FF] hover:text-white whitespace-nowrap shadow-[0_4px_14px_rgba(0,0,0,0.25)] flex justify-center items-center gap-x-2"
onClick={downloadClicked ? handleClose : downloadRecoveryCodes}
>
{downloadClicked ? (
"Close"
) : (
<>
<DownloadSimple weight="bold" size={18} />
<p>Download</p>
</>
)}
</button>
</div>
</div>
);
}

View File

@ -0,0 +1,164 @@
import { useState } from "react";
import System from "@/models/system";
import showToast from "@/utils/toast";
import { Warning } from "@phosphor-icons/react";
import { Tooltip } from "react-tooltip";
export default function ConfluenceOptions() {
const [loading, setLoading] = useState(false);
const handleSubmit = async (e) => {
e.preventDefault();
const form = new FormData(e.target);
try {
setLoading(true);
showToast(
"Fetching all pages for Confluence space - this may take a while.",
"info",
{
clear: true,
autoClose: false,
}
);
const { data, error } = await System.dataConnectors.confluence.collect({
pageUrl: form.get("pageUrl"),
username: form.get("username"),
accessToken: form.get("accessToken"),
});
if (!!error) {
showToast(error, "error", { clear: true });
setLoading(false);
return;
}
showToast(
`Pages collected from Confluence space ${data.spaceKey}. Output folder is ${data.destination}.`,
"success",
{ clear: true }
);
e.target.reset();
setLoading(false);
} catch (e) {
console.error(e);
showToast(e.message, "error", { clear: true });
setLoading(false);
}
};
return (
<div className="flex w-full">
<div className="flex flex-col w-full px-1 md:pb-6 pb-16">
<form className="w-full" onSubmit={handleSubmit}>
<div className="w-full flex flex-col py-2">
<div className="w-full flex flex-col gap-4">
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold flex gap-x-2 items-center">
<p className="font-bold text-white">Confluence Page URL</p>
</label>
<p className="text-xs font-normal text-white/50">
URL of a page in the Confluence space.
</p>
</div>
<input
type="url"
name="pageUrl"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="https://example.atlassian.net/wiki/spaces/~7120208c08555d52224113949698b933a3bb56/pages/851969/Test+anythingLLM+page"
required={true}
autoComplete="off"
spellCheck={false}
/>
</div>
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold">
Confluence Username
</label>
<p className="text-xs font-normal text-white/50">
Your Confluence username.
</p>
</div>
<input
type="email"
name="username"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="jdoe@example.com"
required={true}
autoComplete="off"
spellCheck={false}
/>
</div>
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold flex gap-x-2 items-center">
<p className="font-bold text-white">
Confluence Access Token
</p>
<Warning
size={14}
className="ml-1 text-orange-500 cursor-pointer"
data-tooltip-id="access-token-tooltip"
data-tooltip-place="right"
/>
<Tooltip
delayHide={300}
id="access-token-tooltip"
className="max-w-xs"
clickable={true}
>
<p className="text-sm">
You need to provide an access token for authentication.
You can generate an access token{" "}
<a
href="https://id.atlassian.com/manage-profile/security/api-tokens"
target="_blank"
rel="noopener noreferrer"
className="underline"
onClick={(e) => e.stopPropagation()}
>
here
</a>
.
</p>
</Tooltip>
</label>
<p className="text-xs font-normal text-white/50">
Access token for authentication.
</p>
</div>
<input
type="password"
name="accessToken"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="abcd1234"
required={true}
autoComplete="off"
spellCheck={false}
/>
</div>
</div>
</div>
<div className="flex flex-col gap-y-2 w-full pr-10">
<button
type="submit"
disabled={loading}
className="mt-2 w-full justify-center border border-slate-200 px-4 py-2 rounded-lg text-[#222628] text-sm font-bold items-center flex gap-x-2 bg-slate-200 hover:bg-slate-300 hover:text-slate-800 disabled:bg-slate-300 disabled:cursor-not-allowed"
>
{loading ? "Collecting pages..." : "Submit"}
</button>
{loading && (
<p className="text-xs text-white/50">
Once complete, all pages will be available for embedding into
workspaces.
</p>
)}
</div>
</form>
</div>
</div>
);
}

View File

@ -2,6 +2,7 @@ import ConnectorImages from "@/components/DataConnectorOption/media";
import { MagnifyingGlass } from "@phosphor-icons/react";
import GithubOptions from "./Connectors/Github";
import YoutubeOptions from "./Connectors/Youtube";
import ConfluenceOptions from "./Connectors/Confluence";
import { useState } from "react";
import ConnectorOption from "./ConnectorOption";
@ -20,6 +21,12 @@ export const DATA_CONNECTORS = {
"Import the transcription of an entire YouTube video from a link.",
options: <YoutubeOptions />,
},
confluence: {
name: "Confluence",
image: ConnectorImages.confluence,
description: "Import an entire Confluence page in a single click.",
options: <ConfluenceOptions />,
},
};
export default function DataConnectors() {

View File

@ -261,8 +261,8 @@ function Directory({
)}
</div>
{amountSelected !== 0 && (
<div className="absolute bottom-[12px] left-0 right-0 flex justify-center">
<div className="mx-auto bg-white/40 rounded-lg py-1 px-2">
<div className="absolute bottom-[12px] left-0 right-0 flex justify-center pointer-events-none">
<div className="mx-auto bg-white/40 rounded-lg py-1 px-2 pointer-events-auto">
<div className="flex flex-row items-center gap-x-2">
<button
onClick={moveToWorkspace}
@ -306,6 +306,7 @@ function Directory({
workspace={workspace}
fetchKeys={fetchKeys}
setLoading={setLoading}
setLoadingMessage={setLoadingMessage}
/>
</div>
</div>

View File

@ -7,18 +7,37 @@ import PreLoader from "../../../../../Preloader";
function FileUploadProgressComponent({
slug,
uuid,
file,
setFiles,
rejected = false,
reason = null,
onUploadSuccess,
onUploadError,
setLoading,
setLoadingMessage,
}) {
const [timerMs, setTimerMs] = useState(10);
const [status, setStatus] = useState("pending");
const [error, setError] = useState("");
const [isFadingOut, setIsFadingOut] = useState(false);
const fadeOut = (cb) => {
setIsFadingOut(true);
cb?.();
};
const beginFadeOut = () => {
setIsFadingOut(false);
setFiles((prev) => {
return prev.filter((item) => item.uid !== uuid);
});
};
useEffect(() => {
async function uploadFile() {
setLoading(true);
setLoadingMessage("Uploading file...");
const start = Number(new Date());
const formData = new FormData();
formData.append("file", file, file.name);
@ -34,17 +53,28 @@ function FileUploadProgressComponent({
onUploadError(data.error);
setError(data.error);
} else {
setLoading(false);
setLoadingMessage("");
setStatus("complete");
clearInterval(timer);
onUploadSuccess();
}
// Begin fadeout timer to clear uploader queue.
setTimeout(() => {
fadeOut(() => setTimeout(() => beginFadeOut(), 300));
}, 5000);
}
!!file && !rejected && uploadFile();
}, []);
if (rejected) {
return (
<div className="h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-white/5 border border-white/40">
<div
className={`${
isFadingOut ? "file-upload-fadeout" : "file-upload"
} h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-white/5 border border-white/40`}
>
<div className="w-6 h-6 flex-shrink-0">
<XCircle className="w-6 h-6 stroke-white bg-red-500 rounded-full p-1 w-full h-full" />
</div>
@ -60,7 +90,11 @@ function FileUploadProgressComponent({
if (status === "failed") {
return (
<div className="h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-white/5 border border-white/40 overflow-y-auto">
<div
className={`${
isFadingOut ? "file-upload-fadeout" : "file-upload"
} h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-white/5 border border-white/40 overflow-y-auto`}
>
<div className="w-6 h-6 flex-shrink-0">
<XCircle className="w-6 h-6 stroke-white bg-red-500 rounded-full p-1 w-full h-full" />
</div>
@ -75,7 +109,11 @@ function FileUploadProgressComponent({
}
return (
<div className="h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-white/5 border border-white/40">
<div
className={`${
isFadingOut ? "file-upload-fadeout" : "file-upload"
} h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-white/5 border border-white/40`}
>
<div className="w-6 h-6 flex-shrink-0">
{status !== "complete" ? (
<div className="flex items-center justify-center">

View File

@ -6,8 +6,14 @@ import { useDropzone } from "react-dropzone";
import { v4 } from "uuid";
import FileUploadProgress from "./FileUploadProgress";
import Workspace from "../../../../../models/workspace";
import debounce from "lodash.debounce";
export default function UploadFile({ workspace, fetchKeys, setLoading }) {
export default function UploadFile({
workspace,
fetchKeys,
setLoading,
setLoadingMessage,
}) {
const [ready, setReady] = useState(false);
const [files, setFiles] = useState([]);
const [fetchingUrl, setFetchingUrl] = useState(false);
@ -15,6 +21,7 @@ export default function UploadFile({ workspace, fetchKeys, setLoading }) {
const handleSendLink = async (e) => {
e.preventDefault();
setLoading(true);
setLoadingMessage("Scraping link...");
setFetchingUrl(true);
const formEl = e.target;
const form = new FormData(formEl);
@ -33,14 +40,9 @@ export default function UploadFile({ workspace, fetchKeys, setLoading }) {
setFetchingUrl(false);
};
const handleUploadSuccess = () => {
fetchKeys(true);
showToast("File uploaded successfully", "success", { clear: true });
};
const handleUploadError = (message) => {
showToast(`Error uploading file: ${message}`, "error");
};
// Don't spam fetchKeys, wait 1s between calls at least.
const handleUploadSuccess = debounce(() => fetchKeys(true), 1000);
const handleUploadError = (_msg) => null; // stubbed.
const onDrop = async (acceptedFiles, rejections) => {
const newAccepted = acceptedFiles.map((file) => {
@ -109,11 +111,15 @@ export default function UploadFile({ workspace, fetchKeys, setLoading }) {
<FileUploadProgress
key={file.uid}
file={file.file}
uuid={file.uid}
setFiles={setFiles}
slug={workspace.slug}
rejected={file?.rejected}
reason={file?.reason}
onUploadSuccess={handleUploadSuccess}
onUploadError={handleUploadError}
setLoading={setLoading}
setLoadingMessage={setLoadingMessage}
/>
))}
</div>

View File

@ -80,7 +80,7 @@ export default function DocumentSettings({ workspace, systemSettings }) {
}
useEffect(() => {
fetchKeys();
fetchKeys(true);
}, []);
const updateWorkspace = async (e) => {

View File

@ -1,26 +1,203 @@
import React, { useState } from "react";
import React, { useEffect, useState } from "react";
import System from "../../../models/system";
import { AUTH_TOKEN, AUTH_USER } from "../../../utils/constants";
import useLogo from "../../../hooks/useLogo";
import paths from "../../../utils/paths";
import showToast from "@/utils/toast";
import ModalWrapper from "@/components/ModalWrapper";
import { useModal } from "@/hooks/useModal";
import RecoveryCodeModal from "@/components/Modals/DisplayRecoveryCodeModal";
const RecoveryForm = ({ onSubmit, setShowRecoveryForm }) => {
const [username, setUsername] = useState("");
const [recoveryCodeInputs, setRecoveryCodeInputs] = useState(
Array(2).fill("")
);
const handleRecoveryCodeChange = (index, value) => {
const updatedCodes = [...recoveryCodeInputs];
updatedCodes[index] = value;
setRecoveryCodeInputs(updatedCodes);
};
const handleSubmit = (e) => {
e.preventDefault();
const recoveryCodes = recoveryCodeInputs.filter(
(code) => code.trim() !== ""
);
onSubmit(username, recoveryCodes);
};
return (
<form
onSubmit={handleSubmit}
className="flex flex-col justify-center items-center relative rounded-2xl md:bg-login-gradient md:shadow-[0_4px_14px_rgba(0,0,0,0.25)] md:px-8 px-0 py-4 w-full md:w-fit mt-10 md:mt-0"
>
<div className="flex items-start justify-between pt-11 pb-9 w-screen md:w-full md:px-12 px-6 ">
<div className="flex flex-col gap-y-4 w-full">
<h3 className="text-4xl md:text-lg font-bold text-white text-center md:text-left">
Password Reset
</h3>
<p className="text-sm text-white/90 md:text-left md:max-w-[300px] px-4 md:px-0 text-center">
Provide the necessary information below to reset your password.
</p>
</div>
</div>
<div className="md:px-12 px-6 space-y-6 flex h-full w-full">
<div className="w-full flex flex-col gap-y-4">
<div className="flex flex-col gap-y-2">
<label className="text-white text-sm font-bold">Username</label>
<input
name="username"
type="text"
placeholder="Username"
value={username}
onChange={(e) => setUsername(e.target.value)}
className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-md p-2.5 w-full h-[48px] md:w-[300px] md:h-[34px]"
required
/>
</div>
<div className="flex flex-col gap-y-2">
<label className="text-white text-sm font-bold">
Recovery Codes
</label>
{recoveryCodeInputs.map((code, index) => (
<div key={index}>
<input
type="text"
name={`recoveryCode${index + 1}`}
placeholder={`Recovery Code ${index + 1}`}
value={code}
onChange={(e) =>
handleRecoveryCodeChange(index, e.target.value)
}
className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-md p-2.5 w-full h-[48px] md:w-[300px] md:h-[34px]"
required
/>
</div>
))}
</div>
</div>
</div>
<div className="flex items-center md:p-12 md:px-0 px-6 mt-12 md:mt-0 space-x-2 border-gray-600 w-full flex-col gap-y-8">
<button
type="submit"
className="md:text-[#46C8FF] md:bg-transparent md:w-[300px] text-[#222628] text-sm font-bold focus:ring-4 focus:outline-none rounded-md border-[1.5px] border-[#46C8FF] md:h-[34px] h-[48px] md:hover:text-white md:hover:bg-[#46C8FF] bg-[#46C8FF] focus:z-10 w-full"
>
Reset Password
</button>
<button
type="button"
className="text-white text-sm flex gap-x-1 hover:text-[#46C8FF] hover:underline -mb-8"
onClick={() => setShowRecoveryForm(false)}
>
Back to Login
</button>
</div>
</form>
);
};
const ResetPasswordForm = ({ onSubmit }) => {
const [newPassword, setNewPassword] = useState("");
const [confirmPassword, setConfirmPassword] = useState("");
const handleSubmit = (e) => {
e.preventDefault();
onSubmit(newPassword, confirmPassword);
};
return (
<form
onSubmit={handleSubmit}
className="flex flex-col justify-center items-center relative rounded-2xl md:bg-login-gradient md:shadow-[0_4px_14px_rgba(0,0,0,0.25)] md:px-12 px-0 py-4 w-full md:w-fit -mt-24 md:-mt-28"
>
<div className="flex items-start justify-between pt-11 pb-9 w-screen md:w-full md:px-12 px-6">
<div className="flex flex-col gap-y-4 w-full">
<h3 className="text-4xl md:text-2xl font-bold text-white text-center md:text-left">
Reset Password
</h3>
<p className="text-sm text-white/90 md:text-left md:max-w-[300px] px-4 md:px-0 text-center">
Enter your new password.
</p>
</div>
</div>
<div className="md:px-12 px-6 space-y-6 flex h-full w-full">
<div className="w-full flex flex-col gap-y-4">
<div>
<input
type="password"
name="newPassword"
placeholder="New Password"
value={newPassword}
onChange={(e) => setNewPassword(e.target.value)}
className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-md p-2.5 w-full h-[48px] md:w-[300px] md:h-[34px]"
required
/>
</div>
<div>
<input
type="password"
name="confirmPassword"
placeholder="Confirm Password"
value={confirmPassword}
onChange={(e) => setConfirmPassword(e.target.value)}
className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-md p-2.5 w-full h-[48px] md:w-[300px] md:h-[34px]"
required
/>
</div>
</div>
</div>
<div className="flex items-center md:p-12 md:px-0 px-6 mt-12 md:mt-0 space-x-2 border-gray-600 w-full flex-col gap-y-8">
<button
type="submit"
className="md:text-[#46C8FF] md:bg-transparent md:w-[300px] text-[#222628] text-sm font-bold focus:ring-4 focus:outline-none rounded-md border-[1.5px] border-[#46C8FF] md:h-[34px] h-[48px] md:hover:text-white md:hover:bg-[#46C8FF] bg-[#46C8FF] focus:z-10 w-full"
>
Reset Password
</button>
</div>
</form>
);
};
export default function MultiUserAuth() {
const [loading, setLoading] = useState(false);
const [error, setError] = useState(null);
const { logo: _initLogo } = useLogo();
const [recoveryCodes, setRecoveryCodes] = useState([]);
const [downloadComplete, setDownloadComplete] = useState(false);
const [user, setUser] = useState(null);
const [token, setToken] = useState(null);
const [showRecoveryForm, setShowRecoveryForm] = useState(false);
const [showResetPasswordForm, setShowResetPasswordForm] = useState(false);
const {
isOpen: isRecoveryCodeModalOpen,
openModal: openRecoveryCodeModal,
closeModal: closeRecoveryCodeModal,
} = useModal();
const handleLogin = async (e) => {
setError(null);
setLoading(true);
e.preventDefault();
const data = {};
const form = new FormData(e.target);
for (var [key, value] of form.entries()) data[key] = value;
const { valid, user, token, message } = await System.requestToken(data);
const { valid, user, token, message, recoveryCodes } =
await System.requestToken(data);
if (valid && !!token && !!user) {
window.localStorage.setItem(AUTH_USER, JSON.stringify(user));
window.localStorage.setItem(AUTH_TOKEN, token);
window.location = paths.home();
setUser(user);
setToken(token);
if (recoveryCodes) {
setRecoveryCodes(recoveryCodes);
openRecoveryCodeModal();
} else {
window.localStorage.setItem(AUTH_USER, JSON.stringify(user));
window.localStorage.setItem(AUTH_TOKEN, token);
window.location = paths.home();
}
} else {
setError(message);
setLoading(false);
@ -28,57 +205,134 @@ export default function MultiUserAuth() {
setLoading(false);
};
const handleDownloadComplete = () => setDownloadComplete(true);
const handleResetPassword = () => setShowRecoveryForm(true);
const handleRecoverySubmit = async (username, recoveryCodes) => {
const { success, resetToken, error } = await System.recoverAccount(
username,
recoveryCodes
);
if (success && resetToken) {
window.localStorage.setItem("resetToken", resetToken);
setShowRecoveryForm(false);
setShowResetPasswordForm(true);
} else {
showToast(error, "error", { clear: true });
}
};
const handleResetSubmit = async (newPassword, confirmPassword) => {
const resetToken = window.localStorage.getItem("resetToken");
if (resetToken) {
const { success, error } = await System.resetPassword(
resetToken,
newPassword,
confirmPassword
);
if (success) {
window.localStorage.removeItem("resetToken");
setShowResetPasswordForm(false);
showToast("Password reset successful", "success", { clear: true });
} else {
showToast(error, "error", { clear: true });
}
} else {
showToast("Invalid reset token", "error", { clear: true });
}
};
useEffect(() => {
if (downloadComplete && user && token) {
window.localStorage.setItem(AUTH_USER, JSON.stringify(user));
window.localStorage.setItem(AUTH_TOKEN, token);
window.location = paths.home();
}
}, [downloadComplete, user, token]);
if (showRecoveryForm) {
return (
<RecoveryForm
onSubmit={handleRecoverySubmit}
setShowRecoveryForm={setShowRecoveryForm}
/>
);
}
if (showResetPasswordForm)
return <ResetPasswordForm onSubmit={handleResetSubmit} />;
return (
<form onSubmit={handleLogin}>
<div className="flex flex-col justify-center items-center relative rounded-2xl shadow border-2 border-slate-300 border-opacity-20 w-[400px] login-input-gradient">
<div className="flex items-start justify-between pt-11 pb-9 rounded-t">
<div className="flex items-center flex-col">
<h3 className="text-md md:text-2xl font-bold text-gray-900 dark:text-white text-center">
Sign In
</h3>
</div>
</div>
<div className="px-12 space-y-6 flex h-full w-full">
<div className="w-full flex flex-col gap-y-4">
<div>
<input
name="username"
type="text"
placeholder="Username"
className="bg-opacity-40 border-gray-300 text-sm rounded-lg block w-full p-2.5 bg-[#222628] placeholder-[#FFFFFF99] text-white focus:ring-blue-500 focus:border-blue-500"
required={true}
autoComplete="off"
/>
</div>
<div>
<input
name="password"
type="password"
placeholder="Password"
className="bg-opacity-40 border-gray-300 text-sm rounded-lg block w-full p-2.5 bg-[#222628] placeholder-[#FFFFFF99] text-white focus:ring-blue-500 focus:border-blue-500"
required={true}
autoComplete="off"
/>
</div>
{error && (
<p className="text-red-600 dark:text-red-400 text-sm">
Error: {error}
<>
<form onSubmit={handleLogin}>
<div className="flex flex-col justify-center items-center relative rounded-2xl md:bg-login-gradient md:shadow-[0_4px_14px_rgba(0,0,0,0.25)] md:px-12 py-12 -mt-4 md:mt-0">
<div className="flex items-start justify-between pt-11 pb-9 rounded-t">
<div className="flex items-center flex-col gap-y-4">
<div className="flex gap-x-1">
<h3 className="text-md md:text-2xl font-bold text-white text-center white-space-nowrap hidden md:block">
Welcome to
</h3>
<p className="text-4xl md:text-2xl font-bold bg-gradient-to-r from-[#75D6FF] via-[#FFFFFF] to-[#FFFFFF] bg-clip-text text-transparent">
AnythingLLM
</p>
</div>
<p className="text-sm text-white/90 text-center">
Sign in to your AnythingLLM account.
</p>
)}
</div>
</div>
<div className="w-full px-4 md:px-12">
<div className="w-full flex flex-col gap-y-4">
<div className="w-screen md:w-full md:px-0 px-6">
<input
name="username"
type="text"
placeholder="Username"
className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-md p-2.5 w-full h-[48px] md:w-[300px] md:h-[34px]"
required={true}
autoComplete="off"
/>
</div>
<div className="w-screen md:w-full md:px-0 px-6">
<input
name="password"
type="password"
placeholder="Password"
className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-md p-2.5 w-full h-[48px] md:w-[300px] md:h-[34px]"
required={true}
autoComplete="off"
/>
</div>
{error && <p className="text-red-400 text-sm">Error: {error}</p>}
</div>
</div>
<div className="flex items-center md:p-12 px-10 mt-12 md:mt-0 space-x-2 border-gray-600 w-full flex-col gap-y-8">
<button
disabled={loading}
type="submit"
className="md:text-[#46C8FF] md:bg-transparent text-[#222628] text-sm font-bold focus:ring-4 focus:outline-none rounded-md border-[1.5px] border-[#46C8FF] md:h-[34px] h-[48px] md:hover:text-white md:hover:bg-[#46C8FF] bg-[#46C8FF] focus:z-10 w-full"
>
{loading ? "Validating..." : "Login"}
</button>
<button
type="button"
className="text-white text-sm flex gap-x-1 hover:text-[#46C8FF] hover:underline"
onClick={handleResetPassword}
>
Forgot password?<b>Reset</b>
</button>
</div>
</div>
<div className="flex items-center p-12 space-x-2 border-gray-200 rounded-b dark:border-gray-600 w-full">
<button
disabled={loading}
type="submit"
className="text-gray-500 bg-white hover:bg-gray-100 focus:ring-4 focus:outline-none focus:ring-blue-300 rounded-lg border border-white text-sm font-bold px-5 py-2.5 hover:text-gray-900 focus:z-10 dark:bg-white dark:text-neutral-700 dark:border-white dark:hover:text-white dark:hover:bg-slate-600 dark:focus:ring-gray-600 w-full"
>
{loading ? "Validating..." : "Login"}
</button>
</div>
</div>
</form>
</form>
<ModalWrapper isOpen={isRecoveryCodeModalOpen}>
<RecoveryCodeModal
recoveryCodes={recoveryCodes}
onDownloadComplete={handleDownloadComplete}
onClose={closeRecoveryCodeModal}
/>
</ModalWrapper>
</>
);
}

View File

@ -1,25 +1,44 @@
import React, { useState } from "react";
import React, { useEffect, useState } from "react";
import System from "../../../models/system";
import { AUTH_TOKEN } from "../../../utils/constants";
import useLogo from "../../../hooks/useLogo";
import paths from "../../../utils/paths";
import ModalWrapper from "@/components/ModalWrapper";
import { useModal } from "@/hooks/useModal";
import RecoveryCodeModal from "@/components/Modals/DisplayRecoveryCodeModal";
export default function SingleUserAuth() {
const [loading, setLoading] = useState(false);
const [error, setError] = useState(null);
const { logo: _initLogo } = useLogo();
const [recoveryCodes, setRecoveryCodes] = useState([]);
const [downloadComplete, setDownloadComplete] = useState(false);
const [token, setToken] = useState(null);
const {
isOpen: isRecoveryCodeModalOpen,
openModal: openRecoveryCodeModal,
closeModal: closeRecoveryCodeModal,
} = useModal();
const handleLogin = async (e) => {
setError(null);
setLoading(true);
e.preventDefault();
const data = {};
const form = new FormData(e.target);
for (var [key, value] of form.entries()) data[key] = value;
const { valid, token, message } = await System.requestToken(data);
const { valid, token, message, recoveryCodes } =
await System.requestToken(data);
if (valid && !!token) {
window.localStorage.setItem(AUTH_TOKEN, token);
window.location = paths.home();
setToken(token);
if (recoveryCodes) {
setRecoveryCodes(recoveryCodes);
openRecoveryCodeModal();
} else {
window.localStorage.setItem(AUTH_TOKEN, token);
window.location = paths.home();
}
} else {
setError(message);
setLoading(false);
@ -27,45 +46,71 @@ export default function SingleUserAuth() {
setLoading(false);
};
const handleDownloadComplete = () => {
setDownloadComplete(true);
};
useEffect(() => {
if (downloadComplete && token) {
window.localStorage.setItem(AUTH_TOKEN, token);
window.location = paths.home();
}
}, [downloadComplete, token]);
return (
<form onSubmit={handleLogin}>
<div className="flex flex-col justify-center items-center relative bg-white rounded-2xl shadow dark:bg-stone-700 border-2 border-slate-300 border-opacity-20 w-[400px] login-input-gradient">
<div className="flex items-start justify-between pt-11 pb-9 rounded-t dark:border-gray-600">
<div className="flex items-center flex-col">
<h3 className="text-md md:text-2xl font-bold text-gray-900 dark:text-white text-center">
Sign In
</h3>
</div>
</div>
<div className="px-12 space-y-6 flex h-full w-full">
<div className="w-full flex flex-col gap-y-4">
<div>
<input
name="password"
type="password"
placeholder="Password"
className="bg-neutral-800 bg-opacity-40 border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 dark:bg-[#222628] dark:bg-opacity-40 dark:placeholder-[#FFFFFF99] dark:text-white dark:focus:ring-blue-500 dark:focus:border-blue-500"
required={true}
autoComplete="off"
/>
</div>
{error && (
<p className="text-red-600 dark:text-red-400 text-sm">
Error: {error}
<>
<form onSubmit={handleLogin}>
<div className="flex flex-col justify-center items-center relative rounded-2xl md:bg-login-gradient md:shadow-[0_4px_14px_rgba(0,0,0,0.25)] md:px-12 py-12 -mt-36 md:-mt-10">
<div className="flex items-start justify-between pt-11 pb-9 rounded-t">
<div className="flex items-center flex-col gap-y-4">
<div className="flex gap-x-1">
<h3 className="text-md md:text-2xl font-bold text-white text-center white-space-nowrap hidden md:block">
Welcome to
</h3>
<p className="text-4xl md:text-2xl font-bold bg-gradient-to-r from-[#75D6FF] via-[#FFFFFF] to-[#FFFFFF] bg-clip-text text-transparent">
AnythingLLM
</p>
</div>
<p className="text-sm text-white/90 text-center">
Sign in to your AnythingLLM instance.
</p>
)}
</div>
</div>
<div className="w-full px-4 md:px-12">
<div className="w-full flex flex-col gap-y-4">
<div className="w-screen md:w-full md:px-0 px-6">
<input
name="password"
type="password"
placeholder="Password"
className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-md p-2.5 w-full h-[48px] md:w-[300px] md:h-[34px]"
required={true}
autoComplete="off"
/>
</div>
{error && <p className="text-red-400 text-sm">Error: {error}</p>}
</div>
</div>
<div className="flex items-center md:p-12 px-10 mt-12 md:mt-0 space-x-2 border-gray-600 w-full flex-col gap-y-8">
<button
disabled={loading}
type="submit"
className="md:text-[#46C8FF] md:bg-transparent text-[#222628] text-sm font-bold focus:ring-4 focus:outline-none rounded-md border-[1.5px] border-[#46C8FF] md:h-[34px] h-[48px] md:hover:text-white md:hover:bg-[#46C8FF] bg-[#46C8FF] focus:z-10 w-full"
>
{loading ? "Validating..." : "Login"}
</button>
</div>
</div>
<div className="flex items-center p-12 space-x-2 border-gray-200 rounded-b dark:border-gray-600 w-full">
<button
disabled={loading}
type="submit"
className="text-gray-500 bg-white hover:bg-gray-100 focus:ring-4 focus:outline-none focus:ring-blue-300 rounded-lg border border-white text-sm font-bold px-5 py-2.5 hover:text-gray-900 focus:z-10 dark:bg-white dark:text-neutral-700 dark:border-white dark:hover:text-white dark:hover:bg-slate-600 dark:focus:ring-gray-600 w-full"
>
{loading ? "Validating..." : "Login"}
</button>
</div>
</div>
</form>
</form>
<ModalWrapper isOpen={isRecoveryCodeModalOpen}>
<RecoveryCodeModal
recoveryCodes={recoveryCodes}
onDownloadComplete={handleDownloadComplete}
onClose={closeRecoveryCodeModal}
/>
</ModalWrapper>
</>
);
}

View File

@ -8,26 +8,40 @@ import {
AUTH_TIMESTAMP,
} from "../../../utils/constants";
import useLogo from "../../../hooks/useLogo";
import illustration from "@/media/illustrations/login-illustration.svg";
import loginLogo from "@/media/illustrations/login-logo.svg";
export default function PasswordModal({ mode = "single" }) {
const { logo: _initLogo } = useLogo();
return (
<div className="fixed top-0 left-0 right-0 z-50 w-full p-4 overflow-x-hidden overflow-y-auto md:inset-0 h-[calc(100%-1rem)] h-full bg-zinc-800 flex items-center justify-center">
<div className="fixed top-0 left-0 right-0 z-50 w-full overflow-x-hidden overflow-y-auto md:inset-0 h-[calc(100%-1rem)] h-full bg-[#25272C] flex flex-col md:flex-row items-center justify-center">
<div
className="fixed top-0 left-0 right-0 bottom-0 z-40 animate-slow-pulse"
style={{
background: `
radial-gradient(circle at center, transparent 40%, black 100%),
linear-gradient(180deg, #FF8585 0%, #D4A447 100%)
`,
radial-gradient(circle at center, transparent 40%, black 100%),
linear-gradient(180deg, #85F8FF 0%, #65A6F2 100%)
`,
width: "575px",
filter: "blur(200px)",
margin: "auto",
filter: "blur(150px)",
opacity: "0.4",
}}
className="absolute left-0 top-0 z-0 h-full w-full"
/>
<div className="flex flex-col items-center justify-center h-full w-full z-50">
<img src={_initLogo} className="mb-20 w-80 opacity-80" alt="logo" />
<div className="hidden md:flex md:w-1/2 md:h-full md:items-center md:justify-center">
<img
className="w-full h-full object-contain z-50"
src={illustration}
alt="login illustration"
/>
</div>
<div className="flex flex-col items-center justify-center h-full w-full md:w-1/2 z-50 relative">
<img
src={loginLogo}
className={`mb-8 w-[84px] h-[84px] absolute ${
mode === "single" ? "md:top-50" : "md:top-36"
} top-44 z-30`}
alt="logo"
/>
{mode === "single" ? <SingleUserAuth /> : <MultiUserAuth />}
</div>
</div>

View File

@ -0,0 +1,50 @@
export default function CustomCell({ ...props }) {
const {
root,
depth,
x,
y,
width,
height,
index,
payload,
colors,
rank,
name,
} = props;
return (
<g>
<rect
x={x}
y={y}
width={width}
height={height}
style={{
fill:
depth < 2
? colors[Math.floor((index / root.children.length) * 6)]
: "#ffffff00",
stroke: "#fff",
strokeWidth: 2 / (depth + 1e-10),
strokeOpacity: 1 / (depth + 1e-10),
}}
/>
{depth === 1 ? (
<text
x={x + width / 2}
y={y + height / 2 + 7}
textAnchor="middle"
fill="#fff"
fontSize={14}
>
{name}
</text>
) : null}
{depth === 1 ? (
<text x={x + 4} y={y + 18} fill="#fff" fontSize={16} fillOpacity={0.9}>
{index + 1}
</text>
) : null}
</g>
);
}

View File

@ -0,0 +1,89 @@
import { Tooltip as RechartsTooltip } from "recharts";
// Given a hex, convert to the opposite highest-contrast color
// and if `bw` is enabled, force it to be black/white to normalize
// interface.
function invertColor(hex, bw) {
if (hex.indexOf("#") === 0) {
hex = hex.slice(1);
}
// convert 3-digit hex to 6-digits.
if (hex.length === 3) {
hex = hex[0] + hex[0] + hex[1] + hex[1] + hex[2] + hex[2];
}
if (hex.length !== 6) {
throw new Error("Invalid HEX color.");
}
var r = parseInt(hex.slice(0, 2), 16),
g = parseInt(hex.slice(2, 4), 16),
b = parseInt(hex.slice(4, 6), 16);
if (bw) {
// https://stackoverflow.com/a/3943023/112731
return r * 0.299 + g * 0.587 + b * 0.114 > 186 ? "#FFFFFF" : "#000000";
// : '#FFFFFF';
}
// invert color components
r = (255 - r).toString(16);
g = (255 - g).toString(16);
b = (255 - b).toString(16);
// pad each with zeros and return
return "#" + padZero(r) + padZero(g) + padZero(b);
}
function padZero(str, len) {
len = len || 2;
var zeros = new Array(len).join("0");
return (zeros + str).slice(-len);
}
export default function Tooltip({ legendColor, ...props }) {
return (
<RechartsTooltip
wrapperStyle={{ outline: "none" }}
isAnimationActive={false}
cursor={{ fill: "#d1d5db", opacity: "0.15" }}
position={{ y: 0 }}
{...props}
content={({ active, payload, label }) => {
return active && payload ? (
<div className="bg-white text-sm rounded-md border shadow-lg">
<div className="border-b py-2 px-4">
<p className="text-elem text-gray-700 font-medium">{label}</p>
</div>
<div className="space-y-1 py-2 px-4">
{payload.map(({ value, name }, idx) => (
<div
key={`id-${idx}`}
className="flex items-center justify-between space-x-8"
>
<div className="flex items-center space-x-2">
<span
className="shrink-0 h-3 w-3 border-white rounded-md rounded-full border-2 shadow-md"
style={{ backgroundColor: legendColor }}
/>
<p
style={{
color: invertColor(legendColor, true),
}}
className="font-medium tabular-nums text-right whitespace-nowrap"
>
{value}
</p>
</div>
<p
style={{
color: invertColor(legendColor, true),
}}
className="whitespace-nowrap font-normal"
>
{name}
</p>
</div>
))}
</div>
</div>
) : null;
}}
/>
);
}

View File

@ -0,0 +1,98 @@
export const Colors = {
blue: "#3b82f6",
sky: "#0ea5e9",
cyan: "#06b6d4",
teal: "#14b8a6",
emerald: "#10b981",
green: "#22c55e",
lime: "#84cc16",
yellow: "#eab308",
amber: "#f59e0b",
orange: "#f97316",
red: "#ef4444",
rose: "#f43f5e",
pink: "#ec4899",
fuchsia: "#d946ef",
purple: "#a855f7",
violet: "#8b5cf6",
indigo: "#6366f1",
neutral: "#737373",
stone: "#78716c",
gray: "#6b7280",
slate: "#64748b",
zinc: "#71717a",
};
export function getTremorColor(color) {
switch (color) {
case "blue":
return Colors.blue;
case "sky":
return Colors.sky;
case "cyan":
return Colors.cyan;
case "teal":
return Colors.teal;
case "emerald":
return Colors.emerald;
case "green":
return Colors.green;
case "lime":
return Colors.lime;
case "yellow":
return Colors.yellow;
case "amber":
return Colors.amber;
case "orange":
return Colors.orange;
case "red":
return Colors.red;
case "rose":
return Colors.rose;
case "pink":
return Colors.pink;
case "fuchsia":
return Colors.fuchsia;
case "purple":
return Colors.purple;
case "violet":
return Colors.violet;
case "indigo":
return Colors.indigo;
case "neutral":
return Colors.neutral;
case "stone":
return Colors.stone;
case "gray":
return Colors.gray;
case "slate":
return Colors.slate;
case "zinc":
return Colors.zinc;
}
}
export const themeColorRange = [
"slate",
"gray",
"zinc",
"neutral",
"stone",
"red",
"orange",
"amber",
"yellow",
"lime",
"green",
"emerald",
"teal",
"cyan",
"sky",
"blue",
"indigo",
"violet",
"purple",
"fuchsia",
"pink",
"rose",
];

View File

@ -0,0 +1,467 @@
import { v4 } from "uuid";
import {
AreaChart,
BarChart,
DonutChart,
Legend,
LineChart,
} from "@tremor/react";
import {
Bar,
CartesianGrid,
ComposedChart,
Funnel,
FunnelChart,
Line,
PolarAngleAxis,
PolarGrid,
PolarRadiusAxis,
Radar,
RadarChart,
RadialBar,
RadialBarChart,
Scatter,
ScatterChart,
Treemap,
XAxis,
YAxis,
} from "recharts";
import { Colors, getTremorColor } from "./chart-utils.js";
import CustomCell from "./CustomCell.jsx";
import Tooltip from "./CustomTooltip.jsx";
import { safeJsonParse } from "@/utils/request.js";
import renderMarkdown from "@/utils/chat/markdown.js";
import { WorkspaceProfileImage } from "../PromptReply/index.jsx";
import { memo, useCallback, useState } from "react";
import { saveAs } from "file-saver";
import { useGenerateImage } from "recharts-to-png";
import { CircleNotch, DownloadSimple } from "@phosphor-icons/react";
const dataFormatter = (number) => {
return Intl.NumberFormat("us").format(number).toString();
};
export function Chartable({ props, workspace }) {
const [getDivJpeg, { ref }] = useGenerateImage({
quality: 1,
type: "image/jpeg",
options: {
backgroundColor: "#393d43",
padding: 20,
},
});
const handleDownload = useCallback(async () => {
const jpeg = await getDivJpeg();
if (jpeg) saveAs(jpeg, `chart-${v4().split("-")[0]}.jpg`);
}, []);
const color = null;
const showLegend = true;
const content =
typeof props.content === "string"
? safeJsonParse(props.content, null)
: props.content;
if (content === null) return null;
const chartType = content?.type?.toLowerCase();
const data =
typeof content.dataset === "string"
? safeJsonParse(content.dataset, null)
: content.dataset;
const value = data.length > 0 ? Object.keys(data[0])[1] : "value";
const title = content?.title;
const renderChart = () => {
switch (chartType) {
case "area":
return (
<div className="bg-zinc-900 p-8 rounded-xl text-white">
<h3 className="text-lg font-medium">{title}</h3>
<AreaChart
className="h-[350px]"
data={data}
index="name"
categories={[value]}
colors={[color || "blue", "cyan"]}
showLegend={showLegend}
valueFormatter={dataFormatter}
/>
</div>
);
case "bar":
return (
<div className="bg-zinc-900 p-8 rounded-xl text-white">
<h3 className="text-lg font-medium">{title}</h3>
<BarChart
className="h-[350px]"
data={data}
index="name"
categories={[value]}
colors={[color || "blue"]}
showLegend={showLegend}
valueFormatter={dataFormatter}
layout={"vertical"}
yAxisWidth={100}
/>
</div>
);
case "line":
return (
<div className="bg-zinc-900 p-8 pb-12 rounded-xl text-white h-[500px]">
<h3 className="text-lg font-medium">{title}</h3>
<LineChart
className="h-[400px]"
data={data}
index="name"
categories={[value]}
colors={[color || "blue"]}
showLegend={showLegend}
valueFormatter={dataFormatter}
/>
</div>
);
case "composed":
return (
<div className="bg-zinc-900 p-8 rounded-xl text-white">
<h3 className="text-lg font-medium">{title}</h3>
{showLegend && (
<Legend
categories={[value]}
colors={[color || "blue", color || "blue"]}
className="mb-5 justify-end"
/>
)}
<ComposedChart width={500} height={260} data={data}>
<CartesianGrid
strokeDasharray="3 3"
horizontal
vertical={false}
/>
<XAxis
dataKey="name"
tickLine={false}
axisLine={false}
interval="preserveStartEnd"
tick={{ transform: "translate(0, 6)", fill: "white" }}
style={{
fontSize: "12px",
fontFamily: "Inter; Helvetica",
}}
padding={{ left: 10, right: 10 }}
/>
<YAxis
tickLine={false}
axisLine={false}
type="number"
tick={{ transform: "translate(-3, 0)", fill: "white" }}
style={{
fontSize: "12px",
fontFamily: "Inter; Helvetica",
}}
/>
<Tooltip legendColor={getTremorColor(color || "blue")} />
<Line
type="linear"
dataKey={value}
stroke={getTremorColor(color || "blue")}
dot={false}
strokeWidth={2}
/>
<Bar
dataKey="value"
name="value"
type="linear"
fill={getTremorColor(color || "blue")}
/>
</ComposedChart>
</div>
);
case "scatter":
return (
<div className="bg-zinc-900 p-8 rounded-xl text-white">
<h3 className="text-lg font-medium">{title}</h3>
{showLegend && (
<div className="flex justify-end">
<Legend
categories={[value]}
colors={[color || "blue", color || "blue"]}
className="mb-5"
/>
</div>
)}
<ScatterChart width={500} height={260} data={data}>
<CartesianGrid
strokeDasharray="3 3"
horizontal
vertical={false}
/>
<XAxis
dataKey="name"
tickLine={false}
axisLine={false}
interval="preserveStartEnd"
tick={{ transform: "translate(0, 6)", fill: "white" }}
style={{
fontSize: "12px",
fontFamily: "Inter; Helvetica",
}}
padding={{ left: 10, right: 10 }}
/>
<YAxis
tickLine={false}
axisLine={false}
type="number"
tick={{ transform: "translate(-3, 0)", fill: "white" }}
style={{
fontSize: "12px",
fontFamily: "Inter; Helvetica",
}}
/>
<Tooltip legendColor={getTremorColor(color || "blue")} />
<Scatter dataKey={value} fill={getTremorColor(color || "blue")} />
</ScatterChart>
</div>
);
case "pie":
return (
<div className="bg-zinc-900 p-8 rounded-xl text-white">
<h3 className="text-lg font-medium">{title}</h3>
<DonutChart
data={data}
category={value}
index="name"
colors={[
color || "cyan",
"violet",
"rose",
"amber",
"emerald",
"teal",
"fuchsia",
]}
// No actual legend for pie chart, but this will toggle the central text
showLabel={showLegend}
valueFormatter={dataFormatter}
customTooltip={customTooltip}
/>
</div>
);
case "radar":
return (
<div className="bg-zinc-900 p-8 rounded-xl text-white">
<h3 className="text-lg font-medium">{title}</h3>
{showLegend && (
<div className="flex justify-end">
<Legend
categories={[value]}
colors={[color || "blue", color || "blue"]}
className="mb-5"
/>
</div>
)}
<RadarChart
cx={300}
cy={250}
outerRadius={150}
width={600}
height={500}
data={data}
>
<PolarGrid />
<PolarAngleAxis dataKey="name" tick={{ fill: "white" }} />
<PolarRadiusAxis tick={{ fill: "white" }} />
<Tooltip legendColor={getTremorColor(color || "blue")} />
<Radar
dataKey="value"
stroke={getTremorColor(color || "blue")}
fill={getTremorColor(color || "blue")}
fillOpacity={0.6}
/>
</RadarChart>
</div>
);
case "radialbar":
return (
<div className="bg-zinc-900 p-8 rounded-xl text-white">
<h3 className="text-lg font-medium">{title}</h3>
{showLegend && (
<div className="flex justify-end">
<Legend
categories={[value]}
colors={[color || "blue", color || "blue"]}
className="mb-5"
/>
</div>
)}
<RadialBarChart
width={500}
height={300}
cx={150}
cy={150}
innerRadius={20}
outerRadius={140}
barSize={10}
data={data}
>
<RadialBar
angleAxisId={15}
label={{
position: "insideStart",
fill: getTremorColor(color || "blue"),
}}
dataKey="value"
/>
<Tooltip legendColor={getTremorColor(color || "blue")} />
</RadialBarChart>
</div>
);
case "treemap":
return (
<div className="bg-zinc-900 p-8 rounded-xl text-white">
<h3 className="text-lg font-medium">{title}</h3>
{showLegend && (
<div className="flex justify-end">
<Legend
categories={[value]}
colors={[color || "blue", color || "blue"]}
className="mb-5"
/>
</div>
)}
<Treemap
width={500}
height={260}
data={data}
dataKey="value"
stroke="#fff"
fill={getTremorColor(color || "blue")}
content={<CustomCell colors={Object.values(Colors)} />}
>
<Tooltip legendColor={getTremorColor(color || "blue")} />
</Treemap>
</div>
);
case "funnel":
return (
<div className="bg-zinc-900 p-8 rounded-xl text-white">
<h3 className="text-lg font-medium">{title}</h3>
{showLegend && (
<div className="flex justify-end">
<Legend
categories={[value]}
colors={[color || "blue", color || "blue"]}
className="mb-5"
/>
</div>
)}
<FunnelChart width={500} height={300} data={data}>
<Tooltip legendColor={getTremorColor(color || "blue")} />
<Funnel dataKey="value" color={getTremorColor(color || "blue")} />
</FunnelChart>
</div>
);
default:
return <p>Unsupported chart type.</p>;
}
};
if (!!props.chatId) {
return (
<div className="flex justify-center items-end w-full">
<div className="py-2 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col">
<div className="flex gap-x-5">
<WorkspaceProfileImage workspace={workspace} />
<div className="relative">
<DownloadGraph onClick={handleDownload} />
<div ref={ref}>{renderChart()}</div>
<span
className={`flex flex-col gap-y-1 mt-2`}
dangerouslySetInnerHTML={{
__html: renderMarkdown(content.caption),
}}
/>
</div>
</div>
</div>
</div>
);
}
return (
<div className="flex justify-center items-end w-full">
<div className="py-2 px-4 w-full flex gap-x-5 md:max-w-[800px] flex-col">
<div className="relative">
<DownloadGraph onClick={handleDownload} />
<div ref={ref}>{renderChart()}</div>
</div>
<div className="flex gap-x-5">
<span
className={`flex flex-col gap-y-1 mt-2`}
dangerouslySetInnerHTML={{
__html: renderMarkdown(content.caption),
}}
/>
</div>
</div>
</div>
);
}
const customTooltip = (props) => {
const { payload, active } = props;
if (!active || !payload) return null;
const categoryPayload = payload?.[0];
if (!categoryPayload) return null;
return (
<div className="w-56 bg-zinc-400 rounded-lg border p-2 text-white">
<div className="flex flex-1 space-x-2.5">
<div
className={`flex w-1.5 flex-col bg-${categoryPayload?.color}-500 rounded`}
/>
<div className="w-full">
<div className="flex items-center justify-between space-x-8">
<p className="whitespace-nowrap text-right text-tremor-content">
{categoryPayload.name}
</p>
<p className="whitespace-nowrap text-right font-medium text-tremor-content-emphasis">
{categoryPayload.value}
</p>
</div>
</div>
</div>
</div>
);
};
function DownloadGraph({ onClick }) {
const [loading, setLoading] = useState(false);
const handleClick = async () => {
setLoading(true);
await onClick?.();
setLoading(false);
};
return (
<div className="absolute top-3 right-3 z-50 cursor-pointer">
<div className="flex flex-col items-center">
<div className="p-1 rounded-full border-none">
{loading ? (
<CircleNotch
className="text-white/50 w-5 h-5 animate-spin"
aria-label="Downloading image..."
/>
) : (
<DownloadSimple
weight="bold"
className="text-white/50 w-5 h-5 hover:text-white"
onClick={handleClick}
aria-label="Download graph image"
/>
)}
</div>
</div>
</div>
);
}
export default memo(Chartable);

View File

@ -14,6 +14,7 @@ import {
X,
YoutubeLogo,
} from "@phosphor-icons/react";
import ConfluenceLogo from "@/media/dataConnectors/confluence.png";
import { Tooltip } from "react-tooltip";
import { toPercentString } from "@/utils/numbers";
@ -202,13 +203,6 @@ function CitationDetailModal({ source, onClose }) {
);
}
const ICONS = {
file: FileText,
link: Link,
youtube: YoutubeLogo,
github: GithubLogo,
};
// Show the correct title and/or display text for citations
// which contain valid outbound links that can be clicked by the
// user when viewing a citation. Optionally allows various icons
@ -221,10 +215,17 @@ function parseChunkSource({ title = "", chunks = [] }) {
icon: "file",
};
if (!chunks.length || !chunks[0].chunkSource.startsWith("link://"))
if (
!chunks.length ||
(!chunks[0].chunkSource.startsWith("link://") &&
!chunks[0].chunkSource.startsWith("confluence://"))
)
return nullResponse;
try {
const url = new URL(chunks[0].chunkSource.split("link://")[1]);
const url = new URL(
chunks[0].chunkSource.split("link://")[1] ||
chunks[0].chunkSource.split("confluence://")[1]
);
let text = url.host + url.pathname;
let icon = "link";
@ -238,6 +239,11 @@ function parseChunkSource({ title = "", chunks = [] }) {
icon = "github";
}
if (url.host.includes("atlassian.net")) {
text = title;
icon = "confluence";
}
return {
isUrl: true,
href: url.toString(),
@ -247,3 +253,16 @@ function parseChunkSource({ title = "", chunks = [] }) {
} catch {}
return nullResponse;
}
// Patch to render Confluence icon as a element like we do with Phosphor
const ConfluenceIcon = ({ ...props }) => (
<img src={ConfluenceLogo} {...props} />
);
const ICONS = {
file: FileText,
link: Link,
youtube: YoutubeLogo,
github: GithubLogo,
confluence: ConfluenceIcon,
};

View File

@ -71,7 +71,7 @@ const PromptReply = ({
);
};
function WorkspaceProfileImage({ workspace }) {
export function WorkspaceProfileImage({ workspace }) {
if (!!workspace.pfpUrl) {
return (
<div className="relative w-[35px] h-[35px] rounded-full flex-shrink-0 overflow-hidden">

View File

@ -6,6 +6,7 @@ import ManageWorkspace from "../../../Modals/MangeWorkspace";
import { ArrowDown } from "@phosphor-icons/react";
import debounce from "lodash.debounce";
import useUser from "@/hooks/useUser";
import Chartable from "./Chartable";
export default function ChatHistory({ history = [], workspace, sendCommand }) {
const { user } = useUser();
@ -133,6 +134,12 @@ export default function ChatHistory({ history = [], workspace, sendCommand }) {
return <StatusResponse key={props.uuid} props={props} />;
}
if (props.type === "rechartVisualize" && !!props.content) {
return (
<Chartable key={props.uuid} workspace={workspace} props={props} />
);
}
if (isLastBotReply && props.animate) {
return (
<PromptReply

View File

@ -89,6 +89,7 @@ export function AvailableAgents({
<AbilityTag text="save-file-to-browser" />
<AbilityTag text="list-documents" />
<AbilityTag text="summarize-document" />
<AbilityTag text="chart-generation" />
</div>
</div>
</button>

View File

@ -20,7 +20,6 @@ const PROVIDER_DEFAULT_MODELS = {
ollama: [],
togetherai: [],
groq: [
"llama2-70b-4096",
"mixtral-8x7b-32768",
"llama3-8b-8192",
"llama3-70b-8192",

View File

@ -679,3 +679,66 @@ does not extend the close button beyond the viewport. */
.white-scrollbar::-webkit-scrollbar-thumb:hover {
background-color: #cccccc;
}
/* Recharts rendering styles */
.recharts-text > * {
fill: #fff;
}
.recharts-legend-wrapper {
margin-bottom: 10px;
}
.text-tremor-content {
padding-bottom: 10px;
}
.file-upload {
-webkit-animation: fadein 0.3s linear forwards;
animation: fadein 0.3s linear forwards;
}
.file-upload-fadeout {
-webkit-animation: fadeout 0.3s linear forwards;
animation: fadeout 0.3s linear forwards;
}
@-webkit-keyframes fadein {
0% {
opacity: 0;
}
100% {
opacity: 1;
}
}
@keyframes fadein {
0% {
opacity: 0;
}
100% {
opacity: 1;
}
}
@-webkit-keyframes fadeout {
0% {
opacity: 1;
}
100% {
opacity: 0;
}
}
@keyframes fadeout {
0% {
opacity: 1;
}
100% {
opacity: 0;
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.4 KiB

View File

@ -0,0 +1,174 @@
<svg width="500" height="656" viewBox="0 0 500 656" fill="none" xmlns="http://www.w3.org/2000/svg">
<g filter="url(#filter0_d_1_4)">
<g filter="url(#filter1_ii_1_4)">
<path d="M126.778 581.68V225.373L177.937 256.068V611.774L126.778 581.68Z" fill="url(#paint0_linear_1_4)"/>
</g>
<path d="M127.929 577.98L192.097 616.48L177.693 625.145L112.619 588.534L112.619 220.107L127.817 208.962L127.929 577.98Z" fill="url(#paint1_linear_1_4)"/>
<path d="M176.786 258.588L112.619 220.088L128.154 208.851L192.096 248.034V616.461L177.596 625.326L176.786 258.588Z" fill="url(#paint2_linear_1_4)"/>
<g filter="url(#filter2_ii_1_4)">
<path d="M265.61 514.411V158.104L316.769 188.799V544.505L265.61 514.411Z" fill="url(#paint3_linear_1_4)"/>
</g>
<path d="M266.761 510.711L330.928 549.211L316.525 557.876L251.451 521.266L251.451 152.839L266.648 141.694L266.761 510.711Z" fill="url(#paint4_linear_1_4)"/>
<path d="M315.618 191.32L251.451 152.82L266.986 141.583L330.928 180.765V549.192L316.428 558.057L315.618 191.32Z" fill="url(#paint5_linear_1_4)"/>
<g filter="url(#filter3_ii_1_4)">
<path d="M404.442 418.683V62.3754L455.602 93.071V448.776L404.442 418.683Z" fill="url(#paint6_linear_1_4)"/>
</g>
<path d="M405.594 414.982L469.761 453.483L455.357 462.147L390.283 425.537L390.283 57.11L405.481 45.9652L405.594 414.982Z" fill="url(#paint7_linear_1_4)"/>
<path d="M454.45 95.5913L390.283 57.0911L405.818 45.8542L469.761 85.0366V453.464L455.261 462.328L454.45 95.5913Z" fill="url(#paint8_linear_1_4)"/>
</g>
<rect x="88.956" y="351.304" width="68.0244" height="40.4539" rx="15" fill="url(#paint9_linear_1_4)"/>
<rect x="104.57" y="359.68" width="36.797" height="5.23376" rx="2.61688" fill="white" fill-opacity="0.8"/>
<rect x="104.57" y="378.148" width="36.797" height="5.23376" rx="2.61688" fill="white" fill-opacity="0.8"/>
<rect x="104.57" y="368.914" width="36.797" height="5.23376" rx="2.61688" fill="white" fill-opacity="0.8"/>
<mask id="mask0_1_4" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="0" y="211" width="178" height="436">
<rect x="0.787216" y="211.982" width="177.152" height="434.649" fill="#D9D9D9"/>
</mask>
<g mask="url(#mask0_1_4)">
<rect x="51.503" y="479.103" width="183.106" height="78.9537" rx="39.4769" fill="url(#paint10_linear_1_4)"/>
<circle cx="99.9761" cy="509.549" r="13.9262" fill="white"/>
<circle cx="143.056" cy="519.287" r="13.9262" fill="white"/>
<circle cx="186.136" cy="519.287" r="13.9262" fill="white"/>
</g>
<mask id="mask1_1_4" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="148" y="178" width="169" height="340">
<rect x="148.819" y="178.725" width="167.95" height="338.735" fill="#D9D9D9"/>
</mask>
<g mask="url(#mask1_1_4)">
<rect x="187.512" y="233.079" width="183.106" height="78.9537" rx="39.4769" fill="url(#paint11_linear_1_4)"/>
<path d="M310.535 287.977L305.269 284.227L311.812 275.529L301.997 272.178L303.992 266.034L313.886 269.305V258.613H320.35V269.305L330.244 266.034L332.239 272.178L322.424 275.529L328.888 284.227L323.701 287.977L317.078 279.28L310.535 287.977Z" fill="white"/>
<path d="M270.716 287.977L265.449 284.227L271.992 275.529L262.178 272.178L264.173 266.034L274.067 269.305V258.613H280.53V269.305L290.425 266.034L292.42 272.178L282.605 275.529L289.068 284.227L283.882 287.977L277.259 279.28L270.716 287.977Z" fill="white"/>
<path d="M230.897 287.977L225.63 284.227L232.173 275.529L222.359 272.178L224.354 266.034L234.248 269.305V258.613H240.711V269.305L250.606 266.034L252.601 272.178L242.786 275.529L249.249 284.227L244.063 287.977L237.44 279.28L230.897 287.977Z" fill="white"/>
<rect x="252.529" y="387.811" width="100.24" height="43.2226" rx="21.6113" fill="url(#paint12_linear_1_4)"/>
<circle cx="279.065" cy="404.479" r="7.62378" fill="white"/>
<circle cx="302.649" cy="409.81" r="7.62378" fill="white"/>
</g>
<mask id="mask2_1_4" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="199" y="0" width="257" height="309">
<rect x="199.166" y="0.894867" width="256.435" height="307.227" fill="#D9D9D9"/>
</mask>
<g mask="url(#mask2_1_4)">
<rect x="317.531" y="103.658" width="183.106" height="108.893" rx="40" fill="url(#paint13_linear_1_4)"/>
<rect x="343.093" y="123.945" width="131.983" height="18.7724" rx="8" fill="white" fill-opacity="0.8"/>
<rect x="343.093" y="173.49" width="131.983" height="18.7724" rx="8" fill="white" fill-opacity="0.8"/>
<rect x="343.093" y="148.718" width="131.983" height="18.7724" rx="8" fill="white" fill-opacity="0.8"/>
</g>
<defs>
<filter id="filter0_d_1_4" x="102.619" y="35.8542" width="397.142" height="619.471" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dx="10" dy="10"/>
<feGaussianBlur stdDeviation="10"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.25 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_1_4"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_1_4" result="shape"/>
</filter>
<filter id="filter1_ii_1_4" x="122.778" y="221.373" width="59.1591" height="394.401" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dx="-4" dy="4"/>
<feGaussianBlur stdDeviation="3"/>
<feComposite in2="hardAlpha" operator="arithmetic" k2="-1" k3="1"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.15 0"/>
<feBlend mode="normal" in2="shape" result="effect1_innerShadow_1_4"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dx="4" dy="-4"/>
<feGaussianBlur stdDeviation="3"/>
<feComposite in2="hardAlpha" operator="arithmetic" k2="-1" k3="1"/>
<feColorMatrix type="matrix" values="0 0 0 0 0.471302 0 0 0 0 0.547141 0 0 0 0 0.651872 0 0 0 0.2 0"/>
<feBlend mode="normal" in2="effect1_innerShadow_1_4" result="effect2_innerShadow_1_4"/>
</filter>
<filter id="filter2_ii_1_4" x="261.61" y="154.104" width="59.159" height="394.401" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dx="-4" dy="4"/>
<feGaussianBlur stdDeviation="3"/>
<feComposite in2="hardAlpha" operator="arithmetic" k2="-1" k3="1"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.15 0"/>
<feBlend mode="normal" in2="shape" result="effect1_innerShadow_1_4"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dx="4" dy="-4"/>
<feGaussianBlur stdDeviation="3"/>
<feComposite in2="hardAlpha" operator="arithmetic" k2="-1" k3="1"/>
<feColorMatrix type="matrix" values="0 0 0 0 0.471302 0 0 0 0 0.547141 0 0 0 0 0.651872 0 0 0 0.2 0"/>
<feBlend mode="normal" in2="effect1_innerShadow_1_4" result="effect2_innerShadow_1_4"/>
</filter>
<filter id="filter3_ii_1_4" x="400.442" y="58.3754" width="59.159" height="394.401" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dx="-4" dy="4"/>
<feGaussianBlur stdDeviation="3"/>
<feComposite in2="hardAlpha" operator="arithmetic" k2="-1" k3="1"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.15 0"/>
<feBlend mode="normal" in2="shape" result="effect1_innerShadow_1_4"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dx="4" dy="-4"/>
<feGaussianBlur stdDeviation="3"/>
<feComposite in2="hardAlpha" operator="arithmetic" k2="-1" k3="1"/>
<feColorMatrix type="matrix" values="0 0 0 0 0.471302 0 0 0 0 0.547141 0 0 0 0 0.651872 0 0 0 0.2 0"/>
<feBlend mode="normal" in2="effect1_innerShadow_1_4" result="effect2_innerShadow_1_4"/>
</filter>
<linearGradient id="paint0_linear_1_4" x1="152.358" y1="225.373" x2="152.358" y2="611.774" gradientUnits="userSpaceOnUse">
<stop stop-color="#41495D"/>
<stop offset="1" stop-color="#293240"/>
</linearGradient>
<linearGradient id="paint1_linear_1_4" x1="152.358" y1="208.962" x2="152.358" y2="625.145" gradientUnits="userSpaceOnUse">
<stop stop-color="#151B23"/>
<stop offset="1" stop-color="#526A89"/>
</linearGradient>
<linearGradient id="paint2_linear_1_4" x1="152.358" y1="211.423" x2="152.358" y2="627.606" gradientUnits="userSpaceOnUse">
<stop stop-color="#697784"/>
<stop offset="1" stop-color="#181B1E"/>
</linearGradient>
<linearGradient id="paint3_linear_1_4" x1="291.189" y1="158.104" x2="291.189" y2="544.505" gradientUnits="userSpaceOnUse">
<stop stop-color="#41495D"/>
<stop offset="1" stop-color="#293240"/>
</linearGradient>
<linearGradient id="paint4_linear_1_4" x1="291.189" y1="141.694" x2="291.189" y2="557.876" gradientUnits="userSpaceOnUse">
<stop stop-color="#151B23"/>
<stop offset="1" stop-color="#526A89"/>
</linearGradient>
<linearGradient id="paint5_linear_1_4" x1="291.19" y1="144.155" x2="291.19" y2="560.337" gradientUnits="userSpaceOnUse">
<stop stop-color="#697784"/>
<stop offset="1" stop-color="#181B1E"/>
</linearGradient>
<linearGradient id="paint6_linear_1_4" x1="430.022" y1="62.3754" x2="430.022" y2="448.776" gradientUnits="userSpaceOnUse">
<stop stop-color="#41495D"/>
<stop offset="1" stop-color="#293240"/>
</linearGradient>
<linearGradient id="paint7_linear_1_4" x1="430.022" y1="45.9652" x2="430.022" y2="462.147" gradientUnits="userSpaceOnUse">
<stop stop-color="#151B23"/>
<stop offset="1" stop-color="#526A89"/>
</linearGradient>
<linearGradient id="paint8_linear_1_4" x1="430.022" y1="48.4262" x2="430.022" y2="464.608" gradientUnits="userSpaceOnUse">
<stop stop-color="#697784"/>
<stop offset="1" stop-color="#181B1E"/>
</linearGradient>
<linearGradient id="paint9_linear_1_4" x1="122.968" y1="351.304" x2="122.968" y2="391.758" gradientUnits="userSpaceOnUse">
<stop stop-color="#46C8FF"/>
<stop offset="0.438941" stop-color="#3AA5D2"/>
<stop offset="1" stop-color="#2A7899"/>
</linearGradient>
<linearGradient id="paint10_linear_1_4" x1="143.056" y1="479.103" x2="143.056" y2="558.057" gradientUnits="userSpaceOnUse">
<stop stop-color="#46C8FF"/>
<stop offset="0.438941" stop-color="#3AA5D2"/>
<stop offset="1" stop-color="#2A7899"/>
</linearGradient>
<linearGradient id="paint11_linear_1_4" x1="279.065" y1="233.079" x2="279.065" y2="312.033" gradientUnits="userSpaceOnUse">
<stop stop-color="#46C8FF"/>
<stop offset="0.438941" stop-color="#3AA5D2"/>
<stop offset="1" stop-color="#2A7899"/>
</linearGradient>
<linearGradient id="paint12_linear_1_4" x1="302.649" y1="387.811" x2="302.649" y2="431.034" gradientUnits="userSpaceOnUse">
<stop stop-color="#46C8FF"/>
<stop offset="0.438941" stop-color="#3AA5D2"/>
<stop offset="1" stop-color="#2A7899"/>
</linearGradient>
<linearGradient id="paint13_linear_1_4" x1="409.084" y1="103.658" x2="409.084" y2="212.55" gradientUnits="userSpaceOnUse">
<stop stop-color="#46C8FF"/>
<stop offset="0.438941" stop-color="#3AA5D2"/>
<stop offset="1" stop-color="#2A7899"/>
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 12 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 9.8 KiB

View File

@ -60,6 +60,29 @@ const DataConnector = {
});
},
},
confluence: {
collect: async function ({ pageUrl, username, accessToken }) {
return await fetch(`${API_BASE}/ext/confluence`, {
method: "POST",
headers: baseHeaders(),
body: JSON.stringify({
pageUrl,
username,
accessToken,
}),
})
.then((res) => res.json())
.then((res) => {
if (!res.success) throw new Error(res.reason);
return { data: res.data, error: null };
})
.catch((e) => {
console.error(e);
return { data: null, error: e.message };
});
},
},
};
export default DataConnector;

View File

@ -77,6 +77,43 @@ const System = {
return { valid: false, message: e.message };
});
},
recoverAccount: async function (username, recoveryCodes) {
return await fetch(`${API_BASE}/system/recover-account`, {
method: "POST",
headers: baseHeaders(),
body: JSON.stringify({ username, recoveryCodes }),
})
.then(async (res) => {
const data = await res.json();
if (!res.ok) {
throw new Error(data.message || "Error recovering account.");
}
return data;
})
.catch((e) => {
console.error(e);
return { success: false, error: e.message };
});
},
resetPassword: async function (token, newPassword, confirmPassword) {
return await fetch(`${API_BASE}/system/reset-password`, {
method: "POST",
headers: baseHeaders(),
body: JSON.stringify({ token, newPassword, confirmPassword }),
})
.then(async (res) => {
const data = await res.json();
if (!res.ok) {
throw new Error(data.message || "Error resetting password.");
}
return data;
})
.catch((e) => {
console.error(e);
return { success: false, error: e.message };
});
},
checkDocumentProcessorOnline: async () => {
return await fetch(`${API_BASE}/system/document-processing-status`, {
headers: baseHeaders(),

View File

@ -170,7 +170,7 @@ function AvailableAgentSkills({ skills, settings, toggleAgentSkill }) {
disabled={true}
/>
<GenericSkill
title="View and summarize documents"
title="View & summarize documents"
description="Allow the agent to list and summarize the content of workspace files currently embedded."
settings={settings}
enabled={true}
@ -183,6 +183,14 @@ function AvailableAgentSkills({ skills, settings, toggleAgentSkill }) {
enabled={true}
disabled={true}
/>
<GenericSkill
title="Generate charts"
description="Enable the default agent to generate various types of charts from data provided or given in chat."
skill="create-chart"
settings={settings}
toggleSkill={toggleAgentSkill}
enabled={skills.includes("create-chart")}
/>
<GenericSkill
title="Generate & save files to browser"
description="Enable the default agent to generate and write to files that save and can be downloaded in your browser."

View File

@ -0,0 +1,29 @@
import { chatQueryRefusalResponse } from "@/utils/chat";
export default function ChatQueryRefusalResponse({ workspace, setHasChanges }) {
return (
<div>
<div className="flex flex-col">
<label htmlFor="name" className="block input-label">
Query mode refusal response
</label>
<p className="text-white text-opacity-60 text-xs font-medium py-1.5">
When in <code className="bg-zinc-900 p-0.5 rounded-sm">query</code>{" "}
mode, you may want to return a custom refusal response when no context
is found.
</p>
</div>
<textarea
name="queryRefusalResponse"
rows={2}
defaultValue={chatQueryRefusalResponse(workspace)}
className="border-none bg-zinc-900 placeholder:text-white/20 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 mt-2"
placeholder="The text returned in query mode when there is no relevant context found for a response."
required={true}
wrap="soft"
autoComplete="off"
onChange={() => setHasChanges(true)}
/>
</div>
);
}

View File

@ -8,6 +8,7 @@ import ChatPromptSettings from "./ChatPromptSettings";
import ChatTemperatureSettings from "./ChatTemperatureSettings";
import ChatModeSelection from "./ChatModeSelection";
import WorkspaceLLMSelection from "./WorkspaceLLMSelection";
import ChatQueryRefusalResponse from "./ChatQueryRefusalResponse";
export default function ChatSettings({ workspace }) {
const [settings, setSettings] = useState({});
@ -68,6 +69,10 @@ export default function ChatSettings({ workspace }) {
workspace={workspace}
setHasChanges={setHasChanges}
/>
<ChatQueryRefusalResponse
workspace={workspace}
setHasChanges={setHasChanges}
/>
<ChatTemperatureSettings
settings={settings}
workspace={workspace}

View File

@ -11,6 +11,7 @@ const handledEvents = [
"fileDownload",
"awaitingFeedback",
"wssFailure",
"rechartVisualize",
];
export function websocketURI() {
@ -50,6 +51,25 @@ export default function handleSocketResponse(event, setChatHistory) {
return;
}
if (data.type === "rechartVisualize") {
return setChatHistory((prev) => {
return [
...prev.filter((msg) => !!msg.content),
{
type: "rechartVisualize",
uuid: v4(),
content: data.content,
role: "assistant",
sources: [],
closed: true,
error: null,
animate: false,
pending: false,
},
];
});
}
if (data.type === "wssFailure") {
return setChatHistory((prev) => {
return [

View File

@ -140,3 +140,10 @@ export function chatPrompt(workspace) {
"Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed."
);
}
export function chatQueryRefusalResponse(workspace) {
return (
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query."
);
}

View File

@ -1,5 +1,6 @@
/** @type {import('tailwindcss').Config} */
export default {
darkMode: 'false',
content: {
relative: true,
files: [
@ -9,7 +10,8 @@ export default {
"./src/pages/**/*.{js,jsx}",
"./src/utils/**/*.js",
"./src/*.jsx",
"./index.html"
"./index.html",
'./node_modules/@tremor/**/*.{js,ts,jsx,tsx}'
]
},
theme: {
@ -37,6 +39,7 @@ export default {
"main-gradient": "linear-gradient(180deg, #3D4147 0%, #2C2F35 100%)",
"modal-gradient": "linear-gradient(180deg, #3D4147 0%, #2C2F35 100%)",
"sidebar-gradient": "linear-gradient(90deg, #5B616A 0%, #3F434B 100%)",
"login-gradient": "linear-gradient(180deg, #3D4147 0%, #2C2F35 100%)",
"menu-item-gradient":
"linear-gradient(90deg, #3D4147 0%, #2C2F35 100%)",
"menu-item-selected-gradient":
@ -85,5 +88,35 @@ export default {
}
}
},
// Required for rechart styles to show since they can be rendered dynamically and will be tree-shaken if not safe-listed.
safelist: [
{
pattern:
/^(bg-(?:slate|gray|zinc|neutral|stone|red|orange|amber|yellow|lime|green|emerald|teal|cyan|sky|blue|indigo|violet|purple|fuchsia|pink|rose)-(?:50|100|200|300|400|500|600|700|800|900|950))$/,
variants: ['hover', 'ui-selected'],
},
{
pattern:
/^(text-(?:slate|gray|zinc|neutral|stone|red|orange|amber|yellow|lime|green|emerald|teal|cyan|sky|blue|indigo|violet|purple|fuchsia|pink|rose)-(?:50|100|200|300|400|500|600|700|800|900|950))$/,
variants: ['hover', 'ui-selected'],
},
{
pattern:
/^(border-(?:slate|gray|zinc|neutral|stone|red|orange|amber|yellow|lime|green|emerald|teal|cyan|sky|blue|indigo|violet|purple|fuchsia|pink|rose)-(?:50|100|200|300|400|500|600|700|800|900|950))$/,
variants: ['hover', 'ui-selected'],
},
{
pattern:
/^(ring-(?:slate|gray|zinc|neutral|stone|red|orange|amber|yellow|lime|green|emerald|teal|cyan|sky|blue|indigo|violet|purple|fuchsia|pink|rose)-(?:50|100|200|300|400|500|600|700|800|900|950))$/,
},
{
pattern:
/^(stroke-(?:slate|gray|zinc|neutral|stone|red|orange|amber|yellow|lime|green|emerald|teal|cyan|sky|blue|indigo|violet|purple|fuchsia|pink|rose)-(?:50|100|200|300|400|500|600|700|800|900|950))$/,
},
{
pattern:
/^(fill-(?:slate|gray|zinc|neutral|stone|red|orange|amber|yellow|lime|green|emerald|teal|cyan|sky|blue|indigo|violet|purple|fuchsia|pink|rose)-(?:50|100|200|300|400|500|600|700|800|900|950))$/,
},
],
plugins: []
}

View File

@ -184,6 +184,13 @@
dependencies:
"@babel/helper-plugin-utils" "^7.22.5"
"@babel/runtime@^7.21.0", "@babel/runtime@^7.5.5", "@babel/runtime@^7.8.7":
version "7.24.4"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.4.tgz#de795accd698007a66ba44add6cc86542aff1edd"
integrity sha512-dkxf7+hn8mFBwKjs9bvBlArzLVxVbS8usaPUDd5p2a9JCL9tB8OaOVN1isD4+Xyk4ns89/xeOmbQvgdK7IIVdA==
dependencies:
regenerator-runtime "^0.14.0"
"@babel/template@^7.22.15":
version "7.22.15"
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38"
@ -365,6 +372,13 @@
resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.53.0.tgz#bea56f2ed2b5baea164348ff4d5a879f6f81f20d"
integrity sha512-Kn7K8dx/5U6+cT1yEhpX1w4PCSg0M+XyRILPgvwcEBjerFWCwQj5sbr3/VmxqV0JGHCBCzyd6LxypEuehypY1w==
"@floating-ui/core@^1.0.0":
version "1.6.0"
resolved "https://registry.yarnpkg.com/@floating-ui/core/-/core-1.6.0.tgz#fa41b87812a16bf123122bf945946bae3fdf7fc1"
integrity sha512-PcF++MykgmTj3CIyOQbKA/hDzOAiqI3mhuoN44WRCopIs1sgoDoU4oty4Jtqaj/y3oDU6fnVSm4QG0a3t5i0+g==
dependencies:
"@floating-ui/utils" "^0.2.1"
"@floating-ui/core@^1.5.3":
version "1.5.3"
resolved "https://registry.yarnpkg.com/@floating-ui/core/-/core-1.5.3.tgz#b6aa0827708d70971c8679a16cf680a515b8a52a"
@ -380,11 +394,48 @@
"@floating-ui/core" "^1.5.3"
"@floating-ui/utils" "^0.2.0"
"@floating-ui/utils@^0.2.0":
"@floating-ui/dom@^1.2.1":
version "1.6.3"
resolved "https://registry.yarnpkg.com/@floating-ui/dom/-/dom-1.6.3.tgz#954e46c1dd3ad48e49db9ada7218b0985cee75ef"
integrity sha512-RnDthu3mzPlQ31Ss/BTwQ1zjzIhr3lk1gZB1OC56h/1vEtaXkESrOqL5fQVMfXpwGtRwX+YsZBdyHtJMQnkArw==
dependencies:
"@floating-ui/core" "^1.0.0"
"@floating-ui/utils" "^0.2.0"
"@floating-ui/react-dom@^1.3.0":
version "1.3.0"
resolved "https://registry.yarnpkg.com/@floating-ui/react-dom/-/react-dom-1.3.0.tgz#4d35d416eb19811c2b0e9271100a6aa18c1579b3"
integrity sha512-htwHm67Ji5E/pROEAr7f8IKFShuiCKHwUC/UY4vC3I5jiSvGFAYnSYiZO5MlGmads+QqvUkR9ANHEguGrDv72g==
dependencies:
"@floating-ui/dom" "^1.2.1"
"@floating-ui/react@^0.19.2":
version "0.19.2"
resolved "https://registry.yarnpkg.com/@floating-ui/react/-/react-0.19.2.tgz#c6e4d2097ed0dca665a7c042ddf9cdecc95e9412"
integrity sha512-JyNk4A0Ezirq8FlXECvRtQOX/iBe5Ize0W/pLkrZjfHW9GUV7Xnq6zm6fyZuQzaHHqEnVizmvlA96e1/CkZv+w==
dependencies:
"@floating-ui/react-dom" "^1.3.0"
aria-hidden "^1.1.3"
tabbable "^6.0.1"
"@floating-ui/utils@^0.2.0", "@floating-ui/utils@^0.2.1":
version "0.2.1"
resolved "https://registry.yarnpkg.com/@floating-ui/utils/-/utils-0.2.1.tgz#16308cea045f0fc777b6ff20a9f25474dd8293d2"
integrity sha512-9TANp6GPoMtYzQdt54kfAyMmz1+osLlXdg2ENroU7zzrtflTLrrC/lgrIfaSe+Wu0b89GKccT7vxXA0MoAIO+Q==
"@headlessui/react@^1.7.18":
version "1.7.18"
resolved "https://registry.yarnpkg.com/@headlessui/react/-/react-1.7.18.tgz#30af4634d2215b2ca1aa29d07f33d02bea82d9d7"
integrity sha512-4i5DOrzwN4qSgNsL4Si61VMkUcWbcSKueUV7sFhpHzQcSShdlHENE5+QBntMSRvHt8NyoFO2AGG8si9lq+w4zQ==
dependencies:
"@tanstack/react-virtual" "^3.0.0-beta.60"
client-only "^0.0.1"
"@headlessui/tailwindcss@^0.2.0":
version "0.2.0"
resolved "https://registry.yarnpkg.com/@headlessui/tailwindcss/-/tailwindcss-0.2.0.tgz#2c55c98fd8eee4b4f21ec6eb35a014b840059eec"
integrity sha512-fpL830Fln1SykOCboExsWr3JIVeQKieLJ3XytLe/tt1A0XzqUthOftDmjcCYLW62w7mQI7wXcoPXr3tZ9QfGxw==
"@humanwhocodes/config-array@^0.11.13":
version "0.11.13"
resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.13.tgz#075dc9684f40a531d9b26b0822153c1e832ee297"
@ -492,6 +543,32 @@
resolved "https://registry.yarnpkg.com/@remix-run/router/-/router-1.14.1.tgz#6d2dd03d52e604279c38911afc1079d58c50a755"
integrity sha512-Qg4DMQsfPNAs88rb2xkdk03N3bjK4jgX5fR24eHCTR9q6PrhZQZ4UJBPzCHJkIpTRN1UKxx2DzjZmnC+7Lj0Ow==
"@tanstack/react-virtual@^3.0.0-beta.60":
version "3.2.1"
resolved "https://registry.yarnpkg.com/@tanstack/react-virtual/-/react-virtual-3.2.1.tgz#58ac9af23ff08b5f05a6dfe6a59deac2f9451508"
integrity sha512-i9Nt0ssIh2bSjomJZlr6Iq5usT/9+ewo2/fKHRNk6kjVKS8jrhXbnO8NEawarCuBx/efv0xpoUUKKGxa0cQb4Q==
dependencies:
"@tanstack/virtual-core" "3.2.1"
"@tanstack/virtual-core@3.2.1":
version "3.2.1"
resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.2.1.tgz#b3e4214b8f462054501d80e8777068faa139bd06"
integrity sha512-nO0d4vRzsmpBQCJYyClNHPPoUMI4nXNfrm6IcCRL33ncWMoNVpURh9YebEHPw8KrtsP2VSJIHE4gf4XFGk1OGg==
"@tremor/react@^3.15.1":
version "3.15.1"
resolved "https://registry.yarnpkg.com/@tremor/react/-/react-3.15.1.tgz#a9c10887bd067ffe0e18ca763e425db057f3722f"
integrity sha512-vCUqgYo993VePn6yOs4102ibY2XYcDDp7I1ZV/+i5hdfp+XgsHyQvYeixQcETBMpcajwM8E8NOOO7k9ANLkrrw==
dependencies:
"@floating-ui/react" "^0.19.2"
"@headlessui/react" "^1.7.18"
"@headlessui/tailwindcss" "^0.2.0"
date-fns "^2.30.0"
react-day-picker "^8.9.1"
react-transition-state "^2.1.1"
recharts "^2.10.3"
tailwind-merge "^1.14.0"
"@types/babel__core@^7.20.3":
version "7.20.3"
resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.3.tgz#d5625a50b6f18244425a1359a858c73d70340778"
@ -525,6 +602,57 @@
dependencies:
"@babel/types" "^7.20.7"
"@types/d3-array@^3.0.3":
version "3.2.1"
resolved "https://registry.yarnpkg.com/@types/d3-array/-/d3-array-3.2.1.tgz#1f6658e3d2006c4fceac53fde464166859f8b8c5"
integrity sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==
"@types/d3-color@*":
version "3.1.3"
resolved "https://registry.yarnpkg.com/@types/d3-color/-/d3-color-3.1.3.tgz#368c961a18de721da8200e80bf3943fb53136af2"
integrity sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==
"@types/d3-ease@^3.0.0":
version "3.0.2"
resolved "https://registry.yarnpkg.com/@types/d3-ease/-/d3-ease-3.0.2.tgz#e28db1bfbfa617076f7770dd1d9a48eaa3b6c51b"
integrity sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==
"@types/d3-interpolate@^3.0.1":
version "3.0.4"
resolved "https://registry.yarnpkg.com/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz#412b90e84870285f2ff8a846c6eb60344f12a41c"
integrity sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==
dependencies:
"@types/d3-color" "*"
"@types/d3-path@*":
version "3.1.0"
resolved "https://registry.yarnpkg.com/@types/d3-path/-/d3-path-3.1.0.tgz#2b907adce762a78e98828f0b438eaca339ae410a"
integrity sha512-P2dlU/q51fkOc/Gfl3Ul9kicV7l+ra934qBFXCFhrZMOL6du1TM0pm1ThYvENukyOn5h9v+yMJ9Fn5JK4QozrQ==
"@types/d3-scale@^4.0.2":
version "4.0.8"
resolved "https://registry.yarnpkg.com/@types/d3-scale/-/d3-scale-4.0.8.tgz#d409b5f9dcf63074464bf8ddfb8ee5a1f95945bb"
integrity sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==
dependencies:
"@types/d3-time" "*"
"@types/d3-shape@^3.1.0":
version "3.1.6"
resolved "https://registry.yarnpkg.com/@types/d3-shape/-/d3-shape-3.1.6.tgz#65d40d5a548f0a023821773e39012805e6e31a72"
integrity sha512-5KKk5aKGu2I+O6SONMYSNflgiP0WfZIQvVUMan50wHsLG1G94JlxEVnCpQARfTtzytuY0p/9PXXZb3I7giofIA==
dependencies:
"@types/d3-path" "*"
"@types/d3-time@*", "@types/d3-time@^3.0.0":
version "3.0.3"
resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-3.0.3.tgz#3c186bbd9d12b9d84253b6be6487ca56b54f88be"
integrity sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw==
"@types/d3-timer@^3.0.0":
version "3.0.2"
resolved "https://registry.yarnpkg.com/@types/d3-timer/-/d3-timer-3.0.2.tgz#70bbda77dc23aa727413e22e214afa3f0e852f70"
integrity sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==
"@types/history@^4.7.11":
version "4.7.11"
resolved "https://registry.yarnpkg.com/@types/history/-/history-4.7.11.tgz#56588b17ae8f50c53983a524fc3cc47437969d64"
@ -651,6 +779,13 @@ argparse@^2.0.1:
resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==
aria-hidden@^1.1.3:
version "1.2.4"
resolved "https://registry.yarnpkg.com/aria-hidden/-/aria-hidden-1.2.4.tgz#b78e383fdbc04d05762c78b4a25a501e736c4522"
integrity sha512-y+CcFFwelSXpLZk/7fMB2mUbGtX9lKycf1MWJ7CaTIERyitVlyQx6C+sxcROU2BAJ24OiZyK+8wj2i8AlBoS3A==
dependencies:
tslib "^2.0.0"
array-buffer-byte-length@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead"
@ -748,6 +883,11 @@ balanced-match@^1.0.0:
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
base64-arraybuffer@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz#1c37589a7c4b0746e34bd1feb951da2df01c1bdc"
integrity sha512-I3yl4r9QB5ZRY3XuJVEPfc2XhZO6YweFPI+UovAzn+8/hb3oJ6lnysaFcjVpkCPfVWFUDvoZ8kmVDP7WyRtYtQ==
base64-js@^1.3.1:
version "1.5.1"
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
@ -871,6 +1011,11 @@ classnames@^2.3.0:
resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.5.1.tgz#ba774c614be0f016da105c858e7159eae8e7687b"
integrity sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==
client-only@^0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1"
integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==
cliui@^8.0.1:
version "8.0.1"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa"
@ -890,6 +1035,11 @@ clsx@^1.1.1:
resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.2.1.tgz#0ddc4a20a549b59c93a4116bb26f5294ca17dc12"
integrity sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==
clsx@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.0.tgz#e851283bcb5c80ee7608db18487433f7b23f77cb"
integrity sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==
color-convert@^1.3.0, color-convert@^1.9.0:
version "1.9.3"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
@ -954,6 +1104,13 @@ cross-spawn@^7.0.2, cross-spawn@^7.0.3:
shebang-command "^2.0.0"
which "^2.0.1"
css-line-break@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/css-line-break/-/css-line-break-2.1.0.tgz#bfef660dfa6f5397ea54116bb3cb4873edbc4fa0"
integrity sha512-FHcKFCZcAha3LwfVBhCQbW2nCNbkZXn7KVUJcsT5/P8YmfsVja0FMPJr0B903j/E69HUphKiV9iQArX8SDYA4w==
dependencies:
utrie "^1.0.2"
cssesc@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee"
@ -964,6 +1121,84 @@ csstype@^3.0.2:
resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.2.tgz#1d4bf9d572f11c14031f0436e1c10bc1f571f50b"
integrity sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==
"d3-array@2 - 3", "d3-array@2.10.0 - 3", d3-array@^3.1.6:
version "3.2.4"
resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.2.4.tgz#15fec33b237f97ac5d7c986dc77da273a8ed0bb5"
integrity sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==
dependencies:
internmap "1 - 2"
"d3-color@1 - 3":
version "3.1.0"
resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-3.1.0.tgz#395b2833dfac71507f12ac2f7af23bf819de24e2"
integrity sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==
d3-ease@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/d3-ease/-/d3-ease-3.0.1.tgz#9658ac38a2140d59d346160f1f6c30fda0bd12f4"
integrity sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==
"d3-format@1 - 3":
version "3.1.0"
resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-3.1.0.tgz#9260e23a28ea5cb109e93b21a06e24e2ebd55641"
integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==
"d3-interpolate@1.2.0 - 3", d3-interpolate@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-3.0.1.tgz#3c47aa5b32c5b3dfb56ef3fd4342078a632b400d"
integrity sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==
dependencies:
d3-color "1 - 3"
d3-path@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-3.1.0.tgz#22df939032fb5a71ae8b1800d61ddb7851c42526"
integrity sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==
d3-scale@^4.0.2:
version "4.0.2"
resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-4.0.2.tgz#82b38e8e8ff7080764f8dcec77bd4be393689396"
integrity sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==
dependencies:
d3-array "2.10.0 - 3"
d3-format "1 - 3"
d3-interpolate "1.2.0 - 3"
d3-time "2.1.1 - 3"
d3-time-format "2 - 4"
d3-shape@^3.1.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-3.2.0.tgz#a1a839cbd9ba45f28674c69d7f855bcf91dfc6a5"
integrity sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==
dependencies:
d3-path "^3.1.0"
"d3-time-format@2 - 4":
version "4.1.0"
resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-4.1.0.tgz#7ab5257a5041d11ecb4fe70a5c7d16a195bb408a"
integrity sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==
dependencies:
d3-time "1 - 3"
"d3-time@1 - 3", "d3-time@2.1.1 - 3", d3-time@^3.0.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-3.1.0.tgz#9310db56e992e3c0175e1ef385e545e48a9bb5c7"
integrity sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==
dependencies:
d3-array "2 - 3"
d3-timer@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-3.0.1.tgz#6284d2a2708285b1abb7e201eda4380af35e63b0"
integrity sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==
date-fns@^2.30.0:
version "2.30.0"
resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-2.30.0.tgz#f367e644839ff57894ec6ac480de40cae4b0f4d0"
integrity sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==
dependencies:
"@babel/runtime" "^7.21.0"
debug@^4.1.0, debug@^4.1.1, debug@^4.3.2:
version "4.3.4"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
@ -971,6 +1206,11 @@ debug@^4.1.0, debug@^4.1.1, debug@^4.3.2:
dependencies:
ms "2.1.2"
decimal.js-light@^2.4.1:
version "2.5.1"
resolved "https://registry.yarnpkg.com/decimal.js-light/-/decimal.js-light-2.5.1.tgz#134fd32508f19e208f4fb2f8dac0d2626a867934"
integrity sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==
deep-is@^0.1.3:
version "0.1.4"
resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831"
@ -1046,6 +1286,14 @@ doctrine@^3.0.0:
dependencies:
esutils "^2.0.2"
dom-helpers@^5.0.1:
version "5.2.1"
resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-5.2.1.tgz#d9400536b2bf8225ad98fe052e029451ac40e902"
integrity sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==
dependencies:
"@babel/runtime" "^7.8.7"
csstype "^3.0.2"
dompurify@^3.0.8:
version "3.0.8"
resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-3.0.8.tgz#e0021ab1b09184bc8af7e35c7dd9063f43a8a437"
@ -1342,6 +1590,11 @@ esutils@^2.0.2:
resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
eventemitter3@^4.0.1:
version "4.0.7"
resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f"
integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==
execa@^5.0.0:
version "5.1.1"
resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd"
@ -1382,6 +1635,11 @@ fast-diff@^1.1.2:
resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.3.0.tgz#ece407fa550a64d638536cd727e129c61616e0f0"
integrity sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==
fast-equals@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/fast-equals/-/fast-equals-5.0.1.tgz#a4eefe3c5d1c0d021aeed0bc10ba5e0c12ee405d"
integrity sha512-WF1Wi8PwwSY7/6Kx0vKXtw8RwuSGoM1bvDaJbu7MxDlR1vovZjIAKrnzyrThgAjm6JDTu0fVgWXDlMGspodfoQ==
fast-glob@^3.3.0:
version "3.3.1"
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.1.tgz#784b4e897340f3dbbef17413b3f11acf03c874c4"
@ -1698,6 +1956,14 @@ highlight.js@^11.9.0:
resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-11.9.0.tgz#04ab9ee43b52a41a047432c8103e2158a1b8b5b0"
integrity sha512-fJ7cW7fQGCYAkgv4CPfwFHrfd/cLS4Hau96JuJ+ZTOWhjnhoeN1ub1tFmALm/+lW5z4WCAuAV9bm05AP0mS6Gw==
html2canvas@^1.2.0:
version "1.4.1"
resolved "https://registry.yarnpkg.com/html2canvas/-/html2canvas-1.4.1.tgz#7cef1888311b5011d507794a066041b14669a543"
integrity sha512-fPU6BHNpsyIhr8yyMpTLLxAbkaK8ArIBcmZIRiBLiDhjeqvXolaEmDGmELFuX9I4xDcaKKcJl+TKZLqruBbmWA==
dependencies:
css-line-break "^2.1.0"
text-segmentation "^1.0.3"
human-signals@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0"
@ -1753,6 +2019,11 @@ internal-slot@^1.0.5:
hasown "^2.0.0"
side-channel "^1.0.4"
"internmap@1 - 2":
version "2.0.3"
resolved "https://registry.yarnpkg.com/internmap/-/internmap-2.0.3.tgz#6685f23755e43c524e251d29cbc97248e3061009"
integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==
is-array-buffer@^3.0.1, is-array-buffer@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe"
@ -2483,7 +2754,7 @@ prettier@^3.0.3:
resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.0.3.tgz#432a51f7ba422d1469096c0fdc28e235db8f9643"
integrity sha512-L/4pUDMxcNa8R/EthV08Zt42WBO4h1rarVtK0K+QJG0X187OLo7l699jWw0GKuwzkPQ//jMFA/8Xm6Fh3J/DAg==
prop-types@^15.8.1:
prop-types@^15.6.2, prop-types@^15.8.1:
version "15.8.1"
resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5"
integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==
@ -2502,6 +2773,11 @@ queue-microtask@^1.2.2:
resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243"
integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
react-day-picker@^8.9.1:
version "8.10.0"
resolved "https://registry.yarnpkg.com/react-day-picker/-/react-day-picker-8.10.0.tgz#729c5b9564967a924213978fb9c0751884a60595"
integrity sha512-mz+qeyrOM7++1NCb1ARXmkjMkzWVh2GL9YiPbRjKe0zHccvekk4HE+0MPOZOrosn8r8zTHIIeOUXTmXRqmkRmg==
react-device-detect@^2.2.2:
version "2.2.3"
resolved "https://registry.yarnpkg.com/react-device-detect/-/react-device-detect-2.2.3.tgz#97a7ae767cdd004e7c3578260f48cf70c036e7ca"
@ -2526,7 +2802,7 @@ react-dropzone@^14.2.3:
file-selector "^0.6.0"
prop-types "^15.8.1"
react-is@^16.13.1:
react-is@^16.10.2, react-is@^16.13.1:
version "16.13.1"
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
@ -2556,6 +2832,15 @@ react-router@6.21.1:
dependencies:
"@remix-run/router" "1.14.1"
react-smooth@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/react-smooth/-/react-smooth-4.0.1.tgz#6200d8699bfe051ae40ba187988323b1449eab1a"
integrity sha512-OE4hm7XqR0jNOq3Qmk9mFLyd6p2+j6bvbPJ7qlB7+oo0eNcL2l7WQzG6MBnT3EXY6xzkLMUBec3AfewJdA0J8w==
dependencies:
fast-equals "^5.0.1"
prop-types "^15.8.1"
react-transition-group "^4.4.5"
react-tag-input-component@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/react-tag-input-component/-/react-tag-input-component-2.0.2.tgz#f62f013c6a535141dd1c6c3a88858223170150f1"
@ -2576,6 +2861,21 @@ react-tooltip@^5.25.2:
"@floating-ui/dom" "^1.0.0"
classnames "^2.3.0"
react-transition-group@^4.4.5:
version "4.4.5"
resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-4.4.5.tgz#e53d4e3f3344da8521489fbef8f2581d42becdd1"
integrity sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==
dependencies:
"@babel/runtime" "^7.5.5"
dom-helpers "^5.0.1"
loose-envify "^1.4.0"
prop-types "^15.6.2"
react-transition-state@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/react-transition-state/-/react-transition-state-2.1.1.tgz#1601a6177926b647041b7d598bf124321ab8d25b"
integrity sha512-kQx5g1FVu9knoz1T1WkapjUgFz08qQ/g1OmuWGi3/AoEFfS0kStxrPlZx81urjCXdz2d+1DqLpU6TyLW/Ro04Q==
react@^18.2.0:
version "18.2.0"
resolved "https://registry.yarnpkg.com/react/-/react-18.2.0.tgz#555bd98592883255fa00de14f1151a917b5d77d5"
@ -2597,6 +2897,34 @@ readdirp@~3.6.0:
dependencies:
picomatch "^2.2.1"
recharts-scale@^0.4.4:
version "0.4.5"
resolved "https://registry.yarnpkg.com/recharts-scale/-/recharts-scale-0.4.5.tgz#0969271f14e732e642fcc5bd4ab270d6e87dd1d9"
integrity sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w==
dependencies:
decimal.js-light "^2.4.1"
recharts-to-png@^2.3.1:
version "2.3.1"
resolved "https://registry.yarnpkg.com/recharts-to-png/-/recharts-to-png-2.3.1.tgz#94d4edb8461ba4b16318edea77a34c421c16d7c1"
integrity sha512-a+OaAi03oFJMa+Burf3vyH060iFTrb35W8bBYUatNjZVrrMKUcFM3VOI1ym078WIH7XfgYQb17K9p2spVA2FzQ==
dependencies:
html2canvas "^1.2.0"
recharts@^2.10.3, recharts@^2.12.5:
version "2.12.5"
resolved "https://registry.yarnpkg.com/recharts/-/recharts-2.12.5.tgz#b335eb66173317dccb3e126fce1d7ac5b3cee1e9"
integrity sha512-Cy+BkqrFIYTHJCyKHJEPvbHE2kVQEP6PKbOHJ8ztRGTAhvHuUnCwDaKVb13OwRFZ0QNUk1QvGTDdgWSMbuMtKw==
dependencies:
clsx "^2.0.0"
eventemitter3 "^4.0.1"
lodash "^4.17.21"
react-is "^16.10.2"
react-smooth "^4.0.0"
recharts-scale "^0.4.4"
tiny-invariant "^1.3.1"
victory-vendor "^36.6.8"
reflect.getprototypeof@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/reflect.getprototypeof/-/reflect.getprototypeof-1.0.4.tgz#aaccbf41aca3821b87bb71d9dcbc7ad0ba50a3f3"
@ -2609,6 +2937,11 @@ reflect.getprototypeof@^1.0.4:
globalthis "^1.0.3"
which-builtin-type "^1.1.3"
regenerator-runtime@^0.14.0:
version "0.14.1"
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f"
integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==
regexp.prototype.flags@^1.5.0, regexp.prototype.flags@^1.5.1:
version "1.5.1"
resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz#90ce989138db209f81492edd734183ce99f9677e"
@ -2893,6 +3226,16 @@ synckit@^0.8.5:
"@pkgr/utils" "^2.3.1"
tslib "^2.5.0"
tabbable@^6.0.1:
version "6.2.0"
resolved "https://registry.yarnpkg.com/tabbable/-/tabbable-6.2.0.tgz#732fb62bc0175cfcec257330be187dcfba1f3b97"
integrity sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==
tailwind-merge@^1.14.0:
version "1.14.0"
resolved "https://registry.yarnpkg.com/tailwind-merge/-/tailwind-merge-1.14.0.tgz#e677f55d864edc6794562c63f5001f45093cdb8b"
integrity sha512-3mFKyCo/MBcgyOTlrY8T7odzZFx+w+qKSMAmdFzRvqBfLlSigU6TZnlFHK0lkMwj9Bj8OYU+9yW9lmGuS0QEnQ==
tailwindcss@^3.3.1:
version "3.3.5"
resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.3.5.tgz#22a59e2fbe0ecb6660809d9cc5f3976b077be3b8"
@ -3031,6 +3374,13 @@ text-path-case@^1.0.2:
dependencies:
text-dot-case "^1.0.2"
text-segmentation@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/text-segmentation/-/text-segmentation-1.0.3.tgz#52a388159efffe746b24a63ba311b6ac9f2d7943"
integrity sha512-iOiPUo/BGnZ6+54OsWxZidGCsdU8YbE4PSpdPinp7DeMtUJNJBoJ/ouUSTJjHkh1KntHaltHl/gDs2FC4i5+Nw==
dependencies:
utrie "^1.0.2"
text-sentence-case@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/text-sentence-case/-/text-sentence-case-1.0.2.tgz#e692a9aea3c8dcb1fb12242838e0ca3e9a22a90f"
@ -3085,6 +3435,11 @@ thenify-all@^1.0.0:
dependencies:
any-promise "^1.0.0"
tiny-invariant@^1.3.1:
version "1.3.3"
resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.3.3.tgz#46680b7a873a0d5d10005995eb90a70d74d60127"
integrity sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==
titleize@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/titleize/-/titleize-3.0.0.tgz#71c12eb7fdd2558aa8a44b0be83b8a76694acd53"
@ -3112,7 +3467,7 @@ ts-interface-checker@^0.1.9:
resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699"
integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==
tslib@^2.4.0, tslib@^2.5.0, tslib@^2.6.0:
tslib@^2.0.0, tslib@^2.4.0, tslib@^2.5.0, tslib@^2.6.0:
version "2.6.2"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae"
integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==
@ -3213,11 +3568,38 @@ util-deprecate@^1.0.2:
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
utrie@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/utrie/-/utrie-1.0.2.tgz#d42fe44de9bc0119c25de7f564a6ed1b2c87a645"
integrity sha512-1MLa5ouZiOmQzUbjbu9VmjLzn1QLXBhwpUa7kdLUQK+KQ5KA9I1vk5U4YHe/X2Ch7PYnJfWuWT+VbuxbGwljhw==
dependencies:
base64-arraybuffer "^1.0.2"
uuid@^9.0.0:
version "9.0.1"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30"
integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==
victory-vendor@^36.6.8:
version "36.9.2"
resolved "https://registry.yarnpkg.com/victory-vendor/-/victory-vendor-36.9.2.tgz#668b02a448fa4ea0f788dbf4228b7e64669ff801"
integrity sha512-PnpQQMuxlwYdocC8fIJqVXvkeViHYzotI+NJrCuav0ZYFoq912ZHBk3mCeuj+5/VpodOjPe1z0Fk2ihgzlXqjQ==
dependencies:
"@types/d3-array" "^3.0.3"
"@types/d3-ease" "^3.0.0"
"@types/d3-interpolate" "^3.0.1"
"@types/d3-scale" "^4.0.2"
"@types/d3-shape" "^3.1.0"
"@types/d3-time" "^3.0.0"
"@types/d3-timer" "^3.0.0"
d3-array "^3.1.6"
d3-ease "^3.0.1"
d3-interpolate "^3.0.1"
d3-scale "^4.0.2"
d3-shape "^3.1.0"
d3-time "^3.0.0"
d3-timer "^3.0.1"
vite@^4.3.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/vite/-/vite-4.5.0.tgz#ec406295b4167ac3bc23e26f9c8ff559287cff26"

View File

@ -61,7 +61,7 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
# LLM_PROVIDER='groq'
# GROQ_API_KEY=gsk_abcxyz
# GROQ_MODEL_PREF=llama2-70b-4096
# GROQ_MODEL_PREF=llama3-8b-8192
# LLM_PROVIDER='generic-openai'
# GENERIC_OPEN_AI_BASE_PATH='http://proxy.url.openai.com/v1'

View File

@ -71,6 +71,28 @@ function extensionEndpoints(app) {
}
}
);
app.post(
"/ext/confluence",
[validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],
async (request, response) => {
try {
const responseFromProcessor =
await new CollectorApi().forwardExtensionRequest({
endpoint: "/ext/confluence",
method: "POST",
body: request.body,
});
await Telemetry.sendTelemetry("extension_invoked", {
type: "confluence",
});
response.status(200).json(responseFromProcessor);
} catch (e) {
console.error(e);
response.sendStatus(500).end();
}
}
);
}
module.exports = { extensionEndpoints };

View File

@ -41,6 +41,7 @@ const { WorkspaceChats } = require("../models/workspaceChats");
const {
flexUserRoleValid,
ROLES,
isMultiUserSetup,
} = require("../utils/middleware/multiUserProtected");
const { fetchPfp, determinePfpFilepath } = require("../utils/files/pfp");
const {
@ -49,6 +50,11 @@ const {
} = require("../utils/helpers/chat/convertTo");
const { EventLogs } = require("../models/eventLogs");
const { CollectorApi } = require("../utils/collectorApi");
const {
recoverAccount,
resetPassword,
generateRecoveryCodes,
} = require("../utils/PasswordRecovery");
function systemEndpoints(app) {
if (!app) return;
@ -179,6 +185,24 @@ function systemEndpoints(app) {
existingUser?.id
);
// Check if the user has seen the recovery codes
if (!existingUser.seen_recovery_codes) {
const plainTextCodes = await generateRecoveryCodes(existingUser.id);
// Return recovery codes to frontend
response.status(200).json({
valid: true,
user: existingUser,
token: makeJWT(
{ id: existingUser.id, username: existingUser.username },
"30d"
),
message: null,
recoveryCodes: plainTextCodes,
});
return;
}
response.status(200).json({
valid: true,
user: existingUser,
@ -226,6 +250,55 @@ function systemEndpoints(app) {
}
});
app.post(
"/system/recover-account",
[isMultiUserSetup],
async (request, response) => {
try {
const { username, recoveryCodes } = reqBody(request);
const { success, resetToken, error } = await recoverAccount(
username,
recoveryCodes
);
if (success) {
response.status(200).json({ success, resetToken });
} else {
response.status(400).json({ success, message: error });
}
} catch (error) {
console.error("Error recovering account:", error);
response
.status(500)
.json({ success: false, message: "Internal server error" });
}
}
);
app.post(
"/system/reset-password",
[isMultiUserSetup],
async (request, response) => {
try {
const { token, newPassword, confirmPassword } = reqBody(request);
const { success, message, error } = await resetPassword(
token,
newPassword,
confirmPassword
);
if (success) {
response.status(200).json({ success, message });
} else {
response.status(400).json({ success, error });
}
} catch (error) {
console.error("Error resetting password:", error);
response.status(500).json({ success: false, message: error.message });
}
}
);
app.get(
"/system/system-vectors",
[validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],

View File

@ -60,7 +60,14 @@ embeddedEndpoints(apiRouter);
if (process.env.NODE_ENV !== "development") {
app.use(
express.static(path.resolve(__dirname, "public"), { extensions: ["js"] })
express.static(path.resolve(__dirname, "public"), {
extensions: ["js"],
setHeaders: (res) => {
// Disable I-framing of entire site UI
res.removeHeader("X-Powered-By");
res.setHeader("X-Frame-Options", "DENY");
},
})
);
app.use("/", function (_, response) {

View File

@ -0,0 +1,115 @@
const { v4 } = require("uuid");
const prisma = require("../utils/prisma");
const bcrypt = require("bcrypt");
const RecoveryCode = {
tablename: "recovery_codes",
writable: [],
create: async function (userId, code) {
try {
const codeHash = await bcrypt.hash(code, 10);
const recoveryCode = await prisma.recovery_codes.create({
data: { user_id: userId, code_hash: codeHash },
});
return { recoveryCode, error: null };
} catch (error) {
console.error("FAILED TO CREATE RECOVERY CODE.", error.message);
return { recoveryCode: null, error: error.message };
}
},
createMany: async function (data) {
try {
const recoveryCodes = await prisma.$transaction(
data.map((recoveryCode) =>
prisma.recovery_codes.create({ data: recoveryCode })
)
);
return { recoveryCodes, error: null };
} catch (error) {
console.error("FAILED TO CREATE RECOVERY CODES.", error.message);
return { recoveryCodes: null, error: error.message };
}
},
findFirst: async function (clause = {}) {
try {
const recoveryCode = await prisma.recovery_codes.findFirst({
where: clause,
});
return recoveryCode;
} catch (error) {
console.error("FAILED TO FIND RECOVERY CODE.", error.message);
return null;
}
},
findMany: async function (clause = {}) {
try {
const recoveryCodes = await prisma.recovery_codes.findMany({
where: clause,
});
return recoveryCodes;
} catch (error) {
console.error("FAILED TO FIND RECOVERY CODES.", error.message);
return null;
}
},
deleteMany: async function (clause = {}) {
try {
await prisma.recovery_codes.deleteMany({ where: clause });
return true;
} catch (error) {
console.error("FAILED TO DELETE RECOVERY CODES.", error.message);
return false;
}
},
hashesForUser: async function (userId = null) {
if (!userId) return [];
return (await this.findMany({ user_id: userId })).map(
(recovery) => recovery.code_hash
);
},
};
const PasswordResetToken = {
tablename: "password_reset_tokens",
resetExpiryMs: 600_000, // 10 minutes in ms;
writable: [],
calcExpiry: function () {
return new Date(Date.now() + this.resetExpiryMs);
},
create: async function (userId) {
try {
const passwordResetToken = await prisma.password_reset_tokens.create({
data: { user_id: userId, token: v4(), expiresAt: this.calcExpiry() },
});
return { passwordResetToken, error: null };
} catch (error) {
console.error("FAILED TO CREATE PASSWORD RESET TOKEN.", error.message);
return { passwordResetToken: null, error: error.message };
}
},
findUnique: async function (clause = {}) {
try {
const passwordResetToken = await prisma.password_reset_tokens.findUnique({
where: clause,
});
return passwordResetToken;
} catch (error) {
console.error("FAILED TO FIND PASSWORD RESET TOKEN.", error.message);
return null;
}
},
deleteMany: async function (clause = {}) {
try {
await prisma.password_reset_tokens.deleteMany({ where: clause });
return true;
} catch (error) {
console.error("FAILED TO DELETE PASSWORD RESET TOKEN.", error.message);
return false;
}
},
};
module.exports = {
RecoveryCode,
PasswordResetToken,
};

View File

@ -2,6 +2,23 @@ const prisma = require("../utils/prisma");
const { EventLogs } = require("./eventLogs");
const User = {
writable: [
// Used for generic updates so we can validate keys in request body
"username",
"password",
"pfpFilename",
"role",
"suspended",
],
// validations for the above writable fields.
castColumnValue: function (key, value) {
switch (key) {
case "suspended":
return Number(Boolean(value));
default:
return String(value);
}
},
create: async function ({ username, password, role = "default" }) {
const passwordCheck = this.checkPasswordComplexity(password);
if (!passwordCheck.checkedOK) {
@ -42,13 +59,26 @@ const User = {
update: async function (userId, updates = {}) {
try {
if (!userId) throw new Error("No user id provided for update");
const currentUser = await prisma.users.findUnique({
where: { id: parseInt(userId) },
});
if (!currentUser) {
return { success: false, error: "User not found" };
}
if (!currentUser) return { success: false, error: "User not found" };
// Removes non-writable fields for generic updates
// and force-casts to the proper type;
Object.entries(updates).forEach(([key, value]) => {
if (this.writable.includes(key)) {
updates[key] = this.castColumnValue(key, value);
return;
}
delete updates[key];
});
if (Object.keys(updates).length === 0)
return { success: false, error: "No valid updates applied." };
// Handle password specific updates
if (updates.hasOwnProperty("password")) {
const passwordCheck = this.checkPasswordComplexity(updates.password);
if (!passwordCheck.checkedOK) {
@ -78,6 +108,24 @@ const User = {
}
},
// Explicit direct update of user object.
// Only use this method when directly setting a key value
// that takes no user input for the keys being modified.
_update: async function (id = null, data = {}) {
if (!id) throw new Error("No user id provided for update");
try {
const user = await prisma.users.update({
where: { id },
data,
});
return { user, message: null };
} catch (error) {
console.error(error.message);
return { user: null, message: error.message };
}
},
get: async function (clause = {}) {
try {
const user = await prisma.users.findFirst({ where: clause });

View File

@ -26,6 +26,7 @@ const Workspace = {
"pfpFilename",
"agentProvider",
"agentModel",
"queryRefusalResponse",
],
new: async function (name = null, creatorId = null) {

View File

@ -3,9 +3,9 @@ const { v4: uuidv4 } = require("uuid");
const WorkspaceAgentInvocation = {
// returns array of strings with their @ handle.
// must start with @
// must start with @agent for now.
parseAgents: function (promptString) {
if (!promptString.startsWith("@")) return [];
if (!promptString.startsWith("@agent")) return [];
return promptString.split(/\s+/).filter((v) => v.startsWith("@"));
},

View File

@ -25,6 +25,11 @@
"@datastax/astra-db-ts": "^0.1.3",
"@google/generative-ai": "^0.7.1",
"@googleapis/youtube": "^9.0.0",
"@langchain/anthropic": "0.1.16",
"@langchain/community": "0.0.53",
"@langchain/core": "0.1.61",
"@langchain/openai": "0.0.28",
"@langchain/textsplitters": "0.0.0",
"@pinecone-database/pinecone": "^2.0.1",
"@prisma/client": "5.3.1",
"@qdrant/js-client-rest": "^1.4.0",
@ -46,14 +51,13 @@
"joi-password-complexity": "^5.2.0",
"js-tiktoken": "^1.0.7",
"jsonwebtoken": "^8.5.1",
"langchain": "0.0.201",
"langchain": "0.1.36",
"mime": "^3.0.0",
"moment": "^2.29.4",
"multer": "^1.4.5-lts.1",
"node-html-markdown": "^1.3.0",
"node-llama-cpp": "^2.8.0",
"openai": "^3.2.1",
"openai-latest": "npm:openai@latest",
"openai": "4.38.5",
"pinecone-client": "^1.1.0",
"pluralize": "^8.0.0",
"posthog-node": "^3.1.1",
@ -65,7 +69,7 @@
"swagger-ui-express": "^5.0.0",
"uuid": "^9.0.0",
"uuid-apikey": "^1.5.3",
"vectordb": "0.1.19",
"vectordb": "0.4.11",
"weaviate-ts-client": "^1.4.0"
},
"devDependencies": {
@ -84,4 +88,4 @@
"nodemon": "^2.0.22",
"prettier": "^3.0.3"
}
}
}

View File

@ -0,0 +1,30 @@
-- AlterTable
ALTER TABLE "users" ADD COLUMN "seen_recovery_codes" BOOLEAN DEFAULT false;
-- CreateTable
CREATE TABLE "recovery_codes" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"user_id" INTEGER NOT NULL,
"code_hash" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "recovery_codes_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE
);
-- CreateTable
CREATE TABLE "password_reset_tokens" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"user_id" INTEGER NOT NULL,
"token" TEXT NOT NULL,
"expiresAt" DATETIME NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "password_reset_tokens_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE
);
-- CreateIndex
CREATE INDEX "recovery_codes_user_id_idx" ON "recovery_codes"("user_id");
-- CreateIndex
CREATE UNIQUE INDEX "password_reset_tokens_token_key" ON "password_reset_tokens"("token");
-- CreateIndex
CREATE INDEX "password_reset_tokens_user_id_idx" ON "password_reset_tokens"("user_id");

View File

@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "workspaces" ADD COLUMN "queryRefusalResponse" TEXT;

View File

@ -62,6 +62,7 @@ model users {
pfpFilename String?
role String @default("default")
suspended Int @default(0)
seen_recovery_codes Boolean? @default(false)
createdAt DateTime @default(now())
lastUpdatedAt DateTime @default(now())
workspace_chats workspace_chats[]
@ -69,9 +70,32 @@ model users {
embed_configs embed_configs[]
embed_chats embed_chats[]
threads workspace_threads[]
recovery_codes recovery_codes[]
password_reset_tokens password_reset_tokens[]
workspace_agent_invocations workspace_agent_invocations[]
}
model recovery_codes {
id Int @id @default(autoincrement())
user_id Int
code_hash String
createdAt DateTime @default(now())
user users @relation(fields: [user_id], references: [id], onDelete: Cascade)
@@index([user_id])
}
model password_reset_tokens {
id Int @id @default(autoincrement())
user_id Int
token String @unique
expiresAt DateTime
createdAt DateTime @default(now())
user users @relation(fields: [user_id], references: [id], onDelete: Cascade)
@@index([user_id])
}
model document_vectors {
id Int @id @default(autoincrement())
docId String
@ -106,6 +130,7 @@ model workspaces {
pfpFilename String?
agentProvider String?
agentModel String?
queryRefusalResponse String?
workspace_users workspace_users[]
documents workspace_documents[]
workspace_suggested_messages workspace_suggested_messages[]

View File

@ -1,21 +1,22 @@
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const { chatPrompt } = require("../../chats");
const { handleDefaultStreamResponse } = require("../../helpers/chat/responses");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
class GenericOpenAiLLM {
constructor(embedder = null, modelPreference = null) {
const { Configuration, OpenAIApi } = require("openai");
const { OpenAI: OpenAIApi } = require("openai");
if (!process.env.GENERIC_OPEN_AI_BASE_PATH)
throw new Error(
"GenericOpenAI must have a valid base path to use for the api."
);
this.basePath = process.env.GENERIC_OPEN_AI_BASE_PATH;
const config = new Configuration({
basePath: this.basePath,
this.openai = new OpenAIApi({
baseURL: this.basePath,
apiKey: process.env.GENERIC_OPEN_AI_API_KEY ?? null,
});
this.openai = new OpenAIApi(config);
this.model =
modelPreference ?? process.env.GENERIC_OPEN_AI_MODEL_PREF ?? null;
if (!this.model)
@ -89,8 +90,8 @@ class GenericOpenAiLLM {
}
async sendChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
const textResponse = await this.openai
.createChatCompletion({
const textResponse = await this.openai.chat.completions
.create({
model: this.model,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
@ -103,13 +104,12 @@ class GenericOpenAiLLM {
rawHistory
),
})
.then((json) => {
const res = json.data;
if (!res.hasOwnProperty("choices"))
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("GenericOpenAI chat: No results!");
if (res.choices.length === 0)
if (result.choices.length === 0)
throw new Error("GenericOpenAI chat: No results length!");
return res.choices[0].message.content;
return result.choices[0].message.content;
})
.catch((error) => {
throw new Error(
@ -121,29 +121,26 @@ class GenericOpenAiLLM {
}
async streamChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
});
return streamRequest;
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {
const { data } = await this.openai
.createChatCompletion({
const result = await this.openai.chat.completions
.create({
model: this.model,
messages,
temperature,
@ -152,25 +149,23 @@ class GenericOpenAiLLM {
throw new Error(e.response.data.error.message);
});
if (!data.hasOwnProperty("choices")) return null;
return data.choices[0].message.content;
if (!result.hasOwnProperty("choices") || result.choices.length === 0)
return null;
return result.choices[0].message.content;
}
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
messages,
temperature,
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
messages,
temperature,
});
return streamRequest;
}
handleStream(response, stream, responseProps) {
return handleDefaultStreamResponse(response, stream, responseProps);
return handleDefaultStreamResponseV2(response, stream, responseProps);
}
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations

View File

@ -1,20 +1,20 @@
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const { chatPrompt } = require("../../chats");
const { handleDefaultStreamResponse } = require("../../helpers/chat/responses");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
class GroqLLM {
constructor(embedder = null, modelPreference = null) {
const { Configuration, OpenAIApi } = require("openai");
const { OpenAI: OpenAIApi } = require("openai");
if (!process.env.GROQ_API_KEY) throw new Error("No Groq API key was set.");
const config = new Configuration({
basePath: "https://api.groq.com/openai/v1",
this.openai = new OpenAIApi({
baseURL: "https://api.groq.com/openai/v1",
apiKey: process.env.GROQ_API_KEY,
});
this.openai = new OpenAIApi(config);
this.model =
modelPreference || process.env.GROQ_MODEL_PREF || "llama2-70b-4096";
modelPreference || process.env.GROQ_MODEL_PREF || "llama3-8b-8192";
this.limits = {
history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15,
@ -40,10 +40,9 @@ class GroqLLM {
streamingEnabled() {
return "streamChat" in this && "streamGetChatCompletion" in this;
}
promptWindowLimit() {
switch (this.model) {
case "llama2-70b-4096":
return 4096;
case "mixtral-8x7b-32768":
return 32_768;
case "llama3-8b-8192":
@ -53,13 +52,12 @@ class GroqLLM {
case "gemma-7b-it":
return 8192;
default:
return 4096;
return 8192;
}
}
async isValidChatCompletionModel(modelName = "") {
const validModels = [
"llama2-70b-4096",
"mixtral-8x7b-32768",
"llama3-8b-8192",
"llama3-70b-8192",
@ -68,9 +66,9 @@ class GroqLLM {
const isPreset = validModels.some((model) => modelName === model);
if (isPreset) return true;
const model = await this.openai
.retrieveModel(modelName)
.then((res) => res.data)
const model = await this.openai.models
.retrieve(modelName)
.then((modelObj) => modelObj)
.catch(() => null);
return !!model;
}
@ -99,8 +97,8 @@ class GroqLLM {
`Groq chat: ${this.model} is not valid for chat completion!`
);
const textResponse = await this.openai
.createChatCompletion({
const textResponse = await this.openai.chat.completions
.create({
model: this.model,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
@ -113,13 +111,12 @@ class GroqLLM {
rawHistory
),
})
.then((json) => {
const res = json.data;
if (!res.hasOwnProperty("choices"))
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("GroqAI chat: No results!");
if (res.choices.length === 0)
if (result.choices.length === 0)
throw new Error("GroqAI chat: No results length!");
return res.choices[0].message.content;
return result.choices[0].message.content;
})
.catch((error) => {
throw new Error(
@ -136,23 +133,20 @@ class GroqLLM {
`GroqAI:streamChat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
});
return streamRequest;
}
@ -162,8 +156,8 @@ class GroqLLM {
`GroqAI:chatCompletion: ${this.model} is not valid for chat completion!`
);
const { data } = await this.openai
.createChatCompletion({
const result = await this.openai.chat.completions
.create({
model: this.model,
messages,
temperature,
@ -172,8 +166,9 @@ class GroqLLM {
throw new Error(e.response.data.error.message);
});
if (!data.hasOwnProperty("choices")) return null;
return data.choices[0].message.content;
if (!result.hasOwnProperty("choices") || result.choices.length === 0)
return null;
return result.choices[0].message.content;
}
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
@ -182,20 +177,17 @@ class GroqLLM {
`GroqAI:streamChatCompletion: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
messages,
temperature,
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
messages,
temperature,
});
return streamRequest;
}
handleStream(response, stream, responseProps) {
return handleDefaultStreamResponse(response, stream, responseProps);
return handleDefaultStreamResponseV2(response, stream, responseProps);
}
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations

View File

@ -2,23 +2,21 @@ const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const { OpenAiEmbedder } = require("../../EmbeddingEngines/openAi");
const { chatPrompt } = require("../../chats");
const {
writeResponseChunk,
clientAbortedHandler,
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
class HuggingFaceLLM {
constructor(embedder = null, _modelPreference = null) {
const { Configuration, OpenAIApi } = require("openai");
if (!process.env.HUGGING_FACE_LLM_ENDPOINT)
throw new Error("No HuggingFace Inference Endpoint was set.");
if (!process.env.HUGGING_FACE_LLM_API_KEY)
throw new Error("No HuggingFace Access Token was set.");
const { OpenAI: OpenAIApi } = require("openai");
const config = new Configuration({
basePath: `${process.env.HUGGING_FACE_LLM_ENDPOINT}/v1`,
this.openai = new OpenAIApi({
baseURL: `${process.env.HUGGING_FACE_LLM_ENDPOINT}/v1`,
apiKey: process.env.HUGGING_FACE_LLM_API_KEY,
});
this.openai = new OpenAIApi(config);
// When using HF inference server - the model param is not required so
// we can stub it here. HF Endpoints can only run one model at a time.
// We set to 'tgi' so that endpoint for HF can accept message format
@ -93,8 +91,8 @@ class HuggingFaceLLM {
}
async sendChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
const textResponse = await this.openai
.createChatCompletion({
const textResponse = await this.openai.chat.completions
.create({
model: this.model,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
@ -107,13 +105,12 @@ class HuggingFaceLLM {
rawHistory
),
})
.then((json) => {
const res = json.data;
if (!res.hasOwnProperty("choices"))
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("HuggingFace chat: No results!");
if (res.choices.length === 0)
if (result.choices.length === 0)
throw new Error("HuggingFace chat: No results length!");
return res.choices[0].message.content;
return result.choices[0].message.content;
})
.catch((error) => {
throw new Error(
@ -125,167 +122,47 @@ class HuggingFaceLLM {
}
async streamChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
});
return streamRequest;
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {
const { data } = await this.openai.createChatCompletion({
const result = await this.openai.createChatCompletion({
model: this.model,
messages,
temperature,
});
if (!data.hasOwnProperty("choices")) return null;
return data.choices[0].message.content;
if (!result.hasOwnProperty("choices") || result.choices.length === 0)
return null;
return result.choices[0].message.content;
}
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
messages,
temperature,
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
messages,
temperature,
});
return streamRequest;
}
handleStream(response, stream, responseProps) {
const { uuid = uuidv4(), sources = [] } = responseProps;
return new Promise((resolve) => {
let fullText = "";
let chunk = "";
// Establish listener to early-abort a streaming response
// in case things go sideways or the user does not like the response.
// We preserve the generated text but continue as if chat was completed
// to preserve previously generated content.
const handleAbort = () => clientAbortedHandler(resolve, fullText);
response.on("close", handleAbort);
stream.data.on("data", (data) => {
const lines = data
?.toString()
?.split("\n")
.filter((line) => line.trim() !== "");
for (const line of lines) {
let validJSON = false;
const message = chunk + line.replace(/^data:/, "");
if (message !== "[DONE]") {
// JSON chunk is incomplete and has not ended yet
// so we need to stitch it together. You would think JSON
// chunks would only come complete - but they don't!
try {
JSON.parse(message);
validJSON = true;
} catch {
console.log("Failed to parse message", message);
}
if (!validJSON) {
// It can be possible that the chunk decoding is running away
// and the message chunk fails to append due to string length.
// In this case abort the chunk and reset so we can continue.
// ref: https://github.com/Mintplex-Labs/anything-llm/issues/416
try {
chunk += message;
} catch (e) {
console.error(`Chunk appending error`, e);
chunk = "";
}
continue;
} else {
chunk = "";
}
}
if (message == "[DONE]") {
writeResponseChunk(response, {
uuid,
sources,
type: "textResponseChunk",
textResponse: "",
close: true,
error: false,
});
response.removeListener("close", handleAbort);
resolve(fullText);
} else {
let error = null;
let finishReason = null;
let token = "";
try {
const json = JSON.parse(message);
error = json?.error || null;
token = json?.choices?.[0]?.delta?.content;
finishReason = json?.choices?.[0]?.finish_reason || null;
} catch {
continue;
}
if (!!error) {
writeResponseChunk(response, {
uuid,
sources: [],
type: "textResponseChunk",
textResponse: null,
close: true,
error,
});
response.removeListener("close", handleAbort);
resolve("");
return;
}
if (token) {
fullText += token;
writeResponseChunk(response, {
uuid,
sources: [],
type: "textResponseChunk",
textResponse: token,
close: false,
error: false,
});
}
if (finishReason !== null) {
writeResponseChunk(response, {
uuid,
sources,
type: "textResponseChunk",
textResponse: "",
close: true,
error: false,
});
response.removeListener("close", handleAbort);
resolve(fullText);
}
}
}
});
});
return handleDefaultStreamResponseV2(response, stream, responseProps);
}
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations

View File

@ -1,5 +1,7 @@
const { chatPrompt } = require("../../chats");
const { handleDefaultStreamResponse } = require("../../helpers/chat/responses");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
// hybrid of openAi LLM chat completion for LMStudio
class LMStudioLLM {
@ -7,11 +9,11 @@ class LMStudioLLM {
if (!process.env.LMSTUDIO_BASE_PATH)
throw new Error("No LMStudio API Base Path was set.");
const { Configuration, OpenAIApi } = require("openai");
const config = new Configuration({
basePath: process.env.LMSTUDIO_BASE_PATH?.replace(/\/+$/, ""), // here is the URL to your LMStudio instance
const { OpenAI: OpenAIApi } = require("openai");
this.lmstudio = new OpenAIApi({
baseURL: process.env.LMSTUDIO_BASE_PATH?.replace(/\/+$/, ""), // here is the URL to your LMStudio instance
apiKey: null,
});
this.lmstudio = new OpenAIApi(config);
// Prior to LMStudio 0.2.17 the `model` param was not required and you could pass anything
// into that field and it would work. On 0.2.17 LMStudio introduced multi-model chat
@ -89,8 +91,8 @@ class LMStudioLLM {
`LMStudio chat: ${this.model} is not valid or defined for chat completion!`
);
const textResponse = await this.lmstudio
.createChatCompletion({
const textResponse = await this.lmstudio.chat.completions
.create({
model: this.model,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
@ -103,13 +105,12 @@ class LMStudioLLM {
rawHistory
),
})
.then((json) => {
const res = json.data;
if (!res.hasOwnProperty("choices"))
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("LMStudio chat: No results!");
if (res.choices.length === 0)
if (result.choices.length === 0)
throw new Error("LMStudio chat: No results length!");
return res.choices[0].message.content;
return result.choices[0].message.content;
})
.catch((error) => {
throw new Error(
@ -126,23 +127,20 @@ class LMStudioLLM {
`LMStudio chat: ${this.model} is not valid or defined for chat completion!`
);
const streamRequest = await this.lmstudio.createChatCompletion(
{
model: this.model,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
stream: true,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
},
{ responseType: "stream" }
);
const streamRequest = await this.lmstudio.chat.completions.create({
model: this.model,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
stream: true,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
});
return streamRequest;
}
@ -152,14 +150,15 @@ class LMStudioLLM {
`LMStudio chat: ${this.model} is not valid or defined model for chat completion!`
);
const { data } = await this.lmstudio.createChatCompletion({
const result = await this.lmstudio.chat.completions.create({
model: this.model,
messages,
temperature,
});
if (!data.hasOwnProperty("choices")) return null;
return data.choices[0].message.content;
if (!result.hasOwnProperty("choices") || result.choices.length === 0)
return null;
return result.choices[0].message.content;
}
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
@ -168,20 +167,17 @@ class LMStudioLLM {
`LMStudio chat: ${this.model} is not valid or defined model for chat completion!`
);
const streamRequest = await this.lmstudio.createChatCompletion(
{
model: this.model,
stream: true,
messages,
temperature,
},
{ responseType: "stream" }
);
const streamRequest = await this.lmstudio.chat.completions.create({
model: this.model,
stream: true,
messages,
temperature,
});
return streamRequest;
}
handleStream(response, stream, responseProps) {
return handleDefaultStreamResponse(response, stream, responseProps);
return handleDefaultStreamResponseV2(response, stream, responseProps);
}
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations

View File

@ -1,21 +1,18 @@
const { chatPrompt } = require("../../chats");
const { handleDefaultStreamResponse } = require("../../helpers/chat/responses");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
class LocalAiLLM {
constructor(embedder = null, modelPreference = null) {
if (!process.env.LOCAL_AI_BASE_PATH)
throw new Error("No LocalAI Base Path was set.");
const { Configuration, OpenAIApi } = require("openai");
const config = new Configuration({
basePath: process.env.LOCAL_AI_BASE_PATH,
...(!!process.env.LOCAL_AI_API_KEY
? {
apiKey: process.env.LOCAL_AI_API_KEY,
}
: {}),
const { OpenAI: OpenAIApi } = require("openai");
this.openai = new OpenAIApi({
baseURL: process.env.LOCAL_AI_BASE_PATH,
apiKey: process.env.LOCAL_AI_API_KEY ?? null,
});
this.openai = new OpenAIApi(config);
this.model = modelPreference || process.env.LOCAL_AI_MODEL_PREF;
this.limits = {
history: this.promptWindowLimit() * 0.15,
@ -84,8 +81,8 @@ class LocalAiLLM {
`LocalAI chat: ${this.model} is not valid for chat completion!`
);
const textResponse = await this.openai
.createChatCompletion({
const textResponse = await this.openai.chat.completions
.create({
model: this.model,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
@ -98,13 +95,12 @@ class LocalAiLLM {
rawHistory
),
})
.then((json) => {
const res = json.data;
if (!res.hasOwnProperty("choices"))
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("LocalAI chat: No results!");
if (res.choices.length === 0)
if (result.choices.length === 0)
throw new Error("LocalAI chat: No results length!");
return res.choices[0].message.content;
return result.choices[0].message.content;
})
.catch((error) => {
throw new Error(
@ -121,23 +117,20 @@ class LocalAiLLM {
`LocalAI chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
});
return streamRequest;
}
@ -147,14 +140,15 @@ class LocalAiLLM {
`LocalAI chat: ${this.model} is not valid for chat completion!`
);
const { data } = await this.openai.createChatCompletion({
const result = await this.openai.chat.completions.create({
model: this.model,
messages,
temperature,
});
if (!data.hasOwnProperty("choices")) return null;
return data.choices[0].message.content;
if (!result.hasOwnProperty("choices") || result.choices.length === 0)
return null;
return result.choices[0].message.content;
}
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
@ -163,20 +157,17 @@ class LocalAiLLM {
`LocalAi chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
messages,
temperature,
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
messages,
temperature,
});
return streamRequest;
}
handleStream(response, stream, responseProps) {
return handleDefaultStreamResponse(response, stream, responseProps);
return handleDefaultStreamResponseV2(response, stream, responseProps);
}
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations

View File

@ -1,17 +1,18 @@
const { chatPrompt } = require("../../chats");
const { handleDefaultStreamResponse } = require("../../helpers/chat/responses");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
class MistralLLM {
constructor(embedder = null, modelPreference = null) {
const { Configuration, OpenAIApi } = require("openai");
if (!process.env.MISTRAL_API_KEY)
throw new Error("No Mistral API key was set.");
const config = new Configuration({
basePath: "https://api.mistral.ai/v1",
apiKey: process.env.MISTRAL_API_KEY,
const { OpenAI: OpenAIApi } = require("openai");
this.openai = new OpenAIApi({
baseURL: "https://api.mistral.ai/v1",
apiKey: process.env.MISTRAL_API_KEY ?? null,
});
this.openai = new OpenAIApi(config);
this.model =
modelPreference || process.env.MISTRAL_MODEL_PREF || "mistral-tiny";
this.limits = {
@ -75,8 +76,8 @@ class MistralLLM {
`Mistral chat: ${this.model} is not valid for chat completion!`
);
const textResponse = await this.openai
.createChatCompletion({
const textResponse = await this.openai.chat.completions
.create({
model: this.model,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
messages: await this.compressMessages(
@ -88,13 +89,12 @@ class MistralLLM {
rawHistory
),
})
.then((json) => {
const res = json.data;
if (!res.hasOwnProperty("choices"))
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("Mistral chat: No results!");
if (res.choices.length === 0)
if (result.choices.length === 0)
throw new Error("Mistral chat: No results length!");
return res.choices[0].message.content;
return result.choices[0].message.content;
})
.catch((error) => {
throw new Error(
@ -111,22 +111,19 @@ class MistralLLM {
`Mistral chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
});
return streamRequest;
}
@ -137,14 +134,15 @@ class MistralLLM {
`Mistral chat: ${this.model} is not valid for chat completion!`
);
const { data } = await this.openai.createChatCompletion({
const result = await this.openai.chat.completions.create({
model: this.model,
messages,
temperature,
});
if (!data.hasOwnProperty("choices")) return null;
return data.choices[0].message.content;
if (!result.hasOwnProperty("choices") || result.choices.length === 0)
return null;
return result.choices[0].message.content;
}
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
@ -153,20 +151,17 @@ class MistralLLM {
`Mistral chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
messages,
temperature,
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
messages,
temperature,
});
return streamRequest;
}
handleStream(response, stream, responseProps) {
return handleDefaultStreamResponse(response, stream, responseProps);
return handleDefaultStreamResponseV2(response, stream, responseProps);
}
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations

View File

@ -7,9 +7,9 @@ const {
clientAbortedHandler,
} = require("../../helpers/chat/responses");
// Docs: https://api.js.langchain.com/classes/chat_models_llama_cpp.ChatLlamaCpp.html
// Docs: https://js.langchain.com/docs/integrations/chat/llama_cpp
const ChatLlamaCpp = (...args) =>
import("langchain/chat_models/llama_cpp").then(
import("@langchain/community/chat_models/llama_cpp").then(
({ ChatLlamaCpp }) => new ChatLlamaCpp(...args)
);
@ -64,7 +64,7 @@ class NativeLLM {
HumanMessage,
SystemMessage,
AIMessage,
} = require("langchain/schema");
} = require("@langchain/core/messages");
const langchainChats = [];
const roleToMessageMap = {
system: SystemMessage,

View File

@ -1,5 +1,5 @@
const { chatPrompt } = require("../../chats");
const { StringOutputParser } = require("langchain/schema/output_parser");
const { StringOutputParser } = require("@langchain/core/output_parsers");
const {
writeResponseChunk,
clientAbortedHandler,
@ -28,7 +28,7 @@ class OllamaAILLM {
}
#ollamaClient({ temperature = 0.07 }) {
const { ChatOllama } = require("langchain/chat_models/ollama");
const { ChatOllama } = require("@langchain/community/chat_models/ollama");
return new ChatOllama({
baseUrl: this.basePath,
model: this.model,
@ -44,7 +44,7 @@ class OllamaAILLM {
HumanMessage,
SystemMessage,
AIMessage,
} = require("langchain/schema");
} = require("@langchain/core/messages");
const langchainChats = [];
const roleToMessageMap = {
system: SystemMessage,

View File

@ -1,16 +1,17 @@
const { OpenAiEmbedder } = require("../../EmbeddingEngines/openAi");
const { chatPrompt } = require("../../chats");
const { handleDefaultStreamResponse } = require("../../helpers/chat/responses");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
class OpenAiLLM {
constructor(embedder = null, modelPreference = null) {
const { Configuration, OpenAIApi } = require("openai");
if (!process.env.OPEN_AI_KEY) throw new Error("No OpenAI API key was set.");
const { OpenAI: OpenAIApi } = require("openai");
const config = new Configuration({
this.openai = new OpenAIApi({
apiKey: process.env.OPEN_AI_KEY,
});
this.openai = new OpenAIApi(config);
this.model =
modelPreference || process.env.OPEN_MODEL_PREF || "gpt-3.5-turbo";
this.limits = {
@ -70,9 +71,9 @@ class OpenAiLLM {
const isPreset = modelName.toLowerCase().includes("gpt");
if (isPreset) return true;
const model = await this.openai
.retrieveModel(modelName)
.then((res) => res.data)
const model = await this.openai.models
.retrieve(modelName)
.then((modelObj) => modelObj)
.catch(() => null);
return !!model;
}
@ -91,10 +92,9 @@ class OpenAiLLM {
}
async isSafe(input = "") {
const { flagged = false, categories = {} } = await this.openai
.createModeration({ input })
.then((json) => {
const res = json.data;
const { flagged = false, categories = {} } = await this.openai.moderations
.create({ input })
.then((res) => {
if (!res.hasOwnProperty("results"))
throw new Error("OpenAI moderation: No results!");
if (res.results.length === 0)
@ -128,8 +128,8 @@ class OpenAiLLM {
`OpenAI chat: ${this.model} is not valid for chat completion!`
);
const textResponse = await this.openai
.createChatCompletion({
const textResponse = await this.openai.chat.completions
.create({
model: this.model,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
@ -142,13 +142,12 @@ class OpenAiLLM {
rawHistory
),
})
.then((json) => {
const res = json.data;
if (!res.hasOwnProperty("choices"))
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("OpenAI chat: No results!");
if (res.choices.length === 0)
if (result.choices.length === 0)
throw new Error("OpenAI chat: No results length!");
return res.choices[0].message.content;
return result.choices[0].message.content;
})
.catch((error) => {
throw new Error(
@ -165,23 +164,20 @@ class OpenAiLLM {
`OpenAI chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions({
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
});
return streamRequest;
}
@ -191,8 +187,8 @@ class OpenAiLLM {
`OpenAI chat: ${this.model} is not valid for chat completion!`
);
const { data } = await this.openai
.createChatCompletion({
const result = await this.openai.chat.completions
.create({
model: this.model,
messages,
temperature,
@ -201,8 +197,9 @@ class OpenAiLLM {
throw new Error(e.response.data.error.message);
});
if (!data.hasOwnProperty("choices")) return null;
return data.choices[0].message.content;
if (!result.hasOwnProperty("choices") || result.choices.length === 0)
return null;
return result.choices[0].message.content;
}
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
@ -211,20 +208,17 @@ class OpenAiLLM {
`OpenAI chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
messages,
temperature,
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
messages,
temperature,
});
return streamRequest;
}
handleStream(response, stream, responseProps) {
return handleDefaultStreamResponse(response, stream, responseProps);
return handleDefaultStreamResponseV2(response, stream, responseProps);
}
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations

View File

@ -4,29 +4,32 @@ const { v4: uuidv4 } = require("uuid");
const {
writeResponseChunk,
clientAbortedHandler,
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
const fs = require("fs");
const path = require("path");
const { safeJsonParse } = require("../../http");
const cacheFolder = path.resolve(
process.env.STORAGE_DIR
? path.resolve(process.env.STORAGE_DIR, "models", "openrouter")
: path.resolve(__dirname, `../../../storage/models/openrouter`)
);
class OpenRouterLLM {
constructor(embedder = null, modelPreference = null) {
const { Configuration, OpenAIApi } = require("openai");
if (!process.env.OPENROUTER_API_KEY)
throw new Error("No OpenRouter API key was set.");
const { OpenAI: OpenAIApi } = require("openai");
this.basePath = "https://openrouter.ai/api/v1";
const config = new Configuration({
basePath: this.basePath,
apiKey: process.env.OPENROUTER_API_KEY,
baseOptions: {
headers: {
"HTTP-Referer": "https://useanything.com",
"X-Title": "AnythingLLM",
},
this.openai = new OpenAIApi({
baseURL: this.basePath,
apiKey: process.env.OPENROUTER_API_KEY ?? null,
defaultHeaders: {
"HTTP-Referer": "https://useanything.com",
"X-Title": "AnythingLLM",
},
});
this.openai = new OpenAIApi(config);
this.model =
modelPreference || process.env.OPENROUTER_MODEL_PREF || "openrouter/auto";
this.limits = {
@ -38,12 +41,8 @@ class OpenRouterLLM {
this.embedder = !embedder ? new NativeEmbedder() : embedder;
this.defaultTemp = 0.7;
const cacheFolder = path.resolve(
process.env.STORAGE_DIR
? path.resolve(process.env.STORAGE_DIR, "models", "openrouter")
: path.resolve(__dirname, `../../../storage/models/openrouter`)
);
fs.mkdirSync(cacheFolder, { recursive: true });
if (!fs.existsSync(cacheFolder))
fs.mkdirSync(cacheFolder, { recursive: true });
this.cacheModelPath = path.resolve(cacheFolder, "models.json");
this.cacheAtPath = path.resolve(cacheFolder, ".cached_at");
}
@ -52,11 +51,6 @@ class OpenRouterLLM {
console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args);
}
async init() {
await this.#syncModels();
return this;
}
// This checks if the .cached_at file has a timestamp that is more than 1Week (in millis)
// from the current date. If it is, then we will refetch the API so that all the models are up
// to date.
@ -80,37 +74,7 @@ class OpenRouterLLM {
this.log(
"Model cache is not present or stale. Fetching from OpenRouter API."
);
await fetch(`${this.basePath}/models`, {
method: "GET",
headers: {
"Content-Type": "application/json",
},
})
.then((res) => res.json())
.then(({ data = [] }) => {
const models = {};
data.forEach((model) => {
models[model.id] = {
id: model.id,
name: model.name,
organization:
model.id.split("/")[0].charAt(0).toUpperCase() +
model.id.split("/")[0].slice(1),
maxLength: model.context_length,
};
});
fs.writeFileSync(this.cacheModelPath, JSON.stringify(models), {
encoding: "utf-8",
});
fs.writeFileSync(this.cacheAtPath, String(Number(new Date())), {
encoding: "utf-8",
});
return models;
})
.catch((e) => {
console.error(e);
return {};
});
await fetchOpenRouterModels();
return;
}
@ -173,8 +137,8 @@ class OpenRouterLLM {
`OpenRouter chat: ${this.model} is not valid for chat completion!`
);
const textResponse = await this.openai
.createChatCompletion({
const textResponse = await this.openai.chat.completions
.create({
model: this.model,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
@ -187,13 +151,12 @@ class OpenRouterLLM {
rawHistory
),
})
.then((json) => {
const res = json.data;
if (!res.hasOwnProperty("choices"))
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("OpenRouter chat: No results!");
if (res.choices.length === 0)
if (result.choices.length === 0)
throw new Error("OpenRouter chat: No results length!");
return res.choices[0].message.content;
return result.choices[0].message.content;
})
.catch((error) => {
throw new Error(
@ -210,23 +173,20 @@ class OpenRouterLLM {
`OpenRouter chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
});
return streamRequest;
}
@ -236,8 +196,8 @@ class OpenRouterLLM {
`OpenRouter chat: ${this.model} is not valid for chat completion!`
);
const { data } = await this.openai
.createChatCompletion({
const result = await this.openai.chat.completions
.create({
model: this.model,
messages,
temperature,
@ -246,8 +206,9 @@ class OpenRouterLLM {
throw new Error(e.response.data.error.message);
});
if (!data.hasOwnProperty("choices")) return null;
return data.choices[0].message.content;
if (!result.hasOwnProperty("choices") || result.choices.length === 0)
return null;
return result.choices[0].message.content;
}
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
@ -256,15 +217,12 @@ class OpenRouterLLM {
`OpenRouter chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
messages,
temperature,
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
messages,
temperature,
});
return streamRequest;
}
@ -272,9 +230,8 @@ class OpenRouterLLM {
const timeoutThresholdMs = 500;
const { uuid = uuidv4(), sources = [] } = responseProps;
return new Promise((resolve) => {
return new Promise(async (resolve) => {
let fullText = "";
let chunk = "";
let lastChunkTime = null; // null when first token is still not received.
// Establish listener to early-abort a streaming response
@ -314,97 +271,176 @@ class OpenRouterLLM {
}
}, 500);
stream.data.on("data", (data) => {
const lines = data
?.toString()
?.split("\n")
.filter((line) => line.trim() !== "");
for await (const chunk of stream) {
const message = chunk?.choices?.[0];
const token = message?.delta?.content;
lastChunkTime = Number(new Date());
for (const line of lines) {
let validJSON = false;
const message = chunk + line.replace(/^data: /, "");
// JSON chunk is incomplete and has not ended yet
// so we need to stitch it together. You would think JSON
// chunks would only come complete - but they don't!
try {
JSON.parse(message);
validJSON = true;
} catch {}
if (!validJSON) {
// It can be possible that the chunk decoding is running away
// and the message chunk fails to append due to string length.
// In this case abort the chunk and reset so we can continue.
// ref: https://github.com/Mintplex-Labs/anything-llm/issues/416
try {
chunk += message;
} catch (e) {
console.error(`Chunk appending error`, e);
chunk = "";
}
continue;
} else {
chunk = "";
}
if (message == "[DONE]") {
lastChunkTime = Number(new Date());
writeResponseChunk(response, {
uuid,
sources,
type: "textResponseChunk",
textResponse: "",
close: true,
error: false,
});
clearInterval(timeoutCheck);
response.removeListener("close", handleAbort);
resolve(fullText);
} else {
let finishReason = null;
let token = "";
try {
const json = JSON.parse(message);
token = json?.choices?.[0]?.delta?.content;
finishReason = json?.choices?.[0]?.finish_reason || null;
} catch {
continue;
}
if (token) {
fullText += token;
lastChunkTime = Number(new Date());
writeResponseChunk(response, {
uuid,
sources: [],
type: "textResponseChunk",
textResponse: token,
close: false,
error: false,
});
}
if (finishReason !== null) {
lastChunkTime = Number(new Date());
writeResponseChunk(response, {
uuid,
sources,
type: "textResponseChunk",
textResponse: "",
close: true,
error: false,
});
clearInterval(timeoutCheck);
response.removeListener("close", handleAbort);
resolve(fullText);
}
}
if (token) {
fullText += token;
writeResponseChunk(response, {
uuid,
sources: [],
type: "textResponseChunk",
textResponse: token,
close: false,
error: false,
});
}
});
if (message.finish_reason !== null) {
writeResponseChunk(response, {
uuid,
sources,
type: "textResponseChunk",
textResponse: "",
close: true,
error: false,
});
response.removeListener("close", handleAbort);
resolve(fullText);
}
}
});
}
// handleStream(response, stream, responseProps) {
// const timeoutThresholdMs = 500;
// const { uuid = uuidv4(), sources = [] } = responseProps;
// return new Promise((resolve) => {
// let fullText = "";
// let chunk = "";
// let lastChunkTime = null; // null when first token is still not received.
// // Establish listener to early-abort a streaming response
// // in case things go sideways or the user does not like the response.
// // We preserve the generated text but continue as if chat was completed
// // to preserve previously generated content.
// const handleAbort = () => clientAbortedHandler(resolve, fullText);
// response.on("close", handleAbort);
// // NOTICE: Not all OpenRouter models will return a stop reason
// // which keeps the connection open and so the model never finalizes the stream
// // like the traditional OpenAI response schema does. So in the case the response stream
// // never reaches a formal close state we maintain an interval timer that if we go >=timeoutThresholdMs with
// // no new chunks then we kill the stream and assume it to be complete. OpenRouter is quite fast
// // so this threshold should permit most responses, but we can adjust `timeoutThresholdMs` if
// // we find it is too aggressive.
// const timeoutCheck = setInterval(() => {
// if (lastChunkTime === null) return;
// const now = Number(new Date());
// const diffMs = now - lastChunkTime;
// if (diffMs >= timeoutThresholdMs) {
// console.log(
// `OpenRouter stream did not self-close and has been stale for >${timeoutThresholdMs}ms. Closing response stream.`
// );
// writeResponseChunk(response, {
// uuid,
// sources,
// type: "textResponseChunk",
// textResponse: "",
// close: true,
// error: false,
// });
// clearInterval(timeoutCheck);
// response.removeListener("close", handleAbort);
// resolve(fullText);
// }
// }, 500);
// stream.data.on("data", (data) => {
// const lines = data
// ?.toString()
// ?.split("\n")
// .filter((line) => line.trim() !== "");
// for (const line of lines) {
// let validJSON = false;
// const message = chunk + line.replace(/^data: /, "");
// // JSON chunk is incomplete and has not ended yet
// // so we need to stitch it together. You would think JSON
// // chunks would only come complete - but they don't!
// try {
// JSON.parse(message);
// validJSON = true;
// } catch { }
// if (!validJSON) {
// // It can be possible that the chunk decoding is running away
// // and the message chunk fails to append due to string length.
// // In this case abort the chunk and reset so we can continue.
// // ref: https://github.com/Mintplex-Labs/anything-llm/issues/416
// try {
// chunk += message;
// } catch (e) {
// console.error(`Chunk appending error`, e);
// chunk = "";
// }
// continue;
// } else {
// chunk = "";
// }
// if (message == "[DONE]") {
// lastChunkTime = Number(new Date());
// writeResponseChunk(response, {
// uuid,
// sources,
// type: "textResponseChunk",
// textResponse: "",
// close: true,
// error: false,
// });
// clearInterval(timeoutCheck);
// response.removeListener("close", handleAbort);
// resolve(fullText);
// } else {
// let finishReason = null;
// let token = "";
// try {
// const json = JSON.parse(message);
// token = json?.choices?.[0]?.delta?.content;
// finishReason = json?.choices?.[0]?.finish_reason || null;
// } catch {
// continue;
// }
// if (token) {
// fullText += token;
// lastChunkTime = Number(new Date());
// writeResponseChunk(response, {
// uuid,
// sources: [],
// type: "textResponseChunk",
// textResponse: token,
// close: false,
// error: false,
// });
// }
// if (finishReason !== null) {
// lastChunkTime = Number(new Date());
// writeResponseChunk(response, {
// uuid,
// sources,
// type: "textResponseChunk",
// textResponse: "",
// close: true,
// error: false,
// });
// clearInterval(timeoutCheck);
// response.removeListener("close", handleAbort);
// resolve(fullText);
// }
// }
// }
// });
// });
// }
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations
async embedTextInput(textInput) {
return await this.embedder.embedTextInput(textInput);
@ -420,6 +456,54 @@ class OpenRouterLLM {
}
}
async function fetchOpenRouterModels() {
return await fetch(`https://openrouter.ai/api/v1/models`, {
method: "GET",
headers: {
"Content-Type": "application/json",
},
})
.then((res) => res.json())
.then(({ data = [] }) => {
const models = {};
data.forEach((model) => {
models[model.id] = {
id: model.id,
name: model.name,
organization:
model.id.split("/")[0].charAt(0).toUpperCase() +
model.id.split("/")[0].slice(1),
maxLength: model.context_length,
};
});
// Cache all response information
if (!fs.existsSync(cacheFolder))
fs.mkdirSync(cacheFolder, { recursive: true });
fs.writeFileSync(
path.resolve(cacheFolder, "models.json"),
JSON.stringify(models),
{
encoding: "utf-8",
}
);
fs.writeFileSync(
path.resolve(cacheFolder, ".cached_at"),
String(Number(new Date())),
{
encoding: "utf-8",
}
);
return models;
})
.catch((e) => {
console.error(e);
return {};
});
}
module.exports = {
OpenRouterLLM,
fetchOpenRouterModels,
};

View File

@ -1,6 +1,8 @@
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const { chatPrompt } = require("../../chats");
const { handleDefaultStreamResponse } = require("../../helpers/chat/responses");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
function perplexityModels() {
const { MODELS } = require("./models.js");
@ -9,17 +11,18 @@ function perplexityModels() {
class PerplexityLLM {
constructor(embedder = null, modelPreference = null) {
const { Configuration, OpenAIApi } = require("openai");
if (!process.env.PERPLEXITY_API_KEY)
throw new Error("No Perplexity API key was set.");
const config = new Configuration({
basePath: "https://api.perplexity.ai",
apiKey: process.env.PERPLEXITY_API_KEY,
const { OpenAI: OpenAIApi } = require("openai");
this.openai = new OpenAIApi({
baseURL: "https://api.perplexity.ai",
apiKey: process.env.PERPLEXITY_API_KEY ?? null,
});
this.openai = new OpenAIApi(config);
this.model =
modelPreference || process.env.PERPLEXITY_MODEL_PREF || "pplx-7b-online"; // Give at least a unique model to the provider as last fallback.
modelPreference ||
process.env.PERPLEXITY_MODEL_PREF ||
"sonar-small-online"; // Give at least a unique model to the provider as last fallback.
this.limits = {
history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15,
@ -84,8 +87,8 @@ class PerplexityLLM {
`Perplexity chat: ${this.model} is not valid for chat completion!`
);
const textResponse = await this.openai
.createChatCompletion({
const textResponse = await this.openai.chat.completions
.create({
model: this.model,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
@ -98,13 +101,12 @@ class PerplexityLLM {
rawHistory
),
})
.then((json) => {
const res = json.data;
if (!res.hasOwnProperty("choices"))
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("Perplexity chat: No results!");
if (res.choices.length === 0)
if (result.choices.length === 0)
throw new Error("Perplexity chat: No results length!");
return res.choices[0].message.content;
return result.choices[0].message.content;
})
.catch((error) => {
throw new Error(
@ -121,23 +123,20 @@ class PerplexityLLM {
`Perplexity chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
});
return streamRequest;
}
@ -147,8 +146,8 @@ class PerplexityLLM {
`Perplexity chat: ${this.model} is not valid for chat completion!`
);
const { data } = await this.openai
.createChatCompletion({
const result = await this.openai.chat.completions
.create({
model: this.model,
messages,
temperature,
@ -157,8 +156,9 @@ class PerplexityLLM {
throw new Error(e.response.data.error.message);
});
if (!data.hasOwnProperty("choices")) return null;
return data.choices[0].message.content;
if (!result.hasOwnProperty("choices") || result.choices.length === 0)
return null;
return result.choices[0].message.content;
}
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
@ -167,20 +167,17 @@ class PerplexityLLM {
`Perplexity chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
messages,
temperature,
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
messages,
temperature,
});
return streamRequest;
}
handleStream(response, stream, responseProps) {
return handleDefaultStreamResponse(response, stream, responseProps);
return handleDefaultStreamResponseV2(response, stream, responseProps);
}
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations

View File

@ -1,7 +1,6 @@
const { chatPrompt } = require("../../chats");
const {
writeResponseChunk,
clientAbortedHandler,
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
function togetherAiModels() {
@ -11,15 +10,13 @@ function togetherAiModels() {
class TogetherAiLLM {
constructor(embedder = null, modelPreference = null) {
const { Configuration, OpenAIApi } = require("openai");
if (!process.env.TOGETHER_AI_API_KEY)
throw new Error("No TogetherAI API key was set.");
const config = new Configuration({
basePath: "https://api.together.xyz/v1",
apiKey: process.env.TOGETHER_AI_API_KEY,
const { OpenAI: OpenAIApi } = require("openai");
this.openai = new OpenAIApi({
baseURL: "https://api.together.xyz/v1",
apiKey: process.env.TOGETHER_AI_API_KEY ?? null,
});
this.openai = new OpenAIApi(config);
this.model = modelPreference || process.env.TOGETHER_AI_MODEL_PREF;
this.limits = {
history: this.promptWindowLimit() * 0.15,
@ -91,8 +88,8 @@ class TogetherAiLLM {
`Together AI chat: ${this.model} is not valid for chat completion!`
);
const textResponse = await this.openai
.createChatCompletion({
const textResponse = await this.openai.chat.completions
.create({
model: this.model,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
@ -105,13 +102,12 @@ class TogetherAiLLM {
rawHistory
),
})
.then((json) => {
const res = json.data;
if (!res.hasOwnProperty("choices"))
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("Together AI chat: No results!");
if (res.choices.length === 0)
if (result.choices.length === 0)
throw new Error("Together AI chat: No results length!");
return res.choices[0].message.content;
return result.choices[0].message.content;
})
.catch((error) => {
throw new Error(
@ -128,23 +124,20 @@ class TogetherAiLLM {
`TogetherAI chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
n: 1,
messages: await this.compressMessages(
{
systemPrompt: chatPrompt(workspace),
userPrompt: prompt,
chatHistory,
},
rawHistory
),
});
return streamRequest;
}
@ -154,14 +147,15 @@ class TogetherAiLLM {
`TogetherAI chat: ${this.model} is not valid for chat completion!`
);
const { data } = await this.openai.createChatCompletion({
const result = await this.openai.chat.completions.create({
model: this.model,
messages,
temperature,
});
if (!data.hasOwnProperty("choices")) return null;
return data.choices[0].message.content;
if (!result.hasOwnProperty("choices") || result.choices.length === 0)
return null;
return result.choices[0].message.content;
}
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
@ -170,118 +164,17 @@ class TogetherAiLLM {
`TogetherAI chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.createChatCompletion(
{
model: this.model,
stream: true,
messages,
temperature,
},
{ responseType: "stream" }
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
messages,
temperature,
});
return streamRequest;
}
handleStream(response, stream, responseProps) {
const { uuid = uuidv4(), sources = [] } = responseProps;
return new Promise((resolve) => {
let fullText = "";
let chunk = "";
// Establish listener to early-abort a streaming response
// in case things go sideways or the user does not like the response.
// We preserve the generated text but continue as if chat was completed
// to preserve previously generated content.
const handleAbort = () => clientAbortedHandler(resolve, fullText);
response.on("close", handleAbort);
stream.data.on("data", (data) => {
const lines = data
?.toString()
?.split("\n")
.filter((line) => line.trim() !== "");
for (const line of lines) {
let validJSON = false;
const message = chunk + line.replace(/^data: /, "");
if (message !== "[DONE]") {
// JSON chunk is incomplete and has not ended yet
// so we need to stitch it together. You would think JSON
// chunks would only come complete - but they don't!
try {
JSON.parse(message);
validJSON = true;
} catch {}
if (!validJSON) {
// It can be possible that the chunk decoding is running away
// and the message chunk fails to append due to string length.
// In this case abort the chunk and reset so we can continue.
// ref: https://github.com/Mintplex-Labs/anything-llm/issues/416
try {
chunk += message;
} catch (e) {
console.error(`Chunk appending error`, e);
chunk = "";
}
continue;
} else {
chunk = "";
}
}
if (message == "[DONE]") {
writeResponseChunk(response, {
uuid,
sources,
type: "textResponseChunk",
textResponse: "",
close: true,
error: false,
});
response.removeListener("close", handleAbort);
resolve(fullText);
} else {
let finishReason = null;
let token = "";
try {
const json = JSON.parse(message);
token = json?.choices?.[0]?.delta?.content;
finishReason = json?.choices?.[0]?.finish_reason || null;
} catch {
continue;
}
if (token) {
fullText += token;
writeResponseChunk(response, {
uuid,
sources: [],
type: "textResponseChunk",
textResponse: token,
close: false,
error: false,
});
}
if (finishReason !== null) {
writeResponseChunk(response, {
uuid,
sources,
type: "textResponseChunk",
textResponse: "",
close: true,
error: false,
});
response.removeListener("close", handleAbort);
resolve(fullText);
}
}
}
});
});
return handleDefaultStreamResponseV2(response, stream, responseProps);
}
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations

View File

@ -23,7 +23,9 @@ class AzureOpenAiEmbedder {
}
async embedTextInput(textInput) {
const result = await this.embedChunks(textInput);
const result = await this.embedChunks(
Array.isArray(textInput) ? textInput : [textInput]
);
return result?.[0] || [];
}

View File

@ -31,7 +31,9 @@ class LMStudioEmbedder {
}
async embedTextInput(textInput) {
const result = await this.embedChunks(textInput);
const result = await this.embedChunks(
Array.isArray(textInput) ? textInput : [textInput]
);
return result?.[0] || [];
}

View File

@ -2,20 +2,16 @@ const { toChunks, maximumChunkLength } = require("../../helpers");
class LocalAiEmbedder {
constructor() {
const { Configuration, OpenAIApi } = require("openai");
if (!process.env.EMBEDDING_BASE_PATH)
throw new Error("No embedding base path was set.");
if (!process.env.EMBEDDING_MODEL_PREF)
throw new Error("No embedding model was set.");
const config = new Configuration({
basePath: process.env.EMBEDDING_BASE_PATH,
...(!!process.env.LOCAL_AI_API_KEY
? {
apiKey: process.env.LOCAL_AI_API_KEY,
}
: {}),
const { OpenAI: OpenAIApi } = require("openai");
this.openai = new OpenAIApi({
baseURL: process.env.EMBEDDING_BASE_PATH,
apiKey: process.env.LOCAL_AI_API_KEY ?? null,
});
this.openai = new OpenAIApi(config);
// Limit of how many strings we can process in a single pass to stay with resource or network limits
this.maxConcurrentChunks = 50;
@ -23,7 +19,9 @@ class LocalAiEmbedder {
}
async embedTextInput(textInput) {
const result = await this.embedChunks(textInput);
const result = await this.embedChunks(
Array.isArray(textInput) ? textInput : [textInput]
);
return result?.[0] || [];
}
@ -32,13 +30,13 @@ class LocalAiEmbedder {
for (const chunk of toChunks(textChunks, this.maxConcurrentChunks)) {
embeddingRequests.push(
new Promise((resolve) => {
this.openai
.createEmbedding({
this.openai.embeddings
.create({
model: process.env.EMBEDDING_MODEL_PREF,
input: chunk,
})
.then((res) => {
resolve({ data: res.data?.data, error: null });
.then((result) => {
resolve({ data: result?.data, error: null });
})
.catch((e) => {
e.type =

View File

@ -119,7 +119,9 @@ class NativeEmbedder {
}
async embedTextInput(textInput) {
const result = await this.embedChunks(textInput);
const result = await this.embedChunks(
Array.isArray(textInput) ? textInput : [textInput]
);
return result?.[0] || [];
}

View File

@ -30,7 +30,9 @@ class OllamaEmbedder {
}
async embedTextInput(textInput) {
const result = await this.embedChunks([textInput]);
const result = await this.embedChunks(
Array.isArray(textInput) ? textInput : [textInput]
);
return result?.[0] || [];
}

View File

@ -2,13 +2,11 @@ const { toChunks } = require("../../helpers");
class OpenAiEmbedder {
constructor() {
const { Configuration, OpenAIApi } = require("openai");
if (!process.env.OPEN_AI_KEY) throw new Error("No OpenAI API key was set.");
const config = new Configuration({
const { OpenAI: OpenAIApi } = require("openai");
this.openai = new OpenAIApi({
apiKey: process.env.OPEN_AI_KEY,
});
const openai = new OpenAIApi(config);
this.openai = openai;
this.model = process.env.EMBEDDING_MODEL_PREF || "text-embedding-ada-002";
// Limit of how many strings we can process in a single pass to stay with resource or network limits
@ -19,7 +17,9 @@ class OpenAiEmbedder {
}
async embedTextInput(textInput) {
const result = await this.embedChunks(textInput);
const result = await this.embedChunks(
Array.isArray(textInput) ? textInput : [textInput]
);
return result?.[0] || [];
}
@ -31,13 +31,13 @@ class OpenAiEmbedder {
for (const chunk of toChunks(textChunks, this.maxConcurrentChunks)) {
embeddingRequests.push(
new Promise((resolve) => {
this.openai
.createEmbedding({
this.openai.embeddings
.create({
model: this.model,
input: chunk,
})
.then((res) => {
resolve({ data: res.data?.data, error: null });
.then((result) => {
resolve({ data: result?.data, error: null });
})
.catch((e) => {
e.type =

View File

@ -0,0 +1,103 @@
const bcrypt = require("bcrypt");
const { v4, validate } = require("uuid");
const { User } = require("../../models/user");
const {
RecoveryCode,
PasswordResetToken,
} = require("../../models/passwordRecovery");
async function generateRecoveryCodes(userId) {
const newRecoveryCodes = [];
const plainTextCodes = [];
for (let i = 0; i < 4; i++) {
const code = v4();
const hashedCode = bcrypt.hashSync(code, 10);
newRecoveryCodes.push({
user_id: userId,
code_hash: hashedCode,
});
plainTextCodes.push(code);
}
const { error } = await RecoveryCode.createMany(newRecoveryCodes);
if (!!error) throw new Error(error);
const { user: success } = await User._update(userId, {
seen_recovery_codes: true,
});
if (!success) throw new Error("Failed to generate user recovery codes!");
return plainTextCodes;
}
async function recoverAccount(username = "", recoveryCodes = []) {
const user = await User.get({ username: String(username) });
if (!user) return { success: false, error: "Invalid recovery codes." };
// If hashes do not exist for a user
// because this is a user who has not logged out and back in since upgrade.
const allUserHashes = await RecoveryCode.hashesForUser(user.id);
if (allUserHashes.length < 4)
return { success: false, error: "Invalid recovery codes" };
// If they tried to send more than two unique codes, we only take the first two
const uniqueRecoveryCodes = [...new Set(recoveryCodes)]
.map((code) => code.trim())
.filter((code) => validate(code)) // we know that any provided code must be a uuid v4.
.slice(0, 2);
if (uniqueRecoveryCodes.length !== 2)
return { success: false, error: "Invalid recovery codes." };
const validCodes = uniqueRecoveryCodes.every((code) => {
let valid = false;
allUserHashes.forEach((hash) => {
if (bcrypt.compareSync(code, hash)) valid = true;
});
return valid;
});
if (!validCodes) return { success: false, error: "Invalid recovery codes" };
const { passwordResetToken, error } = await PasswordResetToken.create(
user.id
);
if (!!error) return { success: false, error };
return { success: true, resetToken: passwordResetToken.token };
}
async function resetPassword(token, _newPassword = "", confirmPassword = "") {
const newPassword = String(_newPassword).trim(); // No spaces in passwords
if (!newPassword) throw new Error("Invalid password.");
if (newPassword !== String(confirmPassword))
throw new Error("Passwords do not match");
const resetToken = await PasswordResetToken.findUnique({
token: String(token),
});
if (!resetToken || resetToken.expiresAt < new Date()) {
return { success: false, message: "Invalid reset token" };
}
// JOI password rules will be enforced inside .update.
const { error } = await User.update(resetToken.user_id, {
password: newPassword,
});
// seen_recovery_codes is not publicly writable
// so we have to do direct update here
await User._update(resetToken.user_id, {
seen_recovery_codes: false,
});
if (error) return { success: false, message: error };
await PasswordResetToken.deleteMany({ user_id: resetToken.user_id });
await RecoveryCode.deleteMany({ user_id: resetToken.user_id });
// New codes are provided on first new login.
return { success: true, message: "Password reset successful" };
}
module.exports = {
recoverAccount,
resetPassword,
generateRecoveryCodes,
};

View File

@ -64,7 +64,7 @@ class RecursiveSplitter {
constructor({ chunkSize, chunkOverlap }) {
const {
RecursiveCharacterTextSplitter,
} = require("langchain/text_splitter");
} = require("@langchain/textsplitters");
this.log(`Will split with`, { chunkSize, chunkOverlap });
this.engine = new RecursiveCharacterTextSplitter({
chunkSize,

View File

@ -21,6 +21,19 @@ const chatHistory = {
// We need a full conversation reply with prev being from
// the USER and the last being from anyone other than the user.
if (prev.from !== "USER" || last.from === "USER") return;
// If we have a post-reply flow we should save the chat using this special flow
// so that post save cleanup and other unique properties can be run as opposed to regular chat.
if (aibitat.hasOwnProperty("_replySpecialAttributes")) {
await this._storeSpecial(aibitat, {
prompt: prev.content,
response: last.content,
options: aibitat._replySpecialAttributes,
});
delete aibitat._replySpecialAttributes;
return;
}
await this._store(aibitat, {
prompt: prev.content,
response: last.content,
@ -42,6 +55,28 @@ const chatHistory = {
threadId: invocation?.thread_id || null,
});
},
_storeSpecial: async function (
aibitat,
{ prompt, response, options = {} } = {}
) {
const invocation = aibitat.handlerProps.invocation;
await WorkspaceChats.new({
workspaceId: Number(invocation.workspace_id),
prompt,
response: {
sources: options?.sources ?? [],
// when we have a _storeSpecial called the options param can include a storedResponse() function
// that will override the text property to store extra information in, depending on the special type of chat.
text: options.hasOwnProperty("storedResponse")
? options.storedResponse(response)
: response,
type: options?.saveAsType ?? "chat",
},
user: { id: invocation?.user_id || null },
threadId: invocation?.thread_id || null,
});
options?.postSave();
},
};
},
};

View File

@ -5,6 +5,7 @@ const { docSummarizer } = require("./summarize.js");
const { saveFileInBrowser } = require("./save-file-browser.js");
const { chatHistory } = require("./chat-history.js");
const { memory } = require("./memory.js");
const { rechart } = require("./rechart.js");
module.exports = {
webScraping,
@ -14,6 +15,7 @@ module.exports = {
saveFileInBrowser,
chatHistory,
memory,
rechart,
// Plugin name aliases so they can be pulled by slug as well.
[webScraping.name]: webScraping,
@ -23,4 +25,5 @@ module.exports = {
[saveFileInBrowser.name]: saveFileInBrowser,
[chatHistory.name]: chatHistory,
[memory.name]: memory,
[rechart.name]: rechart,
};

View File

@ -0,0 +1,109 @@
const { safeJsonParse } = require("../../../http");
const { Deduplicator } = require("../utils/dedupe");
const rechart = {
name: "create-chart",
startupConfig: {
params: {},
},
plugin: function () {
return {
name: this.name,
setup(aibitat) {
// Scrape a website and summarize the content based on objective if the content is too large.',
aibitat.function({
super: aibitat,
name: this.name,
tracker: new Deduplicator(),
description:
"Generates the JSON data required to generate a RechartJS chart to the user based on their prompt and available data.",
parameters: {
$schema: "http://json-schema.org/draft-07/schema#",
type: "object",
properties: {
type: {
type: "string",
enum: [
"area",
"bar",
"line",
"composed",
"scatter",
"pie",
"radar",
"radialBar",
"treemap",
"funnel",
],
description: "The type of chart to be generated.",
},
title: {
type: "string",
description:
"Title of the chart. There MUST always be a title. Do not leave it blank.",
},
dataset: {
type: "string",
description: `Valid JSON in which each element is an object for Recharts API for the 'type' of chart defined WITHOUT new line characters. Strictly using this FORMAT and naming:
{ "name": "a", "value": 12 }].
Make sure field "name" always stays named "name". Instead of naming value field value in JSON, name it based on user metric and make it the same across every item.
Make sure the format use double quotes and property names are string literals. Provide JSON data only.`,
},
},
additionalProperties: false,
},
required: ["type", "title", "dataset"],
handler: async function ({ type, dataset, title }) {
try {
if (!this.tracker.isUnique(this.name)) {
this.super.handlerProps.log(
`${this.name} has been run for this chat response already. It can only be called once per chat.`
);
return "The chart was generated and returned to the user. This function completed successfully. Do not call this function again.";
}
const data = safeJsonParse(dataset, null);
if (data === null) {
this.super.introspect(
`${this.caller}: ${this.name} provided invalid JSON data - so we cant make a ${type} chart.`
);
return "Invalid JSON provided. Please only provide valid RechartJS JSON to generate a chart.";
}
this.super.introspect(`${this.caller}: Rendering ${type} chart.`);
this.super.socket.send("rechartVisualize", {
type,
dataset,
title,
});
this.super._replySpecialAttributes = {
saveAsType: "rechartVisualize",
storedResponse: (additionalText = "") =>
JSON.stringify({
type,
dataset,
title,
caption: additionalText,
}),
postSave: () => this.tracker.removeUniqueConstraint(this.name),
};
this.tracker.markUnique(this.name);
return "The chart was generated and returned to the user. This function completed successfully. Do not make another chart.";
} catch (error) {
this.super.handlerProps.log(
`create-chart raised an error. ${error.message}`
);
return `Let the user know this action was not successful. An error was raised while generating the chart. ${error.message}`;
}
},
});
},
};
},
};
module.exports = {
rechart,
};

View File

@ -2,8 +2,8 @@
* A service that provides an AI client to create a completion.
*/
const { ChatOpenAI } = require("langchain/chat_models/openai");
const { ChatAnthropic } = require("langchain/chat_models/anthropic");
const { ChatOpenAI } = require("@langchain/openai");
const { ChatAnthropic } = require("@langchain/anthropic");
class Provider {
_client;
@ -22,17 +22,17 @@ class Provider {
switch (provider) {
case "openai":
return new ChatOpenAI({
openAIApiKey: process.env.OPEN_AI_KEY,
apiKey: process.env.OPEN_AI_KEY,
...config,
});
case "anthropic":
return new ChatAnthropic({
anthropicApiKey: process.env.ANTHROPIC_API_KEY,
apiKey: process.env.ANTHROPIC_API_KEY,
...config,
});
default:
return new ChatOpenAI({
openAIApiKey: process.env.OPEN_AI_KEY,
apiKey: process.env.OPEN_AI_KEY,
...config,
});
}

View File

@ -1,4 +1,4 @@
const OpenAI = require("openai-latest");
const OpenAI = require("openai");
const Provider = require("./ai-provider.js");
const { RetryError } = require("../error.js");

View File

@ -9,10 +9,17 @@
// ... do random # of times.
// We want to block all the reruns of a plugin, so we can add this to prevent that behavior from
// spamming the user (or other costly function) that have the exact same signatures.
// Track Run/isDuplicate prevents _exact_ data re-runs based on the SHA of their inputs
// StartCooldown/isOnCooldown does prevention of _near-duplicate_ runs based on only the function name that is running.
// isUnique/markUnique/removeUniqueConstraint prevents one-time functions from re-running. EG: charting.
const crypto = require("crypto");
const DEFAULT_COOLDOWN_MS = 5 * 1000;
class Deduplicator {
#hashes = {};
#cooldowns = {};
#uniques = {};
constructor() {}
trackRun(key, params = {}) {
@ -30,6 +37,32 @@ class Deduplicator {
.digest("hex");
return this.#hashes.hasOwnProperty(newSig);
}
startCooldown(
key,
parameters = {
cooldownInMs: DEFAULT_COOLDOWN_MS,
}
) {
this.#cooldowns[key] = Number(new Date()) + Number(parameters.cooldownInMs);
}
isOnCooldown(key) {
if (!this.#cooldowns.hasOwnProperty(key)) return false;
return Number(new Date()) <= this.#cooldowns[key];
}
isUnique(key) {
return !this.#uniques.hasOwnProperty(key);
}
removeUniqueConstraint(key) {
delete this.#uniques[key];
}
markUnique(key) {
this.#uniques[key] = Number(new Date());
}
}
module.exports.Deduplicator = Deduplicator;

View File

@ -1,6 +1,6 @@
const { loadSummarizationChain } = require("langchain/chains");
const { PromptTemplate } = require("langchain/prompts");
const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter");
const { PromptTemplate } = require("@langchain/core/prompts");
const { RecursiveCharacterTextSplitter } = require("@langchain/textsplitters");
const Provider = require("../providers/ai-provider");
/**
* Summarize content using OpenAI's GPT-3.5 model.

View File

@ -77,11 +77,10 @@ async function streamChatWithForEmbed(
chatMode
);
// Look for pinned documents and see if the user decided to use this feature. We will also do a vector search
// as pinning is a supplemental tool but it should be used with caution since it can easily blow up a context window.
// See stream.js comment for more information on this implementation.
await new DocumentManager({
workspace: embed.workspace,
maxTokens: LLMConnector.limits.system,
maxTokens: LLMConnector.promptWindowLimit(),
})
.pinnedDocs()
.then((pinnedDocs) => {
@ -137,6 +136,7 @@ async function streamChatWithForEmbed(
id: uuid,
type: "textResponse",
textResponse:
embed.workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.",
sources: [],
close: true,

View File

@ -70,6 +70,7 @@ async function chatWithWorkspace(
close: true,
error: null,
textResponse:
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.",
};
}
@ -88,11 +89,10 @@ async function chatWithWorkspace(
chatMode,
});
// Look for pinned documents and see if the user decided to use this feature. We will also do a vector search
// as pinning is a supplemental tool but it should be used with caution since it can easily blow up a context window.
// See stream.js comment for more information on this implementation.
await new DocumentManager({
workspace,
maxTokens: LLMConnector.limits.system,
maxTokens: LLMConnector.promptWindowLimit(),
})
.pinnedDocs()
.then((pinnedDocs) => {
@ -150,6 +150,7 @@ async function chatWithWorkspace(
close: true,
error: null,
textResponse:
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.",
};
}

View File

@ -79,6 +79,7 @@ async function streamChatWithWorkspace(
id: uuid,
type: "textResponse",
textResponse:
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.",
sources: [],
close: true,
@ -104,9 +105,13 @@ async function streamChatWithWorkspace(
// Look for pinned documents and see if the user decided to use this feature. We will also do a vector search
// as pinning is a supplemental tool but it should be used with caution since it can easily blow up a context window.
// However we limit the maximum of appended context to 80% of its overall size, mostly because if it expands beyond this
// it will undergo prompt compression anyway to make it work. If there is so much pinned that the context here is bigger than
// what the model can support - it would get compressed anyway and that really is not the point of pinning. It is really best
// suited for high-context models.
await new DocumentManager({
workspace,
maxTokens: LLMConnector.limits.system,
maxTokens: LLMConnector.promptWindowLimit(),
})
.pinnedDocs()
.then((pinnedDocs) => {
@ -162,6 +167,7 @@ async function streamChatWithWorkspace(
id: uuid,
type: "textResponse",
textResponse:
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.",
sources: [],
close: true,

View File

@ -9,6 +9,53 @@ function clientAbortedHandler(resolve, fullText) {
return;
}
function handleDefaultStreamResponseV2(response, stream, responseProps) {
const { uuid = uuidv4(), sources = [] } = responseProps;
return new Promise(async (resolve) => {
let fullText = "";
// Establish listener to early-abort a streaming response
// in case things go sideways or the user does not like the response.
// We preserve the generated text but continue as if chat was completed
// to preserve previously generated content.
const handleAbort = () => clientAbortedHandler(resolve, fullText);
response.on("close", handleAbort);
for await (const chunk of stream) {
const message = chunk?.choices?.[0];
const token = message?.delta?.content;
if (token) {
fullText += token;
writeResponseChunk(response, {
uuid,
sources: [],
type: "textResponseChunk",
textResponse: token,
close: false,
error: false,
});
}
// LocalAi returns '' and others return null.
if (message.finish_reason !== "" && message.finish_reason !== null) {
writeResponseChunk(response, {
uuid,
sources,
type: "textResponseChunk",
textResponse: "",
close: true,
error: false,
});
response.removeListener("close", handleAbort);
resolve(fullText);
}
}
});
}
// TODO: Fully remove - deprecated.
// The default way to handle a stream response. Functions best with OpenAI.
// Currently used for LMStudio, LocalAI, Mistral API, and OpenAI
function handleDefaultStreamResponse(response, stream, responseProps) {
@ -123,6 +170,7 @@ function convertToChatHistory(history = []) {
sentAt: moment(createdAt).unix(),
},
{
type: data?.type || "chart",
role: "assistant",
content: data.text,
sources: data.sources || [],
@ -155,6 +203,7 @@ function writeResponseChunk(response, data) {
}
module.exports = {
handleDefaultStreamResponseV2,
handleDefaultStreamResponse,
convertToChatHistory,
convertToPromptHistory,

View File

@ -1,4 +1,7 @@
const { OpenRouterLLM } = require("../AiProviders/openRouter");
const {
OpenRouterLLM,
fetchOpenRouterModels,
} = require("../AiProviders/openRouter");
const { perplexityModels } = require("../AiProviders/perplexity");
const { togetherAiModels } = require("../AiProviders/togetherAi");
const SUPPORT_CUSTOM_MODELS = [
@ -42,14 +45,13 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
}
async function openAiModels(apiKey = null) {
const { Configuration, OpenAIApi } = require("openai");
const config = new Configuration({
const { OpenAI: OpenAIApi } = require("openai");
const openai = new OpenAIApi({
apiKey: apiKey || process.env.OPEN_AI_KEY,
});
const openai = new OpenAIApi(config);
const allModels = await openai
.listModels()
.then((res) => res.data.data)
const allModels = await openai.models
.list()
.then((results) => results.data)
.catch((e) => {
console.error(`OpenAI:listModels`, e.message);
return [
@ -129,15 +131,14 @@ async function openAiModels(apiKey = null) {
}
async function localAIModels(basePath = null, apiKey = null) {
const { Configuration, OpenAIApi } = require("openai");
const config = new Configuration({
basePath: basePath || process.env.LOCAL_AI_BASE_PATH,
apiKey: apiKey || process.env.LOCAL_AI_API_KEY,
const { OpenAI: OpenAIApi } = require("openai");
const openai = new OpenAIApi({
baseURL: basePath || process.env.LOCAL_AI_BASE_PATH,
apiKey: apiKey || process.env.LOCAL_AI_API_KEY || null,
});
const openai = new OpenAIApi(config);
const models = await openai
.listModels()
.then((res) => res.data.data)
const models = await openai.models
.list()
.then((results) => results.data)
.catch((e) => {
console.error(`LocalAI:listModels`, e.message);
return [];
@ -150,14 +151,14 @@ async function localAIModels(basePath = null, apiKey = null) {
async function getLMStudioModels(basePath = null) {
try {
const { Configuration, OpenAIApi } = require("openai");
const config = new Configuration({
basePath: basePath || process.env.LMSTUDIO_BASE_PATH,
const { OpenAI: OpenAIApi } = require("openai");
const openai = new OpenAIApi({
baseURL: basePath || process.env.LMSTUDIO_BASE_PATH,
apiKey: null,
});
const openai = new OpenAIApi(config);
const models = await openai
.listModels()
.then((res) => res.data.data)
const models = await openai.models
.list()
.then((results) => results.data)
.catch((e) => {
console.error(`LMStudio:listModels`, e.message);
return [];
@ -232,8 +233,7 @@ async function getPerplexityModels() {
}
async function getOpenRouterModels() {
const openrouter = await new OpenRouterLLM().init();
const knownModels = openrouter.models();
const knownModels = await fetchOpenRouterModels();
if (!Object.keys(knownModels).length === 0)
return { models: [], error: null };
@ -248,15 +248,16 @@ async function getOpenRouterModels() {
}
async function getMistralModels(apiKey = null) {
const { Configuration, OpenAIApi } = require("openai");
const config = new Configuration({
apiKey: apiKey || process.env.MISTRAL_API_KEY,
basePath: "https://api.mistral.ai/v1",
const { OpenAI: OpenAIApi } = require("openai");
const openai = new OpenAIApi({
apiKey: apiKey || process.env.MISTRAL_API_KEY || null,
baseURL: "https://api.mistral.ai/v1",
});
const openai = new OpenAIApi(config);
const models = await openai
.listModels()
.then((res) => res.data.data.filter((model) => !model.id.includes("embed")))
const models = await openai.models
.list()
.then((results) =>
results.data.filter((model) => !model.id.includes("embed"))
)
.catch((e) => {
console.error(`Mistral:listModels`, e.message);
return [];

View File

@ -64,8 +64,24 @@ function flexUserRoleValid(allowedRoles = DEFAULT_ROLES) {
};
}
// Middleware check on a public route if the instance is in a valid
// multi-user set up.
async function isMultiUserSetup(_request, response, next) {
const multiUserMode = await SystemSettings.isMultiUserMode();
if (!multiUserMode) {
response.status(403).json({
error: "Invalid request",
});
return;
}
next();
return;
}
module.exports = {
ROLES,
strictMultiUserRoleValid,
flexUserRoleValid,
isMultiUserSetup,
};

View File

@ -4,7 +4,6 @@ const {
getLLMProvider,
getEmbeddingEngineSelection,
} = require("../../helpers");
const { OpenAIEmbeddings } = require("langchain/embeddings/openai");
const { TextSplitter } = require("../../TextSplitter");
const { SystemSettings } = require("../../../models/systemSettings");
const { storeVectorResult, cachedVectorInformation } = require("../../files");
@ -57,9 +56,6 @@ const LanceDb = {
const table = await client.openTable(_namespace);
return (await table.countRows()) || 0;
},
embedder: function () {
return new OpenAIEmbeddings({ openAIApiKey: process.env.OPEN_AI_KEY });
},
similarityResponse: async function (
client,
namespace,
@ -82,7 +78,8 @@ const LanceDb = {
.execute();
response.forEach((item) => {
if (this.distanceToSimilarity(item.score) < similarityThreshold) return;
if (this.distanceToSimilarity(item._distance) < similarityThreshold)
return;
const { vector: _, ...rest } = item;
if (filterIdentifiers.includes(sourceIdentifier(rest))) {
console.log(
@ -92,8 +89,11 @@ const LanceDb = {
}
result.contextTexts.push(rest.text);
result.sourceDocuments.push(rest);
result.scores.push(this.distanceToSimilarity(item.score));
result.sourceDocuments.push({
...rest,
score: this.distanceToSimilarity(item._distance),
});
result.scores.push(this.distanceToSimilarity(item._distance));
});
return result;
@ -323,7 +323,7 @@ const LanceDb = {
curateSources: function (sources = []) {
const documents = [];
for (const source of sources) {
const { text, vector: _v, score: _s, ...rest } = source;
const { text, vector: _v, _distance: _d, ...rest } = source;
const metadata = rest.hasOwnProperty("metadata") ? rest.metadata : rest;
if (Object.keys(metadata).length > 0) {
documents.push({

File diff suppressed because it is too large Load Diff