mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2024-11-19 20:50:09 +01:00
Merge branch 'master' of github.com:Mintplex-Labs/anything-llm into render
This commit is contained in:
commit
aa3141aa4b
2
.github/workflows/dev-build.yaml
vendored
2
.github/workflows/dev-build.yaml
vendored
@ -6,7 +6,7 @@ concurrency:
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['vex'] # put your current branch to create a build. Core team only.
|
||||
branches: ['558-multi-modal-support'] # put your current branch to create a build. Core team only.
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- 'cloud-deployments/*'
|
||||
|
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@ -29,6 +29,7 @@
|
||||
"mbox",
|
||||
"Milvus",
|
||||
"Mintplex",
|
||||
"mixtral",
|
||||
"moderations",
|
||||
"numpages",
|
||||
"Ollama",
|
||||
@ -38,11 +39,13 @@
|
||||
"openrouter",
|
||||
"pagerender",
|
||||
"Qdrant",
|
||||
"royalblue",
|
||||
"searxng",
|
||||
"Serper",
|
||||
"Serply",
|
||||
"textgenwebui",
|
||||
"togetherai",
|
||||
"Unembed",
|
||||
"vectordbs",
|
||||
"Weaviate",
|
||||
"Zilliz"
|
||||
|
20
README.md
20
README.md
@ -53,19 +53,19 @@ AnythingLLM is a full-stack application where you can use commercial off-the-she
|
||||
|
||||
AnythingLLM divides your documents into objects called `workspaces`. A Workspace functions a lot like a thread, but with the addition of containerization of your documents. Workspaces can share documents, but they do not talk to each other so you can keep your context for each workspace clean.
|
||||
|
||||
Some cool features of AnythingLLM
|
||||
## Cool features of AnythingLLM
|
||||
|
||||
- **Multi-user instance support and permissioning**
|
||||
- Agents inside your workspace (browse the web, run code, etc)
|
||||
- [Custom Embeddable Chat widget for your website](./embed/README.md)
|
||||
- Multiple document type support (PDF, TXT, DOCX, etc)
|
||||
- Manage documents in your vector database from a simple UI
|
||||
- Two chat modes `conversation` and `query`. Conversation retains previous questions and amendments. Query is simple QA against your documents
|
||||
- In-chat citations
|
||||
- 🆕 **Multi-modal support (both closed and open-source LLMs!)**
|
||||
- 👤 Multi-user instance support and permissioning _Docker version only_
|
||||
- 🦾 Agents inside your workspace (browse the web, run code, etc)
|
||||
- 💬 [Custom Embeddable Chat widget for your website](./embed/README.md) _Docker version only_
|
||||
- 📖 Multiple document type support (PDF, TXT, DOCX, etc)
|
||||
- Simple chat UI with Drag-n-Drop funcitonality and clear citations.
|
||||
- 100% Cloud deployment ready.
|
||||
- "Bring your own LLM" model.
|
||||
- Extremely efficient cost-saving measures for managing very large documents. You'll never pay to embed a massive document or transcript more than once. 90% more cost effective than other document chatbot solutions.
|
||||
- Works with all popular [closed and open-source LLM providers](#supported-llms-embedder-models-speech-models-and-vector-databases).
|
||||
- Built-in cost & time-saving measures for managing very large documents compared to any other chat UI.
|
||||
- Full Developer API for custom integrations!
|
||||
- Much more...install and find out!
|
||||
|
||||
### Supported LLMs, Embedder Models, Speech models, and Vector Databases
|
||||
|
||||
|
@ -89,7 +89,7 @@
|
||||
"mkdir -p /home/ec2-user/anythingllm\n",
|
||||
"touch /home/ec2-user/anythingllm/.env\n",
|
||||
"sudo chown ec2-user:ec2-user -R /home/ec2-user/anythingllm\n",
|
||||
"docker pull mintplexlabs/anythingllm:master\n",
|
||||
"docker pull mintplexlabs/anythingllm\n",
|
||||
"docker run -d -p 3001:3001 --cap-add SYS_ADMIN -v /home/ec2-user/anythingllm:/app/server/storage -v /home/ec2-user/anythingllm/.env:/app/server/.env -e STORAGE_DIR=\"/app/server/storage\" mintplexlabs/anythingllm\n",
|
||||
"echo \"Container ID: $(sudo docker ps --latest --quiet)\"\n",
|
||||
"export ONLINE=$(curl -Is http://localhost:3001/api/ping | head -n 1|cut -d$' ' -f2)\n",
|
||||
|
@ -10,9 +10,8 @@ sudo systemctl start docker
|
||||
|
||||
mkdir -p /home/anythingllm
|
||||
touch /home/anythingllm/.env
|
||||
sudo chown -R ubuntu:ubuntu /home/anythingllm
|
||||
|
||||
sudo docker pull mintplexlabs/anythingllm:master
|
||||
sudo docker pull mintplexlabs/anythingllm
|
||||
sudo docker run -d -p 3001:3001 --cap-add SYS_ADMIN -v /home/anythingllm:/app/server/storage -v /home/anythingllm/.env:/app/server/.env -e STORAGE_DIR="/app/server/storage" mintplexlabs/anythingllm
|
||||
echo "Container ID: $(sudo docker ps --latest --quiet)"
|
||||
|
||||
|
@ -34,7 +34,7 @@ resources:
|
||||
touch /home/anythingllm/.env
|
||||
sudo chown -R ubuntu:ubuntu /home/anythingllm
|
||||
|
||||
sudo docker pull mintplexlabs/anythingllm:master
|
||||
sudo docker pull mintplexlabs/anythingllm
|
||||
sudo docker run -d -p 3001:3001 --cap-add SYS_ADMIN -v /home/anythingllm:/app/server/storage -v /home/anythingllm/.env:/app/server/.env -e STORAGE_DIR="/app/server/storage" mintplexlabs/anythingllm
|
||||
echo "Container ID: $(sudo docker ps --latest --quiet)"
|
||||
|
||||
|
@ -89,13 +89,16 @@ function GroqAIModelSelection({ apiKey, settings }) {
|
||||
name="GroqModelPref"
|
||||
required={true}
|
||||
className="border-none bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
defaultValue={settings?.GroqModelPref}
|
||||
>
|
||||
{customModels.length > 0 && (
|
||||
<optgroup label="Available models">
|
||||
{customModels.map((model) => {
|
||||
return (
|
||||
<option key={model.id} value={model.id}>
|
||||
<option
|
||||
key={model.id}
|
||||
value={model.id}
|
||||
selected={settings?.GroqModelPref === model.id}
|
||||
>
|
||||
{model.id}
|
||||
</option>
|
||||
);
|
||||
|
@ -1,9 +1,11 @@
|
||||
import System from "@/models/system";
|
||||
import { CaretDown, CaretUp } from "@phosphor-icons/react";
|
||||
import { useState, useEffect } from "react";
|
||||
|
||||
export default function OpenRouterOptions({ settings }) {
|
||||
return (
|
||||
<div className="flex gap-[36px] mt-1.5">
|
||||
<div className="flex flex-col gap-y-4 mt-1.5">
|
||||
<div className="flex gap-[36px]">
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-3">
|
||||
OpenRouter API Key
|
||||
@ -23,6 +25,47 @@ export default function OpenRouterOptions({ settings }) {
|
||||
<OpenRouterModelSelection settings={settings} />
|
||||
)}
|
||||
</div>
|
||||
<AdvancedControls settings={settings} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function AdvancedControls({ settings }) {
|
||||
const [showAdvancedControls, setShowAdvancedControls] = useState(false);
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-y-4">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setShowAdvancedControls(!showAdvancedControls)}
|
||||
className="text-white hover:text-white/70 flex items-center text-sm"
|
||||
>
|
||||
{showAdvancedControls ? "Hide" : "Show"} advanced controls
|
||||
{showAdvancedControls ? (
|
||||
<CaretUp size={14} className="ml-1" />
|
||||
) : (
|
||||
<CaretDown size={14} className="ml-1" />
|
||||
)}
|
||||
</button>
|
||||
<div hidden={!showAdvancedControls}>
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-3">
|
||||
Stream Timeout (ms)
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
name="OpenRouterTimeout"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
|
||||
placeholder="Timeout value between token responses to auto-timeout the stream"
|
||||
defaultValue={settings?.OpenRouterTimeout ?? 500}
|
||||
autoComplete="off"
|
||||
onScroll={(e) => e.target.blur()}
|
||||
min={500}
|
||||
step={1}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -69,6 +69,7 @@ export function EditMessageForm({
|
||||
role,
|
||||
chatId,
|
||||
message,
|
||||
attachments = [],
|
||||
adjustTextArea,
|
||||
saveChanges,
|
||||
}) {
|
||||
@ -77,15 +78,15 @@ export function EditMessageForm({
|
||||
e.preventDefault();
|
||||
const form = new FormData(e.target);
|
||||
const editedMessage = form.get("editedMessage");
|
||||
saveChanges({ editedMessage, chatId, role });
|
||||
saveChanges({ editedMessage, chatId, role, attachments });
|
||||
window.dispatchEvent(
|
||||
new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
|
||||
new CustomEvent(EDIT_EVENT, { detail: { chatId, role, attachments } })
|
||||
);
|
||||
}
|
||||
|
||||
function cancelEdits() {
|
||||
window.dispatchEvent(
|
||||
new CustomEvent(EDIT_EVENT, { detail: { chatId, role } })
|
||||
new CustomEvent(EDIT_EVENT, { detail: { chatId, role, attachments } })
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
@ -19,6 +19,7 @@ const HistoricalMessage = ({
|
||||
role,
|
||||
workspace,
|
||||
sources = [],
|
||||
attachments = [],
|
||||
error = false,
|
||||
feedbackScore = null,
|
||||
chatId = null,
|
||||
@ -92,16 +93,20 @@ const HistoricalMessage = ({
|
||||
role={role}
|
||||
chatId={chatId}
|
||||
message={message}
|
||||
attachments={attachments}
|
||||
adjustTextArea={adjustTextArea}
|
||||
saveChanges={saveEditedMessage}
|
||||
/>
|
||||
) : (
|
||||
<div>
|
||||
<span
|
||||
className={`flex flex-col gap-y-1`}
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: DOMPurify.sanitize(renderMarkdown(message)),
|
||||
}}
|
||||
/>
|
||||
<ChatAttachments attachments={attachments} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex gap-x-5 ml-14">
|
||||
@ -160,3 +165,18 @@ export default memo(
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
function ChatAttachments({ attachments = [] }) {
|
||||
if (!attachments.length) return null;
|
||||
return (
|
||||
<div className="flex flex-wrap gap-2">
|
||||
{attachments.map((item) => (
|
||||
<img
|
||||
key={item.name}
|
||||
src={item.contentString}
|
||||
className="max-w-[300px] rounded-md"
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -17,6 +17,7 @@ export default function ChatHistory({
|
||||
sendCommand,
|
||||
updateHistory,
|
||||
regenerateAssistantMessage,
|
||||
hasAttachments = false,
|
||||
}) {
|
||||
const { user } = useUser();
|
||||
const { threadSlug = null } = useParams();
|
||||
@ -92,7 +93,12 @@ export default function ChatHistory({
|
||||
sendCommand(`${heading} ${message}`, true);
|
||||
};
|
||||
|
||||
const saveEditedMessage = async ({ editedMessage, chatId, role }) => {
|
||||
const saveEditedMessage = async ({
|
||||
editedMessage,
|
||||
chatId,
|
||||
role,
|
||||
attachments = [],
|
||||
}) => {
|
||||
if (!editedMessage) return; // Don't save empty edits.
|
||||
|
||||
// if the edit was a user message, we will auto-regenerate the response and delete all
|
||||
@ -109,7 +115,7 @@ export default function ChatHistory({
|
||||
updatedHistory[updatedHistory.length - 1].content = editedMessage;
|
||||
// remove all edited messages after the edited message in backend
|
||||
await Workspace.deleteEditedChats(workspace.slug, threadSlug, chatId);
|
||||
sendCommand(editedMessage, true, updatedHistory);
|
||||
sendCommand(editedMessage, true, updatedHistory, attachments);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -144,7 +150,7 @@ export default function ChatHistory({
|
||||
);
|
||||
};
|
||||
|
||||
if (history.length === 0) {
|
||||
if (history.length === 0 && !hasAttachments) {
|
||||
return (
|
||||
<div className="flex flex-col h-full md:mt-0 pb-44 md:pb-40 w-full justify-end items-center">
|
||||
<div className="flex flex-col items-center md:items-start md:max-w-[600px] w-full px-4">
|
||||
@ -227,6 +233,7 @@ export default function ChatHistory({
|
||||
feedbackScore={props.feedbackScore}
|
||||
chatId={props.chatId}
|
||||
error={props.error}
|
||||
attachments={props.attachments}
|
||||
regenerateMessage={regenerateAssistantMessage}
|
||||
isLastMessage={isLastBotReply}
|
||||
saveEditedMessage={saveEditedMessage}
|
||||
|
Binary file not shown.
After Width: | Height: | Size: 2.6 KiB |
@ -0,0 +1,215 @@
|
||||
import { useState, useEffect, createContext, useContext } from "react";
|
||||
import { v4 } from "uuid";
|
||||
import System from "@/models/system";
|
||||
import { useDropzone } from "react-dropzone";
|
||||
import DndIcon from "./dnd-icon.png";
|
||||
import Workspace from "@/models/workspace";
|
||||
import useUser from "@/hooks/useUser";
|
||||
|
||||
export const DndUploaderContext = createContext();
|
||||
export const REMOVE_ATTACHMENT_EVENT = "ATTACHMENT_REMOVE";
|
||||
export const CLEAR_ATTACHMENTS_EVENT = "ATTACHMENT_CLEAR";
|
||||
|
||||
/**
|
||||
* File Attachment for automatic upload on the chat container page.
|
||||
* @typedef Attachment
|
||||
* @property {string} uid - unique file id.
|
||||
* @property {File} file - native File object
|
||||
* @property {string|null} contentString - base64 encoded string of file
|
||||
* @property {('in_progress'|'failed'|'success')} status - the automatic upload status.
|
||||
* @property {string|null} error - Error message
|
||||
* @property {{id:string, location:string}|null} document - uploaded document details
|
||||
* @property {('attachment'|'upload')} type - The type of upload. Attachments are chat-specific, uploads go to the workspace.
|
||||
*/
|
||||
|
||||
export function DnDFileUploaderProvider({ workspace, children }) {
|
||||
const [files, setFiles] = useState([]);
|
||||
const [ready, setReady] = useState(false);
|
||||
const [dragging, setDragging] = useState(false);
|
||||
const { user } = useUser();
|
||||
|
||||
useEffect(() => {
|
||||
if (!!user && user.role === "default") return false;
|
||||
System.checkDocumentProcessorOnline().then((status) => setReady(status));
|
||||
}, [user]);
|
||||
|
||||
useEffect(() => {
|
||||
window.addEventListener(REMOVE_ATTACHMENT_EVENT, handleRemove);
|
||||
window.addEventListener(CLEAR_ATTACHMENTS_EVENT, resetAttachments);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener(REMOVE_ATTACHMENT_EVENT, handleRemove);
|
||||
window.removeEventListener(CLEAR_ATTACHMENTS_EVENT, resetAttachments);
|
||||
};
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Remove file from uploader queue.
|
||||
* @param {CustomEvent<{uid: string}>} event
|
||||
*/
|
||||
async function handleRemove(event) {
|
||||
/** @type {{uid: Attachment['uid'], document: Attachment['document']}} */
|
||||
const { uid, document } = event.detail;
|
||||
setFiles((prev) => prev.filter((prevFile) => prevFile.uid !== uid));
|
||||
if (!document?.location) return;
|
||||
await Workspace.deleteAndUnembedFile(workspace.slug, document.location);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear queue of attached files currently in prompt box
|
||||
*/
|
||||
function resetAttachments() {
|
||||
setFiles([]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Turns files into attachments we can send as body request to backend
|
||||
* for a chat.
|
||||
* @returns {{name:string,mime:string,contentString:string}[]}
|
||||
*/
|
||||
function parseAttachments() {
|
||||
return (
|
||||
files
|
||||
?.filter((file) => file.type === "attachment")
|
||||
?.map(
|
||||
(
|
||||
/** @type {Attachment} */
|
||||
attachment
|
||||
) => {
|
||||
return {
|
||||
name: attachment.file.name,
|
||||
mime: attachment.file.type,
|
||||
contentString: attachment.contentString,
|
||||
};
|
||||
}
|
||||
) || []
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle dropped files.
|
||||
* @param {Attachment[]} acceptedFiles
|
||||
* @param {any[]} _rejections
|
||||
*/
|
||||
async function onDrop(acceptedFiles, _rejections) {
|
||||
setDragging(false);
|
||||
|
||||
/** @type {Attachment[]} */
|
||||
const newAccepted = [];
|
||||
for (const file of acceptedFiles) {
|
||||
if (file.type.startsWith("image/")) {
|
||||
newAccepted.push({
|
||||
uid: v4(),
|
||||
file,
|
||||
contentString: await toBase64(file),
|
||||
status: "success",
|
||||
error: null,
|
||||
type: "attachment",
|
||||
});
|
||||
} else {
|
||||
newAccepted.push({
|
||||
uid: v4(),
|
||||
file,
|
||||
contentString: null,
|
||||
status: "in_progress",
|
||||
error: null,
|
||||
type: "upload",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
setFiles((prev) => [...prev, ...newAccepted]);
|
||||
|
||||
for (const attachment of newAccepted) {
|
||||
// Images/attachments are chat specific.
|
||||
if (attachment.type === "attachment") continue;
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append("file", attachment.file, attachment.file.name);
|
||||
Workspace.uploadAndEmbedFile(workspace.slug, formData).then(
|
||||
({ response, data }) => {
|
||||
const updates = {
|
||||
status: response.ok ? "success" : "failed",
|
||||
error: data?.error ?? null,
|
||||
document: data?.document,
|
||||
};
|
||||
|
||||
setFiles((prev) => {
|
||||
return prev.map(
|
||||
(
|
||||
/** @type {Attachment} */
|
||||
prevFile
|
||||
) => {
|
||||
if (prevFile.uid !== attachment.uid) return prevFile;
|
||||
return { ...prevFile, ...updates };
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<DndUploaderContext.Provider
|
||||
value={{ files, ready, dragging, setDragging, onDrop, parseAttachments }}
|
||||
>
|
||||
{children}
|
||||
</DndUploaderContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export default function DnDFileUploaderWrapper({ children }) {
|
||||
const { onDrop, ready, dragging, setDragging } =
|
||||
useContext(DndUploaderContext);
|
||||
const { getRootProps, getInputProps } = useDropzone({
|
||||
onDrop,
|
||||
disabled: !ready,
|
||||
noClick: true,
|
||||
noKeyboard: true,
|
||||
onDragEnter: () => setDragging(true),
|
||||
onDragLeave: () => setDragging(false),
|
||||
});
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`relative flex flex-col h-full w-full md:mt-0 mt-[40px] p-[1px]`}
|
||||
{...getRootProps()}
|
||||
>
|
||||
<div
|
||||
hidden={!dragging}
|
||||
className="absolute top-0 w-full h-full bg-dark-text/90 rounded-2xl border-[4px] border-white z-[9999]"
|
||||
>
|
||||
<div className="w-full h-full flex justify-center items-center rounded-xl">
|
||||
<div className="flex flex-col gap-y-[14px] justify-center items-center">
|
||||
<img src={DndIcon} width={69} height={69} />
|
||||
<p className="text-white text-[24px] font-semibold">Add anything</p>
|
||||
<p className="text-white text-[16px] text-center">
|
||||
Drop your file here to embed it into your <br />
|
||||
workspace auto-magically.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<input id="dnd-chat-file-uploader" {...getInputProps()} />
|
||||
{children}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert image types into Base64 strings for requests.
|
||||
* @param {File} file
|
||||
* @returns {string}
|
||||
*/
|
||||
async function toBase64(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = () => {
|
||||
const base64String = reader.result.split(",")[1];
|
||||
resolve(`data:${file.type};base64,${base64String}`);
|
||||
};
|
||||
reader.onerror = (error) => reject(error);
|
||||
reader.readAsDataURL(file);
|
||||
});
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
import { PaperclipHorizontal } from "@phosphor-icons/react";
|
||||
import { Tooltip } from "react-tooltip";
|
||||
|
||||
/**
|
||||
* This is a simple proxy component that clicks on the DnD file uploader for the user.
|
||||
* @returns
|
||||
*/
|
||||
export default function AttachItem() {
|
||||
return (
|
||||
<>
|
||||
<button
|
||||
id="attach-item-btn"
|
||||
data-tooltip-id="attach-item-btn"
|
||||
data-tooltip-content="Attach a file to this chat"
|
||||
aria-label="Attach a file to this chat"
|
||||
type="button"
|
||||
onClick={(e) => {
|
||||
e?.target?.blur();
|
||||
document?.getElementById("dnd-chat-file-uploader")?.click();
|
||||
return;
|
||||
}}
|
||||
className={`border-none relative flex justify-center items-center opacity-60 hover:opacity-100 cursor-pointer`}
|
||||
>
|
||||
<PaperclipHorizontal className="w-6 h-6 pointer-events-none text-white rotate-90 -scale-y-100" />
|
||||
<Tooltip
|
||||
id="attach-item-btn"
|
||||
place="top"
|
||||
delayShow={300}
|
||||
className="tooltip !text-xs z-99"
|
||||
/>
|
||||
</button>
|
||||
</>
|
||||
);
|
||||
}
|
@ -0,0 +1,222 @@
|
||||
import {
|
||||
CircleNotch,
|
||||
FileCode,
|
||||
FileCsv,
|
||||
FileDoc,
|
||||
FileHtml,
|
||||
FileText,
|
||||
FileImage,
|
||||
FilePdf,
|
||||
WarningOctagon,
|
||||
X,
|
||||
} from "@phosphor-icons/react";
|
||||
import { humanFileSize } from "@/utils/numbers";
|
||||
import { REMOVE_ATTACHMENT_EVENT } from "../../DnDWrapper";
|
||||
import { Tooltip } from "react-tooltip";
|
||||
|
||||
/**
|
||||
* @param {{attachments: import("../../DnDWrapper").Attachment[]}}
|
||||
* @returns
|
||||
*/
|
||||
export default function AttachmentManager({ attachments }) {
|
||||
if (attachments.length === 0) return null;
|
||||
return (
|
||||
<div className="flex flex-wrap mt-4 mb-2">
|
||||
{attachments.map((attachment) => (
|
||||
<AttachmentItem key={attachment.uid} attachment={attachment} />
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{attachment: import("../../DnDWrapper").Attachment}}
|
||||
*/
|
||||
function AttachmentItem({ attachment }) {
|
||||
const { uid, file, status, error, document, type } = attachment;
|
||||
const { iconBgColor, Icon } = displayFromFile(file);
|
||||
|
||||
function removeFileFromQueue() {
|
||||
window.dispatchEvent(
|
||||
new CustomEvent(REMOVE_ATTACHMENT_EVENT, { detail: { uid, document } })
|
||||
);
|
||||
}
|
||||
|
||||
if (status === "in_progress") {
|
||||
return (
|
||||
<div
|
||||
className={`h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-zinc-800 border border-white/20 w-[200px]`}
|
||||
>
|
||||
<div
|
||||
className={`${iconBgColor} rounded-lg flex items-center justify-center flex-shrink-0 p-1`}
|
||||
>
|
||||
<CircleNotch size={30} className="text-white animate-spin" />
|
||||
</div>
|
||||
<div className="flex flex-col w-[130px]">
|
||||
<p className="text-white text-xs font-medium truncate">{file.name}</p>
|
||||
<p className="text-white/60 text-xs font-medium">
|
||||
{humanFileSize(file.size)}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (status === "failed") {
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
data-tooltip-id={`attachment-uid-${uid}-error`}
|
||||
data-tooltip-content={error}
|
||||
className={`relative h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-[#4E140B] border border-transparent w-[200px] group`}
|
||||
>
|
||||
<div className="invisible group-hover:visible absolute -top-[5px] -right-[5px] w-fit h-fit z-[10]">
|
||||
<button
|
||||
onClick={removeFileFromQueue}
|
||||
type="button"
|
||||
className="bg-zinc-700 hover:bg-red-400 rounded-full p-1 flex items-center justify-center hover:border-transparent border border-white/40"
|
||||
>
|
||||
<X
|
||||
size={10}
|
||||
className="flex-shrink-0 text-zinc-200 group-hover:text-white"
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
<div
|
||||
className={`bg-error rounded-lg flex items-center justify-center flex-shrink-0 p-1`}
|
||||
>
|
||||
<WarningOctagon size={30} className="text-white" />
|
||||
</div>
|
||||
<div className="flex flex-col w-[130px]">
|
||||
<p className="text-white text-xs font-medium truncate">
|
||||
{file.name}
|
||||
</p>
|
||||
<p className="text-red-100 text-xs truncate">
|
||||
{error ?? "this file failed to upload"}. It will not be available
|
||||
in the workspace.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<Tooltip
|
||||
id={`attachment-uid-${uid}-error`}
|
||||
place="top"
|
||||
delayShow={300}
|
||||
className="allm-tooltip !allm-text-xs"
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
if (type === "attachment") {
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
data-tooltip-id={`attachment-uid-${uid}-success`}
|
||||
data-tooltip-content={`${file.name} will be attached to this prompt. It will not be embedded into the workspace permanently.`}
|
||||
className={`relative h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-zinc-800 border border-white/20 w-[200px] group`}
|
||||
>
|
||||
<div className="invisible group-hover:visible absolute -top-[5px] -right-[5px] w-fit h-fit z-[10]">
|
||||
<button
|
||||
onClick={removeFileFromQueue}
|
||||
type="button"
|
||||
className="bg-zinc-700 hover:bg-red-400 rounded-full p-1 flex items-center justify-center hover:border-transparent border border-white/40"
|
||||
>
|
||||
<X
|
||||
size={10}
|
||||
className="flex-shrink-0 text-zinc-200 group-hover:text-white"
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
<div
|
||||
className={`${iconBgColor} rounded-lg flex items-center justify-center flex-shrink-0 p-1`}
|
||||
>
|
||||
<Icon size={30} className="text-white" />
|
||||
</div>
|
||||
<div className="flex flex-col w-[130px]">
|
||||
<p className="text-white text-xs font-medium truncate">
|
||||
{file.name}
|
||||
</p>
|
||||
<p className="text-white/80 text-xs font-medium">Image attached!</p>
|
||||
</div>
|
||||
</div>
|
||||
<Tooltip
|
||||
id={`attachment-uid-${uid}-success`}
|
||||
place="top"
|
||||
delayShow={300}
|
||||
className="allm-tooltip !allm-text-xs"
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
data-tooltip-id={`attachment-uid-${uid}-success`}
|
||||
data-tooltip-content={`${file.name} was uploaded and embedded into this workspace. It will be available for RAG chat now.`}
|
||||
className={`relative h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-zinc-800 border border-white/20 w-[200px] group`}
|
||||
>
|
||||
<div className="invisible group-hover:visible absolute -top-[5px] -right-[5px] w-fit h-fit z-[10]">
|
||||
<button
|
||||
onClick={removeFileFromQueue}
|
||||
type="button"
|
||||
className="bg-zinc-700 hover:bg-red-400 rounded-full p-1 flex items-center justify-center hover:border-transparent border border-white/40"
|
||||
>
|
||||
<X
|
||||
size={10}
|
||||
className="flex-shrink-0 text-zinc-200 group-hover:text-white"
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
<div
|
||||
className={`${iconBgColor} rounded-lg flex items-center justify-center flex-shrink-0 p-1`}
|
||||
>
|
||||
<Icon size={30} className="text-white" />
|
||||
</div>
|
||||
<div className="flex flex-col w-[130px]">
|
||||
<p className="text-white text-xs font-medium truncate">{file.name}</p>
|
||||
<p className="text-white/80 text-xs font-medium">File embedded!</p>
|
||||
</div>
|
||||
</div>
|
||||
<Tooltip
|
||||
id={`attachment-uid-${uid}-success`}
|
||||
place="top"
|
||||
delayShow={300}
|
||||
className="allm-tooltip !allm-text-xs"
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {File} file
|
||||
* @returns {{iconBgColor:string, Icon: React.Component}}
|
||||
*/
|
||||
function displayFromFile(file) {
|
||||
const extension = file?.name?.split(".")?.pop()?.toLowerCase() ?? "txt";
|
||||
switch (extension) {
|
||||
case "pdf":
|
||||
return { iconBgColor: "bg-magenta", Icon: FilePdf };
|
||||
case "doc":
|
||||
case "docx":
|
||||
return { iconBgColor: "bg-royalblue", Icon: FileDoc };
|
||||
case "html":
|
||||
return { iconBgColor: "bg-purple", Icon: FileHtml };
|
||||
case "csv":
|
||||
case "xlsx":
|
||||
return { iconBgColor: "bg-success", Icon: FileCsv };
|
||||
case "json":
|
||||
case "sql":
|
||||
case "js":
|
||||
case "jsx":
|
||||
case "cpp":
|
||||
case "c":
|
||||
return { iconBgColor: "bg-warn", Icon: FileCode };
|
||||
case "png":
|
||||
case "jpg":
|
||||
case "jpeg":
|
||||
return { iconBgColor: "bg-royalblue", Icon: FileImage };
|
||||
default:
|
||||
return { iconBgColor: "bg-royalblue", Icon: FileText };
|
||||
}
|
||||
}
|
@ -13,6 +13,8 @@ import AvailableAgentsButton, {
|
||||
import TextSizeButton from "./TextSizeMenu";
|
||||
import SpeechToText from "./SpeechToText";
|
||||
import { Tooltip } from "react-tooltip";
|
||||
import AttachmentManager from "./Attachments";
|
||||
import AttachItem from "./AttachItem";
|
||||
|
||||
export const PROMPT_INPUT_EVENT = "set_prompt_input";
|
||||
export default function PromptInput({
|
||||
@ -21,6 +23,7 @@ export default function PromptInput({
|
||||
inputDisabled,
|
||||
buttonDisabled,
|
||||
sendCommand,
|
||||
attachments = [],
|
||||
}) {
|
||||
const [promptInput, setPromptInput] = useState("");
|
||||
const { showAgents, setShowAgents } = useAvailableAgents();
|
||||
@ -106,10 +109,11 @@ export default function PromptInput({
|
||||
/>
|
||||
<form
|
||||
onSubmit={handleSubmit}
|
||||
className="flex flex-col gap-y-1 rounded-t-lg md:w-3/4 w-full mx-auto max-w-xl"
|
||||
className="flex flex-col gap-y-1 rounded-t-lg md:w-3/4 w-full mx-auto max-w-xl items-center"
|
||||
>
|
||||
<div className="flex items-center rounded-lg md:mb-4">
|
||||
<div className="w-[600px] bg-main-gradient shadow-2xl border border-white/50 rounded-2xl flex flex-col px-4 overflow-hidden">
|
||||
<div className="w-[635px] bg-main-gradient shadow-2xl border border-white/50 rounded-2xl flex flex-col px-4 overflow-hidden">
|
||||
<AttachmentManager attachments={attachments} />
|
||||
<div className="flex items-center w-full border-b-2 border-gray-500/50">
|
||||
<textarea
|
||||
ref={textareaRef}
|
||||
@ -158,6 +162,7 @@ export default function PromptInput({
|
||||
</div>
|
||||
<div className="flex justify-between py-3.5">
|
||||
<div className="flex gap-x-2">
|
||||
<AttachItem />
|
||||
<SlashCommandsButton
|
||||
showing={showSlashCommand}
|
||||
setShowSlashCommand={setShowSlashCommand}
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { useState, useEffect, useContext } from "react";
|
||||
import ChatHistory from "./ChatHistory";
|
||||
import { CLEAR_ATTACHMENTS_EVENT, DndUploaderContext } from "./DnDWrapper";
|
||||
import PromptInput, { PROMPT_INPUT_EVENT } from "./PromptInput";
|
||||
import Workspace from "@/models/workspace";
|
||||
import handleChat, { ABORT_STREAM_EVENT } from "@/utils/chat";
|
||||
@ -12,6 +13,7 @@ import handleSocketResponse, {
|
||||
AGENT_SESSION_END,
|
||||
AGENT_SESSION_START,
|
||||
} from "@/utils/chat/agent";
|
||||
import DnDFileUploaderWrapper from "./DnDWrapper";
|
||||
|
||||
export default function ChatContainer({ workspace, knownHistory = [] }) {
|
||||
const { threadSlug = null } = useParams();
|
||||
@ -20,6 +22,7 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
|
||||
const [chatHistory, setChatHistory] = useState(knownHistory);
|
||||
const [socketId, setSocketId] = useState(null);
|
||||
const [websocket, setWebsocket] = useState(null);
|
||||
const { files, parseAttachments } = useContext(DndUploaderContext);
|
||||
|
||||
// Maintain state of message from whatever is in PromptInput
|
||||
const handleMessageChange = (event) => {
|
||||
@ -40,7 +43,11 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
|
||||
if (!message || message === "") return false;
|
||||
const prevChatHistory = [
|
||||
...chatHistory,
|
||||
{ content: message, role: "user" },
|
||||
{
|
||||
content: message,
|
||||
role: "user",
|
||||
attachments: parseAttachments(),
|
||||
},
|
||||
{
|
||||
content: "",
|
||||
role: "assistant",
|
||||
@ -59,11 +66,23 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
|
||||
const updatedHistory = chatHistory.slice(0, -1);
|
||||
const lastUserMessage = updatedHistory.slice(-1)[0];
|
||||
Workspace.deleteChats(workspace.slug, [chatId])
|
||||
.then(() => sendCommand(lastUserMessage.content, true, updatedHistory))
|
||||
.then(() =>
|
||||
sendCommand(
|
||||
lastUserMessage.content,
|
||||
true,
|
||||
updatedHistory,
|
||||
lastUserMessage?.attachments
|
||||
)
|
||||
)
|
||||
.catch((e) => console.error(e));
|
||||
};
|
||||
|
||||
const sendCommand = async (command, submit = false, history = []) => {
|
||||
const sendCommand = async (
|
||||
command,
|
||||
submit = false,
|
||||
history = [],
|
||||
attachments = []
|
||||
) => {
|
||||
if (!command || command === "") return false;
|
||||
if (!submit) {
|
||||
setMessageEmit(command);
|
||||
@ -80,13 +99,18 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
|
||||
role: "assistant",
|
||||
pending: true,
|
||||
userMessage: command,
|
||||
attachments,
|
||||
animate: true,
|
||||
},
|
||||
];
|
||||
} else {
|
||||
prevChatHistory = [
|
||||
...chatHistory,
|
||||
{ content: command, role: "user" },
|
||||
{
|
||||
content: command,
|
||||
role: "user",
|
||||
attachments,
|
||||
},
|
||||
{
|
||||
content: "",
|
||||
role: "assistant",
|
||||
@ -121,13 +145,18 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Simplify this
|
||||
if (!promptMessage || !promptMessage?.userMessage) return false;
|
||||
if (!!threadSlug) {
|
||||
await Workspace.threads.streamChat(
|
||||
{ workspaceSlug: workspace.slug, threadSlug },
|
||||
promptMessage.userMessage,
|
||||
(chatResult) =>
|
||||
|
||||
// If running and edit or regeneration, this history will already have attachments
|
||||
// so no need to parse the current state.
|
||||
const attachments = promptMessage?.attachments ?? parseAttachments();
|
||||
window.dispatchEvent(new CustomEvent(CLEAR_ATTACHMENTS_EVENT));
|
||||
|
||||
await Workspace.multiplexStream({
|
||||
workspaceSlug: workspace.slug,
|
||||
threadSlug,
|
||||
prompt: promptMessage.userMessage,
|
||||
chatHandler: (chatResult) =>
|
||||
handleChat(
|
||||
chatResult,
|
||||
setLoadingResponse,
|
||||
@ -135,23 +164,9 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
|
||||
remHistory,
|
||||
_chatHistory,
|
||||
setSocketId
|
||||
)
|
||||
);
|
||||
} else {
|
||||
await Workspace.streamChat(
|
||||
workspace,
|
||||
promptMessage.userMessage,
|
||||
(chatResult) =>
|
||||
handleChat(
|
||||
chatResult,
|
||||
setLoadingResponse,
|
||||
setChatHistory,
|
||||
remHistory,
|
||||
_chatHistory,
|
||||
setSocketId
|
||||
)
|
||||
);
|
||||
}
|
||||
),
|
||||
attachments,
|
||||
});
|
||||
return;
|
||||
}
|
||||
loadingResponse === true && fetchReply();
|
||||
@ -205,6 +220,7 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
|
||||
});
|
||||
setWebsocket(socket);
|
||||
window.dispatchEvent(new CustomEvent(AGENT_SESSION_START));
|
||||
window.dispatchEvent(new CustomEvent(CLEAR_ATTACHMENTS_EVENT));
|
||||
} catch (e) {
|
||||
setChatHistory((prev) => [
|
||||
...prev.filter((msg) => !!msg.content),
|
||||
@ -231,16 +247,17 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
|
||||
return (
|
||||
<div
|
||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
||||
className="transition-all duration-500 relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll border-2 border-outline"
|
||||
className="transition-all duration-500 relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll border-2 border-outline no-scroll"
|
||||
>
|
||||
{isMobile && <SidebarMobileHeader />}
|
||||
<div className="flex flex-col h-full w-full md:mt-0 mt-[40px]">
|
||||
<DnDFileUploaderWrapper>
|
||||
<ChatHistory
|
||||
history={chatHistory}
|
||||
workspace={workspace}
|
||||
sendCommand={sendCommand}
|
||||
updateHistory={setChatHistory}
|
||||
regenerateAssistantMessage={regenerateAssistantMessage}
|
||||
hasAttachments={files.length > 0}
|
||||
/>
|
||||
<PromptInput
|
||||
submit={handleSubmit}
|
||||
@ -248,8 +265,9 @@ export default function ChatContainer({ workspace, knownHistory = [] }) {
|
||||
inputDisabled={loadingResponse}
|
||||
buttonDisabled={loadingResponse}
|
||||
sendCommand={sendCommand}
|
||||
attachments={files}
|
||||
/>
|
||||
</div>
|
||||
</DnDFileUploaderWrapper>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -5,6 +5,9 @@ import ChatContainer from "./ChatContainer";
|
||||
import paths from "@/utils/paths";
|
||||
import ModalWrapper from "../ModalWrapper";
|
||||
import { useParams } from "react-router-dom";
|
||||
import DnDFileUploaderWrapper, {
|
||||
DnDFileUploaderProvider,
|
||||
} from "./ChatContainer/DnDWrapper";
|
||||
|
||||
export default function WorkspaceChat({ loading, workspace }) {
|
||||
const { threadSlug = null } = useParams();
|
||||
@ -62,7 +65,11 @@ export default function WorkspaceChat({ loading, workspace }) {
|
||||
}
|
||||
|
||||
setEventDelegatorForCodeSnippets();
|
||||
return <ChatContainer workspace={workspace} knownHistory={history} />;
|
||||
return (
|
||||
<DnDFileUploaderProvider workspace={workspace}>
|
||||
<ChatContainer workspace={workspace} knownHistory={history} />
|
||||
</DnDFileUploaderProvider>
|
||||
);
|
||||
}
|
||||
|
||||
// Enables us to safely markdown and sanitize all responses without risk of injection
|
||||
|
@ -4,7 +4,6 @@ import { useEffect, useState } from "react";
|
||||
// Providers which cannot use this feature for workspace<>model selection
|
||||
export const DISABLED_PROVIDERS = [
|
||||
"azure",
|
||||
"lmstudio",
|
||||
"native",
|
||||
"textgenwebui",
|
||||
"generic-openai",
|
||||
|
@ -110,7 +110,28 @@ const Workspace = {
|
||||
);
|
||||
return this._updateChatResponse(slug, chatId, newText);
|
||||
},
|
||||
streamChat: async function ({ slug }, message, handleChat) {
|
||||
multiplexStream: async function ({
|
||||
workspaceSlug,
|
||||
threadSlug = null,
|
||||
prompt,
|
||||
chatHandler,
|
||||
attachments = [],
|
||||
}) {
|
||||
if (!!threadSlug)
|
||||
return this.threads.streamChat(
|
||||
{ workspaceSlug, threadSlug },
|
||||
prompt,
|
||||
chatHandler,
|
||||
attachments
|
||||
);
|
||||
return this.streamChat(
|
||||
{ slug: workspaceSlug },
|
||||
prompt,
|
||||
chatHandler,
|
||||
attachments
|
||||
);
|
||||
},
|
||||
streamChat: async function ({ slug }, message, handleChat, attachments = []) {
|
||||
const ctrl = new AbortController();
|
||||
|
||||
// Listen for the ABORT_STREAM_EVENT key to be emitted by the client
|
||||
@ -124,7 +145,7 @@ const Workspace = {
|
||||
|
||||
await fetchEventSource(`${API_BASE}/workspace/${slug}/stream-chat`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ message }),
|
||||
body: JSON.stringify({ message, attachments }),
|
||||
headers: baseHeaders(),
|
||||
signal: ctrl.signal,
|
||||
openWhenHidden: true,
|
||||
@ -411,6 +432,43 @@ const Workspace = {
|
||||
return null;
|
||||
});
|
||||
},
|
||||
/**
|
||||
* Uploads and embeds a single file in a single call into a workspace
|
||||
* @param {string} slug - workspace slug
|
||||
* @param {FormData} formData
|
||||
* @returns {Promise<{response: {ok: boolean}, data: {success: boolean, error: string|null, document: {id: string, location:string}|null}}>}
|
||||
*/
|
||||
uploadAndEmbedFile: async function (slug, formData) {
|
||||
const response = await fetch(
|
||||
`${API_BASE}/workspace/${slug}/upload-and-embed`,
|
||||
{
|
||||
method: "POST",
|
||||
body: formData,
|
||||
headers: baseHeaders(),
|
||||
}
|
||||
);
|
||||
|
||||
const data = await response.json();
|
||||
return { response, data };
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes and un-embeds a single file in a single call from a workspace
|
||||
* @param {string} slug - workspace slug
|
||||
* @param {string} documentLocation - location of file eg: custom-documents/my-file-uuid.json
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
deleteAndUnembedFile: async function (slug, documentLocation) {
|
||||
const response = await fetch(
|
||||
`${API_BASE}/workspace/${slug}/remove-and-unembed`,
|
||||
{
|
||||
method: "DELETE",
|
||||
body: JSON.stringify({ documentLocation }),
|
||||
headers: baseHeaders(),
|
||||
}
|
||||
);
|
||||
return response.ok;
|
||||
},
|
||||
threads: WorkspaceThread,
|
||||
};
|
||||
|
||||
|
@ -90,7 +90,8 @@ const WorkspaceThread = {
|
||||
streamChat: async function (
|
||||
{ workspaceSlug, threadSlug },
|
||||
message,
|
||||
handleChat
|
||||
handleChat,
|
||||
attachments = []
|
||||
) {
|
||||
const ctrl = new AbortController();
|
||||
|
||||
@ -107,7 +108,7 @@ const WorkspaceThread = {
|
||||
`${API_BASE}/workspace/${workspaceSlug}/thread/${threadSlug}/stream-chat`,
|
||||
{
|
||||
method: "POST",
|
||||
body: JSON.stringify({ message }),
|
||||
body: JSON.stringify({ message, attachments }),
|
||||
headers: baseHeaders(),
|
||||
signal: ctrl.signal,
|
||||
openWhenHidden: true,
|
||||
|
@ -8,7 +8,7 @@ export default function OrderDetails({ setSettings, setStep }) {
|
||||
useEffect(() => {
|
||||
FineTuning.info()
|
||||
.then((res) => {
|
||||
setInfo(res);
|
||||
setInfo(res ?? {});
|
||||
setSettings((prev) => {
|
||||
return { ...prev, tuningInfo: res };
|
||||
});
|
||||
|
@ -3,7 +3,7 @@ import FineTuningSteps, { FineTuningCreationLayout } from "./Steps";
|
||||
import { CheckCircle, Circle, Sparkle } from "@phosphor-icons/react";
|
||||
import { isMobile } from "react-device-detect";
|
||||
|
||||
function SideBarSelection({ currentStep }) {
|
||||
function SideBarSelection({ setStep, currentStep }) {
|
||||
const currentIndex = Object.keys(FineTuningSteps).indexOf(currentStep);
|
||||
return (
|
||||
<div
|
||||
@ -26,7 +26,16 @@ function SideBarSelection({ currentStep }) {
|
||||
isLast ? "" : "border-b border-white/10",
|
||||
].join(" ")}
|
||||
>
|
||||
{isDone ? (
|
||||
<button
|
||||
onClick={() => setStep(stepKey)}
|
||||
className="border-none hover:underline text-white/40 text-sm font-light"
|
||||
>
|
||||
{props.name}
|
||||
</button>
|
||||
) : (
|
||||
<div className="text-sm font-light">{props.name}</div>
|
||||
)}
|
||||
<div className="flex items-center gap-x-2">
|
||||
{isDone ? (
|
||||
<CheckCircle className={`text-green-300`} />
|
||||
@ -60,7 +69,7 @@ export default function FineTuningFlow() {
|
||||
<Sparkle size={24} />
|
||||
<p className="text-lg font-medium">Custom Fine-Tuned Model</p>
|
||||
</div>
|
||||
<SideBarSelection currentStep={step} />
|
||||
<SideBarSelection setStep={setStep} currentStep={step} />
|
||||
</div>
|
||||
{StepPage.component({ settings, setSettings, setStep })}
|
||||
</div>
|
||||
|
@ -4,6 +4,22 @@ import System from "@/models/system";
|
||||
import ModalWrapper from "@/components/ModalWrapper";
|
||||
import { useModal } from "@/hooks/useModal";
|
||||
|
||||
// Some LLMs may return a "valid" response that truncation fails to truncate because
|
||||
// it stored an Object as opposed to a string for the `text` field.
|
||||
function parseText(jsonResponse = "") {
|
||||
try {
|
||||
const json = JSON.parse(jsonResponse);
|
||||
if (!json.hasOwnProperty("text"))
|
||||
throw new Error('JSON response has no property "text".');
|
||||
return typeof json.text !== "string"
|
||||
? JSON.stringify(json.text)
|
||||
: json.text;
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
return "--failed to parse--";
|
||||
}
|
||||
}
|
||||
|
||||
export default function ChatRow({ chat, onDelete }) {
|
||||
const {
|
||||
isOpen: isPromptOpen,
|
||||
@ -47,7 +63,7 @@ export default function ChatRow({ chat, onDelete }) {
|
||||
onClick={openResponseModal}
|
||||
className="px-6 py-4 cursor-pointer transform transition-transform duration-200 hover:scale-105 hover:shadow-lg"
|
||||
>
|
||||
{truncate(JSON.parse(chat.response)?.text, 40)}
|
||||
{truncate(parseText(chat.response), 40)}
|
||||
</td>
|
||||
<td className="px-6 py-4">{chat.createdAt}</td>
|
||||
<td className="px-6 py-4 flex items-center gap-x-6">
|
||||
@ -64,7 +80,7 @@ export default function ChatRow({ chat, onDelete }) {
|
||||
</ModalWrapper>
|
||||
<ModalWrapper isOpen={isResponseOpen}>
|
||||
<TextPreview
|
||||
text={JSON.parse(chat.response)?.text}
|
||||
text={parseText(chat.response)}
|
||||
closeModal={closeResponseModal}
|
||||
/>
|
||||
</ModalWrapper>
|
||||
|
@ -16,7 +16,7 @@ const NO_MODEL_SELECTION = [
|
||||
"generic-openai",
|
||||
"bedrock",
|
||||
];
|
||||
const DISABLED_PROVIDERS = ["azure", "lmstudio", "native"];
|
||||
const DISABLED_PROVIDERS = ["azure", "native"];
|
||||
const LLM_DEFAULT = {
|
||||
name: "System default",
|
||||
value: "default",
|
||||
|
@ -36,6 +36,13 @@ export default {
|
||||
"dark-text": "#222628",
|
||||
description: "#D2D5DB",
|
||||
"x-button": "#9CA3AF",
|
||||
royalblue: "#065986",
|
||||
purple: "#4A1FB8",
|
||||
magenta: "#9E165F",
|
||||
danger: "#F04438",
|
||||
error: "#B42318",
|
||||
warn: "#854708",
|
||||
success: "#05603A",
|
||||
darker: "#F4F4F4"
|
||||
},
|
||||
backgroundImage: {
|
||||
|
@ -103,7 +103,8 @@ function apiWorkspaceEndpoints(app) {
|
||||
"openAiTemp": null,
|
||||
"lastUpdatedAt": "2023-08-17 00:45:03",
|
||||
"openAiHistory": 20,
|
||||
"openAiPrompt": null
|
||||
"openAiPrompt": null,
|
||||
"threads": []
|
||||
}
|
||||
],
|
||||
}
|
||||
@ -118,7 +119,17 @@ function apiWorkspaceEndpoints(app) {
|
||||
}
|
||||
*/
|
||||
try {
|
||||
const workspaces = await Workspace.where();
|
||||
const workspaces = await Workspace._findMany({
|
||||
where: {},
|
||||
include: {
|
||||
threads: {
|
||||
select: {
|
||||
user_id: true,
|
||||
slug: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
response.status(200).json({ workspaces });
|
||||
} catch (e) {
|
||||
console.error(e.message, e);
|
||||
@ -152,7 +163,8 @@ function apiWorkspaceEndpoints(app) {
|
||||
"lastUpdatedAt": "2023-08-17 00:45:03",
|
||||
"openAiHistory": 20,
|
||||
"openAiPrompt": null,
|
||||
"documents": []
|
||||
"documents": [],
|
||||
"threads": []
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -167,7 +179,21 @@ function apiWorkspaceEndpoints(app) {
|
||||
*/
|
||||
try {
|
||||
const { slug } = request.params;
|
||||
const workspace = await Workspace.get({ slug });
|
||||
const workspace = await Workspace._findMany({
|
||||
where: {
|
||||
slug: String(slug),
|
||||
},
|
||||
include: {
|
||||
documents: true,
|
||||
threads: {
|
||||
select: {
|
||||
user_id: true,
|
||||
slug: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
response.status(200).json({ workspace });
|
||||
} catch (e) {
|
||||
console.error(e.message, e);
|
||||
|
@ -27,7 +27,7 @@ function chatEndpoints(app) {
|
||||
async (request, response) => {
|
||||
try {
|
||||
const user = await userFromSession(request, response);
|
||||
const { message } = reqBody(request);
|
||||
const { message, attachments = [] } = reqBody(request);
|
||||
const workspace = response.locals.workspace;
|
||||
|
||||
if (!message?.length) {
|
||||
@ -88,13 +88,16 @@ function chatEndpoints(app) {
|
||||
workspace,
|
||||
message,
|
||||
workspace?.chatMode,
|
||||
user
|
||||
user,
|
||||
null,
|
||||
attachments
|
||||
);
|
||||
await Telemetry.sendTelemetry("sent_chat", {
|
||||
multiUserMode: multiUserMode(response),
|
||||
LLMSelection: process.env.LLM_PROVIDER || "openai",
|
||||
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
|
||||
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
|
||||
multiModal: Array.isArray(attachments) && attachments?.length !== 0,
|
||||
});
|
||||
|
||||
await EventLogs.logEvent(
|
||||
@ -131,7 +134,7 @@ function chatEndpoints(app) {
|
||||
async (request, response) => {
|
||||
try {
|
||||
const user = await userFromSession(request, response);
|
||||
const { message } = reqBody(request);
|
||||
const { message, attachments = [] } = reqBody(request);
|
||||
const workspace = response.locals.workspace;
|
||||
const thread = response.locals.thread;
|
||||
|
||||
@ -196,7 +199,8 @@ function chatEndpoints(app) {
|
||||
message,
|
||||
workspace?.chatMode,
|
||||
user,
|
||||
thread
|
||||
thread,
|
||||
attachments
|
||||
);
|
||||
|
||||
// If thread was renamed emit event to frontend via special `action` response.
|
||||
@ -221,6 +225,7 @@ function chatEndpoints(app) {
|
||||
LLMSelection: process.env.LLM_PROVIDER || "openai",
|
||||
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
|
||||
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
|
||||
multiModal: Array.isArray(attachments) && attachments?.length !== 0,
|
||||
});
|
||||
|
||||
await EventLogs.logEvent(
|
||||
|
@ -33,6 +33,7 @@ const {
|
||||
const { getTTSProvider } = require("../utils/TextToSpeech");
|
||||
const { WorkspaceThread } = require("../models/workspaceThread");
|
||||
const truncate = require("truncate");
|
||||
const { purgeDocument } = require("../utils/files/purgeDocument");
|
||||
|
||||
function workspaceEndpoints(app) {
|
||||
if (!app) return;
|
||||
@ -863,6 +864,114 @@ function workspaceEndpoints(app) {
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/** Handles the uploading and embedding in one-call by uploading via drag-and-drop in chat container. */
|
||||
app.post(
|
||||
"/workspace/:slug/upload-and-embed",
|
||||
[
|
||||
validatedRequest,
|
||||
flexUserRoleValid([ROLES.admin, ROLES.manager]),
|
||||
handleFileUpload,
|
||||
],
|
||||
async function (request, response) {
|
||||
try {
|
||||
const { slug = null } = request.params;
|
||||
const user = await userFromSession(request, response);
|
||||
const currWorkspace = multiUserMode(response)
|
||||
? await Workspace.getWithUser(user, { slug })
|
||||
: await Workspace.get({ slug });
|
||||
|
||||
if (!currWorkspace) {
|
||||
response.sendStatus(400).end();
|
||||
return;
|
||||
}
|
||||
|
||||
const Collector = new CollectorApi();
|
||||
const { originalname } = request.file;
|
||||
const processingOnline = await Collector.online();
|
||||
|
||||
if (!processingOnline) {
|
||||
response
|
||||
.status(500)
|
||||
.json({
|
||||
success: false,
|
||||
error: `Document processing API is not online. Document ${originalname} will not be processed automatically.`,
|
||||
})
|
||||
.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const { success, reason, documents } =
|
||||
await Collector.processDocument(originalname);
|
||||
if (!success || documents?.length === 0) {
|
||||
response.status(500).json({ success: false, error: reason }).end();
|
||||
return;
|
||||
}
|
||||
|
||||
Collector.log(
|
||||
`Document ${originalname} uploaded processed and successfully. It is now available in documents.`
|
||||
);
|
||||
await Telemetry.sendTelemetry("document_uploaded");
|
||||
await EventLogs.logEvent(
|
||||
"document_uploaded",
|
||||
{
|
||||
documentName: originalname,
|
||||
},
|
||||
response.locals?.user?.id
|
||||
);
|
||||
|
||||
const document = documents[0];
|
||||
const { failedToEmbed = [], errors = [] } = await Document.addDocuments(
|
||||
currWorkspace,
|
||||
[document.location],
|
||||
response.locals?.user?.id
|
||||
);
|
||||
|
||||
if (failedToEmbed.length > 0)
|
||||
return response
|
||||
.status(200)
|
||||
.json({ success: false, error: errors?.[0], document: null });
|
||||
|
||||
response.status(200).json({
|
||||
success: true,
|
||||
error: null,
|
||||
document: { id: document.id, location: document.location },
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(e.message, e);
|
||||
response.sendStatus(500).end();
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
app.delete(
|
||||
"/workspace/:slug/remove-and-unembed",
|
||||
[
|
||||
validatedRequest,
|
||||
flexUserRoleValid([ROLES.admin, ROLES.manager]),
|
||||
handleFileUpload,
|
||||
],
|
||||
async function (request, response) {
|
||||
try {
|
||||
const { slug = null } = request.params;
|
||||
const body = reqBody(request);
|
||||
const user = await userFromSession(request, response);
|
||||
const currWorkspace = multiUserMode(response)
|
||||
? await Workspace.getWithUser(user, { slug })
|
||||
: await Workspace.get({ slug });
|
||||
|
||||
if (!currWorkspace || !body.documentLocation)
|
||||
return response.sendStatus(400).end();
|
||||
|
||||
// Will delete the document from the entire system + wil unembed it.
|
||||
await purgeDocument(body.documentLocation);
|
||||
response.status(200).end();
|
||||
} catch (e) {
|
||||
console.error(e.message, e);
|
||||
response.sendStatus(500).end();
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = { workspaceEndpoints };
|
||||
|
@ -72,7 +72,7 @@ const FineTuning = {
|
||||
workspaceId: {
|
||||
in: workspaceIds,
|
||||
},
|
||||
...(feedback === true ? { feedback: 1 } : {}),
|
||||
...(feedback === true ? { feedbackScore: true } : {}),
|
||||
});
|
||||
return count;
|
||||
},
|
||||
@ -141,7 +141,7 @@ const FineTuning = {
|
||||
workspaceId: {
|
||||
in: workspaces.map((ws) => ws.id),
|
||||
},
|
||||
...(feedback === true ? { feedback: 1 } : {}),
|
||||
...(feedback === true ? { feedbackScore: true } : {}),
|
||||
});
|
||||
const preparedData = chats.map((chat) => {
|
||||
const responseJson = safeJsonParse(chat.response);
|
||||
|
@ -429,6 +429,7 @@ const SystemSettings = {
|
||||
// OpenRouter Keys
|
||||
OpenRouterApiKey: !!process.env.OPENROUTER_API_KEY,
|
||||
OpenRouterModelPref: process.env.OPENROUTER_MODEL_PREF,
|
||||
OpenRouterTimeout: process.env.OPENROUTER_TIMEOUT_MS,
|
||||
|
||||
// Mistral AI (API) Keys
|
||||
MistralApiKey: !!process.env.MISTRAL_API_KEY,
|
||||
|
@ -36,7 +36,20 @@ const Workspace = {
|
||||
* @returns {string}
|
||||
*/
|
||||
slugify: function (...args) {
|
||||
slugifyModule.extend({ "+": " plus ", "!": " bang " });
|
||||
slugifyModule.extend({
|
||||
"+": " plus ",
|
||||
"!": " bang ",
|
||||
"@": " at ",
|
||||
"*": " splat ",
|
||||
".": " dot ",
|
||||
":": "",
|
||||
"~": "",
|
||||
"(": "",
|
||||
")": "",
|
||||
"'": "",
|
||||
'"': "",
|
||||
"|": "",
|
||||
});
|
||||
return slugifyModule(...args);
|
||||
},
|
||||
|
||||
@ -302,6 +315,37 @@ const Workspace = {
|
||||
);
|
||||
return;
|
||||
},
|
||||
|
||||
// Direct DB queries for API use only.
|
||||
/**
|
||||
* Generic prisma FindMany query for workspaces collections
|
||||
* @param {import("../node_modules/.prisma/client/index.d.ts").Prisma.TypeMap['model']['workspaces']['operations']['findMany']['args']} prismaQuery
|
||||
* @returns
|
||||
*/
|
||||
_findMany: async function (prismaQuery = {}) {
|
||||
try {
|
||||
const results = await prisma.workspaces.findMany(prismaQuery);
|
||||
return results;
|
||||
} catch (error) {
|
||||
console.error(error.message);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Generic prisma query for .get of workspaces collections
|
||||
* @param {import("../node_modules/.prisma/client/index.d.ts").Prisma.TypeMap['model']['workspaces']['operations']['findFirst']['args']} prismaQuery
|
||||
* @returns
|
||||
*/
|
||||
_findFirst: async function (prismaQuery = {}) {
|
||||
try {
|
||||
const results = await prisma.workspaces.findFirst(prismaQuery);
|
||||
return results;
|
||||
} catch (error) {
|
||||
console.error(error.message);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = { Workspace };
|
||||
|
@ -1476,7 +1476,8 @@
|
||||
"openAiTemp": null,
|
||||
"lastUpdatedAt": "2023-08-17 00:45:03",
|
||||
"openAiHistory": 20,
|
||||
"openAiPrompt": null
|
||||
"openAiPrompt": null,
|
||||
"threads": []
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -1539,7 +1540,8 @@
|
||||
"lastUpdatedAt": "2023-08-17 00:45:03",
|
||||
"openAiHistory": 20,
|
||||
"openAiPrompt": null,
|
||||
"documents": []
|
||||
"documents": [],
|
||||
"threads": []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -66,18 +66,50 @@ class AnthropicLLM {
|
||||
return validModels.includes(modelName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates appropriate content array for a message + attachments.
|
||||
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
|
||||
* @returns {string|object[]}
|
||||
*/
|
||||
#generateContent({ userPrompt, attachments = [] }) {
|
||||
if (!attachments.length) {
|
||||
return userPrompt;
|
||||
}
|
||||
|
||||
const content = [{ type: "text", text: userPrompt }];
|
||||
for (let attachment of attachments) {
|
||||
content.push({
|
||||
type: "image",
|
||||
source: {
|
||||
type: "base64",
|
||||
media_type: attachment.mime,
|
||||
data: attachment.contentString.split("base64,")[1],
|
||||
},
|
||||
});
|
||||
}
|
||||
return content.flat();
|
||||
}
|
||||
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
attachments = [], // This is the specific attachment for only this prompt
|
||||
}) {
|
||||
const prompt = {
|
||||
role: "system",
|
||||
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
|
||||
};
|
||||
|
||||
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
|
||||
return [
|
||||
prompt,
|
||||
...chatHistory,
|
||||
{
|
||||
role: "user",
|
||||
content: this.#generateContent({ userPrompt, attachments }),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
|
@ -95,21 +95,60 @@ class AWSBedrockLLM {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates appropriate content array for a message + attachments.
|
||||
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
|
||||
* @returns {string|object[]}
|
||||
*/
|
||||
#generateContent({ userPrompt, attachments = [] }) {
|
||||
if (!attachments.length) {
|
||||
return { content: userPrompt };
|
||||
}
|
||||
|
||||
const content = [{ type: "text", text: userPrompt }];
|
||||
for (let attachment of attachments) {
|
||||
content.push({
|
||||
type: "image_url",
|
||||
image_url: attachment.contentString,
|
||||
});
|
||||
}
|
||||
return { content: content.flat() };
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the user prompt for this model.
|
||||
* @param {{attachments: import("../../helpers").Attachment[]}} param0
|
||||
* @returns
|
||||
*/
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
attachments = [],
|
||||
}) {
|
||||
// AWS Mistral models do not support system prompts
|
||||
if (this.model.startsWith("mistral"))
|
||||
return [...chatHistory, { role: "user", content: userPrompt }];
|
||||
return [
|
||||
...chatHistory,
|
||||
{
|
||||
role: "user",
|
||||
...this.#generateContent({ userPrompt, attachments }),
|
||||
},
|
||||
];
|
||||
|
||||
const prompt = {
|
||||
role: "system",
|
||||
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
|
||||
};
|
||||
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
|
||||
return [
|
||||
prompt,
|
||||
...chatHistory,
|
||||
{
|
||||
role: "user",
|
||||
...this.#generateContent({ userPrompt, attachments }),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
|
@ -112,11 +112,34 @@ class GeminiLLM {
|
||||
return validModels.includes(modelName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates appropriate content array for a message + attachments.
|
||||
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
|
||||
* @returns {string|object[]}
|
||||
*/
|
||||
#generateContent({ userPrompt, attachments = [] }) {
|
||||
if (!attachments.length) {
|
||||
return userPrompt;
|
||||
}
|
||||
|
||||
const content = [{ text: userPrompt }];
|
||||
for (let attachment of attachments) {
|
||||
content.push({
|
||||
inlineData: {
|
||||
data: attachment.contentString.split("base64,")[1],
|
||||
mimeType: attachment.mime,
|
||||
},
|
||||
});
|
||||
}
|
||||
return content.flat();
|
||||
}
|
||||
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
attachments = [],
|
||||
}) {
|
||||
const prompt = {
|
||||
role: "system",
|
||||
@ -126,7 +149,10 @@ class GeminiLLM {
|
||||
prompt,
|
||||
{ role: "assistant", content: "Okay." },
|
||||
...chatHistory,
|
||||
{ role: "USER_PROMPT", content: userPrompt },
|
||||
{
|
||||
role: "USER_PROMPT",
|
||||
content: this.#generateContent({ userPrompt, attachments }),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
|
@ -48,7 +48,6 @@ class GroqLLM {
|
||||
case "llama3-8b-8192":
|
||||
return 8192;
|
||||
case "llama-3.1-70b-versatile":
|
||||
case "llama-3.1-8b-instant":
|
||||
case "llama-3.1-8b-instant":
|
||||
return 131072;
|
||||
case "mixtral-8x7b-32768":
|
||||
|
@ -66,17 +66,52 @@ class KoboldCPPLLM {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates appropriate content array for a message + attachments.
|
||||
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
|
||||
* @returns {string|object[]}
|
||||
*/
|
||||
#generateContent({ userPrompt, attachments = [] }) {
|
||||
if (!attachments.length) {
|
||||
return userPrompt;
|
||||
}
|
||||
|
||||
const content = [{ type: "text", text: userPrompt }];
|
||||
for (let attachment of attachments) {
|
||||
content.push({
|
||||
type: "image_url",
|
||||
image_url: {
|
||||
url: attachment.contentString,
|
||||
},
|
||||
});
|
||||
}
|
||||
return content.flat();
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the user prompt for this model.
|
||||
* @param {{attachments: import("../../helpers").Attachment[]}} param0
|
||||
* @returns
|
||||
*/
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
attachments = [],
|
||||
}) {
|
||||
const prompt = {
|
||||
role: "system",
|
||||
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
|
||||
};
|
||||
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
|
||||
return [
|
||||
prompt,
|
||||
...chatHistory,
|
||||
{
|
||||
role: "user",
|
||||
content: this.#generateContent({ userPrompt, attachments }),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
|
@ -65,17 +65,52 @@ class LiteLLM {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates appropriate content array for a message + attachments.
|
||||
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
|
||||
* @returns {string|object[]}
|
||||
*/
|
||||
#generateContent({ userPrompt, attachments = [] }) {
|
||||
if (!attachments.length) {
|
||||
return userPrompt;
|
||||
}
|
||||
|
||||
const content = [{ type: "text", text: userPrompt }];
|
||||
for (let attachment of attachments) {
|
||||
content.push({
|
||||
type: "image_url",
|
||||
image_url: {
|
||||
url: attachment.contentString,
|
||||
},
|
||||
});
|
||||
}
|
||||
return content.flat();
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the user prompt for this model.
|
||||
* @param {{attachments: import("../../helpers").Attachment[]}} param0
|
||||
* @returns
|
||||
*/
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
attachments = [],
|
||||
}) {
|
||||
const prompt = {
|
||||
role: "system",
|
||||
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
|
||||
};
|
||||
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
|
||||
return [
|
||||
prompt,
|
||||
...chatHistory,
|
||||
{
|
||||
role: "user",
|
||||
content: this.#generateContent({ userPrompt, attachments }),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
|
@ -63,17 +63,53 @@ class LMStudioLLM {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates appropriate content array for a message + attachments.
|
||||
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
|
||||
* @returns {string|object[]}
|
||||
*/
|
||||
#generateContent({ userPrompt, attachments = [] }) {
|
||||
if (!attachments.length) {
|
||||
return userPrompt;
|
||||
}
|
||||
|
||||
const content = [{ type: "text", text: userPrompt }];
|
||||
for (let attachment of attachments) {
|
||||
content.push({
|
||||
type: "image_url",
|
||||
image_url: {
|
||||
url: attachment.contentString,
|
||||
detail: "auto",
|
||||
},
|
||||
});
|
||||
}
|
||||
return content.flat();
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the user prompt for this model.
|
||||
* @param {{attachments: import("../../helpers").Attachment[]}} param0
|
||||
* @returns
|
||||
*/
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
attachments = [],
|
||||
}) {
|
||||
const prompt = {
|
||||
role: "system",
|
||||
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
|
||||
};
|
||||
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
|
||||
return [
|
||||
prompt,
|
||||
...chatHistory,
|
||||
{
|
||||
role: "user",
|
||||
content: this.#generateContent({ userPrompt, attachments }),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
|
@ -53,17 +53,52 @@ class LocalAiLLM {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates appropriate content array for a message + attachments.
|
||||
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
|
||||
* @returns {string|object[]}
|
||||
*/
|
||||
#generateContent({ userPrompt, attachments = [] }) {
|
||||
if (!attachments.length) {
|
||||
return userPrompt;
|
||||
}
|
||||
|
||||
const content = [{ type: "text", text: userPrompt }];
|
||||
for (let attachment of attachments) {
|
||||
content.push({
|
||||
type: "image_url",
|
||||
image_url: {
|
||||
url: attachment.contentString,
|
||||
},
|
||||
});
|
||||
}
|
||||
return content.flat();
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the user prompt for this model.
|
||||
* @param {{attachments: import("../../helpers").Attachment[]}} param0
|
||||
* @returns
|
||||
*/
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
attachments = [],
|
||||
}) {
|
||||
const prompt = {
|
||||
role: "system",
|
||||
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
|
||||
};
|
||||
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
|
||||
return [
|
||||
prompt,
|
||||
...chatHistory,
|
||||
{
|
||||
role: "user",
|
||||
content: this.#generateContent({ userPrompt, attachments }),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
|
@ -90,17 +90,50 @@ class OllamaAILLM {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates appropriate content array for a message + attachments.
|
||||
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
|
||||
* @returns {string|object[]}
|
||||
*/
|
||||
#generateContent({ userPrompt, attachments = [] }) {
|
||||
if (!attachments.length) {
|
||||
return { content: userPrompt };
|
||||
}
|
||||
|
||||
const content = [{ type: "text", text: userPrompt }];
|
||||
for (let attachment of attachments) {
|
||||
content.push({
|
||||
type: "image_url",
|
||||
image_url: attachment.contentString,
|
||||
});
|
||||
}
|
||||
return { content: content.flat() };
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the user prompt for this model.
|
||||
* @param {{attachments: import("../../helpers").Attachment[]}} param0
|
||||
* @returns
|
||||
*/
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
attachments = [],
|
||||
}) {
|
||||
const prompt = {
|
||||
role: "system",
|
||||
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
|
||||
};
|
||||
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
|
||||
return [
|
||||
prompt,
|
||||
...chatHistory,
|
||||
{
|
||||
role: "user",
|
||||
...this.#generateContent({ userPrompt, attachments }),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
|
@ -73,17 +73,53 @@ class OpenAiLLM {
|
||||
return !!model;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates appropriate content array for a message + attachments.
|
||||
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
|
||||
* @returns {string|object[]}
|
||||
*/
|
||||
#generateContent({ userPrompt, attachments = [] }) {
|
||||
if (!attachments.length) {
|
||||
return userPrompt;
|
||||
}
|
||||
|
||||
const content = [{ type: "text", text: userPrompt }];
|
||||
for (let attachment of attachments) {
|
||||
content.push({
|
||||
type: "image_url",
|
||||
image_url: {
|
||||
url: attachment.contentString,
|
||||
detail: "high",
|
||||
},
|
||||
});
|
||||
}
|
||||
return content.flat();
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the user prompt for this model.
|
||||
* @param {{attachments: import("../../helpers").Attachment[]}} param0
|
||||
* @returns
|
||||
*/
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
attachments = [], // This is the specific attachment for only this prompt
|
||||
}) {
|
||||
const prompt = {
|
||||
role: "system",
|
||||
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
|
||||
};
|
||||
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
|
||||
return [
|
||||
prompt,
|
||||
...chatHistory,
|
||||
{
|
||||
role: "user",
|
||||
content: this.#generateContent({ userPrompt, attachments }),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
|
@ -38,6 +38,7 @@ class OpenRouterLLM {
|
||||
|
||||
this.embedder = embedder ?? new NativeEmbedder();
|
||||
this.defaultTemp = 0.7;
|
||||
this.timeout = this.#parseTimeout();
|
||||
|
||||
if (!fs.existsSync(cacheFolder))
|
||||
fs.mkdirSync(cacheFolder, { recursive: true });
|
||||
@ -49,6 +50,22 @@ class OpenRouterLLM {
|
||||
console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args);
|
||||
}
|
||||
|
||||
/**
|
||||
* OpenRouter has various models that never return `finish_reasons` and thus leave the stream open
|
||||
* which causes issues in subsequent messages. This timeout value forces us to close the stream after
|
||||
* x milliseconds. This is a configurable value via the OPENROUTER_TIMEOUT_MS value
|
||||
* @returns {number} The timeout value in milliseconds (default: 500)
|
||||
*/
|
||||
#parseTimeout() {
|
||||
this.log(
|
||||
`OpenRouter timeout is set to ${process.env.OPENROUTER_TIMEOUT_MS ?? 500}ms`
|
||||
);
|
||||
if (isNaN(Number(process.env.OPENROUTER_TIMEOUT_MS))) return 500;
|
||||
const setValue = Number(process.env.OPENROUTER_TIMEOUT_MS);
|
||||
if (setValue < 500) return 500;
|
||||
return setValue;
|
||||
}
|
||||
|
||||
// This checks if the .cached_at file has a timestamp that is more than 1Week (in millis)
|
||||
// from the current date. If it is, then we will refetch the API so that all the models are up
|
||||
// to date.
|
||||
@ -111,17 +128,49 @@ class OpenRouterLLM {
|
||||
return availableModels.hasOwnProperty(model);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates appropriate content array for a message + attachments.
|
||||
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
|
||||
* @returns {string|object[]}
|
||||
*/
|
||||
#generateContent({ userPrompt, attachments = [] }) {
|
||||
if (!attachments.length) {
|
||||
return userPrompt;
|
||||
}
|
||||
|
||||
const content = [{ type: "text", text: userPrompt }];
|
||||
for (let attachment of attachments) {
|
||||
content.push({
|
||||
type: "image_url",
|
||||
image_url: {
|
||||
url: attachment.contentString,
|
||||
detail: "auto",
|
||||
},
|
||||
});
|
||||
}
|
||||
console.log(content.flat());
|
||||
return content.flat();
|
||||
}
|
||||
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
attachments = [],
|
||||
}) {
|
||||
const prompt = {
|
||||
role: "system",
|
||||
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
|
||||
};
|
||||
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
|
||||
return [
|
||||
prompt,
|
||||
...chatHistory,
|
||||
{
|
||||
role: "user",
|
||||
content: this.#generateContent({ userPrompt, attachments }),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
@ -161,7 +210,7 @@ class OpenRouterLLM {
|
||||
}
|
||||
|
||||
handleStream(response, stream, responseProps) {
|
||||
const timeoutThresholdMs = 500;
|
||||
const timeoutThresholdMs = this.timeout;
|
||||
const { uuid = uuidv4(), sources = [] } = responseProps;
|
||||
|
||||
return new Promise(async (resolve) => {
|
||||
|
@ -21,7 +21,7 @@ class PerplexityLLM {
|
||||
this.model =
|
||||
modelPreference ||
|
||||
process.env.PERPLEXITY_MODEL_PREF ||
|
||||
"sonar-small-online"; // Give at least a unique model to the provider as last fallback.
|
||||
"llama-3-sonar-large-32k-online"; // Give at least a unique model to the provider as last fallback.
|
||||
this.limits = {
|
||||
history: this.promptWindowLimit() * 0.15,
|
||||
system: this.promptWindowLimit() * 0.15,
|
||||
|
@ -1,7 +1,7 @@
|
||||
const MODELS = {
|
||||
"llama-3-sonar-small-32k-online\\*": {
|
||||
id: "llama-3-sonar-small-32k-online\\*",
|
||||
name: "llama-3-sonar-small-32k-online\\*",
|
||||
"llama-3-sonar-small-32k-online": {
|
||||
id: "llama-3-sonar-small-32k-online",
|
||||
name: "llama-3-sonar-small-32k-online",
|
||||
maxLength: 28000,
|
||||
},
|
||||
"llama-3-sonar-small-32k-chat": {
|
||||
@ -9,9 +9,9 @@ const MODELS = {
|
||||
name: "llama-3-sonar-small-32k-chat",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"llama-3-sonar-large-32k-online\\*": {
|
||||
id: "llama-3-sonar-large-32k-online\\*",
|
||||
name: "llama-3-sonar-large-32k-online\\*",
|
||||
"llama-3-sonar-large-32k-online": {
|
||||
id: "llama-3-sonar-large-32k-online",
|
||||
name: "llama-3-sonar-large-32k-online",
|
||||
maxLength: 28000,
|
||||
},
|
||||
"llama-3-sonar-large-32k-chat": {
|
||||
@ -19,6 +19,26 @@ const MODELS = {
|
||||
name: "llama-3-sonar-large-32k-chat",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"llama-3.1-sonar-small-128k-online": {
|
||||
id: "llama-3.1-sonar-small-128k-online",
|
||||
name: "llama-3.1-sonar-small-128k-online",
|
||||
maxLength: 127072,
|
||||
},
|
||||
"llama-3.1-sonar-small-128k-chat": {
|
||||
id: "llama-3.1-sonar-small-128k-chat",
|
||||
name: "llama-3.1-sonar-small-128k-chat",
|
||||
maxLength: 131072,
|
||||
},
|
||||
"llama-3.1-sonar-large-128k-online": {
|
||||
id: "llama-3.1-sonar-large-128k-online",
|
||||
name: "llama-3.1-sonar-large-128k-online",
|
||||
maxLength: 127072,
|
||||
},
|
||||
"llama-3.1-sonar-large-128k-chat": {
|
||||
id: "llama-3.1-sonar-large-128k-chat",
|
||||
name: "llama-3.1-sonar-large-128k-chat",
|
||||
maxLength: 131072,
|
||||
},
|
||||
"llama-3-8b-instruct": {
|
||||
id: "llama-3-8b-instruct",
|
||||
name: "llama-3-8b-instruct",
|
||||
@ -34,6 +54,16 @@ const MODELS = {
|
||||
name: "mixtral-8x7b-instruct",
|
||||
maxLength: 16384,
|
||||
},
|
||||
"llama-3.1-8b-instruct": {
|
||||
id: "llama-3.1-8b-instruct",
|
||||
name: "llama-3.1-8b-instruct",
|
||||
maxLength: 131072,
|
||||
},
|
||||
"llama-3.1-70b-instruct": {
|
||||
id: "llama-3.1-70b-instruct",
|
||||
name: "llama-3.1-70b-instruct",
|
||||
maxLength: 131072,
|
||||
},
|
||||
};
|
||||
|
||||
module.exports.MODELS = MODELS;
|
||||
|
@ -1,9 +1,15 @@
|
||||
| Model | Parameter Count | Context Length | Model Type |
|
||||
| :--------------------------------- | :-------------- | :------------- | :-------------- |
|
||||
| `llama-3-sonar-small-32k-online`\* | 8B | 28,000 | Chat Completion |
|
||||
| :---------------------------------- | :-------------- | :------------- | :-------------- |
|
||||
| `llama-3-sonar-small-32k-online` | 8B | 28,000 | Chat Completion |
|
||||
| `llama-3-sonar-small-32k-chat` | 8B | 32,768 | Chat Completion |
|
||||
| `llama-3-sonar-large-32k-online`\* | 70B | 28,000 | Chat Completion |
|
||||
| `llama-3-sonar-large-32k-online` | 70B | 28,000 | Chat Completion |
|
||||
| `llama-3-sonar-large-32k-chat` | 70B | 32,768 | Chat Completion |
|
||||
| `llama-3.1-sonar-small-128k-online` | 8B | 127,072 | Chat Completion |
|
||||
| `llama-3.1-sonar-small-128k-chat` | 8B | 131,072 | Chat Completion |
|
||||
| `llama-3.1-sonar-large-128k-online` | 70B | 127,072 | Chat Completion |
|
||||
| `llama-3.1-sonar-large-128k-chat` | 70B | 131,072 | Chat Completion |
|
||||
| `llama-3-8b-instruct` | 8B | 8,192 | Chat Completion |
|
||||
| `llama-3-70b-instruct` | 70B | 8,192 | Chat Completion |
|
||||
| `mixtral-8x7b-instruct` | 8x7B | 16,384 | Chat Completion |
|
||||
| `llama-3.1-8b-instruct` | 8B | 131,072 | Chat Completion |
|
||||
| `llama-3.1-70b-instruct` | 70B | 131,072 | Chat Completion |
|
@ -8,7 +8,7 @@
|
||||
// copy outputs into the export in ../models.js
|
||||
|
||||
// Update the date below if you run this again because Perplexity added new models.
|
||||
// Last Collected: Jul 19, 2024
|
||||
// Last Collected: Jul 31, 2024
|
||||
|
||||
import fs from "fs";
|
||||
|
||||
|
@ -63,17 +63,52 @@ class TextGenWebUILLM {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates appropriate content array for a message + attachments.
|
||||
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
|
||||
* @returns {string|object[]}
|
||||
*/
|
||||
#generateContent({ userPrompt, attachments = [] }) {
|
||||
if (!attachments.length) {
|
||||
return userPrompt;
|
||||
}
|
||||
|
||||
const content = [{ type: "text", text: userPrompt }];
|
||||
for (let attachment of attachments) {
|
||||
content.push({
|
||||
type: "image_url",
|
||||
image_url: {
|
||||
url: attachment.contentString,
|
||||
},
|
||||
});
|
||||
}
|
||||
return content.flat();
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the user prompt for this model.
|
||||
* @param {{attachments: import("../../helpers").Attachment[]}} param0
|
||||
* @returns
|
||||
*/
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
attachments = [],
|
||||
}) {
|
||||
const prompt = {
|
||||
role: "system",
|
||||
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
|
||||
};
|
||||
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
|
||||
return [
|
||||
prompt,
|
||||
...chatHistory,
|
||||
{
|
||||
role: "user",
|
||||
content: this.#generateContent({ userPrompt, attachments }),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
|
@ -1,16 +1,70 @@
|
||||
const MODELS = {
|
||||
"zero-one-ai/Yi-34B-Chat": {
|
||||
id: "zero-one-ai/Yi-34B-Chat",
|
||||
organization: "01.AI",
|
||||
name: "01-ai Yi Chat (34B)",
|
||||
"meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": {
|
||||
id: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
|
||||
organization: "Meta",
|
||||
name: "Llama 3.1 8B Instruct Turbo",
|
||||
maxLength: 128000,
|
||||
},
|
||||
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": {
|
||||
id: "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
|
||||
organization: "Meta",
|
||||
name: "Llama 3.1 70B Instruct Turbo",
|
||||
maxLength: 128000,
|
||||
},
|
||||
"meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": {
|
||||
id: "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo",
|
||||
organization: "Meta",
|
||||
name: "Llama 3.1 405B Instruct Turbo",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"meta-llama/Meta-Llama-3-8B-Instruct-Turbo": {
|
||||
id: "meta-llama/Meta-Llama-3-8B-Instruct-Turbo",
|
||||
organization: "Meta",
|
||||
name: "Llama 3 8B Instruct Turbo",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"meta-llama/Meta-Llama-3-70B-Instruct-Turbo": {
|
||||
id: "meta-llama/Meta-Llama-3-70B-Instruct-Turbo",
|
||||
organization: "Meta",
|
||||
name: "Llama 3 70B Instruct Turbo",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"meta-llama/Meta-Llama-3-8B-Instruct-Lite": {
|
||||
id: "meta-llama/Meta-Llama-3-8B-Instruct-Lite",
|
||||
organization: "Meta",
|
||||
name: "Llama 3 8B Instruct Lite",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"meta-llama/Meta-Llama-3-70B-Instruct-Lite": {
|
||||
id: "meta-llama/Meta-Llama-3-70B-Instruct-Lite",
|
||||
organization: "Meta",
|
||||
name: "Llama 3 70B Instruct Lite",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"google/gemma-2-27b-it": {
|
||||
id: "google/gemma-2-27b-it",
|
||||
organization: "Google",
|
||||
name: "Gemma 2 27B",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"google/gemma-2-9b-it": {
|
||||
id: "google/gemma-2-9b-it",
|
||||
organization: "Google",
|
||||
name: "Gemma 2 9B",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"allenai/OLMo-7B-Instruct": {
|
||||
id: "allenai/OLMo-7B-Instruct",
|
||||
organization: "Allen AI",
|
||||
name: "OLMo Instruct (7B)",
|
||||
maxLength: 2048,
|
||||
},
|
||||
"zero-one-ai/Yi-34B-Chat": {
|
||||
id: "zero-one-ai/Yi-34B-Chat",
|
||||
organization: "01.AI",
|
||||
name: "01-ai Yi Chat (34B)",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"allenai/OLMo-7B-Twin-2T": {
|
||||
id: "allenai/OLMo-7B-Twin-2T",
|
||||
organization: "Allen AI",
|
||||
@ -135,19 +189,13 @@ const MODELS = {
|
||||
id: "meta-llama/Llama-3-8b-chat-hf",
|
||||
organization: "Meta",
|
||||
name: "LLaMA-3 Chat (8B)",
|
||||
maxLength: 8000,
|
||||
maxLength: 8192,
|
||||
},
|
||||
"meta-llama/Llama-3-70b-chat-hf": {
|
||||
id: "meta-llama/Llama-3-70b-chat-hf",
|
||||
organization: "Meta",
|
||||
name: "LLaMA-3 Chat (70B)",
|
||||
maxLength: 8000,
|
||||
},
|
||||
"microsoft/WizardLM-2-8x22B": {
|
||||
id: "microsoft/WizardLM-2-8x22B",
|
||||
organization: "Microsoft",
|
||||
name: "WizardLM-2 (8x22B)",
|
||||
maxLength: 65536,
|
||||
maxLength: 8192,
|
||||
},
|
||||
"mistralai/Mistral-7B-Instruct-v0.1": {
|
||||
id: "mistralai/Mistral-7B-Instruct-v0.1",
|
||||
@ -161,6 +209,12 @@ const MODELS = {
|
||||
name: "Mistral (7B) Instruct v0.2",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"mistralai/Mistral-7B-Instruct-v0.3": {
|
||||
id: "mistralai/Mistral-7B-Instruct-v0.3",
|
||||
organization: "mistralai",
|
||||
name: "Mistral (7B) Instruct v0.3",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"mistralai/Mixtral-8x7B-Instruct-v0.1": {
|
||||
id: "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
||||
organization: "mistralai",
|
||||
@ -269,12 +323,30 @@ const MODELS = {
|
||||
name: "Qwen 1.5 Chat (72B)",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"Qwen/Qwen1.5-110B-Chat": {
|
||||
id: "Qwen/Qwen1.5-110B-Chat",
|
||||
organization: "Qwen",
|
||||
name: "Qwen 1.5 Chat (110B)",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"Qwen/Qwen2-72B-Instruct": {
|
||||
id: "Qwen/Qwen2-72B-Instruct",
|
||||
organization: "Qwen",
|
||||
name: "Qwen 2 Instruct (72B)",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"snorkelai/Snorkel-Mistral-PairRM-DPO": {
|
||||
id: "snorkelai/Snorkel-Mistral-PairRM-DPO",
|
||||
organization: "Snorkel AI",
|
||||
name: "Snorkel Mistral PairRM DPO (7B)",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"Snowflake/snowflake-arctic-instruct": {
|
||||
id: "Snowflake/snowflake-arctic-instruct",
|
||||
organization: "Snowflake",
|
||||
name: "Snowflake Arctic Instruct",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"togethercomputer/alpaca-7b": {
|
||||
id: "togethercomputer/alpaca-7b",
|
||||
organization: "Stanford",
|
||||
|
@ -1,7 +1,16 @@
|
||||
| Organization | Model Name | Model String for API | Context length |
|
||||
| --- | --- | --- | --- |
|
||||
| 01.AI | 01-ai Yi Chat (34B) | zero-one-ai/Yi-34B-Chat | 4096 |
|
||||
|--------------|------------|----------------------|----------------|
|
||||
| Meta | Llama 3.1 8B Instruct Turbo | meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo | 128000 |
|
||||
| Meta | Llama 3.1 70B Instruct Turbo | meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo | 128000 |
|
||||
| Meta | Llama 3.1 405B Instruct Turbo | meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo | 4096 |
|
||||
| Meta | Llama 3 8B Instruct Turbo | meta-llama/Meta-Llama-3-8B-Instruct-Turbo | 8192 |
|
||||
| Meta | Llama 3 70B Instruct Turbo | meta-llama/Meta-Llama-3-70B-Instruct-Turbo | 8192 |
|
||||
| Meta | Llama 3 8B Instruct Lite | meta-llama/Meta-Llama-3-8B-Instruct-Lite | 8192 |
|
||||
| Meta | Llama 3 70B Instruct Lite | meta-llama/Meta-Llama-3-70B-Instruct-Lite | 8192 |
|
||||
| Google | Gemma 2 27B | google/gemma-2-27b-it | 8192 |
|
||||
| Google | Gemma 2 9B | google/gemma-2-9b-it | 8192 |
|
||||
| Allen AI | OLMo Instruct (7B) | allenai/OLMo-7B-Instruct | 2048 |
|
||||
| 01.AI | 01-ai Yi Chat (34B) | zero-one-ai/Yi-34B-Chat | 4096 |
|
||||
| Allen AI | OLMo Twin-2T (7B) | allenai/OLMo-7B-Twin-2T | 2048 |
|
||||
| Allen AI | OLMo (7B) | allenai/OLMo-7B | 2048 |
|
||||
| Austism | Chronos Hermes (13B) | Austism/chronos-hermes-13b | 2048 |
|
||||
@ -22,11 +31,11 @@
|
||||
| Meta | LLaMA-2 Chat (70B) | meta-llama/Llama-2-70b-chat-hf | 4096 |
|
||||
| Meta | LLaMA-2 Chat (13B) | meta-llama/Llama-2-13b-chat-hf | 4096 |
|
||||
| Meta | LLaMA-2 Chat (7B) | meta-llama/Llama-2-7b-chat-hf | 4096 |
|
||||
| Meta | LLaMA-3 Chat (8B) | meta-llama/Llama-3-8b-chat-hf | 8000 |
|
||||
| Meta | LLaMA-3 Chat (70B) | meta-llama/Llama-3-70b-chat-hf | 8000 |
|
||||
| Microsoft | WizardLM-2 (8x22B) | microsoft/WizardLM-2-8x22B | 65536 |
|
||||
| Meta | LLaMA-3 Chat (8B) | meta-llama/Llama-3-8b-chat-hf | 8192 |
|
||||
| Meta | LLaMA-3 Chat (70B) | meta-llama/Llama-3-70b-chat-hf | 8192 |
|
||||
| mistralai | Mistral (7B) Instruct | mistralai/Mistral-7B-Instruct-v0.1 | 8192 |
|
||||
| mistralai | Mistral (7B) Instruct v0.2 | mistralai/Mistral-7B-Instruct-v0.2 | 32768 |
|
||||
| mistralai | Mistral (7B) Instruct v0.3 | mistralai/Mistral-7B-Instruct-v0.3 | 32768 |
|
||||
| mistralai | Mixtral-8x7B Instruct (46.7B) | mistralai/Mixtral-8x7B-Instruct-v0.1 | 32768 |
|
||||
| mistralai | Mixtral-8x22B Instruct (141B) | mistralai/Mixtral-8x22B-Instruct-v0.1 | 65536 |
|
||||
| NousResearch | Nous Capybara v1.9 (7B) | NousResearch/Nous-Capybara-7B-V1p9 | 8192 |
|
||||
@ -45,7 +54,10 @@
|
||||
| Qwen | Qwen 1.5 Chat (14B) | Qwen/Qwen1.5-14B-Chat | 32768 |
|
||||
| Qwen | Qwen 1.5 Chat (32B) | Qwen/Qwen1.5-32B-Chat | 32768 |
|
||||
| Qwen | Qwen 1.5 Chat (72B) | Qwen/Qwen1.5-72B-Chat | 32768 |
|
||||
| Qwen | Qwen 1.5 Chat (110B) | Qwen/Qwen1.5-110B-Chat | 32768 |
|
||||
| Qwen | Qwen 2 Instruct (72B) | Qwen/Qwen2-72B-Instruct | 32768 |
|
||||
| Snorkel AI | Snorkel Mistral PairRM DPO (7B) | snorkelai/Snorkel-Mistral-PairRM-DPO | 32768 |
|
||||
| Snowflake | Snowflake Arctic Instruct | Snowflake/snowflake-arctic-instruct | 4096 |
|
||||
| Stanford | Alpaca (7B) | togethercomputer/alpaca-7b | 2048 |
|
||||
| Teknium | OpenHermes-2-Mistral (7B) | teknium/OpenHermes-2-Mistral-7B | 8192 |
|
||||
| Teknium | OpenHermes-2.5-Mistral (7B) | teknium/OpenHermes-2p5-Mistral-7B | 8192 |
|
||||
|
@ -8,7 +8,7 @@
|
||||
// copy outputs into the export in ../models.js
|
||||
|
||||
// Update the date below if you run this again because TogetherAI added new models.
|
||||
// Last Collected: Apr 18, 2024
|
||||
// Last Collected: Jul 31, 2024
|
||||
// Since last collection Together's docs are broken. I just copied the HTML table
|
||||
// and had claude3 convert to markdown and it works well enough.
|
||||
|
||||
|
@ -20,7 +20,8 @@ async function streamChatWithWorkspace(
|
||||
message,
|
||||
chatMode = "chat",
|
||||
user = null,
|
||||
thread = null
|
||||
thread = null,
|
||||
attachments = []
|
||||
) {
|
||||
const uuid = uuidv4();
|
||||
const updatedMessage = await grepCommand(message, user);
|
||||
@ -69,6 +70,7 @@ async function streamChatWithWorkspace(
|
||||
type: "textResponse",
|
||||
textResponse,
|
||||
sources: [],
|
||||
attachments,
|
||||
close: true,
|
||||
error: null,
|
||||
});
|
||||
@ -79,6 +81,7 @@ async function streamChatWithWorkspace(
|
||||
text: textResponse,
|
||||
sources: [],
|
||||
type: chatMode,
|
||||
attachments,
|
||||
},
|
||||
threadId: thread?.id || null,
|
||||
include: false,
|
||||
@ -195,6 +198,7 @@ async function streamChatWithWorkspace(
|
||||
text: textResponse,
|
||||
sources: [],
|
||||
type: chatMode,
|
||||
attachments,
|
||||
},
|
||||
threadId: thread?.id || null,
|
||||
include: false,
|
||||
@ -211,6 +215,7 @@ async function streamChatWithWorkspace(
|
||||
userPrompt: updatedMessage,
|
||||
contextTexts,
|
||||
chatHistory,
|
||||
attachments,
|
||||
},
|
||||
rawHistory
|
||||
);
|
||||
@ -246,7 +251,7 @@ async function streamChatWithWorkspace(
|
||||
const { chat } = await WorkspaceChats.new({
|
||||
workspaceId: workspace.id,
|
||||
prompt: message,
|
||||
response: { text: completeText, sources, type: chatMode },
|
||||
response: { text: completeText, sources, type: chatMode, attachments },
|
||||
threadId: thread?.id || null,
|
||||
user,
|
||||
});
|
||||
|
@ -71,6 +71,7 @@ function convertToChatHistory(history = []) {
|
||||
role: "user",
|
||||
content: prompt,
|
||||
sentAt: moment(createdAt).unix(),
|
||||
attachments: data?.attachments ?? [],
|
||||
chatId: id,
|
||||
},
|
||||
{
|
||||
|
@ -1,3 +1,11 @@
|
||||
/**
|
||||
* File Attachment for automatic upload on the chat container page.
|
||||
* @typedef Attachment
|
||||
* @property {string} name - the given file name
|
||||
* @property {string} mime - the given file mime
|
||||
* @property {string} contentString - full base64 encoded string of file
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} BaseLLMProvider - A basic llm provider object
|
||||
* @property {Function} streamingEnabled - Checks if streaming is enabled for chat completions.
|
||||
|
@ -365,6 +365,10 @@ const KEY_MAPPING = {
|
||||
envKey: "OPENROUTER_MODEL_PREF",
|
||||
checks: [isNotEmpty],
|
||||
},
|
||||
OpenRouterTimeout: {
|
||||
envKey: "OPENROUTER_TIMEOUT_MS",
|
||||
checks: [],
|
||||
},
|
||||
|
||||
// Groq Options
|
||||
GroqApiKey: {
|
||||
|
Loading…
Reference in New Issue
Block a user