{
);
});
+const WatchForChanges = memo(({ workspace, docPath, item }) => {
+ const [watched, setWatched] = useState(item?.watched || false);
+ const [hover, setHover] = useState(false);
+ const watchEvent = new CustomEvent("watch_document_for_changes");
+
+ const updateWatchStatus = async () => {
+ try {
+ if (!watched) window.dispatchEvent(watchEvent);
+ const success =
+ await System.experimentalFeatures.liveSync.setWatchStatusForDocument(
+ workspace.slug,
+ docPath,
+ !watched
+ );
+
+ if (!success) {
+ showToast(
+ `Failed to ${!watched ? "watch" : "unwatch"} document.`,
+ "error",
+ {
+ clear: true,
+ }
+ );
+ return;
+ }
+
+ showToast(
+ `Document ${
+ !watched
+ ? "will be watched for changes"
+ : "will no longer be watched for changes"
+ }.`,
+ "success",
+ { clear: true }
+ );
+ setWatched(!watched);
+ } catch (error) {
+ showToast(`Failed to watch document. ${error.message}`, "error", {
+ clear: true,
+ });
+ return;
+ }
+ };
+
+ if (!item || !item.canWatch) return
;
+
+ return (
+
setHover(true)}
+ onMouseLeave={() => setHover(false)}
+ className="flex gap-x-2 items-center hover:bg-main-gradient p-[2px] rounded ml-2"
+ >
+
+
+
+ );
+});
+
const RemoveItemFromWorkspace = ({ item, onClick }) => {
return (
diff --git a/frontend/src/components/Modals/ManageWorkspace/Documents/WorkspaceDirectory/index.jsx b/frontend/src/components/Modals/ManageWorkspace/Documents/WorkspaceDirectory/index.jsx
index fa32bf9f..4ec3a86e 100644
--- a/frontend/src/components/Modals/ManageWorkspace/Documents/WorkspaceDirectory/index.jsx
+++ b/frontend/src/components/Modals/ManageWorkspace/Documents/WorkspaceDirectory/index.jsx
@@ -3,8 +3,10 @@ import { dollarFormat } from "@/utils/numbers";
import WorkspaceFileRow from "./WorkspaceFileRow";
import { memo, useEffect, useState } from "react";
import ModalWrapper from "@/components/ModalWrapper";
-import { PushPin } from "@phosphor-icons/react";
-import { SEEN_DOC_PIN_ALERT } from "@/utils/constants";
+import { Eye, PushPin } from "@phosphor-icons/react";
+import { SEEN_DOC_PIN_ALERT, SEEN_WATCH_ALERT } from "@/utils/constants";
+import paths from "@/utils/paths";
+import { Link } from "react-router-dom";
function WorkspaceDirectory({
workspace,
@@ -118,6 +120,7 @@ function WorkspaceDirectory({
)}
+
>
);
}
@@ -184,4 +187,75 @@ const PinAlert = memo(() => {
);
});
+const DocumentWatchAlert = memo(() => {
+ const [showAlert, setShowAlert] = useState(false);
+ function dismissAlert() {
+ setShowAlert(false);
+ window.localStorage.setItem(SEEN_WATCH_ALERT, "1");
+ window.removeEventListener(handlePinEvent);
+ }
+
+ function handlePinEvent() {
+ if (!!window?.localStorage?.getItem(SEEN_WATCH_ALERT)) return;
+ setShowAlert(true);
+ }
+
+ useEffect(() => {
+ if (!window || !!window?.localStorage?.getItem(SEEN_WATCH_ALERT)) return;
+ window?.addEventListener("watch_document_for_changes", handlePinEvent);
+ }, []);
+
+ return (
+
+
+
+
+
+
+
+ What does watching a document do?
+
+
+
+
+
+ When you watch a document in AnythingLLM we will{" "}
+ automatically sync your document content from it's original
+ source on regular intervals. This will automatically update the
+ content in every workspace where this file is managed.
+
+
+ This feature currently supports online-based content and will not
+ be available for manually uploaded documents.
+
+
+ You can manage what documents are watched from the{" "}
+
+ File manager
+ {" "}
+ admin view.
+
+
+
+
+
+
+
+
+
+
+ );
+});
+
export default memo(WorkspaceDirectory);
diff --git a/frontend/src/components/SettingsSidebar/index.jsx b/frontend/src/components/SettingsSidebar/index.jsx
index 9dada953..112be312 100644
--- a/frontend/src/components/SettingsSidebar/index.jsx
+++ b/frontend/src/components/SettingsSidebar/index.jsx
@@ -23,6 +23,7 @@ import {
SplitVertical,
Microphone,
Robot,
+ Flask,
} from "@phosphor-icons/react";
import useUser from "@/hooks/useUser";
import { USER_BACKGROUND_COLOR } from "@/utils/constants";
@@ -30,6 +31,7 @@ import { isMobile } from "react-device-detect";
import Footer from "../Footer";
import { Link } from "react-router-dom";
import { useTranslation } from "react-i18next";
+import showToast from "@/utils/toast";
export default function SettingsSidebar() {
const { t } = useTranslation();
@@ -385,5 +387,63 @@ const SidebarOptions = ({ user = null, t }) => (
flex={true}
allowedRole={["admin"]}
/>
+
+ }
+ user={user}
+ flex={true}
+ allowedRole={["admin"]}
+ />
+
>
);
+
+function HoldToReveal({ children, holdForMs = 3_000 }) {
+ let timeout;
+ const [showing, setShowing] = useState(
+ window.localStorage.getItem(
+ "anythingllm_experimental_feature_preview_unlocked"
+ )
+ );
+
+ useEffect(() => {
+ const onPress = (e) => {
+ if (!["Control", "Meta"].includes(e.key)) return;
+ timeout = setTimeout(() => {
+ setShowing(true);
+ showToast("Experimental feature previews unlocked!");
+ window.localStorage.setItem(
+ "anythingllm_experimental_feature_preview_unlocked",
+ "enabled"
+ );
+ window.removeEventListener("keypress", onPress);
+ window.removeEventListener("keyup", onRelease);
+ clearTimeout(timeout);
+ }, holdForMs);
+ };
+ const onRelease = (e) => {
+ if (!["Control", "Meta"].includes(e.key)) return;
+ if (showing) {
+ window.removeEventListener("keypress", onPress);
+ window.removeEventListener("keyup", onRelease);
+ clearTimeout(timeout);
+ return;
+ }
+ clearTimeout(timeout);
+ };
+
+ if (!showing) {
+ window.addEventListener("keydown", onPress);
+ window.addEventListener("keyup", onRelease);
+ }
+ return () => {
+ window.removeEventListener("keydown", onPress);
+ window.removeEventListener("keyup", onRelease);
+ };
+ }, []);
+
+ if (!showing) return null;
+ return children;
+}
diff --git a/frontend/src/hooks/useGetProvidersModels.js b/frontend/src/hooks/useGetProvidersModels.js
index e0a27699..5c8171b3 100644
--- a/frontend/src/hooks/useGetProvidersModels.js
+++ b/frontend/src/hooks/useGetProvidersModels.js
@@ -23,6 +23,7 @@ const PROVIDER_DEFAULT_MODELS = {
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
+ "claude-3-5-sonnet-20240620",
],
azure: [],
lmstudio: [],
diff --git a/frontend/src/i18n.js b/frontend/src/i18n.js
index 2f5ca580..5344e1f9 100644
--- a/frontend/src/i18n.js
+++ b/frontend/src/i18n.js
@@ -9,7 +9,7 @@ i18next
.use(LanguageDetector)
.init({
fallbackLng: "en",
- debug: true,
+ debug: import.meta.env.DEV,
defaultNS,
resources,
lowerCaseLng: true,
diff --git a/frontend/src/locales/resources.js b/frontend/src/locales/resources.js
index d2072d34..04c06502 100644
--- a/frontend/src/locales/resources.js
+++ b/frontend/src/locales/resources.js
@@ -10,10 +10,15 @@
// to a specific language file as this will break the other languages. Any new keys should be added to english
// and the language file you are working on.
+// Contributor Notice: If you are adding a translation you MUST locally run `yarn verify:translations` from the root prior to PR.
+// please do not submit PR's without first verifying this test passes as it will tell you about missing keys or values
+// from the primary dictionary.
+
import English from "./en/common.js";
import Spanish from "./es/common.js";
import French from "./fr/common.js";
import Mandarin from "./zh/common.js";
+import Russian from "./ru/common.js";
export const defaultNS = "common";
export const resources = {
@@ -29,4 +34,7 @@ export const resources = {
fr: {
common: French,
},
+ ru: {
+ common: Russian,
+ },
};
diff --git a/frontend/src/locales/ru/common.js b/frontend/src/locales/ru/common.js
new file mode 100644
index 00000000..34f9591c
--- /dev/null
+++ b/frontend/src/locales/ru/common.js
@@ -0,0 +1,415 @@
+const TRANSLATIONS = {
+ common: {
+ "workspaces-name": "Имя рабочих пространств",
+ error: "ошибка",
+ success: "успех",
+ user: "Пользователь",
+ selection: "Выбор модели",
+ saving: "Сохранение...",
+ save: "Сохранить изменения",
+ previous: "Предыдущая страница",
+ next: "Следующая страница",
+ },
+ settings: {
+ title: "Настройки экземпляра",
+ system: "Системные настройки",
+ invites: "Приглашение",
+ users: "Пользователи",
+ workspaces: "Рабочие пространства",
+ "workspace-chats": "Чат рабочего пространства",
+ appearance: "Внешний вид",
+ "api-keys": "API ключи",
+ llm: "Предпочтение LLM",
+ transcription: "Модель транскрипции",
+ embedder: "Настройки встраивания",
+ "text-splitting": "Разделение и сегментация текста",
+ "vector-database": "Векторная база данных",
+ embeds: "Виджеты встраивания чата",
+ "embed-chats": "История встраивания чатов",
+ security: "Безопасность",
+ "event-logs": "Журналы событий",
+ privacy: "Конфиденциальность и данные",
+ },
+ login: {
+ "multi-user": {
+ welcome: "Добро пожаловать в",
+ "placeholder-username": "Имя пользователя",
+ "placeholder-password": "Пароль",
+ login: "Войти",
+ validating: "Проверка...",
+ "forgot-pass": "Забыли пароль",
+ reset: "Сбросить",
+ },
+ "sign-in": {
+ start: "Войти в ваш",
+ end: "аккаунт.",
+ },
+ },
+ "workspaces—settings": {
+ general: "Общие настройки",
+ chat: "Настройки чата",
+ vector: "Векторная база данных",
+ members: "Участники",
+ agent: "Конфигурация агента",
+ },
+ general: {
+ vector: {
+ title: "Количество векторов",
+ description: "Общее количество векторов в вашей векторной базе данных.",
+ },
+ names: {
+ description:
+ "Это изменит только отображаемое имя вашего рабочего пространства.",
+ },
+ message: {
+ title: "Предлагаемые сообщения чата",
+ description:
+ "Настройте сообщения, которые будут предложены пользователям вашего рабочего пространства.",
+ add: "Добавить новое сообщение",
+ save: "Сохранить сообщения",
+ heading: "Объясните мне",
+ body: "преимущества AnythingLLM",
+ },
+ pfp: {
+ title: "Изображение профиля помощника",
+ description:
+ "Настройте изображение профиля помощника для этого рабочего пространства.",
+ image: "Изображение рабочего пространства",
+ remove: "Удалить изображение рабочего пространства",
+ },
+ delete: {
+ delete: "Удалить рабочее пространство",
+ deleting: "Удаление рабочего пространства...",
+ "confirm-start": "Вы собираетесь удалить весь ваш",
+ "confirm-end":
+ "рабочее пространство. Это удалит все векторные встраивания в вашей векторной базе данных.\n\nОригинальные исходные файлы останутся нетронутыми. Это действие необратимо.",
+ },
+ },
+ chat: {
+ llm: {
+ title: "Поставщик LLM рабочего пространства",
+ description:
+ "Конкретный поставщик и модель LLM, которые будут использоваться для этого рабочего пространства. По умолчанию используется системный поставщик и настройки LLM.",
+ search: "Искать всех поставщиков LLM",
+ },
+ model: {
+ title: "Модель чата рабочего пространства",
+ description:
+ "Конкретная модель чата, которая будет использоваться для этого рабочего пространства. Если пусто, будет использоваться системное предпочтение LLM.",
+ wait: "-- ожидание моделей --",
+ },
+ mode: {
+ title: "Режим чата",
+ chat: {
+ title: "Чат",
+ "desc-start": "будет предоставлять ответы с общей информацией LLM",
+ and: "и",
+ "desc-end": "найденный контекст документов.",
+ },
+ query: {
+ title: "Запрос",
+ "desc-start": "будет предоставлять ответы",
+ only: "только",
+ "desc-end": "если найден контекст документов.",
+ },
+ },
+ history: {
+ title: "История чата",
+ "desc-start":
+ "Количество предыдущих чатов, которые будут включены в краткосрочную память ответа.",
+ recommend: "Рекомендуем 20.",
+ "desc-end":
+ "Любое количество более 45 может привести к непрерывным сбоям чата в зависимости от размера сообщений.",
+ },
+ prompt: {
+ title: "Подсказка",
+ description:
+ "Подсказка, которая будет использоваться в этом рабочем пространстве. Определите контекст и инструкции для AI для создания ответа. Вы должны предоставить тщательно разработанную подсказку, чтобы AI мог генерировать релевантный и точный ответ.",
+ },
+ refusal: {
+ title: "Ответ об отказе в режиме запроса",
+ "desc-start": "В режиме",
+ query: "запроса",
+ "desc-end":
+ "вы можете вернуть пользовательский ответ об отказе, если контекст не найден.",
+ },
+ temperature: {
+ title: "Температура LLM",
+ "desc-start":
+ "Этот параметр контролирует, насколько 'креативными' будут ответы вашего LLM.",
+ "desc-end":
+ "Чем выше число, тем более креативные ответы. Для некоторых моделей это может привести к несвязным ответам при слишком высоких настройках.",
+ hint: "Большинство LLM имеют различные допустимые диапазоны значений. Проконсультируйтесь с вашим поставщиком LLM для получения этой информации.",
+ },
+ },
+ "vector-workspace": {
+ identifier: "Идентификатор векторной базы данных",
+ snippets: {
+ title: "Максимальное количество контекстных фрагментов",
+ description:
+ "Этот параметр контролирует максимальное количество контекстных фрагментов, которые будут отправлены LLM для каждого чата или запроса.",
+ recommend: "Рекомендуемое количество: 4",
+ },
+ doc: {
+ title: "Порог сходства документов",
+ description:
+ "Минимальная оценка сходства, необходимая для того, чтобы источник считался связанным с чатом. Чем выше число, тем более схожим должен быть источник с чатом.",
+ zero: "Без ограничений",
+ low: "Низкий (оценка сходства ≥ .25)",
+ medium: "Средний (оценка сходства ≥ .50)",
+ high: "Высокий (оценка сходства ≥ .75)",
+ },
+ reset: {
+ reset: "Сброс векторной базы данных",
+ resetting: "Очистка векторов...",
+ confirm:
+ "Вы собираетесь сбросить векторную базу данных этого рабочего пространства. Это удалит все текущие векторные встраивания.\n\nОригинальные исходные файлы останутся нетронутыми. Это действие необратимо.",
+ error: "Не удалось сбросить векторную базу данных рабочего пространства!",
+ success: "Векторная база данных рабочего пространства была сброшена!",
+ },
+ },
+ agent: {
+ "performance-warning":
+ "Производительность LLM, не поддерживающих вызовы инструментов, сильно зависит от возможностей и точности модели. Некоторые способности могут быть ограничены или не функционировать.",
+ provider: {
+ title: "Поставщик LLM агента рабочего пространства",
+ description:
+ "Конкретный поставщик и модель LLM, которые будут использоваться для агента @agent этого рабочего пространства.",
+ },
+ mode: {
+ chat: {
+ title: "Модель чата агента рабочего пространства",
+ description:
+ "Конкретная модель чата, которая будет использоваться для агента @agent этого рабочего пространства.",
+ },
+ title: "Модель агента рабочего пространства",
+ description:
+ "Конкретная модель LLM, которая будет использоваться для агента @agent этого рабочего пространства.",
+ wait: "-- ожидание моделей --",
+ },
+ skill: {
+ title: "Навыки агента по умолчанию",
+ description:
+ "Улучшите естественные способности агента по умолчанию с помощью этих предустановленных навыков. Эта настройка применяется ко всем рабочим пространствам.",
+ rag: {
+ title: "RAG и долговременная память",
+ description:
+ "Позвольте агенту использовать ваши локальные документы для ответа на запрос или попросите агента 'запомнить' части контента для долгосрочного извлечения из памяти.",
+ },
+ view: {
+ title: "Просмотр и резюмирование документов",
+ description:
+ "Позвольте агенту перечислять и резюмировать содержание файлов рабочего пространства, которые в данный момент встроены.",
+ },
+ scrape: {
+ title: "Сбор данных с веб-сайтов",
+ description:
+ "Позвольте агенту посещать и собирать содержимое веб-сайтов.",
+ },
+ generate: {
+ title: "Создание диаграмм",
+ description:
+ "Включите возможность создания различных типов диаграмм из предоставленных данных или данных, указанных в чате.",
+ },
+ save: {
+ title: "Создание и сохранение файлов в браузер",
+ description:
+ "Включите возможность создания и записи файлов, которые можно сохранить и загрузить в вашем браузере.",
+ },
+ web: {
+ title: "Поиск в Интернете и просмотр в реальном времени",
+ "desc-start":
+ "Позвольте вашему агенту искать в Интернете для ответа на ваши вопросы, подключаясь к поставщику поиска (SERP).",
+ "desc-end":
+ "Поиск в Интернете во время сессий агента не будет работать, пока это не настроено.",
+ },
+ },
+ },
+ recorded: {
+ title: "Чаты рабочего пространства",
+ description:
+ "Это все записанные чаты и сообщения, отправленные пользователями, упорядоченные по дате создания.",
+ export: "Экспорт",
+ table: {
+ id: "Идентификатор",
+ by: "Отправлено",
+ workspace: "Рабочее пространство",
+ prompt: "Подсказка",
+ response: "Ответ",
+ at: "Отправлено в",
+ },
+ },
+ appearance: {
+ title: "Внешний вид",
+ description: "Настройте параметры внешнего вида вашей платформы.",
+ logo: {
+ title: "Настроить логотип",
+ description:
+ "Загрузите свой логотип, чтобы персонализировать ваш чат-бот.",
+ add: "Добавить пользовательский логотип",
+ recommended: "Рекомендуемый размер: 800 x 200",
+ remove: "Удалить",
+ replace: "Заменить",
+ },
+ message: {
+ title: "Настроить сообщения",
+ description:
+ "Настройте автоматические сообщения, отображаемые вашим пользователям.",
+ new: "Новое",
+ system: "система",
+ user: "пользователь",
+ message: "сообщение",
+ assistant: "Чат-ассистент AnythingLLM",
+ "double-click": "Дважды щелкните, чтобы редактировать...",
+ save: "Сохранить сообщения",
+ },
+ icons: {
+ title: "Пользовательские иконки в подвале",
+ description:
+ "Настройте иконки в подвале, отображаемые внизу боковой панели.",
+ icon: "Иконка",
+ link: "Ссылка",
+ },
+ },
+ api: {
+ title: "API ключи",
+ description:
+ "API ключи позволяют владельцу программно получать доступ к этому экземпляру AnythingLLM и управлять им.",
+ link: "Прочитать документацию по API",
+ generate: "Создать новый API ключ",
+ table: {
+ key: "API ключ",
+ by: "Создано",
+ created: "Создано",
+ },
+ },
+ llm: {
+ title: "Предпочтение LLM",
+ description:
+ "Это учетные данные и настройки для вашего предпочтительного поставщика чата и встраивания LLM. Важно, чтобы эти ключи были актуальными и правильными, иначе AnythingLLM не будет работать должным образом.",
+ provider: "Поставщик LLM",
+ },
+ transcription: {
+ title: "Предпочтение модели транскрипции",
+ description:
+ "Это учетные данные и настройки для вашего предпочтительного поставщика моделей транскрипции. Важно, чтобы эти ключи были актуальными и правильными, иначе медиафайлы и аудио не будут транскрибироваться.",
+ provider: "Поставщик транскрипции",
+ "warn-start":
+ "Использование локальной модели whisper на машинах с ограниченной оперативной памятью или процессором может привести к зависанию AnythingLLM при обработке медиафайлов.",
+ "warn-recommend":
+ "Мы рекомендуем минимум 2ГБ оперативной памяти и загружать файлы <10МБ.",
+ "warn-end":
+ "Встроенная модель будет автоматически загружена при первом использовании.",
+ },
+ embedding: {
+ title: "Настройки встраивания",
+ "desc-start":
+ "При использовании LLM, который не поддерживает встроенный механизм встраивания - возможно, потребуется дополнительно указать учетные данные для встраивания текста.",
+ "desc-end":
+ "Встраивание - это процесс превращения текста в векторы. Эти учетные данные необходимы для превращения ваших файлов и подсказок в формат, который AnythingLLM может использовать для обработки.",
+ provider: {
+ title: "Поставщик встраивания",
+ description:
+ "Нет необходимости в настройке при использовании встроенного механизма встраивания AnythingLLM.",
+ },
+ },
+ text: {
+ title: "Настройки разделения и сегментации текста",
+ "desc-start":
+ "Иногда может понадобиться изменить стандартный способ разделения и сегментации новых документов перед их вставкой в векторную базу данных.",
+ "desc-end":
+ "Следует изменять этот параметр только при полном понимании работы разделения текста и его побочных эффектов.",
+ "warn-start": "Изменения здесь будут применяться только к",
+ "warn-center": "новым встроенным документам",
+ "warn-end": ", а не к существующим документам.",
+ size: {
+ title: "Размер сегмента текста",
+ description:
+ "Это максимальная длина символов, которые могут присутствовать в одном векторе.",
+ recommend: "Максимальная длина модели встраивания составляет",
+ },
+ overlap: {
+ title: "Перекрытие сегментов текста",
+ description:
+ "Это максимальное перекрытие символов, которое происходит при сегментации между двумя смежными сегментами текста.",
+ },
+ },
+ vector: {
+ title: "Векторная база данных",
+ description:
+ "Это учетные данные и настройки для того, как будет функционировать ваш экземпляр AnythingLLM. Важно, чтобы эти ключи были актуальными и правильными.",
+ provider: {
+ title: "Поставщик векторной базы данных",
+ description: "Настройка для LanceDB не требуется.",
+ },
+ },
+ embeddable: {
+ title: "Встраиваемые виджеты чата",
+ description:
+ "Встраиваемые виджеты чата - это интерфейсы чата, ориентированные на публичное использование и привязанные к одному рабочему пространству. Они позволяют создавать рабочие пространства, которые затем можно публиковать в Интернете.",
+ create: "Создать встраивание",
+ table: {
+ workspace: "Рабочее пространство",
+ chats: "Отправленные чаты",
+ Active: "Активные домены",
+ },
+ },
+ "embed-chats": {
+ title: "Встраивание чатов",
+ description:
+ "Это все записанные чаты и сообщения от любого встраивания, которое вы опубликовали.",
+ table: {
+ embed: "Встраивание",
+ sender: "Отправитель",
+ message: "Сообщение",
+ response: "Ответ",
+ at: "Отправлено в",
+ },
+ },
+ multi: {
+ title: "Многопользовательский режим",
+ description:
+ "Настройте ваш экземпляр для поддержки вашей команды, активировав многопользовательский режим.",
+ enable: {
+ "is-enable": "Многопользовательский режим включен",
+ enable: "Включить многопользовательский режим",
+ description:
+ "По умолчанию, вы будете единственным администратором. Как администратор, вы должны будете создавать учетные записи для всех новых пользователей или администраторов. Не теряйте ваш пароль, так как только администратор может сбросить пароли.",
+ username: "Имя пользователя учетной записи администратора",
+ password: "Пароль учетной записи администратора",
+ },
+ password: {
+ title: "Защита паролем",
+ description:
+ "Защитите ваш экземпляр AnythingLLM паролем. Если вы забудете его, метода восстановления не существует, поэтому убедитесь, что вы сохранили этот пароль.",
+ },
+ instance: {
+ title: "Защитить экземпляр паролем",
+ description:
+ "По умолчанию, вы будете единственным администратором. Как администратор, вы должны будете создавать учетные записи для всех новых пользователей или администраторов. Не теряйте ваш пароль, так как только администратор может сбросить пароли.",
+ password: "Пароль экземпляра",
+ },
+ },
+ event: {
+ title: "Журналы событий",
+ description:
+ "Просматривайте все действия и события, происходящие в этом экземпляре для мониторинга.",
+ clear: "Очистить журналы событий",
+ table: {
+ type: "Тип события",
+ user: "Пользователь",
+ occurred: "Произошло в",
+ },
+ },
+ privacy: {
+ title: "Конфиденциальность и обработка данных",
+ description:
+ "Это ваша конфигурация для того, как подключенные сторонние поставщики и AnythingLLM обрабатывают ваши данные.",
+ llm: "Выбор LLM",
+ embedding: "Предпочтение встраивания",
+ vector: "Векторная база данных",
+ anonymous: "Анонимная телеметрия включена",
+ },
+};
+
+export default TRANSLATIONS;
diff --git a/frontend/src/locales/verifyTranslations.mjs b/frontend/src/locales/verifyTranslations.mjs
index dccec76d..93f7eaa6 100644
--- a/frontend/src/locales/verifyTranslations.mjs
+++ b/frontend/src/locales/verifyTranslations.mjs
@@ -9,12 +9,16 @@ function langDisplayName(lang) {
function compareStructures(lang, a, b, subdir = null) {
//if a and b aren't the same type, they can't be equal
- if (typeof a !== typeof b) {
+ if (typeof a !== typeof b && a !== null && b !== null) {
console.log("Invalid type comparison", [
{
lang,
a: typeof a,
b: typeof b,
+ values: {
+ a,
+ b,
+ },
...(!!subdir ? { subdir } : {}),
},
]);
diff --git a/frontend/src/models/experimental/liveSync.js b/frontend/src/models/experimental/liveSync.js
new file mode 100644
index 00000000..54091418
--- /dev/null
+++ b/frontend/src/models/experimental/liveSync.js
@@ -0,0 +1,59 @@
+import { API_BASE } from "@/utils/constants";
+import { baseHeaders } from "@/utils/request";
+
+const LiveDocumentSync = {
+ featureFlag: "experimental_live_file_sync",
+ toggleFeature: async function (updatedStatus = false) {
+ return await fetch(`${API_BASE}/experimental/toggle-live-sync`, {
+ method: "POST",
+ headers: baseHeaders(),
+ body: JSON.stringify({ updatedStatus }),
+ })
+ .then((res) => {
+ if (!res.ok) throw new Error("Could not update status.");
+ return true;
+ })
+ .then((res) => res)
+ .catch((e) => {
+ console.error(e);
+ return false;
+ });
+ },
+ queues: async function () {
+ return await fetch(`${API_BASE}/experimental/live-sync/queues`, {
+ headers: baseHeaders(),
+ })
+ .then((res) => {
+ if (!res.ok) throw new Error("Could not update status.");
+ return res.json();
+ })
+ .then((res) => res?.queues || [])
+ .catch((e) => {
+ console.error(e);
+ return [];
+ });
+ },
+
+ // Should be in Workspaces but is here for now while in preview
+ setWatchStatusForDocument: async function (slug, docPath, watchStatus) {
+ return fetch(`${API_BASE}/workspace/${slug}/update-watch-status`, {
+ method: "POST",
+ headers: baseHeaders(),
+ body: JSON.stringify({ docPath, watchStatus }),
+ })
+ .then((res) => {
+ if (!res.ok) {
+ throw new Error(
+ res.statusText || "Error setting watch status for document."
+ );
+ }
+ return true;
+ })
+ .catch((e) => {
+ console.error(e);
+ return false;
+ });
+ },
+};
+
+export default LiveDocumentSync;
diff --git a/frontend/src/models/system.js b/frontend/src/models/system.js
index d2252be1..b922457b 100644
--- a/frontend/src/models/system.js
+++ b/frontend/src/models/system.js
@@ -1,6 +1,7 @@
import { API_BASE, AUTH_TIMESTAMP, fullApiUrl } from "@/utils/constants";
import { baseHeaders, safeJsonParse } from "@/utils/request";
import DataConnector from "./dataConnector";
+import LiveDocumentSync from "./experimental/liveSync";
const System = {
cacheKeys: {
@@ -675,6 +676,9 @@ const System = {
return false;
});
},
+ experimentalFeatures: {
+ liveSync: LiveDocumentSync,
+ },
};
export default System;
diff --git a/frontend/src/pages/Admin/Agents/WebSearchSelection/SearchProviderOptions/index.jsx b/frontend/src/pages/Admin/Agents/WebSearchSelection/SearchProviderOptions/index.jsx
index 58ceb844..c5ccd260 100644
--- a/frontend/src/pages/Admin/Agents/WebSearchSelection/SearchProviderOptions/index.jsx
+++ b/frontend/src/pages/Admin/Agents/WebSearchSelection/SearchProviderOptions/index.jsx
@@ -182,3 +182,25 @@ export function SerplySearchOptions({ settings }) {
>
);
}
+
+export function SearXNGOptions({ settings }) {
+ return (
+
+
+
+
+
+
+ );
+}
diff --git a/frontend/src/pages/Admin/Agents/WebSearchSelection/icons/searxng.png b/frontend/src/pages/Admin/Agents/WebSearchSelection/icons/searxng.png
new file mode 100644
index 00000000..434e570f
Binary files /dev/null and b/frontend/src/pages/Admin/Agents/WebSearchSelection/icons/searxng.png differ
diff --git a/frontend/src/pages/Admin/Agents/WebSearchSelection/index.jsx b/frontend/src/pages/Admin/Agents/WebSearchSelection/index.jsx
index 9650c38f..438be111 100644
--- a/frontend/src/pages/Admin/Agents/WebSearchSelection/index.jsx
+++ b/frontend/src/pages/Admin/Agents/WebSearchSelection/index.jsx
@@ -4,6 +4,7 @@ import GoogleSearchIcon from "./icons/google.png";
import SerperDotDevIcon from "./icons/serper.png";
import BingSearchIcon from "./icons/bing.png";
import SerplySearchIcon from "./icons/serply.png";
+import SearXNGSearchIcon from "./icons/searxng.png";
import {
CaretUpDown,
MagnifyingGlass,
@@ -17,6 +18,7 @@ import {
GoogleSearchOptions,
BingSearchOptions,
SerplySearchOptions,
+ SearXNGOptions,
} from "./SearchProviderOptions";
const SEARCH_PROVIDERS = [
@@ -60,6 +62,14 @@ const SEARCH_PROVIDERS = [
description:
"Serply.io web-search. Free account with a 100 calls/month forever.",
},
+ {
+ name: "SearXNG",
+ value: "searxng-engine",
+ logo: SearXNGSearchIcon,
+ options: (settings) =>
,
+ description:
+ "Free, open-source, internet meta-search engine with no tracking.",
+ },
];
export default function AgentWebSearchSelection({
diff --git a/frontend/src/pages/Admin/ExperimentalFeatures/Features/LiveSync/manage/DocumentSyncQueueRow/index.jsx b/frontend/src/pages/Admin/ExperimentalFeatures/Features/LiveSync/manage/DocumentSyncQueueRow/index.jsx
new file mode 100644
index 00000000..30795628
--- /dev/null
+++ b/frontend/src/pages/Admin/ExperimentalFeatures/Features/LiveSync/manage/DocumentSyncQueueRow/index.jsx
@@ -0,0 +1,44 @@
+import { useRef } from "react";
+import { Trash } from "@phosphor-icons/react";
+import { stripUuidAndJsonFromString } from "@/components/Modals/ManageWorkspace/Documents/Directory/utils";
+import moment from "moment";
+import System from "@/models/system";
+
+export default function DocumentSyncQueueRow({ queue }) {
+ const rowRef = useRef(null);
+ const handleDelete = async () => {
+ rowRef?.current?.remove();
+ await System.experimentalFeatures.liveSync.setWatchStatusForDocument(
+ queue.workspaceDoc.workspace.slug,
+ queue.workspaceDoc.docpath,
+ false
+ );
+ };
+
+ return (
+ <>
+
+
+ {stripUuidAndJsonFromString(queue.workspaceDoc.filename)}
+ |
+ {moment(queue.lastSyncedAt).fromNow()} |
+
+ {moment(queue.nextSyncAt).format("lll")}
+ ({moment(queue.nextSyncAt).fromNow()})
+ |
+ {moment(queue.createdAt).format("lll")} |
+
+
+ |
+
+ >
+ );
+}
diff --git a/frontend/src/pages/Admin/ExperimentalFeatures/Features/LiveSync/manage/index.jsx b/frontend/src/pages/Admin/ExperimentalFeatures/Features/LiveSync/manage/index.jsx
new file mode 100644
index 00000000..027ebb7e
--- /dev/null
+++ b/frontend/src/pages/Admin/ExperimentalFeatures/Features/LiveSync/manage/index.jsx
@@ -0,0 +1,92 @@
+import { useEffect, useState } from "react";
+import Sidebar from "@/components/Sidebar";
+import { isMobile } from "react-device-detect";
+import * as Skeleton from "react-loading-skeleton";
+import "react-loading-skeleton/dist/skeleton.css";
+import System from "@/models/system";
+import DocumentSyncQueueRow from "./DocumentSyncQueueRow";
+
+export default function LiveDocumentSyncManager() {
+ return (
+
+
+
+
+
+
+
+ Watched documents
+
+
+
+ These are all the documents that are currently being watched in
+ your instance. The content of these documents will be periodically
+ synced.
+
+
+
+
+
+
+ );
+}
+
+function WatchedDocumentsContainer() {
+ const [loading, setLoading] = useState(true);
+ const [queues, setQueues] = useState([]);
+
+ useEffect(() => {
+ async function fetchData() {
+ const _queues = await System.experimentalFeatures.liveSync.queues();
+ setQueues(_queues);
+ setLoading(false);
+ }
+ fetchData();
+ }, []);
+
+ if (loading) {
+ return (
+
+ );
+ }
+
+ return (
+
+
+
+
+ Document Name
+ |
+
+ Last Synced
+ |
+
+ Time until next refresh
+ |
+
+ Created On
+ |
+
+ {" "}
+ |
+
+
+
+ {queues.map((queue) => (
+
+ ))}
+
+
+ );
+}
diff --git a/frontend/src/pages/Admin/ExperimentalFeatures/Features/LiveSync/toggle.jsx b/frontend/src/pages/Admin/ExperimentalFeatures/Features/LiveSync/toggle.jsx
new file mode 100644
index 00000000..69a758d5
--- /dev/null
+++ b/frontend/src/pages/Admin/ExperimentalFeatures/Features/LiveSync/toggle.jsx
@@ -0,0 +1,90 @@
+import System from "@/models/system";
+import paths from "@/utils/paths";
+import showToast from "@/utils/toast";
+import { ArrowSquareOut } from "@phosphor-icons/react";
+import { useState } from "react";
+import { Link } from "react-router-dom";
+
+export default function LiveSyncToggle({ enabled = false, onToggle }) {
+ const [status, setStatus] = useState(enabled);
+
+ async function toggleFeatureFlag() {
+ const updated = await System.experimentalFeatures.liveSync.toggleFeature(
+ !status
+ );
+ if (!updated) {
+ showToast("Failed to update status of feature.", "error", {
+ clear: true,
+ });
+ return false;
+ }
+
+ setStatus(!status);
+ showToast(
+ `Live document content sync has been ${
+ !status ? "enabled" : "disabled"
+ }.`,
+ "success",
+ { clear: true }
+ );
+ onToggle();
+ }
+
+ return (
+
+
+
+
+ Automatic Document Content Sync
+
+
+
+
+
+ Enable the ability to specify a document to be "watched". Watched
+ document's content will be regularly fetched and updated in
+ AnythingLLM.
+
+
+ Watched documents will automatically update in all workspaces they
+ are referenced in at the same time of update.
+
+
+ This feature only applies to web-based content, such as websites,
+ Confluence, YouTube, and GitHub files.
+
+
+
+
+
+ );
+}
diff --git a/frontend/src/pages/Admin/ExperimentalFeatures/features.js b/frontend/src/pages/Admin/ExperimentalFeatures/features.js
new file mode 100644
index 00000000..7dc8251e
--- /dev/null
+++ b/frontend/src/pages/Admin/ExperimentalFeatures/features.js
@@ -0,0 +1,9 @@
+import LiveSyncToggle from "./Features/LiveSync/toggle";
+
+export const configurableFeatures = {
+ experimental_live_file_sync: {
+ title: "Live Document Sync",
+ component: LiveSyncToggle,
+ key: "experimental_live_file_sync",
+ },
+};
diff --git a/frontend/src/pages/Admin/ExperimentalFeatures/index.jsx b/frontend/src/pages/Admin/ExperimentalFeatures/index.jsx
new file mode 100644
index 00000000..0652f26d
--- /dev/null
+++ b/frontend/src/pages/Admin/ExperimentalFeatures/index.jsx
@@ -0,0 +1,280 @@
+import { useEffect, useState } from "react";
+import Sidebar from "@/components/SettingsSidebar";
+import { isMobile } from "react-device-detect";
+import Admin from "@/models/admin";
+import { FullScreenLoader } from "@/components/Preloader";
+import { CaretRight, Flask } from "@phosphor-icons/react";
+import { configurableFeatures } from "./features";
+import ModalWrapper from "@/components/ModalWrapper";
+import paths from "@/utils/paths";
+import showToast from "@/utils/toast";
+
+export default function ExperimentalFeatures() {
+ const [featureFlags, setFeatureFlags] = useState({});
+ const [loading, setLoading] = useState(true);
+ const [selectedFeature, setSelectedFeature] = useState(
+ "experimental_live_file_sync"
+ );
+
+ useEffect(() => {
+ async function fetchSettings() {
+ setLoading(true);
+ const { settings } = await Admin.systemPreferences();
+ setFeatureFlags(settings?.feature_flags ?? {});
+ setLoading(false);
+ }
+ fetchSettings();
+ }, []);
+
+ const refresh = async () => {
+ const { settings } = await Admin.systemPreferences();
+ setFeatureFlags(settings?.feature_flags ?? {});
+ };
+
+ if (loading) {
+ return (
+
+
+
+ );
+ }
+
+ return (
+
+
+ {/* Feature settings nav */}
+
+
+
+
Experimental Features
+
+ {/* Feature list */}
+
featureFlags[flag]
+ )}
+ />
+
+
+ {/* Selected feature setting panel */}
+
+
+
+ {selectedFeature ? (
+
+ ) : (
+
+
+
Select an experimental feature
+
+ )}
+
+
+
+
+
+ );
+}
+
+function FeatureLayout({ children }) {
+ return (
+
+ );
+}
+
+function FeatureList({
+ features = [],
+ selectedFeature = null,
+ handleClick = null,
+ activeFeatures = [],
+}) {
+ if (Object.keys(features).length === 0) return null;
+
+ return (
+
+ {Object.entries(features).map(([feature, settings], index) => (
+
handleClick?.(feature)}
+ >
+
{settings.title}
+
+
+ {activeFeatures.includes(settings.key) ? "On" : "Off"}
+
+
+
+
+ ))}
+
+ );
+}
+
+function SelectedFeatureComponent({ feature, settings, refresh }) {
+ const Component = feature?.component;
+ return Component ? (
+
+ ) : null;
+}
+
+function FeatureVerification({ children }) {
+ if (
+ !window.localStorage.getItem("anythingllm_tos_experimental_feature_set")
+ ) {
+ function acceptTos(e) {
+ e.preventDefault();
+
+ window.localStorage.setItem(
+ "anythingllm_tos_experimental_feature_set",
+ "accepted"
+ );
+ showToast(
+ "Experimental Feature set enabled. Reloading the page.",
+ "success"
+ );
+ setTimeout(() => {
+ window.location.reload();
+ }, 2_500);
+ return;
+ }
+
+ return (
+ <>
+
+
+
+ {children}
+ >
+ );
+ }
+ return <>{children}>;
+}
diff --git a/frontend/src/utils/constants.js b/frontend/src/utils/constants.js
index 6fd29534..3f637617 100644
--- a/frontend/src/utils/constants.js
+++ b/frontend/src/utils/constants.js
@@ -5,6 +5,7 @@ export const AUTH_TOKEN = "anythingllm_authToken";
export const AUTH_TIMESTAMP = "anythingllm_authTimestamp";
export const COMPLETE_QUESTIONNAIRE = "anythingllm_completed_questionnaire";
export const SEEN_DOC_PIN_ALERT = "anythingllm_pinned_document_alert";
+export const SEEN_WATCH_ALERT = "anythingllm_watched_document_alert";
export const USER_BACKGROUND_COLOR = "bg-historical-msg-user";
export const AI_BACKGROUND_COLOR = "bg-historical-msg-system";
diff --git a/frontend/src/utils/paths.js b/frontend/src/utils/paths.js
index b1ffcb2b..8ee924fc 100644
--- a/frontend/src/utils/paths.js
+++ b/frontend/src/utils/paths.js
@@ -135,5 +135,13 @@ export default {
embedChats: () => {
return `/settings/embed-chats`;
},
+ experimental: () => {
+ return `/settings/beta-features`;
+ },
+ },
+ experimental: {
+ liveDocumentSync: {
+ manage: () => `/settings/beta-features/live-document-sync/manage`,
+ },
},
};
diff --git a/frontend/yarn.lock b/frontend/yarn.lock
index 82d68d02..348541b8 100644
--- a/frontend/yarn.lock
+++ b/frontend/yarn.lock
@@ -2464,6 +2464,11 @@ minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2:
dependencies:
brace-expansion "^1.1.7"
+moment@^2.30.1:
+ version "2.30.1"
+ resolved "https://registry.yarnpkg.com/moment/-/moment-2.30.1.tgz#f8c91c07b7a786e30c59926df530b4eac96974ae"
+ integrity sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==
+
ms@2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
diff --git a/server/.env.example b/server/.env.example
index a88a8a03..145e00da 100644
--- a/server/.env.example
+++ b/server/.env.example
@@ -1,5 +1,7 @@
SERVER_PORT=3001
JWT_SECRET="my-random-string-for-seeding" # Please generate random string at least 12 chars long.
+SIG_KEY='passphrase' # Please generate random string at least 32 chars long.
+SIG_SALT='salt' # Please generate random string at least 32 chars long.
###########################################
######## LLM API SElECTION ################
@@ -241,3 +243,6 @@ TTS_PROVIDER="native"
#------ Serply.io ----------- https://serply.io/
# AGENT_SERPLY_API_KEY=
+
+#------ SearXNG ----------- https://github.com/searxng/searxng
+# AGENT_SEARXNG_API_URL=
\ No newline at end of file
diff --git a/server/endpoints/admin.js b/server/endpoints/admin.js
index 1bdfd8b1..67d7210f 100644
--- a/server/endpoints/admin.js
+++ b/server/endpoints/admin.js
@@ -358,6 +358,7 @@ function adminEndpoints(app) {
custom_app_name:
(await SystemSettings.get({ label: "custom_app_name" }))?.value ||
null,
+ feature_flags: (await SystemSettings.getFeatureFlags()) || {},
};
response.status(200).json({ settings });
} catch (e) {
diff --git a/server/endpoints/api/index.js b/server/endpoints/api/index.js
index c5a2b8a8..fdf225b8 100644
--- a/server/endpoints/api/index.js
+++ b/server/endpoints/api/index.js
@@ -4,6 +4,7 @@ const { apiAuthEndpoints } = require("./auth");
const { apiDocumentEndpoints } = require("./document");
const { apiSystemEndpoints } = require("./system");
const { apiWorkspaceEndpoints } = require("./workspace");
+const { apiWorkspaceThreadEndpoints } = require("./workspaceThread");
const { apiUserManagementEndpoints } = require("./userManagement");
// All endpoints must be documented and pass through the validApiKey Middleware.
@@ -17,6 +18,7 @@ function developerEndpoints(app, router) {
apiSystemEndpoints(router);
apiWorkspaceEndpoints(router);
apiDocumentEndpoints(router);
+ apiWorkspaceThreadEndpoints(router);
apiUserManagementEndpoints(router);
}
diff --git a/server/endpoints/api/system/index.js b/server/endpoints/api/system/index.js
index d1294162..029fca89 100644
--- a/server/endpoints/api/system/index.js
+++ b/server/endpoints/api/system/index.js
@@ -26,7 +26,7 @@ function apiSystemEndpoints(app) {
try {
if (process.env.NODE_ENV !== "production")
return response.sendStatus(200).end();
- await dumpENV();
+ dumpENV();
response.sendStatus(200).end();
} catch (e) {
console.log(e.message, e);
@@ -145,7 +145,6 @@ function apiSystemEndpoints(app) {
try {
const body = reqBody(request);
const { newValues, error } = await updateENV(body);
- if (process.env.NODE_ENV === "production") await dumpENV();
response.status(200).json({ newValues, error });
} catch (e) {
console.log(e.message, e);
diff --git a/server/endpoints/api/workspaceThread/index.js b/server/endpoints/api/workspaceThread/index.js
new file mode 100644
index 00000000..a8c859a8
--- /dev/null
+++ b/server/endpoints/api/workspaceThread/index.js
@@ -0,0 +1,594 @@
+const { v4: uuidv4 } = require("uuid");
+const { WorkspaceThread } = require("../../../models/workspaceThread");
+const { Workspace } = require("../../../models/workspace");
+const { validApiKey } = require("../../../utils/middleware/validApiKey");
+const { reqBody, multiUserMode } = require("../../../utils/http");
+const { chatWithWorkspace } = require("../../../utils/chats");
+const {
+ streamChatWithWorkspace,
+ VALID_CHAT_MODE,
+} = require("../../../utils/chats/stream");
+const { Telemetry } = require("../../../models/telemetry");
+const { EventLogs } = require("../../../models/eventLogs");
+const {
+ writeResponseChunk,
+ convertToChatHistory,
+} = require("../../../utils/helpers/chat/responses");
+const { WorkspaceChats } = require("../../../models/workspaceChats");
+const { User } = require("../../../models/user");
+
+function apiWorkspaceThreadEndpoints(app) {
+ if (!app) return;
+
+ app.post(
+ "/v1/workspace/:slug/thread/new",
+ [validApiKey],
+ async (request, response) => {
+ /*
+ #swagger.tags = ['Workspace Threads']
+ #swagger.description = 'Create a new workspace thread'
+ #swagger.parameters['slug'] = {
+ in: 'path',
+ description: 'Unique slug of workspace',
+ required: true,
+ type: 'string'
+ }
+ #swagger.requestBody = {
+ description: 'Optional userId associated with the thread',
+ required: false,
+ type: 'object',
+ content: {
+ "application/json": {
+ example: {
+ userId: 1
+ }
+ }
+ }
+ }
+ #swagger.responses[200] = {
+ content: {
+ "application/json": {
+ schema: {
+ type: 'object',
+ example: {
+ thread: {
+ "id": 1,
+ "name": "Thread",
+ "slug": "thread-uuid",
+ "user_id": 1,
+ "workspace_id": 1
+ },
+ message: null
+ }
+ }
+ }
+ }
+ }
+ #swagger.responses[403] = {
+ schema: {
+ "$ref": "#/definitions/InvalidAPIKey"
+ }
+ }
+ */
+ try {
+ const { slug } = request.params;
+ const { userId } = reqBody(request);
+ const workspace = await Workspace.get({ slug });
+
+ if (!workspace) {
+ response.sendStatus(400).end();
+ return;
+ }
+
+ const { thread, message } = await WorkspaceThread.new(
+ workspace,
+ userId ? Number(userId) : null
+ );
+
+ await Telemetry.sendTelemetry("workspace_thread_created", {
+ multiUserMode: multiUserMode(response),
+ LLMSelection: process.env.LLM_PROVIDER || "openai",
+ Embedder: process.env.EMBEDDING_ENGINE || "inherit",
+ VectorDbSelection: process.env.VECTOR_DB || "lancedb",
+ });
+ await EventLogs.logEvent("api_workspace_thread_created", {
+ workspaceName: workspace?.name || "Unknown Workspace",
+ });
+ response.status(200).json({ thread, message });
+ } catch (e) {
+ console.log(e.message, e);
+ response.sendStatus(500).end();
+ }
+ }
+ );
+
+ app.post(
+ "/v1/workspace/:slug/thread/:threadSlug/update",
+ [validApiKey],
+ async (request, response) => {
+ /*
+ #swagger.tags = ['Workspace Threads']
+ #swagger.description = 'Update thread name by its unique slug.'
+ #swagger.path = '/v1/workspace/{slug}/thread/{threadSlug}/update'
+ #swagger.parameters['slug'] = {
+ in: 'path',
+ description: 'Unique slug of workspace',
+ required: true,
+ type: 'string'
+ }
+ #swagger.parameters['threadSlug'] = {
+ in: 'path',
+ description: 'Unique slug of thread',
+ required: true,
+ type: 'string'
+ }
+ #swagger.requestBody = {
+ description: 'JSON object containing new name to update the thread.',
+ required: true,
+ type: 'object',
+ content: {
+ "application/json": {
+ example: {
+ "name": 'Updated Thread Name'
+ }
+ }
+ }
+ }
+ #swagger.responses[200] = {
+ content: {
+ "application/json": {
+ schema: {
+ type: 'object',
+ example: {
+ thread: {
+ "id": 1,
+ "name": "Updated Thread Name",
+ "slug": "thread-uuid",
+ "user_id": 1,
+ "workspace_id": 1
+ },
+ message: null,
+ }
+ }
+ }
+ }
+ }
+ #swagger.responses[403] = {
+ schema: {
+ "$ref": "#/definitions/InvalidAPIKey"
+ }
+ }
+ */
+ try {
+ const { slug, threadSlug } = request.params;
+ const { name } = reqBody(request);
+ const workspace = await Workspace.get({ slug });
+ const thread = await WorkspaceThread.get({
+ slug: threadSlug,
+ workspace_id: workspace.id,
+ });
+
+ if (!workspace || !thread) {
+ response.sendStatus(400).end();
+ return;
+ }
+
+ const { thread: updatedThread, message } = await WorkspaceThread.update(
+ thread,
+ { name }
+ );
+ response.status(200).json({ thread: updatedThread, message });
+ } catch (e) {
+ console.log(e.message, e);
+ response.sendStatus(500).end();
+ }
+ }
+ );
+
+ app.delete(
+ "/v1/workspace/:slug/thread/:threadSlug",
+ [validApiKey],
+ async (request, response) => {
+ /*
+ #swagger.tags = ['Workspace Threads']
+ #swagger.description = 'Delete a workspace thread'
+ #swagger.parameters['slug'] = {
+ in: 'path',
+ description: 'Unique slug of workspace',
+ required: true,
+ type: 'string'
+ }
+ #swagger.parameters['threadSlug'] = {
+ in: 'path',
+ description: 'Unique slug of thread',
+ required: true,
+ type: 'string'
+ }
+ #swagger.responses[200] = {
+ description: 'Thread deleted successfully'
+ }
+ #swagger.responses[403] = {
+ schema: {
+ "$ref": "#/definitions/InvalidAPIKey"
+ }
+ }
+ */
+ try {
+ const { slug, threadSlug } = request.params;
+ const workspace = await Workspace.get({ slug });
+
+ if (!workspace) {
+ response.sendStatus(400).end();
+ return;
+ }
+
+ await WorkspaceThread.delete({
+ slug: threadSlug,
+ workspace_id: workspace.id,
+ });
+ response.sendStatus(200).end();
+ } catch (e) {
+ console.log(e.message, e);
+ response.sendStatus(500).end();
+ }
+ }
+ );
+
+ app.get(
+ "/v1/workspace/:slug/thread/:threadSlug/chats",
+ [validApiKey],
+ async (request, response) => {
+ /*
+ #swagger.tags = ['Workspace Threads']
+ #swagger.description = 'Get chats for a workspace thread'
+ #swagger.parameters['slug'] = {
+ in: 'path',
+ description: 'Unique slug of workspace',
+ required: true,
+ type: 'string'
+ }
+ #swagger.parameters['threadSlug'] = {
+ in: 'path',
+ description: 'Unique slug of thread',
+ required: true,
+ type: 'string'
+ }
+ #swagger.responses[200] = {
+ content: {
+ "application/json": {
+ schema: {
+ type: 'object',
+ example: {
+ history: [
+ {
+ "role": "user",
+ "content": "What is AnythingLLM?",
+ "sentAt": 1692851630
+ },
+ {
+ "role": "assistant",
+ "content": "AnythingLLM is a platform that allows you to convert notes, PDFs, and other source materials into a chatbot. It ensures privacy, cites its answers, and allows multiple people to interact with the same documents simultaneously. It is particularly useful for businesses to enhance the visibility and readability of various written communications such as SOPs, contracts, and sales calls. You can try it out with a free trial to see if it meets your business needs.",
+ "sources": [{"source": "object about source document and snippets used"}]
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ #swagger.responses[403] = {
+ schema: {
+ "$ref": "#/definitions/InvalidAPIKey"
+ }
+ }
+ */
+ try {
+ const { slug, threadSlug } = request.params;
+ const workspace = await Workspace.get({ slug });
+ const thread = await WorkspaceThread.get({
+ slug: threadSlug,
+ workspace_id: workspace.id,
+ });
+
+ if (!workspace || !thread) {
+ response.sendStatus(400).end();
+ return;
+ }
+
+ const history = await WorkspaceChats.where(
+ {
+ workspaceId: workspace.id,
+ thread_id: thread.id,
+ include: true,
+ },
+ null,
+ { id: "asc" }
+ );
+
+ response.status(200).json({ history: convertToChatHistory(history) });
+ } catch (e) {
+ console.log(e.message, e);
+ response.sendStatus(500).end();
+ }
+ }
+ );
+
+ app.post(
+ "/v1/workspace/:slug/thread/:threadSlug/chat",
+ [validApiKey],
+ async (request, response) => {
+ /*
+ #swagger.tags = ['Workspace Threads']
+ #swagger.description = 'Chat with a workspace thread'
+ #swagger.parameters['slug'] = {
+ in: 'path',
+ description: 'Unique slug of workspace',
+ required: true,
+ type: 'string'
+ }
+ #swagger.parameters['threadSlug'] = {
+ in: 'path',
+ description: 'Unique slug of thread',
+ required: true,
+ type: 'string'
+ }
+ #swagger.requestBody = {
+ description: 'Send a prompt to the workspace thread and the type of conversation (query or chat).',
+ required: true,
+ type: 'object',
+ content: {
+ "application/json": {
+ example: {
+ message: "What is AnythingLLM?",
+ mode: "query | chat",
+ userId: 1
+ }
+ }
+ }
+ }
+ #swagger.responses[200] = {
+ content: {
+ "application/json": {
+ schema: {
+ type: 'object',
+ example: {
+ id: 'chat-uuid',
+ type: "abort | textResponse",
+ textResponse: "Response to your query",
+ sources: [{title: "anythingllm.txt", chunk: "This is a context chunk used in the answer of the prompt by the LLM."}],
+ close: true,
+ error: "null | text string of the failure mode."
+ }
+ }
+ }
+ }
+ }
+ #swagger.responses[403] = {
+ schema: {
+ "$ref": "#/definitions/InvalidAPIKey"
+ }
+ }
+ */
+ try {
+ const { slug, threadSlug } = request.params;
+ const { message, mode = "query", userId } = reqBody(request);
+ const workspace = await Workspace.get({ slug });
+ const thread = await WorkspaceThread.get({
+ slug: threadSlug,
+ workspace_id: workspace.id,
+ });
+
+ if (!workspace || !thread) {
+ response.status(400).json({
+ id: uuidv4(),
+ type: "abort",
+ textResponse: null,
+ sources: [],
+ close: true,
+ error: `Workspace ${slug} or thread ${threadSlug} is not valid.`,
+ });
+ return;
+ }
+
+ if (!message?.length || !VALID_CHAT_MODE.includes(mode)) {
+ response.status(400).json({
+ id: uuidv4(),
+ type: "abort",
+ textResponse: null,
+ sources: [],
+ close: true,
+ error: !message?.length
+ ? "message parameter cannot be empty."
+ : `${mode} is not a valid mode.`,
+ });
+ return;
+ }
+
+ const user = userId ? await User.get({ id: Number(userId) }) : null;
+ const result = await chatWithWorkspace(
+ workspace,
+ message,
+ mode,
+ user,
+ thread
+ );
+ await Telemetry.sendTelemetry("sent_chat", {
+ LLMSelection: process.env.LLM_PROVIDER || "openai",
+ Embedder: process.env.EMBEDDING_ENGINE || "inherit",
+ VectorDbSelection: process.env.VECTOR_DB || "lancedb",
+ });
+ await EventLogs.logEvent("api_sent_chat", {
+ workspaceName: workspace?.name,
+ chatModel: workspace?.chatModel || "System Default",
+ threadName: thread?.name,
+ userId: user?.id,
+ });
+ response.status(200).json({ ...result });
+ } catch (e) {
+ console.log(e.message, e);
+ response.status(500).json({
+ id: uuidv4(),
+ type: "abort",
+ textResponse: null,
+ sources: [],
+ close: true,
+ error: e.message,
+ });
+ }
+ }
+ );
+
+ app.post(
+ "/v1/workspace/:slug/thread/:threadSlug/stream-chat",
+ [validApiKey],
+ async (request, response) => {
+ /*
+ #swagger.tags = ['Workspace Threads']
+ #swagger.description = 'Stream chat with a workspace thread'
+ #swagger.parameters['slug'] = {
+ in: 'path',
+ description: 'Unique slug of workspace',
+ required: true,
+ type: 'string'
+ }
+ #swagger.parameters['threadSlug'] = {
+ in: 'path',
+ description: 'Unique slug of thread',
+ required: true,
+ type: 'string'
+ }
+ #swagger.requestBody = {
+ description: 'Send a prompt to the workspace thread and the type of conversation (query or chat).',
+ required: true,
+ type: 'object',
+ content: {
+ "application/json": {
+ example: {
+ message: "What is AnythingLLM?",
+ mode: "query | chat",
+ userId: 1
+ }
+ }
+ }
+ }
+ #swagger.responses[200] = {
+ content: {
+ "text/event-stream": {
+ schema: {
+ type: 'array',
+ example: [
+ {
+ id: 'uuid-123',
+ type: "abort | textResponseChunk",
+ textResponse: "First chunk",
+ sources: [],
+ close: false,
+ error: "null | text string of the failure mode."
+ },
+ {
+ id: 'uuid-123',
+ type: "abort | textResponseChunk",
+ textResponse: "chunk two",
+ sources: [],
+ close: false,
+ error: "null | text string of the failure mode."
+ },
+ {
+ id: 'uuid-123',
+ type: "abort | textResponseChunk",
+ textResponse: "final chunk of LLM output!",
+ sources: [{title: "anythingllm.txt", chunk: "This is a context chunk used in the answer of the prompt by the LLM. This will only return in the final chunk."}],
+ close: true,
+ error: "null | text string of the failure mode."
+ }
+ ]
+ }
+ }
+ }
+ }
+ #swagger.responses[403] = {
+ schema: {
+ "$ref": "#/definitions/InvalidAPIKey"
+ }
+ }
+ */
+ try {
+ const { slug, threadSlug } = request.params;
+ const { message, mode = "query", userId } = reqBody(request);
+ const workspace = await Workspace.get({ slug });
+ const thread = await WorkspaceThread.get({
+ slug: threadSlug,
+ workspace_id: workspace.id,
+ });
+
+ if (!workspace || !thread) {
+ response.status(400).json({
+ id: uuidv4(),
+ type: "abort",
+ textResponse: null,
+ sources: [],
+ close: true,
+ error: `Workspace ${slug} or thread ${threadSlug} is not valid.`,
+ });
+ return;
+ }
+
+ if (!message?.length || !VALID_CHAT_MODE.includes(mode)) {
+ response.status(400).json({
+ id: uuidv4(),
+ type: "abort",
+ textResponse: null,
+ sources: [],
+ close: true,
+ error: !message?.length
+ ? "Message is empty"
+ : `${mode} is not a valid mode.`,
+ });
+ return;
+ }
+
+ const user = userId ? await User.get({ id: Number(userId) }) : null;
+
+ response.setHeader("Cache-Control", "no-cache");
+ response.setHeader("Content-Type", "text/event-stream");
+ response.setHeader("Access-Control-Allow-Origin", "*");
+ response.setHeader("Connection", "keep-alive");
+ response.flushHeaders();
+
+ await streamChatWithWorkspace(
+ response,
+ workspace,
+ message,
+ mode,
+ user,
+ thread
+ );
+ await Telemetry.sendTelemetry("sent_chat", {
+ LLMSelection: process.env.LLM_PROVIDER || "openai",
+ Embedder: process.env.EMBEDDING_ENGINE || "inherit",
+ VectorDbSelection: process.env.VECTOR_DB || "lancedb",
+ });
+ await EventLogs.logEvent("api_sent_chat", {
+ workspaceName: workspace?.name,
+ chatModel: workspace?.chatModel || "System Default",
+ threadName: thread?.name,
+ userId: user?.id,
+ });
+ response.end();
+ } catch (e) {
+ console.log(e.message, e);
+ writeResponseChunk(response, {
+ id: uuidv4(),
+ type: "abort",
+ textResponse: null,
+ sources: [],
+ close: true,
+ error: e.message,
+ });
+ response.end();
+ }
+ }
+ );
+}
+
+module.exports = { apiWorkspaceThreadEndpoints };
diff --git a/server/endpoints/experimental/index.js b/server/endpoints/experimental/index.js
new file mode 100644
index 00000000..e452aff3
--- /dev/null
+++ b/server/endpoints/experimental/index.js
@@ -0,0 +1,10 @@
+const { liveSyncEndpoints } = require("./liveSync");
+
+// All endpoints here are not stable and can move around - have breaking changes
+// or are opt-in features that are not fully released.
+// When a feature is promoted it should be removed from here and added to the appropriate scope.
+function experimentalEndpoints(router) {
+ liveSyncEndpoints(router);
+}
+
+module.exports = { experimentalEndpoints };
diff --git a/server/endpoints/experimental/liveSync.js b/server/endpoints/experimental/liveSync.js
new file mode 100644
index 00000000..2a22d9a9
--- /dev/null
+++ b/server/endpoints/experimental/liveSync.js
@@ -0,0 +1,114 @@
+const { DocumentSyncQueue } = require("../../models/documentSyncQueue");
+const { Document } = require("../../models/documents");
+const { EventLogs } = require("../../models/eventLogs");
+const { SystemSettings } = require("../../models/systemSettings");
+const { Telemetry } = require("../../models/telemetry");
+const { reqBody } = require("../../utils/http");
+const {
+ featureFlagEnabled,
+} = require("../../utils/middleware/featureFlagEnabled");
+const {
+ flexUserRoleValid,
+ ROLES,
+} = require("../../utils/middleware/multiUserProtected");
+const { validWorkspaceSlug } = require("../../utils/middleware/validWorkspace");
+const { validatedRequest } = require("../../utils/middleware/validatedRequest");
+
+function liveSyncEndpoints(app) {
+ if (!app) return;
+
+ app.post(
+ "/experimental/toggle-live-sync",
+ [validatedRequest, flexUserRoleValid([ROLES.admin])],
+ async (request, response) => {
+ try {
+ const { updatedStatus = false } = reqBody(request);
+ const newStatus =
+ SystemSettings.validations.experimental_live_file_sync(updatedStatus);
+ const currentStatus =
+ (await SystemSettings.get({ label: "experimental_live_file_sync" }))
+ ?.value || "disabled";
+ if (currentStatus === newStatus)
+ return response
+ .status(200)
+ .json({ liveSyncEnabled: newStatus === "enabled" });
+
+ // Already validated earlier - so can hot update.
+ await SystemSettings._updateSettings({
+ experimental_live_file_sync: newStatus,
+ });
+ if (newStatus === "enabled") {
+ await Telemetry.sendTelemetry("experimental_feature_enabled", {
+ feature: "live_file_sync",
+ });
+ await EventLogs.logEvent("experimental_feature_enabled", {
+ feature: "live_file_sync",
+ });
+ DocumentSyncQueue.bootWorkers();
+ } else {
+ DocumentSyncQueue.killWorkers();
+ }
+
+ response.status(200).json({ liveSyncEnabled: newStatus === "enabled" });
+ } catch (e) {
+ console.error(e);
+ response.status(500).end();
+ }
+ }
+ );
+
+ app.get(
+ "/experimental/live-sync/queues",
+ [
+ validatedRequest,
+ flexUserRoleValid([ROLES.admin]),
+ featureFlagEnabled(DocumentSyncQueue.featureKey),
+ ],
+ async (_, response) => {
+ const queues = await DocumentSyncQueue.where(
+ {},
+ null,
+ { createdAt: "asc" },
+ {
+ workspaceDoc: {
+ include: {
+ workspace: true,
+ },
+ },
+ }
+ );
+ response.status(200).json({ queues });
+ }
+ );
+
+ // Should be in workspace routes, but is here for now.
+ app.post(
+ "/workspace/:slug/update-watch-status",
+ [
+ validatedRequest,
+ flexUserRoleValid([ROLES.admin, ROLES.manager]),
+ validWorkspaceSlug,
+ featureFlagEnabled(DocumentSyncQueue.featureKey),
+ ],
+ async (request, response) => {
+ try {
+ const { docPath, watchStatus = false } = reqBody(request);
+ const workspace = response.locals.workspace;
+
+ const document = await Document.get({
+ workspaceId: workspace.id,
+ docpath: docPath,
+ });
+ if (!document) return response.sendStatus(404).end();
+
+ await DocumentSyncQueue.toggleWatchStatus(document, watchStatus);
+ return response.status(200).end();
+ } catch (error) {
+ console.error("Error processing the watch status update:", error);
+ return response.status(500).end();
+ }
+ }
+ );
+}
+
+module.exports = { liveSyncEndpoints };
diff --git a/server/endpoints/system.js b/server/endpoints/system.js
index 6ab30c5c..1849a2fc 100644
--- a/server/endpoints/system.js
+++ b/server/endpoints/system.js
@@ -66,7 +66,7 @@ function systemEndpoints(app) {
app.get("/env-dump", async (_, response) => {
if (process.env.NODE_ENV !== "production")
return response.sendStatus(200).end();
- await dumpENV();
+ dumpENV();
response.sendStatus(200).end();
});
@@ -416,7 +416,6 @@ function systemEndpoints(app) {
false,
response?.locals?.user?.id
);
- if (process.env.NODE_ENV === "production") await dumpENV();
response.status(200).json({ newValues, error });
} catch (e) {
console.log(e.message, e);
@@ -451,8 +450,6 @@ function systemEndpoints(app) {
true
)?.error;
}
-
- if (process.env.NODE_ENV === "production") await dumpENV();
response.status(200).json({ success: !error, error });
} catch (e) {
console.log(e.message, e);
@@ -493,7 +490,6 @@ function systemEndpoints(app) {
},
true
);
- if (process.env.NODE_ENV === "production") await dumpENV();
await Telemetry.sendTelemetry("enabled_multi_user_mode", {
multiUserMode: true,
});
diff --git a/server/index.js b/server/index.js
index 59d8fec6..987a0cab 100644
--- a/server/index.js
+++ b/server/index.js
@@ -22,6 +22,7 @@ const { bootHTTP, bootSSL } = require("./utils/boot");
const { workspaceThreadEndpoints } = require("./endpoints/workspaceThreads");
const { documentEndpoints } = require("./endpoints/document");
const { agentWebsocket } = require("./endpoints/agentWebsocket");
+const { experimentalEndpoints } = require("./endpoints/experimental");
const app = express();
const apiRouter = express.Router();
const FILE_LIMIT = "3GB";
@@ -54,6 +55,7 @@ embedManagementEndpoints(apiRouter);
utilEndpoints(apiRouter);
documentEndpoints(apiRouter);
agentWebsocket(apiRouter);
+experimentalEndpoints(apiRouter);
developerEndpoints(app, apiRouter);
// Externally facing embedder endpoints
diff --git a/server/jobs/helpers/index.js b/server/jobs/helpers/index.js
new file mode 100644
index 00000000..c2dae213
--- /dev/null
+++ b/server/jobs/helpers/index.js
@@ -0,0 +1,30 @@
+const path = require('node:path');
+const fs = require('node:fs');
+const { parentPort } = require('node:worker_threads');
+const documentsPath =
+ process.env.NODE_ENV === "development"
+ ? path.resolve(__dirname, `../../storage/documents`)
+ : path.resolve(process.env.STORAGE_DIR, `documents`);
+
+function log(stringContent = '') {
+ if (parentPort) parentPort.postMessage(stringContent);
+ else console.log(`parentPort is undefined. Debug: ${stringContent}`)
+}
+
+function conclude() {
+ if (parentPort) parentPort.postMessage('done');
+ else process.exit(0);
+}
+
+function updateSourceDocument(docPath = null, jsonContent = {}) {
+ const destinationFilePath = path.resolve(documentsPath, docPath);
+ fs.writeFileSync(destinationFilePath, JSON.stringify(jsonContent, null, 4), {
+ encoding: "utf-8",
+ });
+}
+
+module.exports = {
+ log,
+ conclude,
+ updateSourceDocument,
+}
\ No newline at end of file
diff --git a/server/jobs/sync-watched-documents.js b/server/jobs/sync-watched-documents.js
new file mode 100644
index 00000000..c4f235a0
--- /dev/null
+++ b/server/jobs/sync-watched-documents.js
@@ -0,0 +1,153 @@
+const { Document } = require('../models/documents.js');
+const { DocumentSyncQueue } = require('../models/documentSyncQueue.js');
+const { CollectorApi } = require('../utils/collectorApi');
+const { fileData } = require("../utils/files");
+const { log, conclude, updateSourceDocument } = require('./helpers/index.js');
+const { getVectorDbClass } = require('../utils/helpers/index.js');
+const { DocumentSyncRun } = require('../models/documentSyncRun.js');
+
+(async () => {
+ try {
+ const queuesToProcess = await DocumentSyncQueue.staleDocumentQueues();
+ if (queuesToProcess.length === 0) {
+ log('No outstanding documents to sync. Exiting.');
+ return;
+ }
+
+ const collector = new CollectorApi();
+ if (!(await collector.online())) {
+ log('Could not reach collector API. Exiting.');
+ return;
+ }
+
+ log(`${queuesToProcess.length} watched documents have been found to be stale and will be updated now.`)
+ for (const queue of queuesToProcess) {
+ let newContent = null;
+ const document = queue.workspaceDoc;
+ const workspace = document.workspace;
+ const { metadata, type, source } = Document.parseDocumentTypeAndSource(document);
+
+ if (!metadata || !DocumentSyncQueue.validFileTypes.includes(type)) {
+ // Document is either broken, invalid, or not supported so drop it from future queues.
+ log(`Document ${document.filename} has no metadata, is broken, or invalid and has been removed from all future runs.`)
+ await DocumentSyncQueue.unwatch(document);
+ continue;
+ }
+
+ if (type === 'link' || type === 'youtube') {
+ const response = await collector.forwardExtensionRequest({
+ endpoint: "/ext/resync-source-document",
+ method: "POST",
+ body: JSON.stringify({
+ type,
+ options: { link: source }
+ })
+ });
+ newContent = response?.content;
+ }
+
+ if (type === 'confluence' || type === 'github') {
+ const response = await collector.forwardExtensionRequest({
+ endpoint: "/ext/resync-source-document",
+ method: "POST",
+ body: JSON.stringify({
+ type,
+ options: { chunkSource: metadata.chunkSource }
+ })
+ });
+ newContent = response?.content;
+ }
+
+ if (!newContent) {
+ // Check if the last "x" runs were all failures (not exits!). If so - remove the job entirely since it is broken.
+ const failedRunCount = (await DocumentSyncRun.where({ queueId: queue.id }, DocumentSyncQueue.maxRepeatFailures, { createdAt: 'desc' })).filter((run) => run.status === DocumentSyncRun.statuses.failed).length;
+ if (failedRunCount >= DocumentSyncQueue.maxRepeatFailures) {
+ log(`Document ${document.filename} has failed to refresh ${failedRunCount} times continuously and will now be removed from the watched document set.`)
+ await DocumentSyncQueue.unwatch(document);
+ continue;
+ }
+
+ log(`Failed to get a new content response from collector for source ${source}. Skipping, but will retry next worker interval. Attempt ${failedRunCount === 0 ? 1 : failedRunCount}/${DocumentSyncQueue.maxRepeatFailures}`);
+ await DocumentSyncQueue.saveRun(queue.id, DocumentSyncRun.statuses.failed, { filename: document.filename, workspacesModified: [], reason: 'No content found.' })
+ continue;
+ }
+
+ const currentDocumentData = await fileData(document.docpath)
+ if (currentDocumentData.pageContent === newContent) {
+ const nextSync = DocumentSyncQueue.calcNextSync(queue)
+ log(`Source ${source} is unchanged and will be skipped. Next sync will be ${nextSync.toLocaleString()}.`);
+ await DocumentSyncQueue._update(
+ queue.id,
+ {
+ lastSyncedAt: new Date().toISOString(),
+ nextSyncAt: nextSync.toISOString(),
+ }
+ );
+ await DocumentSyncQueue.saveRun(queue.id, DocumentSyncRun.statuses.exited, { filename: document.filename, workspacesModified: [], reason: 'Content unchanged.' })
+ continue;
+ }
+
+ // update the defined document and workspace vectorDB with the latest information
+ // it will skip cache and create a new vectorCache file.
+ const vectorDatabase = getVectorDbClass();
+ await vectorDatabase.deleteDocumentFromNamespace(workspace.slug, document.docId);
+ await vectorDatabase.addDocumentToNamespace(
+ workspace.slug,
+ { ...currentDocumentData, pageContent: newContent, docId: document.docId },
+ document.docpath,
+ true
+ );
+ updateSourceDocument(
+ document.docpath,
+ {
+ ...currentDocumentData,
+ pageContent: newContent,
+ docId: document.docId,
+ published: (new Date).toLocaleString(),
+ // Todo: Update word count and token_estimate?
+ }
+ )
+ log(`Workspace "${workspace.name}" vectors of ${source} updated. Document and vector cache updated.`)
+
+
+ // Now we can bloom the results to all matching documents in all other workspaces
+ const workspacesModified = [workspace.slug];
+ const moreReferences = await Document.where({
+ id: { not: document.id },
+ filename: document.filename
+ }, null, null, { workspace: true });
+
+ if (moreReferences.length !== 0) {
+ log(`${source} is referenced in ${moreReferences.length} other workspaces. Updating those workspaces as well...`)
+ for (const additionalDocumentRef of moreReferences) {
+ const additionalWorkspace = additionalDocumentRef.workspace;
+ workspacesModified.push(additionalWorkspace.slug);
+
+ await vectorDatabase.deleteDocumentFromNamespace(additionalWorkspace.slug, additionalDocumentRef.docId);
+ await vectorDatabase.addDocumentToNamespace(
+ additionalWorkspace.slug,
+ { ...currentDocumentData, pageContent: newContent, docId: additionalDocumentRef.docId },
+ additionalDocumentRef.docpath,
+ );
+ log(`Workspace "${additionalWorkspace.name}" vectors for ${source} was also updated with the new content from cache.`)
+ }
+ }
+
+ const nextRefresh = DocumentSyncQueue.calcNextSync(queue);
+ log(`${source} has been refreshed in all workspaces it is currently referenced in. Next refresh will be ${nextRefresh.toLocaleString()}.`)
+ await DocumentSyncQueue._update(
+ queue.id,
+ {
+ lastSyncedAt: new Date().toISOString(),
+ nextSyncAt: nextRefresh.toISOString(),
+ }
+ );
+ await DocumentSyncQueue.saveRun(queue.id, DocumentSyncRun.statuses.success, { filename: document.filename, workspacesModified })
+ }
+ } catch (e) {
+ console.error(e)
+ log(`errored with ${e.message}`)
+ } finally {
+ conclude();
+ }
+})();
diff --git a/server/models/documentSyncQueue.js b/server/models/documentSyncQueue.js
new file mode 100644
index 00000000..b034643c
--- /dev/null
+++ b/server/models/documentSyncQueue.js
@@ -0,0 +1,237 @@
+const { BackgroundService } = require("../utils/BackgroundWorkers");
+const prisma = require("../utils/prisma");
+const { SystemSettings } = require("./systemSettings");
+const { Telemetry } = require("./telemetry");
+
+/**
+ * @typedef {('link'|'youtube'|'confluence'|'github')} validFileType
+ */
+
+const DocumentSyncQueue = {
+ featureKey: "experimental_live_file_sync",
+ // update the validFileTypes and .canWatch properties when adding elements here.
+ validFileTypes: ["link", "youtube", "confluence", "github"],
+ defaultStaleAfter: 604800000,
+ maxRepeatFailures: 5, // How many times a run can fail in a row before pruning.
+ writable: [],
+
+ bootWorkers: function () {
+ new BackgroundService().boot();
+ },
+
+ killWorkers: function () {
+ new BackgroundService().stop();
+ },
+
+ /** Check is the Document Sync/Watch feature is enabled and can be used. */
+ enabled: async function () {
+ return (
+ (await SystemSettings.get({ label: this.featureKey }))?.value ===
+ "enabled"
+ );
+ },
+
+ /**
+ * @param {import("@prisma/client").document_sync_queues} queueRecord - queue record to calculate for
+ */
+ calcNextSync: function (queueRecord) {
+ return new Date(Number(new Date()) + queueRecord.staleAfterMs);
+ },
+
+ canWatch: function ({ title, chunkSource = null } = {}) {
+ if (chunkSource.startsWith("link://") && title.endsWith(".html"))
+ return true; // If is web-link material (prior to feature most chunkSources were links://)
+ if (chunkSource.startsWith("youtube://")) return true; // If is a youtube link
+ if (chunkSource.startsWith("confluence://")) return true; // If is a confluence document link
+ if (chunkSource.startsWith("github://")) return true; // If is a Github file reference
+ return false;
+ },
+
+ /**
+ * Creates Queue record and updates document watch status to true on Document record
+ * @param {import("@prisma/client").workspace_documents} document - document record to watch, must have `id`
+ */
+ watch: async function (document = null) {
+ if (!document) return false;
+ try {
+ const { Document } = require("./documents");
+
+ // Get all documents that are watched and share the same unique filename. If this value is
+ // non-zero then we exit early so that we do not have duplicated watch queues for the same file
+ // across many workspaces.
+ const workspaceDocIds = (
+ await Document.where({ filename: document.filename, watched: true })
+ ).map((rec) => rec.id);
+ const hasRecords =
+ (await this.count({ workspaceDocId: { in: workspaceDocIds } })) > 0;
+ if (hasRecords)
+ throw new Error(
+ `Cannot watch this document again - it already has a queue set.`
+ );
+
+ const queue = await prisma.document_sync_queues.create({
+ data: {
+ workspaceDocId: document.id,
+ nextSyncAt: new Date(Number(new Date()) + this.defaultStaleAfter),
+ },
+ });
+ await Document._updateAll(
+ { filename: document.filename },
+ { watched: true }
+ );
+ return queue || null;
+ } catch (error) {
+ console.error(error.message);
+ return null;
+ }
+ },
+
+ /**
+ * Deletes Queue record and updates document watch status to false on Document record
+ * @param {import("@prisma/client").workspace_documents} document - document record to unwatch, must have `id`
+ */
+ unwatch: async function (document = null) {
+ if (!document) return false;
+ try {
+ const { Document } = require("./documents");
+
+ // We could have been given a document to unwatch which is a clone of one that is already being watched but by another workspaceDocument id.
+ // so in this instance we need to delete any queues related to this document by any WorkspaceDocumentId it is referenced by.
+ const workspaceDocIds = (
+ await Document.where({ filename: document.filename, watched: true })
+ ).map((rec) => rec.id);
+ await this.delete({ workspaceDocId: { in: workspaceDocIds } });
+ await Document._updateAll(
+ { filename: document.filename },
+ { watched: false }
+ );
+ return true;
+ } catch (error) {
+ console.error(error.message);
+ return false;
+ }
+ },
+
+ _update: async function (id = null, data = {}) {
+ if (!id) throw new Error("No id provided for update");
+
+ try {
+ await prisma.document_sync_queues.update({
+ where: { id },
+ data,
+ });
+ return true;
+ } catch (error) {
+ console.error(error.message);
+ return false;
+ }
+ },
+
+ get: async function (clause = {}) {
+ try {
+ const queue = await prisma.document_sync_queues.findFirst({
+ where: clause,
+ });
+ return queue || null;
+ } catch (error) {
+ console.error(error.message);
+ return null;
+ }
+ },
+
+ where: async function (
+ clause = {},
+ limit = null,
+ orderBy = null,
+ include = {}
+ ) {
+ try {
+ const results = await prisma.document_sync_queues.findMany({
+ where: clause,
+ ...(limit !== null ? { take: limit } : {}),
+ ...(orderBy !== null ? { orderBy } : {}),
+ ...(include !== null ? { include } : {}),
+ });
+ return results;
+ } catch (error) {
+ console.error(error.message);
+ return [];
+ }
+ },
+
+ count: async function (clause = {}, limit = null) {
+ try {
+ const count = await prisma.document_sync_queues.count({
+ where: clause,
+ ...(limit !== null ? { take: limit } : {}),
+ });
+ return count;
+ } catch (error) {
+ console.error("FAILED TO COUNT DOCUMENTS.", error.message);
+ return 0;
+ }
+ },
+
+ delete: async function (clause = {}) {
+ try {
+ await prisma.document_sync_queues.deleteMany({ where: clause });
+ return true;
+ } catch (error) {
+ console.error(error.message);
+ return false;
+ }
+ },
+
+ /**
+ * Gets the "stale" queues where the queue's nextSyncAt is less than the current time
+ * @returns {Promise<(
+ * import("@prisma/client").document_sync_queues &
+ * { workspaceDoc: import("@prisma/client").workspace_documents &
+ * { workspace: import("@prisma/client").workspaces }
+ * })[]}>}
+ */
+ staleDocumentQueues: async function () {
+ const queues = await this.where(
+ {
+ nextSyncAt: {
+ lte: new Date().toISOString(),
+ },
+ },
+ null,
+ null,
+ {
+ workspaceDoc: {
+ include: {
+ workspace: true,
+ },
+ },
+ }
+ );
+ return queues;
+ },
+
+ saveRun: async function (queueId = null, status = null, result = {}) {
+ const { DocumentSyncRun } = require("./documentSyncRun");
+ return DocumentSyncRun.save(queueId, status, result);
+ },
+
+ /**
+ * Updates document to be watched/unwatched & creates or deletes any queue records and updated Document record `watched` status
+ * @param {import("@prisma/client").workspace_documents} documentRecord
+ * @param {boolean} watchStatus - indicate if queue record should be created or not.
+ * @returns
+ */
+ toggleWatchStatus: async function (documentRecord, watchStatus = false) {
+ if (!watchStatus) {
+ await Telemetry.sendTelemetry("document_unwatched");
+ await this.unwatch(documentRecord);
+ return;
+ }
+
+ await this.watch(documentRecord);
+ await Telemetry.sendTelemetry("document_watched");
+ return;
+ },
+};
+
+module.exports = { DocumentSyncQueue };
diff --git a/server/models/documentSyncRun.js b/server/models/documentSyncRun.js
new file mode 100644
index 00000000..94fcf3ff
--- /dev/null
+++ b/server/models/documentSyncRun.js
@@ -0,0 +1,88 @@
+const prisma = require("../utils/prisma");
+const DocumentSyncRun = {
+ statuses: {
+ unknown: "unknown",
+ exited: "exited",
+ failed: "failed",
+ success: "success",
+ },
+
+ save: async function (queueId = null, status = null, result = {}) {
+ try {
+ if (!this.statuses.hasOwnProperty(status))
+ throw new Error(
+ `DocumentSyncRun status ${status} is not a valid status.`
+ );
+
+ const run = await prisma.document_sync_executions.create({
+ data: {
+ queueId: Number(queueId),
+ status: String(status),
+ result: JSON.stringify(result),
+ },
+ });
+ return run || null;
+ } catch (error) {
+ console.error(error.message);
+ return null;
+ }
+ },
+
+ get: async function (clause = {}) {
+ try {
+ const queue = await prisma.document_sync_executions.findFirst({
+ where: clause,
+ });
+ return queue || null;
+ } catch (error) {
+ console.error(error.message);
+ return null;
+ }
+ },
+
+ where: async function (
+ clause = {},
+ limit = null,
+ orderBy = null,
+ include = {}
+ ) {
+ try {
+ const results = await prisma.document_sync_executions.findMany({
+ where: clause,
+ ...(limit !== null ? { take: limit } : {}),
+ ...(orderBy !== null ? { orderBy } : {}),
+ ...(include !== null ? { include } : {}),
+ });
+ return results;
+ } catch (error) {
+ console.error(error.message);
+ return [];
+ }
+ },
+
+ count: async function (clause = {}, limit = null, orderBy = {}) {
+ try {
+ const count = await prisma.document_sync_executions.count({
+ where: clause,
+ ...(limit !== null ? { take: limit } : {}),
+ ...(orderBy !== null ? { orderBy } : {}),
+ });
+ return count;
+ } catch (error) {
+ console.error("FAILED TO COUNT DOCUMENTS.", error.message);
+ return 0;
+ }
+ },
+
+ delete: async function (clause = {}) {
+ try {
+ await prisma.document_sync_executions.deleteMany({ where: clause });
+ return true;
+ } catch (error) {
+ console.error(error.message);
+ return false;
+ }
+ },
+};
+
+module.exports = { DocumentSyncRun };
diff --git a/server/models/documents.js b/server/models/documents.js
index 6c09651c..80d4fd85 100644
--- a/server/models/documents.js
+++ b/server/models/documents.js
@@ -3,9 +3,30 @@ const { getVectorDbClass } = require("../utils/helpers");
const prisma = require("../utils/prisma");
const { Telemetry } = require("./telemetry");
const { EventLogs } = require("./eventLogs");
+const { safeJsonParse } = require("../utils/http");
const Document = {
- writable: ["pinned"],
+ writable: ["pinned", "watched", "lastUpdatedAt"],
+ /**
+ * @param {import("@prisma/client").workspace_documents} document - Document PrismaRecord
+ * @returns {{
+ * metadata: (null|object),
+ * type: import("./documentSyncQueue.js").validFileType,
+ * source: string
+ * }}
+ */
+ parseDocumentTypeAndSource: function (document) {
+ const metadata = safeJsonParse(document.metadata, null);
+ if (!metadata) return { metadata: null, type: null, source: null };
+
+ // Parse the correct type of source and its original source path.
+ const idx = metadata.chunkSource.indexOf("://");
+ const [type, source] = [
+ metadata.chunkSource.slice(0, idx),
+ metadata.chunkSource.slice(idx + 3),
+ ];
+ return { metadata, type, source: this._stripSource(source, type) };
+ },
forWorkspace: async function (workspaceId = null) {
if (!workspaceId) return [];
@@ -36,7 +57,7 @@ const Document = {
}
},
- getPins: async function (clause = {}) {
+ getOnlyWorkspaceIds: async function (clause = {}) {
try {
const workspaceIds = await prisma.workspace_documents.findMany({
where: clause,
@@ -44,19 +65,25 @@ const Document = {
workspaceId: true,
},
});
- return workspaceIds.map((pin) => pin.workspaceId) || [];
+ return workspaceIds.map((record) => record.workspaceId) || [];
} catch (error) {
console.error(error.message);
return [];
}
},
- where: async function (clause = {}, limit = null, orderBy = null) {
+ where: async function (
+ clause = {},
+ limit = null,
+ orderBy = null,
+ include = null
+ ) {
try {
const results = await prisma.workspace_documents.findMany({
where: clause,
...(limit !== null ? { take: limit } : {}),
...(orderBy !== null ? { orderBy } : {}),
+ ...(include !== null ? { include } : {}),
});
return results;
} catch (error) {
@@ -202,6 +229,18 @@ const Document = {
return { document: null, message: error.message };
}
},
+ _updateAll: async function (clause = {}, data = {}) {
+ try {
+ await prisma.workspace_documents.updateMany({
+ where: clause,
+ data,
+ });
+ return true;
+ } catch (error) {
+ console.error(error.message);
+ return false;
+ }
+ },
content: async function (docId) {
if (!docId) throw new Error("No workspace docId provided!");
const document = await this.get({ docId: String(docId) });
@@ -211,6 +250,22 @@ const Document = {
const data = await fileData(document.docpath);
return { title: data.title, content: data.pageContent };
},
+ contentByDocPath: async function (docPath) {
+ const { fileData } = require("../utils/files");
+ const data = await fileData(docPath);
+ return { title: data.title, content: data.pageContent };
+ },
+
+ // Some data sources have encoded params in them we don't want to log - so strip those details.
+ _stripSource: function (sourceString, type) {
+ if (["confluence", "github"].includes(type)) {
+ const _src = new URL(sourceString);
+ _src.search = ""; // remove all search params that are encoded for resync.
+ return _src.toString();
+ }
+
+ return sourceString;
+ },
};
module.exports = { Document };
diff --git a/server/models/systemSettings.js b/server/models/systemSettings.js
index 8d548c7b..eae75d9c 100644
--- a/server/models/systemSettings.js
+++ b/server/models/systemSettings.js
@@ -28,6 +28,9 @@ const SystemSettings = {
"default_agent_skills",
"agent_sql_connections",
"custom_app_name",
+
+ // beta feature flags
+ "experimental_live_file_sync",
],
validations: {
footer_data: (updates) => {
@@ -76,6 +79,7 @@ const SystemSettings = {
"serper-dot-dev",
"bing-search",
"serply-engine",
+ "searxng-engine",
].includes(update)
)
throw new Error("Invalid SERP provider.");
@@ -113,6 +117,12 @@ const SystemSettings = {
return JSON.stringify(existingConnections ?? []);
}
},
+ experimental_live_file_sync: (update) => {
+ if (typeof update === "boolean")
+ return update === true ? "enabled" : "disabled";
+ if (!["enabled", "disabled"].includes(update)) return "disabled";
+ return String(update);
+ },
},
currentSettings: async function () {
const { hasVectorCachedFiles } = require("../utils/files");
@@ -176,10 +186,11 @@ const SystemSettings = {
// Agent Settings & Configs
// --------------------------------------------------------
AgentGoogleSearchEngineId: process.env.AGENT_GSE_CTX || null,
- AgentGoogleSearchEngineKey: process.env.AGENT_GSE_KEY || null,
- AgentSerperApiKey: process.env.AGENT_SERPER_DEV_KEY || null,
- AgentBingSearchApiKey: process.env.AGENT_BING_SEARCH_API_KEY || null,
- AgentSerplyApiKey: process.env.AGENT_SERPLY_API_KEY || null,
+ AgentGoogleSearchEngineKey: !!process.env.AGENT_GSE_KEY || null,
+ AgentSerperApiKey: !!process.env.AGENT_SERPER_DEV_KEY || null,
+ AgentBingSearchApiKey: !!process.env.AGENT_BING_SEARCH_API_KEY || null,
+ AgentSerplyApiKey: !!process.env.AGENT_SERPLY_API_KEY || null,
+ AgentSearXNGApiUrl: process.env.AGENT_SEARXNG_API_URL || null,
};
},
@@ -457,6 +468,13 @@ const SystemSettings = {
});
},
},
+ getFeatureFlags: async function () {
+ return {
+ experimental_live_file_sync:
+ (await SystemSettings.get({ label: "experimental_live_file_sync" }))
+ ?.value === "enabled",
+ };
+ },
};
function mergeConnections(existingConnections = [], updates = []) {
diff --git a/server/models/workspaceChats.js b/server/models/workspaceChats.js
index bda40064..95124520 100644
--- a/server/models/workspaceChats.js
+++ b/server/models/workspaceChats.js
@@ -7,6 +7,7 @@ const WorkspaceChats = {
response = {},
user = null,
threadId = null,
+ include = true,
}) {
try {
const chat = await prisma.workspace_chats.create({
@@ -16,6 +17,7 @@ const WorkspaceChats = {
response: JSON.stringify(response),
user_id: user?.id || null,
thread_id: threadId,
+ include,
},
});
return { chat, message: null };
diff --git a/server/package.json b/server/package.json
index 9cc27c8b..77120297 100644
--- a/server/package.json
+++ b/server/package.json
@@ -25,6 +25,7 @@
"@datastax/astra-db-ts": "^0.1.3",
"@google/generative-ai": "^0.7.1",
"@googleapis/youtube": "^9.0.0",
+ "@ladjs/graceful": "^3.2.2",
"@langchain/anthropic": "0.1.16",
"@langchain/community": "0.0.53",
"@langchain/core": "0.1.61",
@@ -38,6 +39,7 @@
"archiver": "^5.3.1",
"bcrypt": "^5.1.0",
"body-parser": "^1.20.2",
+ "bree": "^9.2.3",
"chalk": "^4",
"check-disk-space": "^3.4.0",
"chromadb": "^1.5.2",
@@ -80,7 +82,8 @@
"uuid": "^9.0.0",
"uuid-apikey": "^1.5.3",
"vectordb": "0.4.11",
- "weaviate-ts-client": "^1.4.0"
+ "weaviate-ts-client": "^1.4.0",
+ "winston": "^3.13.0"
},
"devDependencies": {
"@inquirer/prompts": "^4.3.1",
diff --git a/server/prisma/migrations/20240618224346_init/migration.sql b/server/prisma/migrations/20240618224346_init/migration.sql
new file mode 100644
index 00000000..cce17134
--- /dev/null
+++ b/server/prisma/migrations/20240618224346_init/migration.sql
@@ -0,0 +1,26 @@
+-- AlterTable
+ALTER TABLE "workspace_documents" ADD COLUMN "watched" BOOLEAN DEFAULT false;
+
+-- CreateTable
+CREATE TABLE "document_sync_queues" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "staleAfterMs" INTEGER NOT NULL DEFAULT 604800000,
+ "nextSyncAt" DATETIME NOT NULL,
+ "createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "lastSyncedAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "workspaceDocId" INTEGER NOT NULL,
+ CONSTRAINT "document_sync_queues_workspaceDocId_fkey" FOREIGN KEY ("workspaceDocId") REFERENCES "workspace_documents" ("id") ON DELETE CASCADE ON UPDATE CASCADE
+);
+
+-- CreateTable
+CREATE TABLE "document_sync_executions" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "queueId" INTEGER NOT NULL,
+ "status" TEXT NOT NULL DEFAULT 'unknown',
+ "result" TEXT,
+ "createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ CONSTRAINT "document_sync_executions_queueId_fkey" FOREIGN KEY ("queueId") REFERENCES "document_sync_queues" ("id") ON DELETE CASCADE ON UPDATE CASCADE
+);
+
+-- CreateIndex
+CREATE UNIQUE INDEX "document_sync_queues_workspaceDocId_key" ON "document_sync_queues"("workspaceDocId");
diff --git a/server/prisma/schema.prisma b/server/prisma/schema.prisma
index 0ded65be..f385e66f 100644
--- a/server/prisma/schema.prisma
+++ b/server/prisma/schema.prisma
@@ -24,16 +24,18 @@ model api_keys {
}
model workspace_documents {
- id Int @id @default(autoincrement())
- docId String @unique
- filename String
- docpath String
- workspaceId Int
- metadata String?
- pinned Boolean? @default(false)
- createdAt DateTime @default(now())
- lastUpdatedAt DateTime @default(now())
- workspace workspaces @relation(fields: [workspaceId], references: [id])
+ id Int @id @default(autoincrement())
+ docId String @unique
+ filename String
+ docpath String
+ workspaceId Int
+ metadata String?
+ pinned Boolean? @default(false)
+ watched Boolean? @default(false)
+ createdAt DateTime @default(now())
+ lastUpdatedAt DateTime @default(now())
+ workspace workspaces @relation(fields: [workspaceId], references: [id])
+ document_sync_queues document_sync_queues?
}
model invites {
@@ -275,3 +277,23 @@ model slash_command_presets {
@@unique([uid, command])
}
+
+model document_sync_queues {
+ id Int @id @default(autoincrement())
+ staleAfterMs Int @default(604800000) // 7 days
+ nextSyncAt DateTime
+ createdAt DateTime @default(now())
+ lastSyncedAt DateTime @default(now())
+ workspaceDocId Int @unique
+ workspaceDoc workspace_documents? @relation(fields: [workspaceDocId], references: [id], onDelete: Cascade)
+ runs document_sync_executions[]
+}
+
+model document_sync_executions {
+ id Int @id @default(autoincrement())
+ queueId Int
+ status String @default("unknown")
+ result String?
+ createdAt DateTime @default(now())
+ queue document_sync_queues @relation(fields: [queueId], references: [id], onDelete: Cascade)
+}
diff --git a/server/swagger/init.js b/server/swagger/init.js
index 06481456..31edcf1c 100644
--- a/server/swagger/init.js
+++ b/server/swagger/init.js
@@ -35,6 +35,7 @@ const endpointsFiles = [
"../endpoints/api/document/index.js",
"../endpoints/api/workspace/index.js",
"../endpoints/api/system/index.js",
+ "../endpoints/api/workspaceThread/index.js",
"../endpoints/api/userManagement/index.js",
];
diff --git a/server/swagger/openapi.json b/server/swagger/openapi.json
index 2a1b5543..d27504aa 100644
--- a/server/swagger/openapi.json
+++ b/server/swagger/openapi.json
@@ -2371,6 +2371,487 @@
}
}
},
+ "/v1/workspace/{slug}/thread/new": {
+ "post": {
+ "tags": [
+ "Workspace Threads"
+ ],
+ "description": "Create a new workspace thread",
+ "parameters": [
+ {
+ "name": "slug",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ },
+ "description": "Unique slug of workspace"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "example": {
+ "thread": {
+ "id": 1,
+ "name": "Thread",
+ "slug": "thread-uuid",
+ "user_id": 1,
+ "workspace_id": 1
+ },
+ "message": null
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "Bad Request"
+ },
+ "403": {
+ "description": "Forbidden",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidAPIKey"
+ }
+ },
+ "application/xml": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidAPIKey"
+ }
+ }
+ }
+ },
+ "500": {
+ "description": "Internal Server Error"
+ }
+ },
+ "requestBody": {
+ "description": "Optional userId associated with the thread",
+ "required": false,
+ "type": "object",
+ "content": {
+ "application/json": {
+ "example": {
+ "userId": 1
+ }
+ }
+ }
+ }
+ }
+ },
+ "/v1/workspace/{slug}/thread/{threadSlug}/update": {
+ "post": {
+ "tags": [
+ "Workspace Threads"
+ ],
+ "description": "Update thread name by its unique slug.",
+ "parameters": [
+ {
+ "name": "slug",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ },
+ "description": "Unique slug of workspace"
+ },
+ {
+ "name": "threadSlug",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ },
+ "description": "Unique slug of thread"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "example": {
+ "thread": {
+ "id": 1,
+ "name": "Updated Thread Name",
+ "slug": "thread-uuid",
+ "user_id": 1,
+ "workspace_id": 1
+ },
+ "message": null
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "Bad Request"
+ },
+ "403": {
+ "description": "Forbidden",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidAPIKey"
+ }
+ },
+ "application/xml": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidAPIKey"
+ }
+ }
+ }
+ },
+ "500": {
+ "description": "Internal Server Error"
+ }
+ },
+ "requestBody": {
+ "description": "JSON object containing new name to update the thread.",
+ "required": true,
+ "type": "object",
+ "content": {
+ "application/json": {
+ "example": {
+ "name": "Updated Thread Name"
+ }
+ }
+ }
+ }
+ }
+ },
+ "/v1/workspace/{slug}/thread/{threadSlug}": {
+ "delete": {
+ "tags": [
+ "Workspace Threads"
+ ],
+ "description": "Delete a workspace thread",
+ "parameters": [
+ {
+ "name": "slug",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ },
+ "description": "Unique slug of workspace"
+ },
+ {
+ "name": "threadSlug",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ },
+ "description": "Unique slug of thread"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Thread deleted successfully"
+ },
+ "400": {
+ "description": "Bad Request"
+ },
+ "403": {
+ "description": "Forbidden",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidAPIKey"
+ }
+ },
+ "application/xml": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidAPIKey"
+ }
+ }
+ }
+ },
+ "500": {
+ "description": "Internal Server Error"
+ }
+ }
+ }
+ },
+ "/v1/workspace/{slug}/thread/{threadSlug}/chats": {
+ "get": {
+ "tags": [
+ "Workspace Threads"
+ ],
+ "description": "Get chats for a workspace thread",
+ "parameters": [
+ {
+ "name": "slug",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ },
+ "description": "Unique slug of workspace"
+ },
+ {
+ "name": "threadSlug",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ },
+ "description": "Unique slug of thread"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "example": {
+ "history": [
+ {
+ "role": "user",
+ "content": "What is AnythingLLM?",
+ "sentAt": 1692851630
+ },
+ {
+ "role": "assistant",
+ "content": "AnythingLLM is a platform that allows you to convert notes, PDFs, and other source materials into a chatbot. It ensures privacy, cites its answers, and allows multiple people to interact with the same documents simultaneously. It is particularly useful for businesses to enhance the visibility and readability of various written communications such as SOPs, contracts, and sales calls. You can try it out with a free trial to see if it meets your business needs.",
+ "sources": [
+ {
+ "source": "object about source document and snippets used"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "Bad Request"
+ },
+ "403": {
+ "description": "Forbidden",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidAPIKey"
+ }
+ },
+ "application/xml": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidAPIKey"
+ }
+ }
+ }
+ },
+ "500": {
+ "description": "Internal Server Error"
+ }
+ }
+ }
+ },
+ "/v1/workspace/{slug}/thread/{threadSlug}/chat": {
+ "post": {
+ "tags": [
+ "Workspace Threads"
+ ],
+ "description": "Chat with a workspace thread",
+ "parameters": [
+ {
+ "name": "slug",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ },
+ "description": "Unique slug of workspace"
+ },
+ {
+ "name": "threadSlug",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ },
+ "description": "Unique slug of thread"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "example": {
+ "id": "chat-uuid",
+ "type": "abort | textResponse",
+ "textResponse": "Response to your query",
+ "sources": [
+ {
+ "title": "anythingllm.txt",
+ "chunk": "This is a context chunk used in the answer of the prompt by the LLM."
+ }
+ ],
+ "close": true,
+ "error": "null | text string of the failure mode."
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "Bad Request"
+ },
+ "403": {
+ "description": "Forbidden",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidAPIKey"
+ }
+ },
+ "application/xml": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidAPIKey"
+ }
+ }
+ }
+ },
+ "500": {
+ "description": "Internal Server Error"
+ }
+ },
+ "requestBody": {
+ "description": "Send a prompt to the workspace thread and the type of conversation (query or chat).",
+ "required": true,
+ "type": "object",
+ "content": {
+ "application/json": {
+ "example": {
+ "message": "What is AnythingLLM?",
+ "mode": "query | chat",
+ "userId": 1
+ }
+ }
+ }
+ }
+ }
+ },
+ "/v1/workspace/{slug}/thread/{threadSlug}/stream-chat": {
+ "post": {
+ "tags": [
+ "Workspace Threads"
+ ],
+ "description": "Stream chat with a workspace thread",
+ "parameters": [
+ {
+ "name": "slug",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ },
+ "description": "Unique slug of workspace"
+ },
+ {
+ "name": "threadSlug",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ },
+ "description": "Unique slug of thread"
+ }
+ ],
+ "responses": {
+ "200": {
+ "content": {
+ "text/event-stream": {
+ "schema": {
+ "type": "array",
+ "example": [
+ {
+ "id": "uuid-123",
+ "type": "abort | textResponseChunk",
+ "textResponse": "First chunk",
+ "sources": [],
+ "close": false,
+ "error": "null | text string of the failure mode."
+ },
+ {
+ "id": "uuid-123",
+ "type": "abort | textResponseChunk",
+ "textResponse": "chunk two",
+ "sources": [],
+ "close": false,
+ "error": "null | text string of the failure mode."
+ },
+ {
+ "id": "uuid-123",
+ "type": "abort | textResponseChunk",
+ "textResponse": "final chunk of LLM output!",
+ "sources": [
+ {
+ "title": "anythingllm.txt",
+ "chunk": "This is a context chunk used in the answer of the prompt by the LLM. This will only return in the final chunk."
+ }
+ ],
+ "close": true,
+ "error": "null | text string of the failure mode."
+ }
+ ]
+ }
+ }
+ },
+ "description": "OK"
+ },
+ "400": {
+ "description": "Bad Request"
+ },
+ "403": {
+ "description": "Forbidden",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidAPIKey"
+ }
+ },
+ "application/xml": {
+ "schema": {
+ "$ref": "#/components/schemas/InvalidAPIKey"
+ }
+ }
+ }
+ }
+ },
+ "requestBody": {
+ "description": "Send a prompt to the workspace thread and the type of conversation (query or chat).",
+ "required": true,
+ "type": "object",
+ "content": {
+ "application/json": {
+ "example": {
+ "message": "What is AnythingLLM?",
+ "mode": "query | chat",
+ "userId": 1
+ }
+ }
+ }
+ }
+ }
+ },
"/v1/users": {
"get": {
"tags": [
diff --git a/server/utils/AiProviders/anthropic/index.js b/server/utils/AiProviders/anthropic/index.js
index 215fa5fb..4da56bf9 100644
--- a/server/utils/AiProviders/anthropic/index.js
+++ b/server/utils/AiProviders/anthropic/index.js
@@ -46,6 +46,8 @@ class AnthropicLLM {
return 200_000;
case "claude-3-haiku-20240307":
return 200_000;
+ case "claude-3-5-sonnet-20240620":
+ return 200_000;
default:
return 100_000; // assume a claude-instant-1.2 model
}
@@ -59,6 +61,7 @@ class AnthropicLLM {
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
+ "claude-3-5-sonnet-20240620",
];
return validModels.includes(modelName);
}
diff --git a/server/utils/BackgroundWorkers/index.js b/server/utils/BackgroundWorkers/index.js
new file mode 100644
index 00000000..97445bd5
--- /dev/null
+++ b/server/utils/BackgroundWorkers/index.js
@@ -0,0 +1,88 @@
+const path = require("path");
+const Graceful = require("@ladjs/graceful");
+const Bree = require("bree");
+
+class BackgroundService {
+ name = "BackgroundWorkerService";
+ static _instance = null;
+ #root = path.resolve(__dirname, "../../jobs");
+
+ constructor() {
+ if (BackgroundService._instance) {
+ this.#log("SINGLETON LOCK: Using existing BackgroundService.");
+ return BackgroundService._instance;
+ }
+
+ this.logger = this.getLogger();
+ BackgroundService._instance = this;
+ }
+
+ #log(text, ...args) {
+ console.log(`\x1b[36m[${this.name}]\x1b[0m ${text}`, ...args);
+ }
+
+ async boot() {
+ const { DocumentSyncQueue } = require("../../models/documentSyncQueue");
+ if (!(await DocumentSyncQueue.enabled())) {
+ this.#log("Feature is not enabled and will not be started.");
+ return;
+ }
+
+ this.#log("Starting...");
+ this.bree = new Bree({
+ logger: this.logger,
+ root: this.#root,
+ jobs: this.jobs(),
+ errorHandler: this.onError,
+ workerMessageHandler: this.onWorkerMessageHandler,
+ });
+ this.graceful = new Graceful({ brees: [this.bree], logger: this.logger });
+ this.graceful.listen();
+ this.bree.start();
+ this.#log("Service started");
+ }
+
+ async stop() {
+ this.#log("Stopping...");
+ if (!!this.graceful && !!this.bree) this.graceful.stopBree(this.bree, 0);
+ this.bree = null;
+ this.graceful = null;
+ this.#log("Service stopped");
+ }
+
+ jobs() {
+ return [
+ // Job for auto-sync of documents
+ // https://github.com/breejs/bree
+ {
+ name: "sync-watched-documents",
+ interval: "1hr",
+ },
+ ];
+ }
+
+ getLogger() {
+ const { format, createLogger, transports } = require("winston");
+ return new createLogger({
+ level: "info",
+ format: format.combine(
+ format.colorize(),
+ format.printf(({ level, message, service }) => {
+ return `\x1b[36m[${service}]\x1b[0m ${level}: ${message}`;
+ })
+ ),
+ defaultMeta: { service: this.name },
+ transports: [new transports.Console()],
+ });
+ }
+
+ onError(error, _workerMetadata) {
+ this.logger.error(`[${error.name}]: ${error.message}`);
+ }
+
+ onWorkerMessageHandler(message, _workerMetadata) {
+ this.logger.info(`[${message.name}]: ${message.message}`);
+ }
+}
+
+module.exports.BackgroundService = BackgroundService;
diff --git a/server/utils/EncryptionManager/index.js b/server/utils/EncryptionManager/index.js
new file mode 100644
index 00000000..8ef5619e
--- /dev/null
+++ b/server/utils/EncryptionManager/index.js
@@ -0,0 +1,85 @@
+const crypto = require("crypto");
+const { dumpENV } = require("../helpers/updateENV");
+
+// Class that is used to arbitrarily encrypt/decrypt string data via a persistent passphrase/salt that
+// is either user defined or is created and saved to the ENV on creation.
+class EncryptionManager {
+ #keyENV = "SIG_KEY";
+ #saltENV = "SIG_SALT";
+ #encryptionKey;
+ #encryptionSalt;
+
+ constructor({ key = null, salt = null } = {}) {
+ this.#loadOrCreateKeySalt(key, salt);
+ this.key = crypto.scryptSync(this.#encryptionKey, this.#encryptionSalt, 32);
+ this.algorithm = "aes-256-cbc";
+ this.separator = ":";
+
+ // Used to send key to collector process to be able to decrypt data since they do not share ENVs
+ // this value should use the CommunicationKey.encrypt process before sending anywhere outside the
+ // server process so it is never sent in its raw format.
+ this.xPayload = this.key.toString("base64");
+ }
+
+ log(text, ...args) {
+ console.log(`\x1b[36m[EncryptionManager]\x1b[0m ${text}`, ...args);
+ }
+
+ #loadOrCreateKeySalt(_key = null, _salt = null) {
+ if (!!_key && !!_salt) {
+ this.log(
+ "Pre-assigned key & salt for encrypting arbitrary data was used."
+ );
+ this.#encryptionKey = _key;
+ this.#encryptionSalt = _salt;
+ return;
+ }
+
+ if (!process.env[this.#keyENV] || !process.env[this.#saltENV]) {
+ this.log("Self-assigning key & salt for encrypting arbitrary data.");
+ process.env[this.#keyENV] = crypto.randomBytes(32).toString("hex");
+ process.env[this.#saltENV] = crypto.randomBytes(32).toString("hex");
+ if (process.env.NODE_ENV === "production") dumpENV();
+ } else
+ this.log("Loaded existing key & salt for encrypting arbitrary data.");
+
+ this.#encryptionKey = process.env[this.#keyENV];
+ this.#encryptionSalt = process.env[this.#saltENV];
+ return;
+ }
+
+ encrypt(plainTextString = null) {
+ try {
+ if (!plainTextString)
+ throw new Error("Empty string is not valid for this method.");
+ const iv = crypto.randomBytes(16);
+ const cipher = crypto.createCipheriv(this.algorithm, this.key, iv);
+ const encrypted = cipher.update(plainTextString, "utf8", "hex");
+ return [
+ encrypted + cipher.final("hex"),
+ Buffer.from(iv).toString("hex"),
+ ].join(this.separator);
+ } catch (e) {
+ this.log(e);
+ return null;
+ }
+ }
+
+ decrypt(encryptedString) {
+ try {
+ const [encrypted, iv] = encryptedString.split(this.separator);
+ if (!iv) throw new Error("IV not found");
+ const decipher = crypto.createDecipheriv(
+ this.algorithm,
+ this.key,
+ Buffer.from(iv, "hex")
+ );
+ return decipher.update(encrypted, "hex", "utf8") + decipher.final("utf8");
+ } catch (e) {
+ this.log(e);
+ return null;
+ }
+ }
+}
+
+module.exports = { EncryptionManager };
diff --git a/server/utils/agents/aibitat/plugins/web-browsing.js b/server/utils/agents/aibitat/plugins/web-browsing.js
index 81314f17..f4269fe1 100644
--- a/server/utils/agents/aibitat/plugins/web-browsing.js
+++ b/server/utils/agents/aibitat/plugins/web-browsing.js
@@ -71,6 +71,9 @@ const webBrowsing = {
case "serply-engine":
engine = "_serplyEngine";
break;
+ case "searxng-engine":
+ engine = "_searXNGEngine";
+ break;
default:
engine = "_googleSearchEngine";
}
@@ -102,7 +105,7 @@ const webBrowsing = {
query.length > 100 ? `${query.slice(0, 100)}...` : query
}"`
);
- const searchResponse = await fetch(searchURL)
+ const data = await fetch(searchURL)
.then((res) => res.json())
.then((searchResult) => searchResult?.items || [])
.then((items) => {
@@ -116,10 +119,15 @@ const webBrowsing = {
})
.catch((e) => {
console.log(e);
- return {};
+ return [];
});
- return JSON.stringify(searchResponse);
+ if (data.length === 0)
+ return `No information was found online for the search query.`;
+ this.super.introspect(
+ `${this.caller}: I found ${data.length} results - looking over them now.`
+ );
+ return JSON.stringify(data);
},
/**
@@ -176,6 +184,9 @@ const webBrowsing = {
if (data.length === 0)
return `No information was found online for the search query.`;
+ this.super.introspect(
+ `${this.caller}: I found ${data.length} results - looking over them now.`
+ );
return JSON.stringify(data);
},
_bingWebSearch: async function (query) {
@@ -219,6 +230,9 @@ const webBrowsing = {
if (searchResponse.length === 0)
return `No information was found online for the search query.`;
+ this.super.introspect(
+ `${this.caller}: I found ${data.length} results - looking over them now.`
+ );
return JSON.stringify(searchResponse);
},
_serplyEngine: async function (
@@ -293,6 +307,71 @@ const webBrowsing = {
if (data.length === 0)
return `No information was found online for the search query.`;
+ this.super.introspect(
+ `${this.caller}: I found ${data.length} results - looking over them now.`
+ );
+ return JSON.stringify(data);
+ },
+ _searXNGEngine: async function (query) {
+ let searchURL;
+ if (!process.env.AGENT_SEARXNG_API_URL) {
+ this.super.introspect(
+ `${this.caller}: I can't use SearXNG searching because the user has not defined the required base URL.\nPlease set this value in the agent skill settings.`
+ );
+ return `Search is disabled and no content was found. This functionality is disabled because the user has not set it up yet.`;
+ }
+
+ try {
+ searchURL = new URL(process.env.AGENT_SEARXNG_API_URL);
+ searchURL.searchParams.append("q", encodeURIComponent(query));
+ searchURL.searchParams.append("format", "json");
+ } catch (e) {
+ this.super.handlerProps.log(`SearXNG Search: ${e.message}`);
+ this.super.introspect(
+ `${this.caller}: I can't use SearXNG searching because the url provided is not a valid URL.`
+ );
+ return `Search is disabled and no content was found. This functionality is disabled because the user has not set it up yet.`;
+ }
+
+ this.super.introspect(
+ `${this.caller}: Using SearXNG to search for "${
+ query.length > 100 ? `${query.slice(0, 100)}...` : query
+ }"`
+ );
+
+ const { response, error } = await fetch(searchURL.toString(), {
+ method: "GET",
+ headers: {
+ "Content-Type": "application/json",
+ "User-Agent": "anything-llm",
+ },
+ })
+ .then((res) => res.json())
+ .then((data) => {
+ return { response: data, error: null };
+ })
+ .catch((e) => {
+ return { response: null, error: e.message };
+ });
+ if (error)
+ return `There was an error searching for content. ${error}`;
+
+ const data = [];
+ response.results?.forEach((searchResult) => {
+ const { url, title, content, publishedDate } = searchResult;
+ data.push({
+ title,
+ link: url,
+ snippet: content,
+ publishedDate,
+ });
+ });
+
+ if (data.length === 0)
+ return `No information was found online for the search query.`;
+ this.super.introspect(
+ `${this.caller}: I found ${data.length} results - looking over them now.`
+ );
return JSON.stringify(data);
},
});
diff --git a/server/utils/boot/index.js b/server/utils/boot/index.js
index 2022f66e..8a3dcbd2 100644
--- a/server/utils/boot/index.js
+++ b/server/utils/boot/index.js
@@ -1,4 +1,6 @@
const { Telemetry } = require("../../models/telemetry");
+const { BackgroundService } = require("../BackgroundWorkers");
+const { EncryptionManager } = require("../EncryptionManager");
const { CommunicationKey } = require("../comKey");
const setupTelemetry = require("../telemetry");
@@ -18,6 +20,8 @@ function bootSSL(app, port = 3001) {
.listen(port, async () => {
await setupTelemetry();
new CommunicationKey(true);
+ new EncryptionManager();
+ new BackgroundService().boot();
console.log(`Primary server in HTTPS mode listening on port ${port}`);
})
.on("error", catchSigTerms);
@@ -45,6 +49,8 @@ function bootHTTP(app, port = 3001) {
.listen(port, async () => {
await setupTelemetry();
new CommunicationKey(true);
+ new EncryptionManager();
+ new BackgroundService().boot();
console.log(`Primary server in HTTP mode listening on port ${port}`);
})
.on("error", catchSigTerms);
diff --git a/server/utils/chats/index.js b/server/utils/chats/index.js
index b6258c2e..f3e0baae 100644
--- a/server/utils/chats/index.js
+++ b/server/utils/chats/index.js
@@ -77,15 +77,30 @@ async function chatWithWorkspace(
// User is trying to query-mode chat a workspace that has no data in it - so
// we should exit early as no information can be found under these conditions.
if ((!hasVectorizedSpace || embeddingsCount === 0) && chatMode === "query") {
+ const textResponse =
+ workspace?.queryRefusalResponse ??
+ "There is no relevant information in this workspace to answer your query.";
+
+ await WorkspaceChats.new({
+ workspaceId: workspace.id,
+ prompt: message,
+ response: {
+ text: textResponse,
+ sources: [],
+ type: chatMode,
+ },
+ threadId: thread?.id || null,
+ include: false,
+ user,
+ });
+
return {
id: uuid,
type: "textResponse",
sources: [],
close: true,
error: null,
- textResponse:
- workspace?.queryRefusalResponse ??
- "There is no relevant information in this workspace to answer your query.",
+ textResponse,
};
}
@@ -172,15 +187,30 @@ async function chatWithWorkspace(
// If in query mode and no context chunks are found from search, backfill, or pins - do not
// let the LLM try to hallucinate a response or use general knowledge and exit early
if (chatMode === "query" && contextTexts.length === 0) {
+ const textResponse =
+ workspace?.queryRefusalResponse ??
+ "There is no relevant information in this workspace to answer your query.";
+
+ await WorkspaceChats.new({
+ workspaceId: workspace.id,
+ prompt: message,
+ response: {
+ text: textResponse,
+ sources: [],
+ type: chatMode,
+ },
+ threadId: thread?.id || null,
+ include: false,
+ user,
+ });
+
return {
id: uuid,
type: "textResponse",
sources: [],
close: true,
error: null,
- textResponse:
- workspace?.queryRefusalResponse ??
- "There is no relevant information in this workspace to answer your query.",
+ textResponse,
};
}
diff --git a/server/utils/chats/stream.js b/server/utils/chats/stream.js
index ced9a971..770e6cb6 100644
--- a/server/utils/chats/stream.js
+++ b/server/utils/chats/stream.js
@@ -75,16 +75,29 @@ async function streamChatWithWorkspace(
// User is trying to query-mode chat a workspace that has no data in it - so
// we should exit early as no information can be found under these conditions.
if ((!hasVectorizedSpace || embeddingsCount === 0) && chatMode === "query") {
+ const textResponse =
+ workspace?.queryRefusalResponse ??
+ "There is no relevant information in this workspace to answer your query.";
writeResponseChunk(response, {
id: uuid,
type: "textResponse",
- textResponse:
- workspace?.queryRefusalResponse ??
- "There is no relevant information in this workspace to answer your query.",
+ textResponse,
sources: [],
close: true,
error: null,
});
+ await WorkspaceChats.new({
+ workspaceId: workspace.id,
+ prompt: message,
+ response: {
+ text: textResponse,
+ sources: [],
+ type: chatMode,
+ },
+ threadId: thread?.id || null,
+ include: false,
+ user,
+ });
return;
}
@@ -177,16 +190,30 @@ async function streamChatWithWorkspace(
// If in query mode and no context chunks are found from search, backfill, or pins - do not
// let the LLM try to hallucinate a response or use general knowledge and exit early
if (chatMode === "query" && contextTexts.length === 0) {
+ const textResponse =
+ workspace?.queryRefusalResponse ??
+ "There is no relevant information in this workspace to answer your query.";
writeResponseChunk(response, {
id: uuid,
type: "textResponse",
- textResponse:
- workspace?.queryRefusalResponse ??
- "There is no relevant information in this workspace to answer your query.",
+ textResponse,
sources: [],
close: true,
error: null,
});
+
+ await WorkspaceChats.new({
+ workspaceId: workspace.id,
+ prompt: message,
+ response: {
+ text: textResponse,
+ sources: [],
+ type: chatMode,
+ },
+ threadId: thread?.id || null,
+ include: false,
+ user,
+ });
return;
}
diff --git a/server/utils/collectorApi/index.js b/server/utils/collectorApi/index.js
index 6971f640..7f578191 100644
--- a/server/utils/collectorApi/index.js
+++ b/server/utils/collectorApi/index.js
@@ -1,8 +1,9 @@
+const { EncryptionManager } = require("../EncryptionManager");
+
// When running locally will occupy the 0.0.0.0 hostname space but when deployed inside
// of docker this endpoint is not exposed so it is only on the Docker instances internal network
// so no additional security is needed on the endpoint directly. Auth is done however by the express
// middleware prior to leaving the node-side of the application so that is good enough >:)
-
class CollectorApi {
constructor() {
const { CommunicationKey } = require("../comKey");
@@ -54,6 +55,9 @@ class CollectorApi {
headers: {
"Content-Type": "application/json",
"X-Integrity": this.comkey.sign(data),
+ "X-Payload-Signer": this.comkey.encrypt(
+ new EncryptionManager().xPayload
+ ),
},
body: data,
})
@@ -77,6 +81,9 @@ class CollectorApi {
headers: {
"Content-Type": "application/json",
"X-Integrity": this.comkey.sign(data),
+ "X-Payload-Signer": this.comkey.encrypt(
+ new EncryptionManager().xPayload
+ ),
},
body: data,
})
@@ -98,6 +105,9 @@ class CollectorApi {
headers: {
"Content-Type": "application/json",
"X-Integrity": this.comkey.sign(data),
+ "X-Payload-Signer": this.comkey.encrypt(
+ new EncryptionManager().xPayload
+ ),
},
body: data,
})
@@ -122,6 +132,9 @@ class CollectorApi {
headers: {
"Content-Type": "application/json",
"X-Integrity": this.comkey.sign(body),
+ "X-Payload-Signer": this.comkey.encrypt(
+ new EncryptionManager().xPayload
+ ),
},
})
.then((res) => {
@@ -144,6 +157,9 @@ class CollectorApi {
headers: {
"Content-Type": "application/json",
"X-Integrity": this.comkey.sign(data),
+ "X-Payload-Signer": this.comkey.encrypt(
+ new EncryptionManager().xPayload
+ ),
},
body: data,
})
diff --git a/server/utils/comKey/index.js b/server/utils/comKey/index.js
index aec436bb..5cc6b0c0 100644
--- a/server/utils/comKey/index.js
+++ b/server/utils/comKey/index.js
@@ -73,6 +73,14 @@ class CommunicationKey {
.sign("RSA-SHA256", Buffer.from(textData), this.#readPrivateKey())
.toString("hex");
}
+
+ // Use the rolling priv-key to encrypt arbitrary data that is text
+ // returns the encrypted content as a base64 string.
+ encrypt(textData = "") {
+ return crypto
+ .privateEncrypt(this.#readPrivateKey(), Buffer.from(textData, "utf-8"))
+ .toString("base64");
+ }
}
module.exports = { CommunicationKey };
diff --git a/server/utils/files/index.js b/server/utils/files/index.js
index fea6f7f7..58bdf807 100644
--- a/server/utils/files/index.js
+++ b/server/utils/files/index.js
@@ -2,6 +2,7 @@ const fs = require("fs");
const path = require("path");
const { v5: uuidv5 } = require("uuid");
const { Document } = require("../../models/documents");
+const { DocumentSyncQueue } = require("../../models/documentSyncQueue");
const documentsPath =
process.env.NODE_ENV === "development"
? path.resolve(__dirname, `../../storage/documents`)
@@ -25,7 +26,7 @@ async function fileData(filePath = null) {
async function viewLocalFiles() {
if (!fs.existsSync(documentsPath)) fs.mkdirSync(documentsPath);
-
+ const liveSyncAvailable = await DocumentSyncQueue.enabled();
const directory = {
name: "documents",
type: "folder",
@@ -50,16 +51,28 @@ async function viewLocalFiles() {
const rawData = fs.readFileSync(filePath, "utf8");
const cachefilename = `${file}/${subfile}`;
const { pageContent, ...metadata } = JSON.parse(rawData);
+ const pinnedInWorkspaces = await Document.getOnlyWorkspaceIds({
+ docpath: cachefilename,
+ pinned: true,
+ });
+ const watchedInWorkspaces = liveSyncAvailable
+ ? await Document.getOnlyWorkspaceIds({
+ docpath: cachefilename,
+ watched: true,
+ })
+ : [];
subdocs.items.push({
name: subfile,
type: "file",
...metadata,
cached: await cachedVectorInformation(cachefilename, true),
- pinnedWorkspaces: await Document.getPins({
- docpath: cachefilename,
- pinned: true,
- }),
+ pinnedWorkspaces: pinnedInWorkspaces,
+ canWatch: liveSyncAvailable
+ ? DocumentSyncQueue.canWatch(metadata)
+ : false,
+ // Is file watched in any workspace since sync updates all workspaces where file is referenced
+ watched: watchedInWorkspaces.length !== 0,
});
}
directory.items.push(subdocs);
diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js
index 72d7a8c4..6abd6408 100644
--- a/server/utils/helpers/updateENV.js
+++ b/server/utils/helpers/updateENV.js
@@ -407,6 +407,10 @@ const KEY_MAPPING = {
envKey: "AGENT_SERPLY_API_KEY",
checks: [],
},
+ AgentSearXNGApiUrl: {
+ envKey: "AGENT_SEARXNG_API_URL",
+ checks: [],
+ },
// TTS/STT Integration ENVS
TextToSpeechProvider: {
@@ -565,6 +569,7 @@ function validAnthropicModel(input = "") {
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
+ "claude-3-5-sonnet-20240620",
];
return validModels.includes(input)
? null
@@ -718,6 +723,7 @@ async function updateENV(newENVs = {}, force = false, userId = null) {
}
await logChangesToEventLog(newValues, userId);
+ if (process.env.NODE_ENV === "production") dumpENV();
return { newValues, error: error?.length > 0 ? error : false };
}
@@ -743,15 +749,20 @@ async function logChangesToEventLog(newValues = {}, userId = null) {
return;
}
-async function dumpENV() {
+function dumpENV() {
const fs = require("fs");
const path = require("path");
const frozenEnvs = {};
const protectedKeys = [
...Object.values(KEY_MAPPING).map((values) => values.envKey),
+ // Manually Add Keys here which are not already defined in KEY_MAPPING
+ // and are either managed or manually set ENV key:values.
"STORAGE_DIR",
"SERVER_PORT",
+ // For persistent data encryption
+ "SIG_KEY",
+ "SIG_SALT",
// Password Schema Keys if present.
"PASSWORDMINCHAR",
"PASSWORDMAXCHAR",
@@ -764,16 +775,6 @@ async function dumpENV() {
"ENABLE_HTTPS",
"HTTPS_CERT_PATH",
"HTTPS_KEY_PATH",
- // DISABLED TELEMETRY
- "DISABLE_TELEMETRY",
-
- // Agent Integrations
- // Search engine integrations
- "AGENT_GSE_CTX",
- "AGENT_GSE_KEY",
- "AGENT_SERPER_DEV_KEY",
- "AGENT_BING_SEARCH_API_KEY",
- "AGENT_SERPLY_API_KEY",
];
// Simple sanitization of each value to prevent ENV injection via newline or quote escaping.
diff --git a/server/utils/middleware/featureFlagEnabled.js b/server/utils/middleware/featureFlagEnabled.js
new file mode 100644
index 00000000..f13a8886
--- /dev/null
+++ b/server/utils/middleware/featureFlagEnabled.js
@@ -0,0 +1,24 @@
+const { SystemSettings } = require("../../models/systemSettings");
+
+// Explicitly check that a specific feature flag is enabled.
+// This should match the key in the SystemSetting label.
+function featureFlagEnabled(featureFlagKey = null) {
+ return async (_, response, next) => {
+ if (!featureFlagKey) return response.sendStatus(401).end();
+
+ const flagValue = (
+ await SystemSettings.get({ label: String(featureFlagKey) })
+ )?.value;
+ if (!flagValue) return response.sendStatus(401).end();
+
+ if (flagValue === "enabled") {
+ next();
+ return;
+ }
+
+ return response.sendStatus(401).end();
+ };
+}
+module.exports = {
+ featureFlagEnabled,
+};
diff --git a/server/utils/vectorDbProviders/astra/index.js b/server/utils/vectorDbProviders/astra/index.js
index 30ff2bbf..efaaa135 100644
--- a/server/utils/vectorDbProviders/astra/index.js
+++ b/server/utils/vectorDbProviders/astra/index.js
@@ -100,7 +100,8 @@ const AstraDB = {
addDocumentToNamespace: async function (
namespace,
documentData = {},
- fullFilePath = null
+ fullFilePath = null,
+ skipCache = false
) {
const { DocumentVectors } = require("../../../models/vectors");
try {
@@ -109,40 +110,42 @@ const AstraDB = {
if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace);
- const cacheResult = await cachedVectorInformation(fullFilePath);
- if (cacheResult.exists) {
- const { client } = await this.connect();
- const { chunks } = cacheResult;
- const documentVectors = [];
- vectorDimension = chunks[0][0].values.length || null;
+ if (!skipCache) {
+ const cacheResult = await cachedVectorInformation(fullFilePath);
+ if (cacheResult.exists) {
+ const { client } = await this.connect();
+ const { chunks } = cacheResult;
+ const documentVectors = [];
+ vectorDimension = chunks[0][0].values.length || null;
- const collection = await this.getOrCreateCollection(
- client,
- namespace,
- vectorDimension
- );
- if (!(await this.isRealCollection(collection)))
- throw new Error("Failed to create new AstraDB collection!", {
+ const collection = await this.getOrCreateCollection(
+ client,
namespace,
- });
+ vectorDimension
+ );
+ if (!(await this.isRealCollection(collection)))
+ throw new Error("Failed to create new AstraDB collection!", {
+ namespace,
+ });
- for (const chunk of chunks) {
- // Before sending to Astra and saving the records to our db
- // we need to assign the id of each chunk that is stored in the cached file.
- const newChunks = chunk.map((chunk) => {
- const _id = uuidv4();
- documentVectors.push({ docId, vectorId: _id });
- return {
- _id: _id,
- $vector: chunk.values,
- metadata: chunk.metadata || {},
- };
- });
+ for (const chunk of chunks) {
+ // Before sending to Astra and saving the records to our db
+ // we need to assign the id of each chunk that is stored in the cached file.
+ const newChunks = chunk.map((chunk) => {
+ const _id = uuidv4();
+ documentVectors.push({ docId, vectorId: _id });
+ return {
+ _id: _id,
+ $vector: chunk.values,
+ metadata: chunk.metadata || {},
+ };
+ });
- await collection.insertMany(newChunks);
+ await collection.insertMany(newChunks);
+ }
+ await DocumentVectors.bulkInsert(documentVectors);
+ return { vectorized: true, error: null };
}
- await DocumentVectors.bulkInsert(documentVectors);
- return { vectorized: true, error: null };
}
const EmbedderEngine = getEmbeddingEngineSelection();
diff --git a/server/utils/vectorDbProviders/chroma/index.js b/server/utils/vectorDbProviders/chroma/index.js
index 5bea32bf..a79d4fc4 100644
--- a/server/utils/vectorDbProviders/chroma/index.js
+++ b/server/utils/vectorDbProviders/chroma/index.js
@@ -185,7 +185,8 @@ const Chroma = {
addDocumentToNamespace: async function (
namespace,
documentData = {},
- fullFilePath = null
+ fullFilePath = null,
+ skipCache = false
) {
const { DocumentVectors } = require("../../../models/vectors");
try {
@@ -193,43 +194,45 @@ const Chroma = {
if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace);
- const cacheResult = await cachedVectorInformation(fullFilePath);
- if (cacheResult.exists) {
- const { client } = await this.connect();
- const collection = await client.getOrCreateCollection({
- name: this.normalize(namespace),
- metadata: { "hnsw:space": "cosine" },
- });
- const { chunks } = cacheResult;
- const documentVectors = [];
-
- for (const chunk of chunks) {
- const submission = {
- ids: [],
- embeddings: [],
- metadatas: [],
- documents: [],
- };
-
- // Before sending to Chroma and saving the records to our db
- // we need to assign the id of each chunk that is stored in the cached file.
- chunk.forEach((chunk) => {
- const id = uuidv4();
- const { id: _id, ...metadata } = chunk.metadata;
- documentVectors.push({ docId, vectorId: id });
- submission.ids.push(id);
- submission.embeddings.push(chunk.values);
- submission.metadatas.push(metadata);
- submission.documents.push(metadata.text);
+ if (skipCache) {
+ const cacheResult = await cachedVectorInformation(fullFilePath);
+ if (cacheResult.exists) {
+ const { client } = await this.connect();
+ const collection = await client.getOrCreateCollection({
+ name: this.normalize(namespace),
+ metadata: { "hnsw:space": "cosine" },
});
+ const { chunks } = cacheResult;
+ const documentVectors = [];
- const additionResult = await collection.add(submission);
- if (!additionResult)
- throw new Error("Error embedding into ChromaDB", additionResult);
+ for (const chunk of chunks) {
+ const submission = {
+ ids: [],
+ embeddings: [],
+ metadatas: [],
+ documents: [],
+ };
+
+ // Before sending to Chroma and saving the records to our db
+ // we need to assign the id of each chunk that is stored in the cached file.
+ chunk.forEach((chunk) => {
+ const id = uuidv4();
+ const { id: _id, ...metadata } = chunk.metadata;
+ documentVectors.push({ docId, vectorId: id });
+ submission.ids.push(id);
+ submission.embeddings.push(chunk.values);
+ submission.metadatas.push(metadata);
+ submission.documents.push(metadata.text);
+ });
+
+ const additionResult = await collection.add(submission);
+ if (!additionResult)
+ throw new Error("Error embedding into ChromaDB", additionResult);
+ }
+
+ await DocumentVectors.bulkInsert(documentVectors);
+ return { vectorized: true, error: null };
}
-
- await DocumentVectors.bulkInsert(documentVectors);
- return { vectorized: true, error: null };
}
// If we are here then we are going to embed and store a novel document.
diff --git a/server/utils/vectorDbProviders/lance/index.js b/server/utils/vectorDbProviders/lance/index.js
index 54c12c04..e1e6a5e6 100644
--- a/server/utils/vectorDbProviders/lance/index.js
+++ b/server/utils/vectorDbProviders/lance/index.js
@@ -153,7 +153,8 @@ const LanceDb = {
addDocumentToNamespace: async function (
namespace,
documentData = {},
- fullFilePath = null
+ fullFilePath = null,
+ skipCache = false
) {
const { DocumentVectors } = require("../../../models/vectors");
try {
@@ -161,25 +162,27 @@ const LanceDb = {
if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace);
- const cacheResult = await cachedVectorInformation(fullFilePath);
- if (cacheResult.exists) {
- const { client } = await this.connect();
- const { chunks } = cacheResult;
- const documentVectors = [];
- const submissions = [];
+ if (!skipCache) {
+ const cacheResult = await cachedVectorInformation(fullFilePath);
+ if (cacheResult.exists) {
+ const { client } = await this.connect();
+ const { chunks } = cacheResult;
+ const documentVectors = [];
+ const submissions = [];
- for (const chunk of chunks) {
- chunk.forEach((chunk) => {
- const id = uuidv4();
- const { id: _id, ...metadata } = chunk.metadata;
- documentVectors.push({ docId, vectorId: id });
- submissions.push({ id: id, vector: chunk.values, ...metadata });
- });
+ for (const chunk of chunks) {
+ chunk.forEach((chunk) => {
+ const id = uuidv4();
+ const { id: _id, ...metadata } = chunk.metadata;
+ documentVectors.push({ docId, vectorId: id });
+ submissions.push({ id: id, vector: chunk.values, ...metadata });
+ });
+ }
+
+ await this.updateOrCreateCollection(client, submissions, namespace);
+ await DocumentVectors.bulkInsert(documentVectors);
+ return { vectorized: true, error: null };
}
-
- await this.updateOrCreateCollection(client, submissions, namespace);
- await DocumentVectors.bulkInsert(documentVectors);
- return { vectorized: true, error: null };
}
// If we are here then we are going to embed and store a novel document.
diff --git a/server/utils/vectorDbProviders/milvus/index.js b/server/utils/vectorDbProviders/milvus/index.js
index d720c265..14d54d6e 100644
--- a/server/utils/vectorDbProviders/milvus/index.js
+++ b/server/utils/vectorDbProviders/milvus/index.js
@@ -137,7 +137,8 @@ const Milvus = {
addDocumentToNamespace: async function (
namespace,
documentData = {},
- fullFilePath = null
+ fullFilePath = null,
+ skipCache = false
) {
const { DocumentVectors } = require("../../../models/vectors");
try {
@@ -146,38 +147,40 @@ const Milvus = {
if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace);
- const cacheResult = await cachedVectorInformation(fullFilePath);
- if (cacheResult.exists) {
- const { client } = await this.connect();
- const { chunks } = cacheResult;
- const documentVectors = [];
- vectorDimension = chunks[0][0].values.length || null;
+ if (skipCache) {
+ const cacheResult = await cachedVectorInformation(fullFilePath);
+ if (cacheResult.exists) {
+ const { client } = await this.connect();
+ const { chunks } = cacheResult;
+ const documentVectors = [];
+ vectorDimension = chunks[0][0].values.length || null;
- await this.getOrCreateCollection(client, namespace, vectorDimension);
- for (const chunk of chunks) {
- // Before sending to Pinecone and saving the records to our db
- // we need to assign the id of each chunk that is stored in the cached file.
- const newChunks = chunk.map((chunk) => {
- const id = uuidv4();
- documentVectors.push({ docId, vectorId: id });
- return { id, vector: chunk.values, metadata: chunk.metadata };
- });
- const insertResult = await client.insert({
- collection_name: this.normalize(namespace),
- data: newChunks,
- });
+ await this.getOrCreateCollection(client, namespace, vectorDimension);
+ for (const chunk of chunks) {
+ // Before sending to Pinecone and saving the records to our db
+ // we need to assign the id of each chunk that is stored in the cached file.
+ const newChunks = chunk.map((chunk) => {
+ const id = uuidv4();
+ documentVectors.push({ docId, vectorId: id });
+ return { id, vector: chunk.values, metadata: chunk.metadata };
+ });
+ const insertResult = await client.insert({
+ collection_name: this.normalize(namespace),
+ data: newChunks,
+ });
- if (insertResult?.status.error_code !== "Success") {
- throw new Error(
- `Error embedding into Milvus! Reason:${insertResult?.status.reason}`
- );
+ if (insertResult?.status.error_code !== "Success") {
+ throw new Error(
+ `Error embedding into Milvus! Reason:${insertResult?.status.reason}`
+ );
+ }
}
+ await DocumentVectors.bulkInsert(documentVectors);
+ await client.flushSync({
+ collection_names: [this.normalize(namespace)],
+ });
+ return { vectorized: true, error: null };
}
- await DocumentVectors.bulkInsert(documentVectors);
- await client.flushSync({
- collection_names: [this.normalize(namespace)],
- });
- return { vectorized: true, error: null };
}
const EmbedderEngine = getEmbeddingEngineSelection();
diff --git a/server/utils/vectorDbProviders/pinecone/index.js b/server/utils/vectorDbProviders/pinecone/index.js
index d1aeb2f6..040f41d3 100644
--- a/server/utils/vectorDbProviders/pinecone/index.js
+++ b/server/utils/vectorDbProviders/pinecone/index.js
@@ -96,7 +96,8 @@ const PineconeDB = {
addDocumentToNamespace: async function (
namespace,
documentData = {},
- fullFilePath = null
+ fullFilePath = null,
+ skipCache = false
) {
const { DocumentVectors } = require("../../../models/vectors");
try {
@@ -104,26 +105,28 @@ const PineconeDB = {
if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace);
- const cacheResult = await cachedVectorInformation(fullFilePath);
- if (cacheResult.exists) {
- const { pineconeIndex } = await this.connect();
- const pineconeNamespace = pineconeIndex.namespace(namespace);
- const { chunks } = cacheResult;
- const documentVectors = [];
+ if (!skipCache) {
+ const cacheResult = await cachedVectorInformation(fullFilePath);
+ if (cacheResult.exists) {
+ const { pineconeIndex } = await this.connect();
+ const pineconeNamespace = pineconeIndex.namespace(namespace);
+ const { chunks } = cacheResult;
+ const documentVectors = [];
- for (const chunk of chunks) {
- // Before sending to Pinecone and saving the records to our db
- // we need to assign the id of each chunk that is stored in the cached file.
- const newChunks = chunk.map((chunk) => {
- const id = uuidv4();
- documentVectors.push({ docId, vectorId: id });
- return { ...chunk, id };
- });
- await pineconeNamespace.upsert([...newChunks]);
+ for (const chunk of chunks) {
+ // Before sending to Pinecone and saving the records to our db
+ // we need to assign the id of each chunk that is stored in the cached file.
+ const newChunks = chunk.map((chunk) => {
+ const id = uuidv4();
+ documentVectors.push({ docId, vectorId: id });
+ return { ...chunk, id };
+ });
+ await pineconeNamespace.upsert([...newChunks]);
+ }
+
+ await DocumentVectors.bulkInsert(documentVectors);
+ return { vectorized: true, error: null };
}
-
- await DocumentVectors.bulkInsert(documentVectors);
- return { vectorized: true, error: null };
}
// If we are here then we are going to embed and store a novel document.
diff --git a/server/utils/vectorDbProviders/qdrant/index.js b/server/utils/vectorDbProviders/qdrant/index.js
index 77945915..36550f09 100644
--- a/server/utils/vectorDbProviders/qdrant/index.js
+++ b/server/utils/vectorDbProviders/qdrant/index.js
@@ -137,7 +137,8 @@ const QDrant = {
addDocumentToNamespace: async function (
namespace,
documentData = {},
- fullFilePath = null
+ fullFilePath = null,
+ skipCache = false
) {
const { DocumentVectors } = require("../../../models/vectors");
try {
@@ -146,59 +147,63 @@ const QDrant = {
if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace);
- const cacheResult = await cachedVectorInformation(fullFilePath);
- if (cacheResult.exists) {
- const { client } = await this.connect();
- const { chunks } = cacheResult;
- const documentVectors = [];
- vectorDimension =
- chunks[0][0]?.vector?.length ?? chunks[0][0]?.values?.length ?? null;
+ if (skipCache) {
+ const cacheResult = await cachedVectorInformation(fullFilePath);
+ if (cacheResult.exists) {
+ const { client } = await this.connect();
+ const { chunks } = cacheResult;
+ const documentVectors = [];
+ vectorDimension =
+ chunks[0][0]?.vector?.length ??
+ chunks[0][0]?.values?.length ??
+ null;
- const collection = await this.getOrCreateCollection(
- client,
- namespace,
- vectorDimension
- );
- if (!collection)
- throw new Error("Failed to create new QDrant collection!", {
+ const collection = await this.getOrCreateCollection(
+ client,
namespace,
- });
+ vectorDimension
+ );
+ if (!collection)
+ throw new Error("Failed to create new QDrant collection!", {
+ namespace,
+ });
- for (const chunk of chunks) {
- const submission = {
- ids: [],
- vectors: [],
- payloads: [],
- };
+ for (const chunk of chunks) {
+ const submission = {
+ ids: [],
+ vectors: [],
+ payloads: [],
+ };
- // Before sending to Qdrant and saving the records to our db
- // we need to assign the id of each chunk that is stored in the cached file.
- // The id property must be defined or else it will be unable to be managed by ALLM.
- chunk.forEach((chunk) => {
- const id = uuidv4();
- if (chunk?.payload?.hasOwnProperty("id")) {
- const { id: _id, ...payload } = chunk.payload;
- documentVectors.push({ docId, vectorId: id });
- submission.ids.push(id);
- submission.vectors.push(chunk.vector);
- submission.payloads.push(payload);
- } else {
- console.error(
- "The 'id' property is not defined in chunk.payload - it will be omitted from being inserted in QDrant collection."
- );
- }
- });
+ // Before sending to Qdrant and saving the records to our db
+ // we need to assign the id of each chunk that is stored in the cached file.
+ // The id property must be defined or else it will be unable to be managed by ALLM.
+ chunk.forEach((chunk) => {
+ const id = uuidv4();
+ if (chunk?.payload?.hasOwnProperty("id")) {
+ const { id: _id, ...payload } = chunk.payload;
+ documentVectors.push({ docId, vectorId: id });
+ submission.ids.push(id);
+ submission.vectors.push(chunk.vector);
+ submission.payloads.push(payload);
+ } else {
+ console.error(
+ "The 'id' property is not defined in chunk.payload - it will be omitted from being inserted in QDrant collection."
+ );
+ }
+ });
- const additionResult = await client.upsert(namespace, {
- wait: true,
- batch: { ...submission },
- });
- if (additionResult?.status !== "completed")
- throw new Error("Error embedding into QDrant", additionResult);
+ const additionResult = await client.upsert(namespace, {
+ wait: true,
+ batch: { ...submission },
+ });
+ if (additionResult?.status !== "completed")
+ throw new Error("Error embedding into QDrant", additionResult);
+ }
+
+ await DocumentVectors.bulkInsert(documentVectors);
+ return { vectorized: true, error: null };
}
-
- await DocumentVectors.bulkInsert(documentVectors);
- return { vectorized: true, error: null };
}
// If we are here then we are going to embed and store a novel document.
diff --git a/server/utils/vectorDbProviders/weaviate/index.js b/server/utils/vectorDbProviders/weaviate/index.js
index 978e2557..176c56d6 100644
--- a/server/utils/vectorDbProviders/weaviate/index.js
+++ b/server/utils/vectorDbProviders/weaviate/index.js
@@ -179,7 +179,8 @@ const Weaviate = {
addDocumentToNamespace: async function (
namespace,
documentData = {},
- fullFilePath = null
+ fullFilePath = null,
+ skipCache = false
) {
const { DocumentVectors } = require("../../../models/vectors");
try {
@@ -192,55 +193,57 @@ const Weaviate = {
if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace);
- const cacheResult = await cachedVectorInformation(fullFilePath);
- if (cacheResult.exists) {
- const { client } = await this.connect();
- const weaviateClassExits = await this.hasNamespace(namespace);
- if (!weaviateClassExits) {
- await client.schema
- .classCreator()
- .withClass({
- class: camelCase(namespace),
- description: `Class created by AnythingLLM named ${camelCase(
- namespace
- )}`,
- vectorizer: "none",
- })
- .do();
- }
-
- const { chunks } = cacheResult;
- const documentVectors = [];
- const vectors = [];
-
- for (const chunk of chunks) {
- // Before sending to Weaviate and saving the records to our db
- // we need to assign the id of each chunk that is stored in the cached file.
- chunk.forEach((chunk) => {
- const id = uuidv4();
- const flattenedMetadata = this.flattenObjectForWeaviate(
- chunk.properties ?? chunk.metadata
- );
- documentVectors.push({ docId, vectorId: id });
- const vectorRecord = {
- id,
- class: camelCase(namespace),
- vector: chunk.vector || chunk.values || [],
- properties: { ...flattenedMetadata },
- };
- vectors.push(vectorRecord);
- });
-
- const { success: additionResult, errors = [] } =
- await this.addVectors(client, vectors);
- if (!additionResult) {
- console.error("Weaviate::addVectors failed to insert", errors);
- throw new Error("Error embedding into Weaviate");
+ if (skipCache) {
+ const cacheResult = await cachedVectorInformation(fullFilePath);
+ if (cacheResult.exists) {
+ const { client } = await this.connect();
+ const weaviateClassExits = await this.hasNamespace(namespace);
+ if (!weaviateClassExits) {
+ await client.schema
+ .classCreator()
+ .withClass({
+ class: camelCase(namespace),
+ description: `Class created by AnythingLLM named ${camelCase(
+ namespace
+ )}`,
+ vectorizer: "none",
+ })
+ .do();
}
- }
- await DocumentVectors.bulkInsert(documentVectors);
- return { vectorized: true, error: null };
+ const { chunks } = cacheResult;
+ const documentVectors = [];
+ const vectors = [];
+
+ for (const chunk of chunks) {
+ // Before sending to Weaviate and saving the records to our db
+ // we need to assign the id of each chunk that is stored in the cached file.
+ chunk.forEach((chunk) => {
+ const id = uuidv4();
+ const flattenedMetadata = this.flattenObjectForWeaviate(
+ chunk.properties ?? chunk.metadata
+ );
+ documentVectors.push({ docId, vectorId: id });
+ const vectorRecord = {
+ id,
+ class: camelCase(namespace),
+ vector: chunk.vector || chunk.values || [],
+ properties: { ...flattenedMetadata },
+ };
+ vectors.push(vectorRecord);
+ });
+
+ const { success: additionResult, errors = [] } =
+ await this.addVectors(client, vectors);
+ if (!additionResult) {
+ console.error("Weaviate::addVectors failed to insert", errors);
+ throw new Error("Error embedding into Weaviate");
+ }
+ }
+
+ await DocumentVectors.bulkInsert(documentVectors);
+ return { vectorized: true, error: null };
+ }
}
// If we are here then we are going to embed and store a novel document.
diff --git a/server/utils/vectorDbProviders/zilliz/index.js b/server/utils/vectorDbProviders/zilliz/index.js
index ebb59157..cb60d2e3 100644
--- a/server/utils/vectorDbProviders/zilliz/index.js
+++ b/server/utils/vectorDbProviders/zilliz/index.js
@@ -138,7 +138,8 @@ const Zilliz = {
addDocumentToNamespace: async function (
namespace,
documentData = {},
- fullFilePath = null
+ fullFilePath = null,
+ skipCache = false
) {
const { DocumentVectors } = require("../../../models/vectors");
try {
@@ -147,38 +148,40 @@ const Zilliz = {
if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace);
- const cacheResult = await cachedVectorInformation(fullFilePath);
- if (cacheResult.exists) {
- const { client } = await this.connect();
- const { chunks } = cacheResult;
- const documentVectors = [];
- vectorDimension = chunks[0][0].values.length || null;
+ if (skipCache) {
+ const cacheResult = await cachedVectorInformation(fullFilePath);
+ if (cacheResult.exists) {
+ const { client } = await this.connect();
+ const { chunks } = cacheResult;
+ const documentVectors = [];
+ vectorDimension = chunks[0][0].values.length || null;
- await this.getOrCreateCollection(client, namespace, vectorDimension);
- for (const chunk of chunks) {
- // Before sending to Pinecone and saving the records to our db
- // we need to assign the id of each chunk that is stored in the cached file.
- const newChunks = chunk.map((chunk) => {
- const id = uuidv4();
- documentVectors.push({ docId, vectorId: id });
- return { id, vector: chunk.values, metadata: chunk.metadata };
- });
- const insertResult = await client.insert({
- collection_name: this.normalize(namespace),
- data: newChunks,
- });
+ await this.getOrCreateCollection(client, namespace, vectorDimension);
+ for (const chunk of chunks) {
+ // Before sending to Pinecone and saving the records to our db
+ // we need to assign the id of each chunk that is stored in the cached file.
+ const newChunks = chunk.map((chunk) => {
+ const id = uuidv4();
+ documentVectors.push({ docId, vectorId: id });
+ return { id, vector: chunk.values, metadata: chunk.metadata };
+ });
+ const insertResult = await client.insert({
+ collection_name: this.normalize(namespace),
+ data: newChunks,
+ });
- if (insertResult?.status.error_code !== "Success") {
- throw new Error(
- `Error embedding into Zilliz! Reason:${insertResult?.status.reason}`
- );
+ if (insertResult?.status.error_code !== "Success") {
+ throw new Error(
+ `Error embedding into Zilliz! Reason:${insertResult?.status.reason}`
+ );
+ }
}
+ await DocumentVectors.bulkInsert(documentVectors);
+ await client.flushSync({
+ collection_names: [this.normalize(namespace)],
+ });
+ return { vectorized: true, error: null };
}
- await DocumentVectors.bulkInsert(documentVectors);
- await client.flushSync({
- collection_names: [this.normalize(namespace)],
- });
- return { vectorized: true, error: null };
}
const EmbedderEngine = getEmbeddingEngineSelection();
diff --git a/server/yarn.lock b/server/yarn.lock
index a05c62fc..6c9de0e2 100644
--- a/server/yarn.lock
+++ b/server/yarn.lock
@@ -272,6 +272,18 @@
"@azure/logger" "^1.0.3"
tslib "^2.4.0"
+"@babel/runtime@^7.10.5":
+ version "7.24.7"
+ resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.7.tgz#f4f0d5530e8dbdf59b3451b9b3e594b6ba082e12"
+ integrity sha512-UwgBRMjJP+xv857DCngvqXI3Iq6J4v0wXmwc6sapg+zyhbwmQX67LUEFrkK5tbyJ30jGuG3ZvWpBiB9LCy1kWw==
+ dependencies:
+ regenerator-runtime "^0.14.0"
+
+"@breejs/later@^4.2.0":
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/@breejs/later/-/later-4.2.0.tgz#669661f3a02535ef900f360c74e48c3f5483c786"
+ integrity sha512-EVMD0SgJtOuFeg0lAVbCwa+qeTKILb87jqvLyUtQswGD9+ce2nB52Y5zbTF1Hc0MDFfbydcMcxb47jSdhikVHA==
+
"@colors/colors@1.6.0", "@colors/colors@^1.6.0":
version "1.6.0"
resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0"
@@ -567,6 +579,14 @@
resolved "https://registry.yarnpkg.com/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz#8ace5259254426ccef57f3175bc64ed7095ed919"
integrity sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw==
+"@ladjs/graceful@^3.2.2":
+ version "3.2.2"
+ resolved "https://registry.yarnpkg.com/@ladjs/graceful/-/graceful-3.2.2.tgz#1b141a9dc2604df99177d6714dbe4a0bff5e2ddf"
+ integrity sha512-GyL5Cpgh2RlndFW2e4AUHrEDe0tzyXKpAs92wrAQhNKcY0y++qfK8PC+6TOHzN9zvxPY9j1KAU29Gfa9vxWzDg==
+ dependencies:
+ lil-http-terminator "^1.2.2"
+ p-is-promise "3"
+
"@lancedb/vectordb-darwin-arm64@0.4.11":
version "0.4.11"
resolved "https://registry.yarnpkg.com/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.4.11.tgz#390549891e03f28ba0c1b741f30730b2d09227da"
@@ -1114,6 +1134,11 @@
dependencies:
"@types/node" "*"
+"@types/lodash@^4.14.165":
+ version "4.17.5"
+ resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.5.tgz#e6c29b58e66995d57cd170ce3e2a61926d55ee04"
+ integrity sha512-MBIOHVZqVqgfro1euRDWX7OO0fBVUUMrN6Pwm8LQsz8cWhEpihlvR70ENj3f40j58TNxZaWv2ndSkInykNBBJw==
+
"@types/long@^4.0.1":
version "4.0.2"
resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.2.tgz#b74129719fc8d11c01868010082d483b7545591a"
@@ -1708,6 +1733,11 @@ boolbase@^1.0.0:
resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e"
integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==
+boolean@^3.2.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/boolean/-/boolean-3.2.0.tgz#9e5294af4e98314494cbb17979fa54ca159f116b"
+ integrity sha512-d0II/GO9uf9lfUHH2BQsjxzRJZBdsjgsBiW4BvhWk/3qoKwQFjIDVN19PfX8F2D/r9PCMTtLWjYVCFrpeYUzsw==
+
bottleneck@^2.15.3:
version "2.19.5"
resolved "https://registry.yarnpkg.com/bottleneck/-/bottleneck-2.19.5.tgz#5df0b90f59fd47656ebe63c78a98419205cadd91"
@@ -1735,6 +1765,22 @@ braces@~3.0.2:
dependencies:
fill-range "^7.0.1"
+bree@^9.2.3:
+ version "9.2.3"
+ resolved "https://registry.yarnpkg.com/bree/-/bree-9.2.3.tgz#8c47402efcc79ed6da31637f84092ef59743d395"
+ integrity sha512-iCVyLVcqql8rFogVX5gzkofdo6OZu8mxe5dUSkAZyaR43UdNfP0DOj3jJk31yogy6lfnRMhGvO5Gj1ypLeInuA==
+ dependencies:
+ "@breejs/later" "^4.2.0"
+ boolean "^3.2.0"
+ combine-errors "^3.0.3"
+ cron-validate "^1.4.5"
+ human-interval "^2.0.1"
+ is-string-and-not-blank "^0.0.2"
+ is-valid-path "^0.1.1"
+ ms "^2.1.3"
+ p-wait-for "3"
+ safe-timers "^1.1.0"
+
bson@^6.2.0:
version "6.6.0"
resolved "https://registry.yarnpkg.com/bson/-/bson-6.6.0.tgz#f225137eb49fe19bee4d87949a0515c05176e2ad"
@@ -2040,6 +2086,14 @@ colorspace@1.1.x:
color "^3.1.3"
text-hex "1.0.x"
+combine-errors@^3.0.3:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/combine-errors/-/combine-errors-3.0.3.tgz#f4df6740083e5703a3181110c2b10551f003da86"
+ integrity sha512-C8ikRNRMygCwaTx+Ek3Yr+OuZzgZjduCOfSQBjbM8V3MfgcjSTeto/GXP6PAwKvJz/v15b7GHZvx5rOlczFw/Q==
+ dependencies:
+ custom-error-instance "2.1.1"
+ lodash.uniqby "4.5.0"
+
combined-stream@^1.0.8:
version "1.0.8"
resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
@@ -2165,6 +2219,13 @@ crc32-stream@^4.0.2:
crc-32 "^1.2.0"
readable-stream "^3.4.0"
+cron-validate@^1.4.5:
+ version "1.4.5"
+ resolved "https://registry.yarnpkg.com/cron-validate/-/cron-validate-1.4.5.tgz#eceb221f7558e6302e5f84c7b3a454fdf4d064c3"
+ integrity sha512-nKlOJEnYKudMn/aNyNH8xxWczlfpaazfWV32Pcx/2St51r2bxWbGhZD7uwzMcRhunA/ZNL+Htm/i0792Z59UMQ==
+ dependencies:
+ yup "0.32.9"
+
cross-env@^7.0.3:
version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-7.0.3.tgz#865264b29677dc015ba8418918965dd232fc54cf"
@@ -2209,6 +2270,11 @@ css-what@^6.1.0:
resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4"
integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==
+custom-error-instance@2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/custom-error-instance/-/custom-error-instance-2.1.1.tgz#3cf6391487a6629a6247eb0ca0ce00081b7e361a"
+ integrity sha512-p6JFxJc3M4OTD2li2qaHkDCw9SfMw82Ldr6OC9Je1aXiGfhx2W8p3GaoeaGrPJTUN9NirTM/KTxHWMUdR1rsUg==
+
data-view-buffer@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/data-view-buffer/-/data-view-buffer-1.0.1.tgz#8ea6326efec17a2e42620696e671d7d5a8bc66b2"
@@ -3521,6 +3587,13 @@ https-proxy-agent@^7.0.0:
agent-base "^7.0.2"
debug "4"
+human-interval@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/human-interval/-/human-interval-2.0.1.tgz#655baf606c7067bb26042dcae14ec777b099af15"
+ integrity sha512-r4Aotzf+OtKIGQCB3odUowy4GfUDTy3aTWTfLd7ZF2gBCy3XW3v/dJLRefZnOFFnjqs5B1TypvS8WarpBkYUNQ==
+ dependencies:
+ numbered "^1.1.0"
+
human-signals@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0"
@@ -3708,6 +3781,11 @@ is-docker@^2.0.0, is-docker@^2.1.1:
resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa"
integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==
+is-extglob@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0"
+ integrity sha512-7Q+VbVafe6x2T+Tu6NcOf6sRklazEPmBoB3IWk3WdGZM2iGUwU/Oe3Wtq5lSEkDTTlpp8yx+5t4pzO/i9Ty1ww==
+
is-extglob@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
@@ -3732,6 +3810,13 @@ is-generator-function@^1.0.10:
dependencies:
has-tostringtag "^1.0.0"
+is-glob@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863"
+ integrity sha512-a1dBeB19NXsf/E0+FHqkagizel/LQw2DjSQpvQrj3zT+jYPpaUCryPnrQajXKFLCMuf4I6FhRpaGtw4lPrG6Eg==
+ dependencies:
+ is-extglob "^1.0.0"
+
is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1:
version "4.0.3"
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084"
@@ -3744,6 +3829,13 @@ is-interactive@^2.0.0:
resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-2.0.0.tgz#40c57614593826da1100ade6059778d597f16e90"
integrity sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==
+is-invalid-path@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/is-invalid-path/-/is-invalid-path-0.1.0.tgz#307a855b3cf1a938b44ea70d2c61106053714f34"
+ integrity sha512-aZMG0T3F34mTg4eTdszcGXx54oiZ4NtHSft3hWNJMGJXUUqdIj3cOZuHcU0nCWWcY3jd7yRe/3AEm3vSNTpBGQ==
+ dependencies:
+ is-glob "^2.0.0"
+
is-lambda@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/is-lambda/-/is-lambda-1.0.1.tgz#3d9877899e6a53efc0160504cde15f82e6f061d5"
@@ -3806,6 +3898,18 @@ is-stream@^2.0.0:
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077"
integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==
+is-string-and-not-blank@^0.0.2:
+ version "0.0.2"
+ resolved "https://registry.yarnpkg.com/is-string-and-not-blank/-/is-string-and-not-blank-0.0.2.tgz#cd19eded2ca4a514f79ca528915f1fb28e5dd38a"
+ integrity sha512-FyPGAbNVyZpTeDCTXnzuwbu9/WpNXbCfbHXLpCRpN4GANhS00eEIP5Ef+k5HYSNIzIhdN9zRDoBj6unscECvtQ==
+ dependencies:
+ is-string-blank "^1.0.1"
+
+is-string-blank@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-string-blank/-/is-string-blank-1.0.1.tgz#866dca066d41d2894ebdfd2d8fe93e586e583a03"
+ integrity sha512-9H+ZBCVs3L9OYqv8nuUAzpcT9OTgMD1yAWrG7ihlnibdkbtB850heAmYWxHuXc4CHy4lKeK69tN+ny1K7gBIrw==
+
is-string@^1.0.5, is-string@^1.0.7:
version "1.0.7"
resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd"
@@ -3832,6 +3936,13 @@ is-unicode-supported@^1.1.0, is-unicode-supported@^1.3.0:
resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz#d824984b616c292a2e198207d4a609983842f714"
integrity sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==
+is-valid-path@^0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/is-valid-path/-/is-valid-path-0.1.1.tgz#110f9ff74c37f663e1ec7915eb451f2db93ac9df"
+ integrity sha512-+kwPrVDu9Ms03L90Qaml+79+6DZHqHyRoANI6IsZJ/g8frhnfchDOBCa0RbQ6/kdHt5CS5OeIEyrYznNuVN+8A==
+ dependencies:
+ is-invalid-path "^0.1.0"
+
is-weakmap@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.2.tgz#bf72615d649dfe5f699079c54b83e47d1ae19cfd"
@@ -4157,6 +4268,11 @@ levn@^0.4.1:
prelude-ls "^1.2.1"
type-check "~0.4.0"
+lil-http-terminator@^1.2.2:
+ version "1.2.3"
+ resolved "https://registry.yarnpkg.com/lil-http-terminator/-/lil-http-terminator-1.2.3.tgz#594ef0f3c2b2f7d43a8f2989b2b3de611bf507eb"
+ integrity sha512-vQcHSwAFq/kTR2cG6peOVS7SjgksGgSPeH0G2lkw+buue33thE/FCHdn10wJXXshc5RswFy0Iaz48qA2Busw5Q==
+
locate-path@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286"
@@ -4164,6 +4280,48 @@ locate-path@^6.0.0:
dependencies:
p-locate "^5.0.0"
+lodash-es@^4.17.15:
+ version "4.17.21"
+ resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.21.tgz#43e626c46e6591b7750beb2b50117390c609e3ee"
+ integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==
+
+lodash._baseiteratee@~4.7.0:
+ version "4.7.0"
+ resolved "https://registry.yarnpkg.com/lodash._baseiteratee/-/lodash._baseiteratee-4.7.0.tgz#34a9b5543572727c3db2e78edae3c0e9e66bd102"
+ integrity sha512-nqB9M+wITz0BX/Q2xg6fQ8mLkyfF7MU7eE+MNBNjTHFKeKaZAPEzEg+E8LWxKWf1DQVflNEn9N49yAuqKh2mWQ==
+ dependencies:
+ lodash._stringtopath "~4.8.0"
+
+lodash._basetostring@~4.12.0:
+ version "4.12.0"
+ resolved "https://registry.yarnpkg.com/lodash._basetostring/-/lodash._basetostring-4.12.0.tgz#9327c9dc5158866b7fa4b9d42f4638e5766dd9df"
+ integrity sha512-SwcRIbyxnN6CFEEK4K1y+zuApvWdpQdBHM/swxP962s8HIxPO3alBH5t3m/dl+f4CMUug6sJb7Pww8d13/9WSw==
+
+lodash._baseuniq@~4.6.0:
+ version "4.6.0"
+ resolved "https://registry.yarnpkg.com/lodash._baseuniq/-/lodash._baseuniq-4.6.0.tgz#0ebb44e456814af7905c6212fa2c9b2d51b841e8"
+ integrity sha512-Ja1YevpHZctlI5beLA7oc5KNDhGcPixFhcqSiORHNsp/1QTv7amAXzw+gu4YOvErqVlMVyIJGgtzeepCnnur0A==
+ dependencies:
+ lodash._createset "~4.0.0"
+ lodash._root "~3.0.0"
+
+lodash._createset@~4.0.0:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/lodash._createset/-/lodash._createset-4.0.3.tgz#0f4659fbb09d75194fa9e2b88a6644d363c9fe26"
+ integrity sha512-GTkC6YMprrJZCYU3zcqZj+jkXkrXzq3IPBcF/fIPpNEAB4hZEtXU8zp/RwKOvZl43NUmwDbyRk3+ZTbeRdEBXA==
+
+lodash._root@~3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/lodash._root/-/lodash._root-3.0.1.tgz#fba1c4524c19ee9a5f8136b4609f017cf4ded692"
+ integrity sha512-O0pWuFSK6x4EXhM1dhZ8gchNtG7JMqBtrHdoUFUWXD7dJnNSUze1GuyQr5sOs0aCvgGeI3o/OJW8f4ca7FDxmQ==
+
+lodash._stringtopath@~4.8.0:
+ version "4.8.0"
+ resolved "https://registry.yarnpkg.com/lodash._stringtopath/-/lodash._stringtopath-4.8.0.tgz#941bcf0e64266e5fc1d66fed0a6959544c576824"
+ integrity sha512-SXL66C731p0xPDC5LZg4wI5H+dJo/EO4KTqOMwLYCH3+FmmfAKJEZCm6ohGpI+T1xwsDsJCfL4OnhorllvlTPQ==
+ dependencies:
+ lodash._basetostring "~4.12.0"
+
lodash.assignwith@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/lodash.assignwith/-/lodash.assignwith-4.2.0.tgz#127a97f02adc41751a954d24b0de17e100e038eb"
@@ -4234,7 +4392,15 @@ lodash.union@^4.6.0:
resolved "https://registry.yarnpkg.com/lodash.union/-/lodash.union-4.6.0.tgz#48bb5088409f16f1821666641c44dd1aaae3cd88"
integrity sha512-c4pB2CdGrGdjMKYLA+XiRDO7Y0PRQbm/Gzg8qMj+QH+pFVAoTp5sBpO0odL3FjoPCGjK96p6qsP+yQoiLoOBcw==
-lodash@^4.17.21:
+lodash.uniqby@4.5.0:
+ version "4.5.0"
+ resolved "https://registry.yarnpkg.com/lodash.uniqby/-/lodash.uniqby-4.5.0.tgz#a3a17bbf62eeb6240f491846e97c1c4e2a5e1e21"
+ integrity sha512-IRt7cfTtHy6f1aRVA5n7kT8rgN3N1nH6MOWLcHfpWG2SH19E3JksLK38MktLxZDhlAjCP9jpIXkOnRXlu6oByQ==
+ dependencies:
+ lodash._baseiteratee "~4.7.0"
+ lodash._baseuniq "~4.6.0"
+
+lodash@^4.17.20, lodash@^4.17.21:
version "4.17.21"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
@@ -4546,7 +4712,7 @@ ms@2.1.2:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
-ms@2.1.3, ms@^2.0.0, ms@^2.1.1:
+ms@2.1.3, ms@^2.0.0, ms@^2.1.1, ms@^2.1.3:
version "2.1.3"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
@@ -4607,6 +4773,11 @@ named-placeholders@^1.1.3:
dependencies:
lru-cache "^7.14.1"
+nanoclone@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/nanoclone/-/nanoclone-0.2.1.tgz#dd4090f8f1a110d26bb32c49ed2f5b9235209ed4"
+ integrity sha512-wynEP02LmIbLpcYw8uBKpcfF6dmg2vcpKqxeH5UcoKEYdExslsdUA4ugFauuaeYdTB76ez6gJW8XAZ6CgkXYxA==
+
napi-build-utils@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-1.0.2.tgz#b1fddc0b2c46e380a0b7a76f984dd47c41a13806"
@@ -4808,6 +4979,11 @@ num-sort@^2.0.0:
resolved "https://registry.yarnpkg.com/num-sort/-/num-sort-2.1.0.tgz#1cbb37aed071329fdf41151258bc011898577a9b"
integrity sha512-1MQz1Ed8z2yckoBeSfkQHHO9K1yDRxxtotKSJ9yvcTUUxSvfvzEq5GwBrjjHEpMlq/k5gvXdmJ1SbYxWtpNoVg==
+numbered@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/numbered/-/numbered-1.1.0.tgz#9fcd79564c73a84b9574e8370c3d8e58fe3c133c"
+ integrity sha512-pv/ue2Odr7IfYOO0byC1KgBI10wo5YDauLhxY6/saNzAdAs0r1SotGCPzzCLNPL0xtrAwWRialLu23AAu9xO1g==
+
object-assign@^4, object-assign@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
@@ -5031,6 +5207,11 @@ p-finally@^1.0.0:
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==
+p-is-promise@3:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-3.0.0.tgz#58e78c7dfe2e163cf2a04ff869e7c1dba64a5971"
+ integrity sha512-Wo8VsW4IRQSKVXsJCn7TomUaVtyfjVDn3nUP7kE967BQk0CwFpdbZs0X0uk5sW9mkBa9eNM7hCMaG93WUAwxYQ==
+
p-limit@^3.0.2:
version "3.1.0"
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b"
@@ -5068,13 +5249,20 @@ p-retry@4:
"@types/retry" "0.12.0"
retry "^0.13.1"
-p-timeout@^3.2.0:
+p-timeout@^3.0.0, p-timeout@^3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-3.2.0.tgz#c7e17abc971d2a7962ef83626b35d635acf23dfe"
integrity sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==
dependencies:
p-finally "^1.0.0"
+p-wait-for@3:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/p-wait-for/-/p-wait-for-3.2.0.tgz#640429bcabf3b0dd9f492c31539c5718cb6a3f1f"
+ integrity sha512-wpgERjNkLrBiFmkMEjuZJEWKKDrNfHCKA1OhyN1wg1FrLkULbviEy6py1AyJUgZ72YWFbZ38FIpnqvVqAlDUwA==
+ dependencies:
+ p-timeout "^3.0.0"
+
pad-left@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/pad-left/-/pad-left-2.1.0.tgz#16e6a3b2d44a8e138cb0838cc7cb403a4fc9e994"
@@ -5318,6 +5506,11 @@ prop-types@^15.8.1:
object-assign "^4.1.1"
react-is "^16.13.1"
+property-expr@^2.0.4:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/property-expr/-/property-expr-2.0.6.tgz#f77bc00d5928a6c748414ad12882e83f24aec1e8"
+ integrity sha512-SVtmxhRE/CGkn3eZY1T6pC8Nln6Fr/lu1mKSgRud0eC73whjGfoAogbn78LkD8aFL0zz3bAFerKSnOl7NlErBA==
+
protobufjs@^6.8.8:
version "6.11.4"
resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.4.tgz#29a412c38bf70d89e537b6d02d904a6f448173aa"
@@ -5507,6 +5700,11 @@ reflect.getprototypeof@^1.0.4:
globalthis "^1.0.3"
which-builtin-type "^1.1.3"
+regenerator-runtime@^0.14.0:
+ version "0.14.1"
+ resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f"
+ integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==
+
regexp.prototype.flags@^1.5.2:
version "1.5.2"
resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz#138f644a3350f981a858c44f6bb1a61ff59be334"
@@ -5627,6 +5825,11 @@ safe-stable-stringify@^2.3.1:
resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz#138c84b6f6edb3db5f8ef3ef7115b8f55ccbf886"
integrity sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g==
+safe-timers@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/safe-timers/-/safe-timers-1.1.0.tgz#c58ae8325db8d3b067322f0a4ef3a0cad67aad83"
+ integrity sha512-9aqY+v5eMvmRaluUEtdRThV1EjlSElzO7HuCj0sTW9xvp++8iJ9t/RWGNWV6/WHcUJLHpyT2SNf/apoKTU2EpA==
+
"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0":
version "2.1.2"
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
@@ -6194,6 +6397,11 @@ toidentifier@1.0.1:
resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35"
integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==
+toposort@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/toposort/-/toposort-2.0.2.tgz#ae21768175d1559d48bef35420b2f4962f09c330"
+ integrity sha512-0a5EOkAUp8D4moMi2W8ZF8jcga7BgZd91O/yabJCFY8az+XSzeGyTKs0Aoo897iV1Nj6guFq8orWDS96z91oGg==
+
touch@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b"
@@ -6571,7 +6779,7 @@ winston-transport@^4.7.0:
readable-stream "^3.6.0"
triple-beam "^1.3.0"
-winston@^3.7.2, winston@^3.9.0:
+winston@^3.13.0, winston@^3.7.2, winston@^3.9.0:
version "3.13.0"
resolved "https://registry.yarnpkg.com/winston/-/winston-3.13.0.tgz#e76c0d722f78e04838158c61adc1287201de7ce3"
integrity sha512-rwidmA1w3SE4j0E5MuIufFhyJPBDG7Nu71RkZor1p2+qHvJSZ9GYDA81AyleQcZbh/+V6HjeBdfnTZJm9rSeQQ==
@@ -6677,6 +6885,19 @@ yocto-queue@^0.1.0:
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
+yup@0.32.9:
+ version "0.32.9"
+ resolved "https://registry.yarnpkg.com/yup/-/yup-0.32.9.tgz#9367bec6b1b0e39211ecbca598702e106019d872"
+ integrity sha512-Ci1qN+i2H0XpY7syDQ0k5zKQ/DoxO0LzPg8PAR/X4Mpj6DqaeCoIYEEjDJwhArh3Fa7GWbQQVDZKeXYlSH4JMg==
+ dependencies:
+ "@babel/runtime" "^7.10.5"
+ "@types/lodash" "^4.14.165"
+ lodash "^4.17.20"
+ lodash-es "^4.17.15"
+ nanoclone "^0.2.1"
+ property-expr "^2.0.4"
+ toposort "^2.0.2"
+
zip-stream@^4.1.0:
version "4.1.1"
resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-4.1.1.tgz#1337fe974dbaffd2fa9a1ba09662a66932bd7135"