[BETA] Live document sync (#1719)

* wip bg workers for live document sync

* Add ability to re-embed specific documents across many workspaces via background queue
bgworkser is gated behind expieremental system setting flag that needs to be explictly enabled
UI for watching/unwatching docments that are embedded.
TODO: UI to easily manage all bg tasks and see run results
TODO: UI to enable this feature and background endpoints to manage it

* create frontend views and paths
Move elements to correct experimental scope

* update migration to delete runs on removal of watched document

* Add watch support to YouTube transcripts (#1716)

* Add watch support to YouTube transcripts
refactor how sync is done for supported types

* Watch specific files in Confluence space (#1718)

Add failure-prune check for runs

* create tmp workflow modifications for beta image

* create tmp workflow modifications for beta image

* create tmp workflow modifications for beta image

* dual build
update copy of alert modals

* update job interval

* Add support for live-sync of Github files

* update copy for document sync feature

* hide Experimental features from UI

* update docs links

* [FEAT] Implement new settings menu for experimental features (#1735)

* implement new settings menu for experimental features

* remove unused context save bar

---------

Co-authored-by: timothycarambat <rambat1010@gmail.com>

* dont run job on boot

* unset workflow changes

* Add persistent encryption service
Relay key to collector so persistent encryption can be used
Encrypt any private data in chunkSources used for replay during resync jobs

* update jsDOC

* Linting and organization

* update modal copy for feature

---------

Co-authored-by: Sean Hatfield <seanhatfield5@gmail.com>
This commit is contained in:
Timothy Carambat 2024-06-21 13:38:50 -07:00 committed by GitHub
parent c176fe38d2
commit dc4ad6b5a9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
58 changed files with 2885 additions and 304 deletions

View File

@ -11,6 +11,7 @@
"cooldowns", "cooldowns",
"Deduplicator", "Deduplicator",
"Dockerized", "Dockerized",
"docpath",
"elevenlabs", "elevenlabs",
"Embeddable", "Embeddable",
"epub", "epub",

View File

@ -1,18 +1,41 @@
const { setDataSigner } = require("../middleware/setDataSigner");
const { verifyPayloadIntegrity } = require("../middleware/verifyIntegrity"); const { verifyPayloadIntegrity } = require("../middleware/verifyIntegrity");
const { reqBody } = require("../utils/http"); const { reqBody } = require("../utils/http");
const { validURL } = require("../utils/url"); const { validURL } = require("../utils/url");
const RESYNC_METHODS = require("./resync");
function extensions(app) { function extensions(app) {
if (!app) return; if (!app) return;
app.post( app.post(
"/ext/github-repo", "/ext/resync-source-document",
[verifyPayloadIntegrity], [verifyPayloadIntegrity, setDataSigner],
async function (request, response) { async function (request, response) {
try { try {
const loadGithubRepo = require("../utils/extensions/GithubRepo"); const { type, options } = reqBody(request);
if (!RESYNC_METHODS.hasOwnProperty(type)) throw new Error(`Type "${type}" is not a valid type to sync.`);
return await RESYNC_METHODS[type](options, response);
} catch (e) {
console.error(e);
response.status(200).json({
success: false,
content: null,
reason: e.message || "A processing error occurred.",
});
}
return;
}
)
app.post(
"/ext/github-repo",
[verifyPayloadIntegrity, setDataSigner],
async function (request, response) {
try {
const { loadGithubRepo } = require("../utils/extensions/GithubRepo");
const { success, reason, data } = await loadGithubRepo( const { success, reason, data } = await loadGithubRepo(
reqBody(request) reqBody(request),
response,
); );
response.status(200).json({ response.status(200).json({
success, success,
@ -67,7 +90,7 @@ function extensions(app) {
[verifyPayloadIntegrity], [verifyPayloadIntegrity],
async function (request, response) { async function (request, response) {
try { try {
const loadYouTubeTranscript = require("../utils/extensions/YoutubeTranscript"); const { loadYouTubeTranscript } = require("../utils/extensions/YoutubeTranscript");
const { success, reason, data } = await loadYouTubeTranscript( const { success, reason, data } = await loadYouTubeTranscript(
reqBody(request) reqBody(request)
); );
@ -108,12 +131,13 @@ function extensions(app) {
app.post( app.post(
"/ext/confluence", "/ext/confluence",
[verifyPayloadIntegrity], [verifyPayloadIntegrity, setDataSigner],
async function (request, response) { async function (request, response) {
try { try {
const loadConfluence = require("../utils/extensions/Confluence"); const { loadConfluence } = require("../utils/extensions/Confluence");
const { success, reason, data } = await loadConfluence( const { success, reason, data } = await loadConfluence(
reqBody(request) reqBody(request),
response
); );
response.status(200).json({ success, reason, data }); response.status(200).json({ success, reason, data });
} catch (e) { } catch (e) {

View File

@ -0,0 +1,113 @@
const { getLinkText } = require("../../processLink");
/**
* Fetches the content of a raw link. Returns the content as a text string of the link in question.
* @param {object} data - metadata from document (eg: link)
* @param {import("../../middleware/setDataSigner").ResponseWithSigner} response
*/
async function resyncLink({ link }, response) {
if (!link) throw new Error('Invalid link provided');
try {
const { success, content = null } = await getLinkText(link);
if (!success) throw new Error(`Failed to sync link content. ${reason}`);
response.status(200).json({ success, content });
} catch (e) {
console.error(e);
response.status(200).json({
success: false,
content: null,
});
}
}
/**
* Fetches the content of a YouTube link. Returns the content as a text string of the video in question.
* We offer this as there may be some videos where a transcription could be manually edited after initial scraping
* but in general - transcriptions often never change.
* @param {object} data - metadata from document (eg: link)
* @param {import("../../middleware/setDataSigner").ResponseWithSigner} response
*/
async function resyncYouTube({ link }, response) {
if (!link) throw new Error('Invalid link provided');
try {
const { fetchVideoTranscriptContent } = require("../../utils/extensions/YoutubeTranscript");
const { success, reason, content } = await fetchVideoTranscriptContent({ url: link });
if (!success) throw new Error(`Failed to sync YouTube video transcript. ${reason}`);
response.status(200).json({ success, content });
} catch (e) {
console.error(e);
response.status(200).json({
success: false,
content: null,
});
}
}
/**
* Fetches the content of a specific confluence page via its chunkSource.
* Returns the content as a text string of the page in question and only that page.
* @param {object} data - metadata from document (eg: chunkSource)
* @param {import("../../middleware/setDataSigner").ResponseWithSigner} response
*/
async function resyncConfluence({ chunkSource }, response) {
if (!chunkSource) throw new Error('Invalid source property provided');
try {
// Confluence data is `payload` encrypted. So we need to expand its
// encrypted payload back into query params so we can reFetch the page with same access token/params.
const source = response.locals.encryptionWorker.expandPayload(chunkSource);
const { fetchConfluencePage } = require("../../utils/extensions/Confluence");
const { success, reason, content } = await fetchConfluencePage({
pageUrl: `https:${source.pathname}`, // need to add back the real protocol
baseUrl: source.searchParams.get('baseUrl'),
accessToken: source.searchParams.get('token'),
username: source.searchParams.get('username'),
});
if (!success) throw new Error(`Failed to sync Confluence page content. ${reason}`);
response.status(200).json({ success, content });
} catch (e) {
console.error(e);
response.status(200).json({
success: false,
content: null,
});
}
}
/**
* Fetches the content of a specific confluence page via its chunkSource.
* Returns the content as a text string of the page in question and only that page.
* @param {object} data - metadata from document (eg: chunkSource)
* @param {import("../../middleware/setDataSigner").ResponseWithSigner} response
*/
async function resyncGithub({ chunkSource }, response) {
if (!chunkSource) throw new Error('Invalid source property provided');
try {
// Github file data is `payload` encrypted (might contain PAT). So we need to expand its
// encrypted payload back into query params so we can reFetch the page with same access token/params.
const source = response.locals.encryptionWorker.expandPayload(chunkSource);
const { fetchGithubFile } = require("../../utils/extensions/GithubRepo");
const { success, reason, content } = await fetchGithubFile({
repoUrl: `https:${source.pathname}`, // need to add back the real protocol
branch: source.searchParams.get('branch'),
accessToken: source.searchParams.get('pat'),
sourceFilePath: source.searchParams.get('path'),
});
if (!success) throw new Error(`Failed to sync Github file content. ${reason}`);
response.status(200).json({ success, content });
} catch (e) {
console.error(e);
response.status(200).json({
success: false,
content: null,
});
}
}
module.exports = {
link: resyncLink,
youtube: resyncYouTube,
confluence: resyncConfluence,
github: resyncGithub,
}

View File

@ -0,0 +1,41 @@
const { EncryptionWorker } = require("../utils/EncryptionWorker");
const { CommunicationKey } = require("../utils/comKey");
/**
* Express Response Object interface with defined encryptionWorker attached to locals property.
* @typedef {import("express").Response & import("express").Response['locals'] & {encryptionWorker: EncryptionWorker} } ResponseWithSigner
*/
// You can use this middleware to assign the EncryptionWorker to the response locals
// property so that if can be used to encrypt/decrypt arbitrary data via response object.
// eg: Encrypting API keys in chunk sources.
// The way this functions is that the rolling RSA Communication Key is used server-side to private-key encrypt the raw
// key of the persistent EncryptionManager credentials. Since EncryptionManager credentials do _not_ roll, we should not send them
// even between server<>collector in plaintext because if the user configured the server/collector to be public they could technically
// be exposing the key in transit via the X-Payload-Signer header. Even if this risk is minimal we should not do this.
// This middleware uses the CommunicationKey public key to first decrypt the base64 representation of the EncryptionManager credentials
// and then loads that in to the EncryptionWorker as a buffer so we can use the same credentials across the system. Should we ever break the
// collector out into its own service this would still work without SSL/TLS.
/**
*
* @param {import("express").Request} request
* @param {import("express").Response} response
* @param {import("express").NextFunction} next
*/
function setDataSigner(request, response, next) {
const comKey = new CommunicationKey();
const encryptedPayloadSigner = request.header("X-Payload-Signer");
if (!encryptedPayloadSigner) console.log('Failed to find signed-payload to set encryption worker! Encryption calls will fail.');
const decryptedPayloadSignerKey = comKey.decrypt(encryptedPayloadSigner);
const encryptionWorker = new EncryptionWorker(decryptedPayloadSignerKey);
response.locals.encryptionWorker = encryptionWorker;
next();
}
module.exports = {
setDataSigner
}

View File

@ -0,0 +1,77 @@
const crypto = require("crypto");
// Differs from EncryptionManager in that is does not set or define the keys that will be used
// to encrypt or read data and it must be told the key (as base64 string) explicitly that will be used and is provided to
// the class on creation. This key should be the same `key` that is used by the EncryptionManager class.
class EncryptionWorker {
constructor(presetKeyBase64 = "") {
this.key = Buffer.from(presetKeyBase64, "base64");
this.algorithm = "aes-256-cbc";
this.separator = ":";
}
log(text, ...args) {
console.log(`\x1b[36m[EncryptionManager]\x1b[0m ${text}`, ...args);
}
/**
* Give a chunk source, parse its payload query param and expand that object back into the URL
* as additional query params
* @param {string} chunkSource
* @returns {URL} Javascript URL object with query params decrypted from payload query param.
*/
expandPayload(chunkSource = "") {
try {
const url = new URL(chunkSource);
if (!url.searchParams.has("payload")) return url;
const decryptedPayload = this.decrypt(url.searchParams.get("payload"));
const encodedParams = JSON.parse(decryptedPayload);
url.searchParams.delete("payload"); // remove payload prop
// Add all query params needed to replay as query params
Object.entries(encodedParams).forEach(([key, value]) =>
url.searchParams.append(key, value)
);
return url;
} catch (e) {
console.error(e);
}
return new URL(chunkSource);
}
encrypt(plainTextString = null) {
try {
if (!plainTextString)
throw new Error("Empty string is not valid for this method.");
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(this.algorithm, this.key, iv);
const encrypted = cipher.update(plainTextString, "utf8", "hex");
return [
encrypted + cipher.final("hex"),
Buffer.from(iv).toString("hex"),
].join(this.separator);
} catch (e) {
this.log(e);
return null;
}
}
decrypt(encryptedString) {
try {
const [encrypted, iv] = encryptedString.split(this.separator);
if (!iv) throw new Error("IV not found");
const decipher = crypto.createDecipheriv(
this.algorithm,
this.key,
Buffer.from(iv, "hex")
);
return decipher.update(encrypted, "hex", "utf8") + decipher.final("utf8");
} catch (e) {
this.log(e);
return null;
}
}
}
module.exports = { EncryptionWorker };

View File

@ -40,6 +40,15 @@ class CommunicationKey {
} catch {} } catch {}
return false; return false;
} }
// Use the rolling public-key to decrypt arbitrary data that was encrypted via the private key on the server side CommunicationKey class
// that we know was done with the same key-pair and the given input is in base64 format already.
// Returns plaintext string of the data that was encrypted.
decrypt(base64String = "") {
return crypto
.publicDecrypt(this.#readPublicKey(), Buffer.from(base64String, "base64"))
.toString();
}
} }
module.exports = { CommunicationKey }; module.exports = { CommunicationKey };

View File

@ -9,7 +9,13 @@ const {
ConfluencePagesLoader, ConfluencePagesLoader,
} = require("langchain/document_loaders/web/confluence"); } = require("langchain/document_loaders/web/confluence");
async function loadConfluence({ pageUrl, username, accessToken }) { /**
* Load Confluence documents from a spaceID and Confluence credentials
* @param {object} args - forwarded request body params
* @param {import("../../../middleware/setDataSigner").ResponseWithSigner} response - Express response object with encryptionWorker
* @returns
*/
async function loadConfluence({ pageUrl, username, accessToken }, response) {
if (!pageUrl || !username || !accessToken) { if (!pageUrl || !username || !accessToken) {
return { return {
success: false, success: false,
@ -79,7 +85,10 @@ async function loadConfluence({ pageUrl, username, accessToken }) {
docAuthor: subdomain, docAuthor: subdomain,
description: doc.metadata.title, description: doc.metadata.title,
docSource: `${subdomain} Confluence`, docSource: `${subdomain} Confluence`,
chunkSource: `confluence://${doc.metadata.url}`, chunkSource: generateChunkSource(
{ doc, baseUrl, accessToken, username },
response.locals.encryptionWorker
),
published: new Date().toLocaleString(), published: new Date().toLocaleString(),
wordCount: doc.pageContent.split(" ").length, wordCount: doc.pageContent.split(" ").length,
pageContent: doc.pageContent, pageContent: doc.pageContent,
@ -106,6 +115,82 @@ async function loadConfluence({ pageUrl, username, accessToken }) {
}; };
} }
/**
* Gets the page content from a specific Confluence page, not all pages in a workspace.
* @returns
*/
async function fetchConfluencePage({
pageUrl,
baseUrl,
username,
accessToken,
}) {
if (!pageUrl || !baseUrl || !username || !accessToken) {
return {
success: false,
content: null,
reason:
"You need either a username and access token, or a personal access token (PAT), to use the Confluence connector.",
};
}
const { valid, result } = validSpaceUrl(pageUrl);
if (!valid) {
return {
success: false,
content: null,
reason:
"Confluence space URL is not in the expected format of https://domain.atlassian.net/wiki/space/~SPACEID/* or https://customDomain/wiki/space/~SPACEID/*",
};
}
console.log(`-- Working Confluence Page ${pageUrl} --`);
const { spaceKey } = result;
const loader = new ConfluencePagesLoader({
baseUrl,
spaceKey,
username,
accessToken,
});
const { docs, error } = await loader
.load()
.then((docs) => {
return { docs, error: null };
})
.catch((e) => {
return {
docs: [],
error: e.message?.split("Error:")?.[1] || e.message,
};
});
if (!docs.length || !!error) {
return {
success: false,
reason: error ?? "No pages found for that Confluence space.",
content: null,
};
}
const targetDocument = docs.find(
(doc) => doc.pageContent && doc.metadata.url === pageUrl
);
if (!targetDocument) {
return {
success: false,
reason: "Target page could not be found in Confluence space.",
content: null,
};
}
return {
success: true,
reason: null,
content: targetDocument.pageContent,
};
}
/** /**
* A match result for a url-pattern of a Confluence URL * A match result for a url-pattern of a Confluence URL
* @typedef {Object} ConfluenceMatchResult * @typedef {Object} ConfluenceMatchResult
@ -195,4 +280,29 @@ function validSpaceUrl(spaceUrl = "") {
return { valid: false, result: null }; return { valid: false, result: null };
} }
module.exports = loadConfluence; /**
* Generate the full chunkSource for a specific Confluence page so that we can resync it later.
* This data is encrypted into a single `payload` query param so we can replay credentials later
* since this was encrypted with the systems persistent password and salt.
* @param {object} chunkSourceInformation
* @param {import("../../EncryptionWorker").EncryptionWorker} encryptionWorker
* @returns {string}
*/
function generateChunkSource(
{ doc, baseUrl, accessToken, username },
encryptionWorker
) {
const payload = {
baseUrl,
token: accessToken,
username,
};
return `confluence://${doc.metadata.url}?payload=${encryptionWorker.encrypt(
JSON.stringify(payload)
)}`;
}
module.exports = {
loadConfluence,
fetchConfluencePage,
};

View File

@ -150,6 +150,36 @@ class RepoLoader {
this.branches = [...new Set(branches.flat())]; this.branches = [...new Set(branches.flat())];
return this.#branchPrefSort(this.branches); return this.#branchPrefSort(this.branches);
} }
async fetchSingleFile(sourceFilePath) {
try {
return fetch(
`https://api.github.com/repos/${this.author}/${this.project}/contents/${sourceFilePath}?ref=${this.branch}`,
{
method: "GET",
headers: {
Accept: "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
...(!!this.accessToken
? { Authorization: `Bearer ${this.accessToken}` }
: {}),
},
}
)
.then((res) => {
if (res.ok) return res.json();
throw new Error(`Failed to fetch from Github API: ${res.statusText}`);
})
.then((json) => {
if (json.hasOwnProperty("status") || !json.hasOwnProperty("content"))
throw new Error(json?.message || "missing content");
return atob(json.content);
});
} catch (e) {
console.error(`RepoLoader.fetchSingleFile`, e);
return null;
}
}
} }
module.exports = RepoLoader; module.exports = RepoLoader;

View File

@ -6,7 +6,13 @@ const { v4 } = require("uuid");
const { writeToServerDocuments } = require("../../files"); const { writeToServerDocuments } = require("../../files");
const { tokenizeString } = require("../../tokenizer"); const { tokenizeString } = require("../../tokenizer");
async function loadGithubRepo(args) { /**
* Load in a Github Repo recursively or just the top level if no PAT is provided
* @param {object} args - forwarded request body params
* @param {import("../../../middleware/setDataSigner").ResponseWithSigner} response - Express response object with encryptionWorker
* @returns
*/
async function loadGithubRepo(args, response) {
const repo = new RepoLoader(args); const repo = new RepoLoader(args);
await repo.init(); await repo.init();
@ -52,7 +58,11 @@ async function loadGithubRepo(args) {
docAuthor: repo.author, docAuthor: repo.author,
description: "No description found.", description: "No description found.",
docSource: doc.metadata.source, docSource: doc.metadata.source,
chunkSource: `link://${doc.metadata.repository}/blob/${doc.metadata.branch}/${doc.metadata.source}`, chunkSource: generateChunkSource(
repo,
doc,
response.locals.encryptionWorker
),
published: new Date().toLocaleString(), published: new Date().toLocaleString(),
wordCount: doc.pageContent.split(" ").length, wordCount: doc.pageContent.split(" ").length,
pageContent: doc.pageContent, pageContent: doc.pageContent,
@ -81,4 +91,69 @@ async function loadGithubRepo(args) {
}; };
} }
module.exports = loadGithubRepo; /**
* Gets the page content from a specific source file in a give Github Repo, not all items in a repo.
* @returns
*/
async function fetchGithubFile({
repoUrl,
branch,
accessToken = null,
sourceFilePath,
}) {
const repo = new RepoLoader({
repo: repoUrl,
branch,
accessToken,
});
await repo.init();
if (!repo.ready)
return {
success: false,
content: null,
reason: "Could not prepare Github repo for loading! Check URL or PAT.",
};
console.log(
`-- Working Github ${repo.author}/${repo.project}:${repo.branch} file:${sourceFilePath} --`
);
const fileContent = await repo.fetchSingleFile(sourceFilePath);
if (!fileContent) {
return {
success: false,
reason: "Target file returned a null content response.",
content: null,
};
}
return {
success: true,
reason: null,
content: fileContent,
};
}
/**
* Generate the full chunkSource for a specific file so that we can resync it later.
* This data is encrypted into a single `payload` query param so we can replay credentials later
* since this was encrypted with the systems persistent password and salt.
* @param {RepoLoader} repo
* @param {import("@langchain/core/documents").Document} doc
* @param {import("../../EncryptionWorker").EncryptionWorker} encryptionWorker
* @returns {string}
*/
function generateChunkSource(repo, doc, encryptionWorker) {
const payload = {
owner: repo.author,
project: repo.project,
branch: repo.branch,
path: doc.metadata.source,
pat: !!repo.accessToken ? repo.accessToken : null,
};
return `github://${repo.repo}?payload=${encryptionWorker.encrypt(
JSON.stringify(payload)
)}`;
}
module.exports = { loadGithubRepo, fetchGithubFile };

View File

@ -26,11 +26,13 @@ function validYoutubeVideoUrl(link) {
return false; return false;
} }
async function loadYouTubeTranscript({ url }) { async function fetchVideoTranscriptContent({ url }) {
if (!validYoutubeVideoUrl(url)) { if (!validYoutubeVideoUrl(url)) {
return { return {
success: false, success: false,
reason: "Invalid URL. Should be youtu.be or youtube.com/watch.", reason: "Invalid URL. Should be youtu.be or youtube.com/watch.",
content: null,
metadata: {},
}; };
} }
@ -52,6 +54,8 @@ async function loadYouTubeTranscript({ url }) {
return { return {
success: false, success: false,
reason: error ?? "No transcript found for that YouTube video.", reason: error ?? "No transcript found for that YouTube video.",
content: null,
metadata: {},
}; };
} }
@ -61,9 +65,30 @@ async function loadYouTubeTranscript({ url }) {
return { return {
success: false, success: false,
reason: "No transcript could be parsed for that YouTube video.", reason: "No transcript could be parsed for that YouTube video.",
content: null,
metadata: {},
}; };
} }
return {
success: true,
reason: null,
content,
metadata,
};
}
async function loadYouTubeTranscript({ url }) {
const transcriptResults = await fetchVideoTranscriptContent({ url });
if (!transcriptResults.success) {
return {
success: false,
reason:
transcriptResults.reason ||
"An unknown error occurred during transcription retrieval",
};
}
const { content, metadata } = transcriptResults;
const outFolder = slugify( const outFolder = slugify(
`${metadata.author} YouTube transcripts` `${metadata.author} YouTube transcripts`
).toLowerCase(); ).toLowerCase();
@ -86,7 +111,7 @@ async function loadYouTubeTranscript({ url }) {
docAuthor: metadata.author, docAuthor: metadata.author,
description: metadata.description, description: metadata.description,
docSource: url, docSource: url,
chunkSource: `link://${url}`, chunkSource: `youtube://${url}`,
published: new Date().toLocaleString(), published: new Date().toLocaleString(),
wordCount: content.split(" ").length, wordCount: content.split(" ").length,
pageContent: content, pageContent: content,
@ -111,4 +136,7 @@ async function loadYouTubeTranscript({ url }) {
}; };
} }
module.exports = loadYouTubeTranscript; module.exports = {
loadYouTubeTranscript,
fetchVideoTranscriptContent,
};

View File

@ -2,6 +2,8 @@ SERVER_PORT=3001
STORAGE_DIR="/app/server/storage" STORAGE_DIR="/app/server/storage"
UID='1000' UID='1000'
GID='1000' GID='1000'
# SIG_KEY='passphrase' # Please generate random string at least 32 chars long.
# SIG_SALT='salt' # Please generate random string at least 32 chars long.
# JWT_SECRET="my-random-string-for-seeding" # Only needed if AUTH_TOKEN is set. Please generate random string at least 12 chars long. # JWT_SECRET="my-random-string-for-seeding" # Only needed if AUTH_TOKEN is set. Please generate random string at least 12 chars long.
########################################### ###########################################

View File

@ -24,6 +24,7 @@
"js-levenshtein": "^1.1.6", "js-levenshtein": "^1.1.6",
"lodash.debounce": "^4.0.8", "lodash.debounce": "^4.0.8",
"markdown-it": "^13.0.1", "markdown-it": "^13.0.1",
"moment": "^2.30.1",
"pluralize": "^8.0.0", "pluralize": "^8.0.0",
"react": "^18.2.0", "react": "^18.2.0",
"react-device-detect": "^2.2.2", "react-device-detect": "^2.2.2",

View File

@ -57,6 +57,12 @@ const EmbedChats = lazy(() => import("@/pages/GeneralSettings/EmbedChats"));
const PrivacyAndData = lazy( const PrivacyAndData = lazy(
() => import("@/pages/GeneralSettings/PrivacyAndData") () => import("@/pages/GeneralSettings/PrivacyAndData")
); );
const ExperimentalFeatures = lazy(
() => import("@/pages/Admin/ExperimentalFeatures")
);
const LiveDocumentSyncManage = lazy(
() => import("@/pages/Admin/ExperimentalFeatures/Features/LiveSync/manage")
);
export default function App() { export default function App() {
return ( return (
@ -142,6 +148,10 @@ export default function App() {
path="/settings/appearance" path="/settings/appearance"
element={<ManagerRoute Component={GeneralAppearance} />} element={<ManagerRoute Component={GeneralAppearance} />}
/> />
<Route
path="/settings/beta-features"
element={<AdminRoute Component={ExperimentalFeatures} />}
/>
<Route <Route
path="/settings/api-keys" path="/settings/api-keys"
element={<AdminRoute Component={GeneralApiKeys} />} element={<AdminRoute Component={GeneralApiKeys} />}
@ -169,9 +179,16 @@ export default function App() {
{/* Onboarding Flow */} {/* Onboarding Flow */}
<Route path="/onboarding" element={<OnboardingFlow />} /> <Route path="/onboarding" element={<OnboardingFlow />} />
<Route path="/onboarding/:step" element={<OnboardingFlow />} /> <Route path="/onboarding/:step" element={<OnboardingFlow />} />
{/* Experimental feature pages */}
{/* Live Document Sync feature */}
<Route
path="/settings/beta-features/live-document-sync/manage"
element={<AdminRoute Component={LiveDocumentSyncManage} />}
/>
</Routes> </Routes>
<ToastContainer />
</I18nextProvider> </I18nextProvider>
<ToastContainer />
</PfpProvider> </PfpProvider>
</LogoProvider> </LogoProvider>
</ContextWrapper> </ContextWrapper>

View File

@ -8,7 +8,7 @@ const uuidPattern =
const jsonPattern = /\.json$/; const jsonPattern = /\.json$/;
// Function to strip UUID v4 and JSON from file names as that will impact search results. // Function to strip UUID v4 and JSON from file names as that will impact search results.
const stripUuidAndJsonFromString = (input = "") => { export const stripUuidAndJsonFromString = (input = "") => {
return input return input
?.replace(uuidPattern, "") // remove v4 uuid ?.replace(uuidPattern, "") // remove v4 uuid
?.replace(jsonPattern, "") // remove trailing .json ?.replace(jsonPattern, "") // remove trailing .json

View File

@ -4,9 +4,10 @@ import {
getFileExtension, getFileExtension,
middleTruncate, middleTruncate,
} from "@/utils/directories"; } from "@/utils/directories";
import { ArrowUUpLeft, File, PushPin } from "@phosphor-icons/react"; import { ArrowUUpLeft, Eye, File, PushPin } from "@phosphor-icons/react";
import Workspace from "@/models/workspace"; import Workspace from "@/models/workspace";
import showToast from "@/utils/toast"; import showToast from "@/utils/toast";
import System from "@/models/system";
import { Tooltip } from "react-tooltip"; import { Tooltip } from "react-tooltip";
export default function WorkspaceFileRow({ export default function WorkspaceFileRow({
@ -61,6 +62,11 @@ export default function WorkspaceFileRow({
<div className="w-4 h-4 ml-2 flex-shrink-0" /> <div className="w-4 h-4 ml-2 flex-shrink-0" />
) : ( ) : (
<div className="flex gap-x-2 items-center"> <div className="flex gap-x-2 items-center">
<WatchForChanges
workspace={workspace}
docPath={`${folderName}/${item.name}`}
item={item}
/>
<PinItemToWorkspace <PinItemToWorkspace
workspace={workspace} workspace={workspace}
docPath={`${folderName}/${item.name}`} docPath={`${folderName}/${item.name}`}
@ -129,7 +135,7 @@ const PinItemToWorkspace = memo(({ workspace, docPath, item }) => {
} }
}; };
if (!item) return <div />; if (!item) return <div className="w-[16px] p-[2px] ml-2" />;
return ( return (
<div <div
@ -157,6 +163,78 @@ const PinItemToWorkspace = memo(({ workspace, docPath, item }) => {
); );
}); });
const WatchForChanges = memo(({ workspace, docPath, item }) => {
const [watched, setWatched] = useState(item?.watched || false);
const [hover, setHover] = useState(false);
const watchEvent = new CustomEvent("watch_document_for_changes");
const updateWatchStatus = async () => {
try {
if (!watched) window.dispatchEvent(watchEvent);
const success =
await System.experimentalFeatures.liveSync.setWatchStatusForDocument(
workspace.slug,
docPath,
!watched
);
if (!success) {
showToast(
`Failed to ${!watched ? "watch" : "unwatch"} document.`,
"error",
{
clear: true,
}
);
return;
}
showToast(
`Document ${
!watched
? "will be watched for changes"
: "will no longer be watched for changes"
}.`,
"success",
{ clear: true }
);
setWatched(!watched);
} catch (error) {
showToast(`Failed to watch document. ${error.message}`, "error", {
clear: true,
});
return;
}
};
if (!item || !item.canWatch) return <div className="w-[16px] p-[2px] ml-2" />;
return (
<div
onMouseEnter={() => setHover(true)}
onMouseLeave={() => setHover(false)}
className="flex gap-x-2 items-center hover:bg-main-gradient p-[2px] rounded ml-2"
>
<Eye
data-tooltip-id={`watch-changes-${item.id}`}
data-tooltip-content={
watched ? "Stop watching for changes" : "Watch document for changes"
}
size={16}
onClick={updateWatchStatus}
weight={hover || watched ? "fill" : "regular"}
className="outline-none text-base font-bold flex-shrink-0 cursor-pointer"
/>
<Tooltip
id={`watch-changes-${item.id}`}
place="bottom"
delayShow={300}
className="tooltip invert !text-xs"
/>
</div>
);
});
const RemoveItemFromWorkspace = ({ item, onClick }) => { const RemoveItemFromWorkspace = ({ item, onClick }) => {
return ( return (
<div> <div>

View File

@ -3,8 +3,10 @@ import { dollarFormat } from "@/utils/numbers";
import WorkspaceFileRow from "./WorkspaceFileRow"; import WorkspaceFileRow from "./WorkspaceFileRow";
import { memo, useEffect, useState } from "react"; import { memo, useEffect, useState } from "react";
import ModalWrapper from "@/components/ModalWrapper"; import ModalWrapper from "@/components/ModalWrapper";
import { PushPin } from "@phosphor-icons/react"; import { Eye, PushPin } from "@phosphor-icons/react";
import { SEEN_DOC_PIN_ALERT } from "@/utils/constants"; import { SEEN_DOC_PIN_ALERT, SEEN_WATCH_ALERT } from "@/utils/constants";
import paths from "@/utils/paths";
import { Link } from "react-router-dom";
function WorkspaceDirectory({ function WorkspaceDirectory({
workspace, workspace,
@ -118,6 +120,7 @@ function WorkspaceDirectory({
)} )}
</div> </div>
<PinAlert /> <PinAlert />
<DocumentWatchAlert />
</> </>
); );
} }
@ -184,4 +187,75 @@ const PinAlert = memo(() => {
); );
}); });
const DocumentWatchAlert = memo(() => {
const [showAlert, setShowAlert] = useState(false);
function dismissAlert() {
setShowAlert(false);
window.localStorage.setItem(SEEN_WATCH_ALERT, "1");
window.removeEventListener(handlePinEvent);
}
function handlePinEvent() {
if (!!window?.localStorage?.getItem(SEEN_WATCH_ALERT)) return;
setShowAlert(true);
}
useEffect(() => {
if (!window || !!window?.localStorage?.getItem(SEEN_WATCH_ALERT)) return;
window?.addEventListener("watch_document_for_changes", handlePinEvent);
}, []);
return (
<ModalWrapper isOpen={showAlert} noPortal={true}>
<div className="relative w-full max-w-2xl max-h-full">
<div className="relative bg-main-gradient rounded-lg shadow">
<div className="flex items-start justify-between p-4 rounded-t border-gray-500/50">
<div className="flex items-center gap-2">
<Eye
className="text-yellow-600 text-lg w-6 h-6"
weight="regular"
/>
<h3 className="text-xl font-semibold text-white">
What does watching a document do?
</h3>
</div>
</div>
<div className="w-full p-6 text-white text-md flex flex-col gap-y-2">
<p>
When you <b>watch</b> a document in AnythingLLM we will{" "}
<i>automatically</i> sync your document content from it's original
source on regular intervals. This will automatically update the
content in every workspace where this file is managed.
</p>
<p>
This feature currently supports online-based content and will not
be available for manually uploaded documents.
</p>
<p>
You can manage what documents are watched from the{" "}
<Link
to={paths.experimental.liveDocumentSync.manage()}
className="text-blue-600 underline"
>
File manager
</Link>{" "}
admin view.
</p>
</div>
<div className="flex w-full justify-between items-center p-6 space-x-2 border-t rounded-b border-gray-500/50">
<button disabled={true} className="invisible" />
<button
onClick={dismissAlert}
className="border border-slate-200 px-4 py-2 rounded-lg text-white text-sm items-center flex gap-x-2 hover:bg-slate-200 hover:text-slate-800 focus:ring-gray-800"
>
Okay, got it
</button>
</div>
</div>
</div>
</ModalWrapper>
);
});
export default memo(WorkspaceDirectory); export default memo(WorkspaceDirectory);

View File

@ -23,6 +23,7 @@ import {
SplitVertical, SplitVertical,
Microphone, Microphone,
Robot, Robot,
Flask,
} from "@phosphor-icons/react"; } from "@phosphor-icons/react";
import useUser from "@/hooks/useUser"; import useUser from "@/hooks/useUser";
import { USER_BACKGROUND_COLOR } from "@/utils/constants"; import { USER_BACKGROUND_COLOR } from "@/utils/constants";
@ -30,6 +31,7 @@ import { isMobile } from "react-device-detect";
import Footer from "../Footer"; import Footer from "../Footer";
import { Link } from "react-router-dom"; import { Link } from "react-router-dom";
import { useTranslation } from "react-i18next"; import { useTranslation } from "react-i18next";
import showToast from "@/utils/toast";
export default function SettingsSidebar() { export default function SettingsSidebar() {
const { t } = useTranslation(); const { t } = useTranslation();
@ -385,5 +387,63 @@ const SidebarOptions = ({ user = null, t }) => (
flex={true} flex={true}
allowedRole={["admin"]} allowedRole={["admin"]}
/> />
<HoldToReveal>
<Option
href={paths.settings.experimental()}
btnText="Experimental Features"
icon={<Flask className="h-5 w-5 flex-shrink-0" />}
user={user}
flex={true}
allowedRole={["admin"]}
/>
</HoldToReveal>
</> </>
); );
function HoldToReveal({ children, holdForMs = 3_000 }) {
let timeout;
const [showing, setShowing] = useState(
window.localStorage.getItem(
"anythingllm_experimental_feature_preview_unlocked"
)
);
useEffect(() => {
const onPress = (e) => {
if (!["Control", "Meta"].includes(e.key)) return;
timeout = setTimeout(() => {
setShowing(true);
showToast("Experimental feature previews unlocked!");
window.localStorage.setItem(
"anythingllm_experimental_feature_preview_unlocked",
"enabled"
);
window.removeEventListener("keypress", onPress);
window.removeEventListener("keyup", onRelease);
clearTimeout(timeout);
}, holdForMs);
};
const onRelease = (e) => {
if (!["Control", "Meta"].includes(e.key)) return;
if (showing) {
window.removeEventListener("keypress", onPress);
window.removeEventListener("keyup", onRelease);
clearTimeout(timeout);
return;
}
clearTimeout(timeout);
};
if (!showing) {
window.addEventListener("keydown", onPress);
window.addEventListener("keyup", onRelease);
}
return () => {
window.removeEventListener("keydown", onPress);
window.removeEventListener("keyup", onRelease);
};
}, []);
if (!showing) return null;
return children;
}

View File

@ -0,0 +1,59 @@
import { API_BASE } from "@/utils/constants";
import { baseHeaders } from "@/utils/request";
const LiveDocumentSync = {
featureFlag: "experimental_live_file_sync",
toggleFeature: async function (updatedStatus = false) {
return await fetch(`${API_BASE}/experimental/toggle-live-sync`, {
method: "POST",
headers: baseHeaders(),
body: JSON.stringify({ updatedStatus }),
})
.then((res) => {
if (!res.ok) throw new Error("Could not update status.");
return true;
})
.then((res) => res)
.catch((e) => {
console.error(e);
return false;
});
},
queues: async function () {
return await fetch(`${API_BASE}/experimental/live-sync/queues`, {
headers: baseHeaders(),
})
.then((res) => {
if (!res.ok) throw new Error("Could not update status.");
return res.json();
})
.then((res) => res?.queues || [])
.catch((e) => {
console.error(e);
return [];
});
},
// Should be in Workspaces but is here for now while in preview
setWatchStatusForDocument: async function (slug, docPath, watchStatus) {
return fetch(`${API_BASE}/workspace/${slug}/update-watch-status`, {
method: "POST",
headers: baseHeaders(),
body: JSON.stringify({ docPath, watchStatus }),
})
.then((res) => {
if (!res.ok) {
throw new Error(
res.statusText || "Error setting watch status for document."
);
}
return true;
})
.catch((e) => {
console.error(e);
return false;
});
},
};
export default LiveDocumentSync;

View File

@ -1,6 +1,7 @@
import { API_BASE, AUTH_TIMESTAMP, fullApiUrl } from "@/utils/constants"; import { API_BASE, AUTH_TIMESTAMP, fullApiUrl } from "@/utils/constants";
import { baseHeaders, safeJsonParse } from "@/utils/request"; import { baseHeaders, safeJsonParse } from "@/utils/request";
import DataConnector from "./dataConnector"; import DataConnector from "./dataConnector";
import LiveDocumentSync from "./experimental/liveSync";
const System = { const System = {
cacheKeys: { cacheKeys: {
@ -675,6 +676,9 @@ const System = {
return false; return false;
}); });
}, },
experimentalFeatures: {
liveSync: LiveDocumentSync,
},
}; };
export default System; export default System;

View File

@ -0,0 +1,44 @@
import { useRef } from "react";
import { Trash } from "@phosphor-icons/react";
import { stripUuidAndJsonFromString } from "@/components/Modals/ManageWorkspace/Documents/Directory/utils";
import moment from "moment";
import System from "@/models/system";
export default function DocumentSyncQueueRow({ queue }) {
const rowRef = useRef(null);
const handleDelete = async () => {
rowRef?.current?.remove();
await System.experimentalFeatures.liveSync.setWatchStatusForDocument(
queue.workspaceDoc.workspace.slug,
queue.workspaceDoc.docpath,
false
);
};
return (
<>
<tr
ref={rowRef}
className="bg-transparent text-white text-opacity-80 text-sm font-medium"
>
<td scope="row" className="px-6 py-4 whitespace-nowrap">
{stripUuidAndJsonFromString(queue.workspaceDoc.filename)}
</td>
<td className="px-6 py-4">{moment(queue.lastSyncedAt).fromNow()}</td>
<td className="px-6 py-4">
{moment(queue.nextSyncAt).format("lll")}
<i className="text-xs px-2">({moment(queue.nextSyncAt).fromNow()})</i>
</td>
<td className="px-6 py-4">{moment(queue.createdAt).format("lll")}</td>
<td className="px-6 py-4 flex items-center gap-x-6">
<button
onClick={handleDelete}
className="font-medium text-red-300 px-2 py-1 rounded-lg hover:bg-red-800 hover:bg-opacity-20"
>
<Trash className="h-5 w-5" />
</button>
</td>
</tr>
</>
);
}

View File

@ -0,0 +1,92 @@
import { useEffect, useState } from "react";
import Sidebar from "@/components/Sidebar";
import { isMobile } from "react-device-detect";
import * as Skeleton from "react-loading-skeleton";
import "react-loading-skeleton/dist/skeleton.css";
import System from "@/models/system";
import DocumentSyncQueueRow from "./DocumentSyncQueueRow";
export default function LiveDocumentSyncManager() {
return (
<div className="w-screen h-screen overflow-hidden bg-sidebar flex">
<Sidebar />
<div
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
>
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
<div className="items-center flex gap-x-4">
<p className="text-lg leading-6 font-bold text-white">
Watched documents
</p>
</div>
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
These are all the documents that are currently being watched in
your instance. The content of these documents will be periodically
synced.
</p>
</div>
<WatchedDocumentsContainer />
</div>
</div>
</div>
);
}
function WatchedDocumentsContainer() {
const [loading, setLoading] = useState(true);
const [queues, setQueues] = useState([]);
useEffect(() => {
async function fetchData() {
const _queues = await System.experimentalFeatures.liveSync.queues();
setQueues(_queues);
setLoading(false);
}
fetchData();
}, []);
if (loading) {
return (
<Skeleton.default
height="80vh"
width="100%"
highlightColor="#3D4147"
baseColor="#2C2F35"
count={1}
className="w-full p-4 rounded-b-2xl rounded-tr-2xl rounded-tl-sm mt-6"
containerClassName="flex w-full"
/>
);
}
return (
<table className="w-full text-sm text-left rounded-lg mt-6">
<thead className="text-white text-opacity-80 text-xs leading-[18px] font-bold uppercase border-white border-b border-opacity-60">
<tr>
<th scope="col" className="px-6 py-3 rounded-tl-lg">
Document Name
</th>
<th scope="col" className="px-6 py-3">
Last Synced
</th>
<th scope="col" className="px-6 py-3">
Time until next refresh
</th>
<th scope="col" className="px-6 py-3">
Created On
</th>
<th scope="col" className="px-6 py-3 rounded-tr-lg">
{" "}
</th>
</tr>
</thead>
<tbody>
{queues.map((queue) => (
<DocumentSyncQueueRow key={queue.id} queue={queue} />
))}
</tbody>
</table>
);
}

View File

@ -0,0 +1,90 @@
import System from "@/models/system";
import paths from "@/utils/paths";
import showToast from "@/utils/toast";
import { ArrowSquareOut } from "@phosphor-icons/react";
import { useState } from "react";
import { Link } from "react-router-dom";
export default function LiveSyncToggle({ enabled = false, onToggle }) {
const [status, setStatus] = useState(enabled);
async function toggleFeatureFlag() {
const updated = await System.experimentalFeatures.liveSync.toggleFeature(
!status
);
if (!updated) {
showToast("Failed to update status of feature.", "error", {
clear: true,
});
return false;
}
setStatus(!status);
showToast(
`Live document content sync has been ${
!status ? "enabled" : "disabled"
}.`,
"success",
{ clear: true }
);
onToggle();
}
return (
<div className="p-4">
<div className="flex flex-col gap-y-6 max-w-[500px]">
<div className="flex items-center justify-between">
<h2 className="text-white text-md font-bold">
Automatic Document Content Sync
</h2>
<label className="relative inline-flex cursor-pointer items-center">
<input
type="checkbox"
onClick={toggleFeatureFlag}
checked={status}
className="peer sr-only pointer-events-none"
/>
<div className="pointer-events-none peer h-6 w-11 rounded-full bg-stone-400 after:absolute after:left-[2px] after:top-[2px] after:h-5 after:w-5 after:rounded-full after:shadow-xl after:border after:border-gray-600 after:bg-white after:box-shadow-md after:transition-all after:content-[''] peer-checked:bg-lime-300 peer-checked:after:translate-x-full peer-checked:after:border-white peer-focus:outline-none peer-focus:ring-4 peer-focus:ring-blue-800"></div>
</label>
</div>
<div className="flex flex-col space-y-4">
<p className="text-white/90 text-sm">
Enable the ability to specify a document to be "watched". Watched
document's content will be regularly fetched and updated in
AnythingLLM.
</p>
<p className="text-white/90 text-sm">
Watched documents will automatically update in all workspaces they
are referenced in at the same time of update.
</p>
<p className="text-white/80 text-xs italic">
This feature only applies to web-based content, such as websites,
Confluence, YouTube, and GitHub files.
</p>
</div>
</div>
<div className="mt-8">
<ul className="space-y-2">
<li>
<a
href="https://docs.useanything.com/beta-preview/active-features/live-document-sync"
target="_blank"
className="text-sm text-blue-400 hover:underline flex items-center gap-x-1"
>
<ArrowSquareOut size={14} />
<span>Feature Documentation and Warnings</span>
</a>
</li>
<li>
<Link
to={paths.experimental.liveDocumentSync.manage()}
className="text-sm text-blue-400 hover:underline"
>
Manage Watched Documents &rarr;
</Link>
</li>
</ul>
</div>
</div>
);
}

View File

@ -0,0 +1,9 @@
import LiveSyncToggle from "./Features/LiveSync/toggle";
export const configurableFeatures = {
experimental_live_file_sync: {
title: "Live Document Sync",
component: LiveSyncToggle,
key: "experimental_live_file_sync",
},
};

View File

@ -0,0 +1,280 @@
import { useEffect, useState } from "react";
import Sidebar from "@/components/SettingsSidebar";
import { isMobile } from "react-device-detect";
import Admin from "@/models/admin";
import { FullScreenLoader } from "@/components/Preloader";
import { CaretRight, Flask } from "@phosphor-icons/react";
import { configurableFeatures } from "./features";
import ModalWrapper from "@/components/ModalWrapper";
import paths from "@/utils/paths";
import showToast from "@/utils/toast";
export default function ExperimentalFeatures() {
const [featureFlags, setFeatureFlags] = useState({});
const [loading, setLoading] = useState(true);
const [selectedFeature, setSelectedFeature] = useState(
"experimental_live_file_sync"
);
useEffect(() => {
async function fetchSettings() {
setLoading(true);
const { settings } = await Admin.systemPreferences();
setFeatureFlags(settings?.feature_flags ?? {});
setLoading(false);
}
fetchSettings();
}, []);
const refresh = async () => {
const { settings } = await Admin.systemPreferences();
setFeatureFlags(settings?.feature_flags ?? {});
};
if (loading) {
return (
<div
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] w-full h-full flex justify-center items-center"
>
<FullScreenLoader />
</div>
);
}
return (
<FeatureLayout>
<div className="flex-1 flex gap-x-6 p-4 mt-10">
{/* Feature settings nav */}
<div className="flex flex-col gap-y-[18px]">
<div className="text-white flex items-center gap-x-2">
<Flask size={24} />
<p className="text-lg font-medium">Experimental Features</p>
</div>
{/* Feature list */}
<FeatureList
features={configurableFeatures}
selectedFeature={selectedFeature}
handleClick={setSelectedFeature}
activeFeatures={Object.keys(featureFlags).filter(
(flag) => featureFlags[flag]
)}
/>
</div>
{/* Selected feature setting panel */}
<FeatureVerification>
<div className="flex-[2] flex flex-col gap-y-[18px] mt-10">
<div className="bg-[#303237] text-white rounded-xl flex-1 p-4">
{selectedFeature ? (
<SelectedFeatureComponent
feature={configurableFeatures[selectedFeature]}
settings={featureFlags}
refresh={refresh}
/>
) : (
<div className="flex flex-col items-center justify-center h-full text-white/60">
<Flask size={40} />
<p className="font-medium">Select an experimental feature</p>
</div>
)}
</div>
</div>
</FeatureVerification>
</div>
</FeatureLayout>
);
}
function FeatureLayout({ children }) {
return (
<div
id="workspace-feature-settings-container"
className="w-screen h-screen overflow-hidden bg-sidebar flex md:mt-0 mt-6"
>
<Sidebar />
<div
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] w-full h-full flex"
>
{children}
</div>
</div>
);
}
function FeatureList({
features = [],
selectedFeature = null,
handleClick = null,
activeFeatures = [],
}) {
if (Object.keys(features).length === 0) return null;
return (
<div
className={`bg-white/5 text-white rounded-xl ${
isMobile ? "w-full" : "min-w-[360px] w-fit"
}`}
>
{Object.entries(features).map(([feature, settings], index) => (
<div
key={feature}
className={`py-3 px-4 flex items-center justify-between ${
index === 0 ? "rounded-t-xl" : ""
} ${
index === Object.keys(features).length - 1
? "rounded-b-xl"
: "border-b border-white/10"
} cursor-pointer transition-all duration-300 hover:bg-white/5 ${
selectedFeature === feature ? "bg-white/10" : ""
}`}
onClick={() => handleClick?.(feature)}
>
<div className="text-sm font-light">{settings.title}</div>
<div className="flex items-center gap-x-2">
<div className="text-sm text-white/60 font-medium">
{activeFeatures.includes(settings.key) ? "On" : "Off"}
</div>
<CaretRight size={14} weight="bold" className="text-white/80" />
</div>
</div>
))}
</div>
);
}
function SelectedFeatureComponent({ feature, settings, refresh }) {
const Component = feature?.component;
return Component ? (
<Component
enabled={settings[feature.key]}
feature={feature.key}
onToggle={refresh}
/>
) : null;
}
function FeatureVerification({ children }) {
if (
!window.localStorage.getItem("anythingllm_tos_experimental_feature_set")
) {
function acceptTos(e) {
e.preventDefault();
window.localStorage.setItem(
"anythingllm_tos_experimental_feature_set",
"accepted"
);
showToast(
"Experimental Feature set enabled. Reloading the page.",
"success"
);
setTimeout(() => {
window.location.reload();
}, 2_500);
return;
}
return (
<>
<ModalWrapper isOpen={true}>
<form
onSubmit={acceptTos}
className="relative w-full max-w-2xl max-h-full"
>
<div className="relative bg-main-gradient rounded-lg shadow">
<div className="flex items-start justify-between p-4 border-b rounded-t border-gray-500/50">
<h3 className="text-xl font-semibold text-white">
Terms of use for experimental features
</h3>
</div>
<div className="p-6 space-y-6 flex h-full w-full">
<div className="w-full flex flex-col gap-y-4 text-white">
<p>
Experimental features of AnythingLLM are features that we
are piloting and are <b>opt-in</b>. We proactively will
condition or warn you on any potential concerns should any
exist prior to approval of any feature.
</p>
<div>
<p>
Use of any feature on this page can result in, but not
limited to, the following possibilities.
</p>
<ul className="list-disc ml-6 text-sm font-mono">
<li>Loss of data.</li>
<li>Change in quality of results.</li>
<li>Increased storage.</li>
<li>Increased resource consumption.</li>
<li>
Increased cost or use of any connected LLM or embedding
provider.
</li>
<li>Potential bugs or issues using AnythingLLM.</li>
</ul>
</div>
<div>
<p>
Use of an experimental feature also comes with the
following list of non-exhaustive conditions.
</p>
<ul className="list-disc ml-6 text-sm font-mono">
<li>Feature may not exist in future updates.</li>
<li>The feature being used is not currently stable.</li>
<li>
The feature may not be available in future versions,
configurations, or subscriptions of AnythingLLM.
</li>
<li>
Your privacy settings <b>will be honored</b> with use of
any beta feature.
</li>
<li>These conditions may change in future updates.</li>
</ul>
</div>
<p>
Access to any features requires approval of this modal. If
you would like to read more you can refer to{" "}
<a
href="https://docs.useanything.com/beta-preview/overview"
className="underline text-blue-500"
>
docs.useanything.com
</a>{" "}
or email{" "}
<a
href="mailto:team@mintplexlabs.com"
className="underline text-blue-500"
>
team@mintplexlabs.com
</a>
</p>
</div>
</div>
<div className="flex w-full justify-between items-center p-6 space-x-2 border-t rounded-b border-gray-500/50">
<a
href={paths.home()}
className="px-4 py-2 rounded-lg text-white hover:bg-stone-900 transition-all duration-300"
>
Reject & Close
</a>
<button
type="submit"
className="transition-all duration-300 border border-slate-200 px-4 py-2 rounded-lg text-white text-sm items-center flex gap-x-2 hover:bg-slate-200 hover:text-slate-800 focus:ring-gray-800"
>
I understand
</button>
</div>
</div>
</form>
</ModalWrapper>
{children}
</>
);
}
return <>{children}</>;
}

View File

@ -5,6 +5,7 @@ export const AUTH_TOKEN = "anythingllm_authToken";
export const AUTH_TIMESTAMP = "anythingllm_authTimestamp"; export const AUTH_TIMESTAMP = "anythingllm_authTimestamp";
export const COMPLETE_QUESTIONNAIRE = "anythingllm_completed_questionnaire"; export const COMPLETE_QUESTIONNAIRE = "anythingllm_completed_questionnaire";
export const SEEN_DOC_PIN_ALERT = "anythingllm_pinned_document_alert"; export const SEEN_DOC_PIN_ALERT = "anythingllm_pinned_document_alert";
export const SEEN_WATCH_ALERT = "anythingllm_watched_document_alert";
export const USER_BACKGROUND_COLOR = "bg-historical-msg-user"; export const USER_BACKGROUND_COLOR = "bg-historical-msg-user";
export const AI_BACKGROUND_COLOR = "bg-historical-msg-system"; export const AI_BACKGROUND_COLOR = "bg-historical-msg-system";

View File

@ -135,5 +135,13 @@ export default {
embedChats: () => { embedChats: () => {
return `/settings/embed-chats`; return `/settings/embed-chats`;
}, },
experimental: () => {
return `/settings/beta-features`;
},
},
experimental: {
liveDocumentSync: {
manage: () => `/settings/beta-features/live-document-sync/manage`,
},
}, },
}; };

View File

@ -2464,6 +2464,11 @@ minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2:
dependencies: dependencies:
brace-expansion "^1.1.7" brace-expansion "^1.1.7"
moment@^2.30.1:
version "2.30.1"
resolved "https://registry.yarnpkg.com/moment/-/moment-2.30.1.tgz#f8c91c07b7a786e30c59926df530b4eac96974ae"
integrity sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==
ms@2.1.2: ms@2.1.2:
version "2.1.2" version "2.1.2"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"

View File

@ -1,5 +1,7 @@
SERVER_PORT=3001 SERVER_PORT=3001
JWT_SECRET="my-random-string-for-seeding" # Please generate random string at least 12 chars long. JWT_SECRET="my-random-string-for-seeding" # Please generate random string at least 12 chars long.
SIG_KEY='passphrase' # Please generate random string at least 32 chars long.
SIG_SALT='salt' # Please generate random string at least 32 chars long.
########################################### ###########################################
######## LLM API SElECTION ################ ######## LLM API SElECTION ################

View File

@ -358,6 +358,7 @@ function adminEndpoints(app) {
custom_app_name: custom_app_name:
(await SystemSettings.get({ label: "custom_app_name" }))?.value || (await SystemSettings.get({ label: "custom_app_name" }))?.value ||
null, null,
feature_flags: (await SystemSettings.getFeatureFlags()) || {},
}; };
response.status(200).json({ settings }); response.status(200).json({ settings });
} catch (e) { } catch (e) {

View File

@ -0,0 +1,10 @@
const { liveSyncEndpoints } = require("./liveSync");
// All endpoints here are not stable and can move around - have breaking changes
// or are opt-in features that are not fully released.
// When a feature is promoted it should be removed from here and added to the appropriate scope.
function experimentalEndpoints(router) {
liveSyncEndpoints(router);
}
module.exports = { experimentalEndpoints };

View File

@ -0,0 +1,114 @@
const { DocumentSyncQueue } = require("../../models/documentSyncQueue");
const { Document } = require("../../models/documents");
const { EventLogs } = require("../../models/eventLogs");
const { SystemSettings } = require("../../models/systemSettings");
const { Telemetry } = require("../../models/telemetry");
const { reqBody } = require("../../utils/http");
const {
featureFlagEnabled,
} = require("../../utils/middleware/featureFlagEnabled");
const {
flexUserRoleValid,
ROLES,
} = require("../../utils/middleware/multiUserProtected");
const { validWorkspaceSlug } = require("../../utils/middleware/validWorkspace");
const { validatedRequest } = require("../../utils/middleware/validatedRequest");
function liveSyncEndpoints(app) {
if (!app) return;
app.post(
"/experimental/toggle-live-sync",
[validatedRequest, flexUserRoleValid([ROLES.admin])],
async (request, response) => {
try {
const { updatedStatus = false } = reqBody(request);
const newStatus =
SystemSettings.validations.experimental_live_file_sync(updatedStatus);
const currentStatus =
(await SystemSettings.get({ label: "experimental_live_file_sync" }))
?.value || "disabled";
if (currentStatus === newStatus)
return response
.status(200)
.json({ liveSyncEnabled: newStatus === "enabled" });
// Already validated earlier - so can hot update.
await SystemSettings._updateSettings({
experimental_live_file_sync: newStatus,
});
if (newStatus === "enabled") {
await Telemetry.sendTelemetry("experimental_feature_enabled", {
feature: "live_file_sync",
});
await EventLogs.logEvent("experimental_feature_enabled", {
feature: "live_file_sync",
});
DocumentSyncQueue.bootWorkers();
} else {
DocumentSyncQueue.killWorkers();
}
response.status(200).json({ liveSyncEnabled: newStatus === "enabled" });
} catch (e) {
console.error(e);
response.status(500).end();
}
}
);
app.get(
"/experimental/live-sync/queues",
[
validatedRequest,
flexUserRoleValid([ROLES.admin]),
featureFlagEnabled(DocumentSyncQueue.featureKey),
],
async (_, response) => {
const queues = await DocumentSyncQueue.where(
{},
null,
{ createdAt: "asc" },
{
workspaceDoc: {
include: {
workspace: true,
},
},
}
);
response.status(200).json({ queues });
}
);
// Should be in workspace routes, but is here for now.
app.post(
"/workspace/:slug/update-watch-status",
[
validatedRequest,
flexUserRoleValid([ROLES.admin, ROLES.manager]),
validWorkspaceSlug,
featureFlagEnabled(DocumentSyncQueue.featureKey),
],
async (request, response) => {
try {
const { docPath, watchStatus = false } = reqBody(request);
const workspace = response.locals.workspace;
const document = await Document.get({
workspaceId: workspace.id,
docpath: docPath,
});
if (!document) return response.sendStatus(404).end();
await DocumentSyncQueue.toggleWatchStatus(document, watchStatus);
return response.status(200).end();
} catch (error) {
console.error("Error processing the watch status update:", error);
return response.status(500).end();
}
}
);
}
module.exports = { liveSyncEndpoints };

View File

@ -22,6 +22,7 @@ const { bootHTTP, bootSSL } = require("./utils/boot");
const { workspaceThreadEndpoints } = require("./endpoints/workspaceThreads"); const { workspaceThreadEndpoints } = require("./endpoints/workspaceThreads");
const { documentEndpoints } = require("./endpoints/document"); const { documentEndpoints } = require("./endpoints/document");
const { agentWebsocket } = require("./endpoints/agentWebsocket"); const { agentWebsocket } = require("./endpoints/agentWebsocket");
const { experimentalEndpoints } = require("./endpoints/experimental");
const app = express(); const app = express();
const apiRouter = express.Router(); const apiRouter = express.Router();
const FILE_LIMIT = "3GB"; const FILE_LIMIT = "3GB";
@ -54,6 +55,7 @@ embedManagementEndpoints(apiRouter);
utilEndpoints(apiRouter); utilEndpoints(apiRouter);
documentEndpoints(apiRouter); documentEndpoints(apiRouter);
agentWebsocket(apiRouter); agentWebsocket(apiRouter);
experimentalEndpoints(apiRouter);
developerEndpoints(app, apiRouter); developerEndpoints(app, apiRouter);
// Externally facing embedder endpoints // Externally facing embedder endpoints

View File

@ -0,0 +1,30 @@
const path = require('node:path');
const fs = require('node:fs');
const { parentPort } = require('node:worker_threads');
const documentsPath =
process.env.NODE_ENV === "development"
? path.resolve(__dirname, `../../storage/documents`)
: path.resolve(process.env.STORAGE_DIR, `documents`);
function log(stringContent = '') {
if (parentPort) parentPort.postMessage(stringContent);
else console.log(`parentPort is undefined. Debug: ${stringContent}`)
}
function conclude() {
if (parentPort) parentPort.postMessage('done');
else process.exit(0);
}
function updateSourceDocument(docPath = null, jsonContent = {}) {
const destinationFilePath = path.resolve(documentsPath, docPath);
fs.writeFileSync(destinationFilePath, JSON.stringify(jsonContent, null, 4), {
encoding: "utf-8",
});
}
module.exports = {
log,
conclude,
updateSourceDocument,
}

View File

@ -0,0 +1,153 @@
const { Document } = require('../models/documents.js');
const { DocumentSyncQueue } = require('../models/documentSyncQueue.js');
const { CollectorApi } = require('../utils/collectorApi');
const { fileData } = require("../utils/files");
const { log, conclude, updateSourceDocument } = require('./helpers/index.js');
const { getVectorDbClass } = require('../utils/helpers/index.js');
const { DocumentSyncRun } = require('../models/documentSyncRun.js');
(async () => {
try {
const queuesToProcess = await DocumentSyncQueue.staleDocumentQueues();
if (queuesToProcess.length === 0) {
log('No outstanding documents to sync. Exiting.');
return;
}
const collector = new CollectorApi();
if (!(await collector.online())) {
log('Could not reach collector API. Exiting.');
return;
}
log(`${queuesToProcess.length} watched documents have been found to be stale and will be updated now.`)
for (const queue of queuesToProcess) {
let newContent = null;
const document = queue.workspaceDoc;
const workspace = document.workspace;
const { metadata, type, source } = Document.parseDocumentTypeAndSource(document);
if (!metadata || !DocumentSyncQueue.validFileTypes.includes(type)) {
// Document is either broken, invalid, or not supported so drop it from future queues.
log(`Document ${document.filename} has no metadata, is broken, or invalid and has been removed from all future runs.`)
await DocumentSyncQueue.unwatch(document);
continue;
}
if (type === 'link' || type === 'youtube') {
const response = await collector.forwardExtensionRequest({
endpoint: "/ext/resync-source-document",
method: "POST",
body: JSON.stringify({
type,
options: { link: source }
})
});
newContent = response?.content;
}
if (type === 'confluence' || type === 'github') {
const response = await collector.forwardExtensionRequest({
endpoint: "/ext/resync-source-document",
method: "POST",
body: JSON.stringify({
type,
options: { chunkSource: metadata.chunkSource }
})
});
newContent = response?.content;
}
if (!newContent) {
// Check if the last "x" runs were all failures (not exits!). If so - remove the job entirely since it is broken.
const failedRunCount = (await DocumentSyncRun.where({ queueId: queue.id }, DocumentSyncQueue.maxRepeatFailures, { createdAt: 'desc' })).filter((run) => run.status === DocumentSyncRun.statuses.failed).length;
if (failedRunCount >= DocumentSyncQueue.maxRepeatFailures) {
log(`Document ${document.filename} has failed to refresh ${failedRunCount} times continuously and will now be removed from the watched document set.`)
await DocumentSyncQueue.unwatch(document);
continue;
}
log(`Failed to get a new content response from collector for source ${source}. Skipping, but will retry next worker interval. Attempt ${failedRunCount === 0 ? 1 : failedRunCount}/${DocumentSyncQueue.maxRepeatFailures}`);
await DocumentSyncQueue.saveRun(queue.id, DocumentSyncRun.statuses.failed, { filename: document.filename, workspacesModified: [], reason: 'No content found.' })
continue;
}
const currentDocumentData = await fileData(document.docpath)
if (currentDocumentData.pageContent === newContent) {
const nextSync = DocumentSyncQueue.calcNextSync(queue)
log(`Source ${source} is unchanged and will be skipped. Next sync will be ${nextSync.toLocaleString()}.`);
await DocumentSyncQueue._update(
queue.id,
{
lastSyncedAt: new Date().toISOString(),
nextSyncAt: nextSync.toISOString(),
}
);
await DocumentSyncQueue.saveRun(queue.id, DocumentSyncRun.statuses.exited, { filename: document.filename, workspacesModified: [], reason: 'Content unchanged.' })
continue;
}
// update the defined document and workspace vectorDB with the latest information
// it will skip cache and create a new vectorCache file.
const vectorDatabase = getVectorDbClass();
await vectorDatabase.deleteDocumentFromNamespace(workspace.slug, document.docId);
await vectorDatabase.addDocumentToNamespace(
workspace.slug,
{ ...currentDocumentData, pageContent: newContent, docId: document.docId },
document.docpath,
true
);
updateSourceDocument(
document.docpath,
{
...currentDocumentData,
pageContent: newContent,
docId: document.docId,
published: (new Date).toLocaleString(),
// Todo: Update word count and token_estimate?
}
)
log(`Workspace "${workspace.name}" vectors of ${source} updated. Document and vector cache updated.`)
// Now we can bloom the results to all matching documents in all other workspaces
const workspacesModified = [workspace.slug];
const moreReferences = await Document.where({
id: { not: document.id },
filename: document.filename
}, null, null, { workspace: true });
if (moreReferences.length !== 0) {
log(`${source} is referenced in ${moreReferences.length} other workspaces. Updating those workspaces as well...`)
for (const additionalDocumentRef of moreReferences) {
const additionalWorkspace = additionalDocumentRef.workspace;
workspacesModified.push(additionalWorkspace.slug);
await vectorDatabase.deleteDocumentFromNamespace(additionalWorkspace.slug, additionalDocumentRef.docId);
await vectorDatabase.addDocumentToNamespace(
additionalWorkspace.slug,
{ ...currentDocumentData, pageContent: newContent, docId: additionalDocumentRef.docId },
additionalDocumentRef.docpath,
);
log(`Workspace "${additionalWorkspace.name}" vectors for ${source} was also updated with the new content from cache.`)
}
}
const nextRefresh = DocumentSyncQueue.calcNextSync(queue);
log(`${source} has been refreshed in all workspaces it is currently referenced in. Next refresh will be ${nextRefresh.toLocaleString()}.`)
await DocumentSyncQueue._update(
queue.id,
{
lastSyncedAt: new Date().toISOString(),
nextSyncAt: nextRefresh.toISOString(),
}
);
await DocumentSyncQueue.saveRun(queue.id, DocumentSyncRun.statuses.success, { filename: document.filename, workspacesModified })
}
} catch (e) {
console.error(e)
log(`errored with ${e.message}`)
} finally {
conclude();
}
})();

View File

@ -0,0 +1,237 @@
const { BackgroundService } = require("../utils/BackgroundWorkers");
const prisma = require("../utils/prisma");
const { SystemSettings } = require("./systemSettings");
const { Telemetry } = require("./telemetry");
/**
* @typedef {('link'|'youtube'|'confluence'|'github')} validFileType
*/
const DocumentSyncQueue = {
featureKey: "experimental_live_file_sync",
// update the validFileTypes and .canWatch properties when adding elements here.
validFileTypes: ["link", "youtube", "confluence", "github"],
defaultStaleAfter: 604800000,
maxRepeatFailures: 5, // How many times a run can fail in a row before pruning.
writable: [],
bootWorkers: function () {
new BackgroundService().boot();
},
killWorkers: function () {
new BackgroundService().stop();
},
/** Check is the Document Sync/Watch feature is enabled and can be used. */
enabled: async function () {
return (
(await SystemSettings.get({ label: this.featureKey }))?.value ===
"enabled"
);
},
/**
* @param {import("@prisma/client").document_sync_queues} queueRecord - queue record to calculate for
*/
calcNextSync: function (queueRecord) {
return new Date(Number(new Date()) + queueRecord.staleAfterMs);
},
canWatch: function ({ title, chunkSource = null } = {}) {
if (chunkSource.startsWith("link://") && title.endsWith(".html"))
return true; // If is web-link material (prior to feature most chunkSources were links://)
if (chunkSource.startsWith("youtube://")) return true; // If is a youtube link
if (chunkSource.startsWith("confluence://")) return true; // If is a confluence document link
if (chunkSource.startsWith("github://")) return true; // If is a Github file reference
return false;
},
/**
* Creates Queue record and updates document watch status to true on Document record
* @param {import("@prisma/client").workspace_documents} document - document record to watch, must have `id`
*/
watch: async function (document = null) {
if (!document) return false;
try {
const { Document } = require("./documents");
// Get all documents that are watched and share the same unique filename. If this value is
// non-zero then we exit early so that we do not have duplicated watch queues for the same file
// across many workspaces.
const workspaceDocIds = (
await Document.where({ filename: document.filename, watched: true })
).map((rec) => rec.id);
const hasRecords =
(await this.count({ workspaceDocId: { in: workspaceDocIds } })) > 0;
if (hasRecords)
throw new Error(
`Cannot watch this document again - it already has a queue set.`
);
const queue = await prisma.document_sync_queues.create({
data: {
workspaceDocId: document.id,
nextSyncAt: new Date(Number(new Date()) + this.defaultStaleAfter),
},
});
await Document._updateAll(
{ filename: document.filename },
{ watched: true }
);
return queue || null;
} catch (error) {
console.error(error.message);
return null;
}
},
/**
* Deletes Queue record and updates document watch status to false on Document record
* @param {import("@prisma/client").workspace_documents} document - document record to unwatch, must have `id`
*/
unwatch: async function (document = null) {
if (!document) return false;
try {
const { Document } = require("./documents");
// We could have been given a document to unwatch which is a clone of one that is already being watched but by another workspaceDocument id.
// so in this instance we need to delete any queues related to this document by any WorkspaceDocumentId it is referenced by.
const workspaceDocIds = (
await Document.where({ filename: document.filename, watched: true })
).map((rec) => rec.id);
await this.delete({ workspaceDocId: { in: workspaceDocIds } });
await Document._updateAll(
{ filename: document.filename },
{ watched: false }
);
return true;
} catch (error) {
console.error(error.message);
return false;
}
},
_update: async function (id = null, data = {}) {
if (!id) throw new Error("No id provided for update");
try {
await prisma.document_sync_queues.update({
where: { id },
data,
});
return true;
} catch (error) {
console.error(error.message);
return false;
}
},
get: async function (clause = {}) {
try {
const queue = await prisma.document_sync_queues.findFirst({
where: clause,
});
return queue || null;
} catch (error) {
console.error(error.message);
return null;
}
},
where: async function (
clause = {},
limit = null,
orderBy = null,
include = {}
) {
try {
const results = await prisma.document_sync_queues.findMany({
where: clause,
...(limit !== null ? { take: limit } : {}),
...(orderBy !== null ? { orderBy } : {}),
...(include !== null ? { include } : {}),
});
return results;
} catch (error) {
console.error(error.message);
return [];
}
},
count: async function (clause = {}, limit = null) {
try {
const count = await prisma.document_sync_queues.count({
where: clause,
...(limit !== null ? { take: limit } : {}),
});
return count;
} catch (error) {
console.error("FAILED TO COUNT DOCUMENTS.", error.message);
return 0;
}
},
delete: async function (clause = {}) {
try {
await prisma.document_sync_queues.deleteMany({ where: clause });
return true;
} catch (error) {
console.error(error.message);
return false;
}
},
/**
* Gets the "stale" queues where the queue's nextSyncAt is less than the current time
* @returns {Promise<(
* import("@prisma/client").document_sync_queues &
* { workspaceDoc: import("@prisma/client").workspace_documents &
* { workspace: import("@prisma/client").workspaces }
* })[]}>}
*/
staleDocumentQueues: async function () {
const queues = await this.where(
{
nextSyncAt: {
lte: new Date().toISOString(),
},
},
null,
null,
{
workspaceDoc: {
include: {
workspace: true,
},
},
}
);
return queues;
},
saveRun: async function (queueId = null, status = null, result = {}) {
const { DocumentSyncRun } = require("./documentSyncRun");
return DocumentSyncRun.save(queueId, status, result);
},
/**
* Updates document to be watched/unwatched & creates or deletes any queue records and updated Document record `watched` status
* @param {import("@prisma/client").workspace_documents} documentRecord
* @param {boolean} watchStatus - indicate if queue record should be created or not.
* @returns
*/
toggleWatchStatus: async function (documentRecord, watchStatus = false) {
if (!watchStatus) {
await Telemetry.sendTelemetry("document_unwatched");
await this.unwatch(documentRecord);
return;
}
await this.watch(documentRecord);
await Telemetry.sendTelemetry("document_watched");
return;
},
};
module.exports = { DocumentSyncQueue };

View File

@ -0,0 +1,88 @@
const prisma = require("../utils/prisma");
const DocumentSyncRun = {
statuses: {
unknown: "unknown",
exited: "exited",
failed: "failed",
success: "success",
},
save: async function (queueId = null, status = null, result = {}) {
try {
if (!this.statuses.hasOwnProperty(status))
throw new Error(
`DocumentSyncRun status ${status} is not a valid status.`
);
const run = await prisma.document_sync_executions.create({
data: {
queueId: Number(queueId),
status: String(status),
result: JSON.stringify(result),
},
});
return run || null;
} catch (error) {
console.error(error.message);
return null;
}
},
get: async function (clause = {}) {
try {
const queue = await prisma.document_sync_executions.findFirst({
where: clause,
});
return queue || null;
} catch (error) {
console.error(error.message);
return null;
}
},
where: async function (
clause = {},
limit = null,
orderBy = null,
include = {}
) {
try {
const results = await prisma.document_sync_executions.findMany({
where: clause,
...(limit !== null ? { take: limit } : {}),
...(orderBy !== null ? { orderBy } : {}),
...(include !== null ? { include } : {}),
});
return results;
} catch (error) {
console.error(error.message);
return [];
}
},
count: async function (clause = {}, limit = null, orderBy = {}) {
try {
const count = await prisma.document_sync_executions.count({
where: clause,
...(limit !== null ? { take: limit } : {}),
...(orderBy !== null ? { orderBy } : {}),
});
return count;
} catch (error) {
console.error("FAILED TO COUNT DOCUMENTS.", error.message);
return 0;
}
},
delete: async function (clause = {}) {
try {
await prisma.document_sync_executions.deleteMany({ where: clause });
return true;
} catch (error) {
console.error(error.message);
return false;
}
},
};
module.exports = { DocumentSyncRun };

View File

@ -3,9 +3,30 @@ const { getVectorDbClass } = require("../utils/helpers");
const prisma = require("../utils/prisma"); const prisma = require("../utils/prisma");
const { Telemetry } = require("./telemetry"); const { Telemetry } = require("./telemetry");
const { EventLogs } = require("./eventLogs"); const { EventLogs } = require("./eventLogs");
const { safeJsonParse } = require("../utils/http");
const Document = { const Document = {
writable: ["pinned"], writable: ["pinned", "watched", "lastUpdatedAt"],
/**
* @param {import("@prisma/client").workspace_documents} document - Document PrismaRecord
* @returns {{
* metadata: (null|object),
* type: import("./documentSyncQueue.js").validFileType,
* source: string
* }}
*/
parseDocumentTypeAndSource: function (document) {
const metadata = safeJsonParse(document.metadata, null);
if (!metadata) return { metadata: null, type: null, source: null };
// Parse the correct type of source and its original source path.
const idx = metadata.chunkSource.indexOf("://");
const [type, source] = [
metadata.chunkSource.slice(0, idx),
metadata.chunkSource.slice(idx + 3),
];
return { metadata, type, source: this._stripSource(source, type) };
},
forWorkspace: async function (workspaceId = null) { forWorkspace: async function (workspaceId = null) {
if (!workspaceId) return []; if (!workspaceId) return [];
@ -36,7 +57,7 @@ const Document = {
} }
}, },
getPins: async function (clause = {}) { getOnlyWorkspaceIds: async function (clause = {}) {
try { try {
const workspaceIds = await prisma.workspace_documents.findMany({ const workspaceIds = await prisma.workspace_documents.findMany({
where: clause, where: clause,
@ -44,19 +65,25 @@ const Document = {
workspaceId: true, workspaceId: true,
}, },
}); });
return workspaceIds.map((pin) => pin.workspaceId) || []; return workspaceIds.map((record) => record.workspaceId) || [];
} catch (error) { } catch (error) {
console.error(error.message); console.error(error.message);
return []; return [];
} }
}, },
where: async function (clause = {}, limit = null, orderBy = null) { where: async function (
clause = {},
limit = null,
orderBy = null,
include = null
) {
try { try {
const results = await prisma.workspace_documents.findMany({ const results = await prisma.workspace_documents.findMany({
where: clause, where: clause,
...(limit !== null ? { take: limit } : {}), ...(limit !== null ? { take: limit } : {}),
...(orderBy !== null ? { orderBy } : {}), ...(orderBy !== null ? { orderBy } : {}),
...(include !== null ? { include } : {}),
}); });
return results; return results;
} catch (error) { } catch (error) {
@ -202,6 +229,18 @@ const Document = {
return { document: null, message: error.message }; return { document: null, message: error.message };
} }
}, },
_updateAll: async function (clause = {}, data = {}) {
try {
await prisma.workspace_documents.updateMany({
where: clause,
data,
});
return true;
} catch (error) {
console.error(error.message);
return false;
}
},
content: async function (docId) { content: async function (docId) {
if (!docId) throw new Error("No workspace docId provided!"); if (!docId) throw new Error("No workspace docId provided!");
const document = await this.get({ docId: String(docId) }); const document = await this.get({ docId: String(docId) });
@ -211,6 +250,22 @@ const Document = {
const data = await fileData(document.docpath); const data = await fileData(document.docpath);
return { title: data.title, content: data.pageContent }; return { title: data.title, content: data.pageContent };
}, },
contentByDocPath: async function (docPath) {
const { fileData } = require("../utils/files");
const data = await fileData(docPath);
return { title: data.title, content: data.pageContent };
},
// Some data sources have encoded params in them we don't want to log - so strip those details.
_stripSource: function (sourceString, type) {
if (["confluence", "github"].includes(type)) {
const _src = new URL(sourceString);
_src.search = ""; // remove all search params that are encoded for resync.
return _src.toString();
}
return sourceString;
},
}; };
module.exports = { Document }; module.exports = { Document };

View File

@ -28,6 +28,9 @@ const SystemSettings = {
"default_agent_skills", "default_agent_skills",
"agent_sql_connections", "agent_sql_connections",
"custom_app_name", "custom_app_name",
// beta feature flags
"experimental_live_file_sync",
], ],
validations: { validations: {
footer_data: (updates) => { footer_data: (updates) => {
@ -114,6 +117,12 @@ const SystemSettings = {
return JSON.stringify(existingConnections ?? []); return JSON.stringify(existingConnections ?? []);
} }
}, },
experimental_live_file_sync: (update) => {
if (typeof update === "boolean")
return update === true ? "enabled" : "disabled";
if (!["enabled", "disabled"].includes(update)) return "disabled";
return String(update);
},
}, },
currentSettings: async function () { currentSettings: async function () {
const { hasVectorCachedFiles } = require("../utils/files"); const { hasVectorCachedFiles } = require("../utils/files");
@ -459,6 +468,13 @@ const SystemSettings = {
}); });
}, },
}, },
getFeatureFlags: async function () {
return {
experimental_live_file_sync:
(await SystemSettings.get({ label: "experimental_live_file_sync" }))
?.value === "enabled",
};
},
}; };
function mergeConnections(existingConnections = [], updates = []) { function mergeConnections(existingConnections = [], updates = []) {

View File

@ -25,6 +25,7 @@
"@datastax/astra-db-ts": "^0.1.3", "@datastax/astra-db-ts": "^0.1.3",
"@google/generative-ai": "^0.7.1", "@google/generative-ai": "^0.7.1",
"@googleapis/youtube": "^9.0.0", "@googleapis/youtube": "^9.0.0",
"@ladjs/graceful": "^3.2.2",
"@langchain/anthropic": "0.1.16", "@langchain/anthropic": "0.1.16",
"@langchain/community": "0.0.53", "@langchain/community": "0.0.53",
"@langchain/core": "0.1.61", "@langchain/core": "0.1.61",
@ -38,6 +39,7 @@
"archiver": "^5.3.1", "archiver": "^5.3.1",
"bcrypt": "^5.1.0", "bcrypt": "^5.1.0",
"body-parser": "^1.20.2", "body-parser": "^1.20.2",
"bree": "^9.2.3",
"chalk": "^4", "chalk": "^4",
"check-disk-space": "^3.4.0", "check-disk-space": "^3.4.0",
"chromadb": "^1.5.2", "chromadb": "^1.5.2",
@ -80,7 +82,8 @@
"uuid": "^9.0.0", "uuid": "^9.0.0",
"uuid-apikey": "^1.5.3", "uuid-apikey": "^1.5.3",
"vectordb": "0.4.11", "vectordb": "0.4.11",
"weaviate-ts-client": "^1.4.0" "weaviate-ts-client": "^1.4.0",
"winston": "^3.13.0"
}, },
"devDependencies": { "devDependencies": {
"@inquirer/prompts": "^4.3.1", "@inquirer/prompts": "^4.3.1",

View File

@ -0,0 +1,26 @@
-- AlterTable
ALTER TABLE "workspace_documents" ADD COLUMN "watched" BOOLEAN DEFAULT false;
-- CreateTable
CREATE TABLE "document_sync_queues" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"staleAfterMs" INTEGER NOT NULL DEFAULT 604800000,
"nextSyncAt" DATETIME NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"lastSyncedAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"workspaceDocId" INTEGER NOT NULL,
CONSTRAINT "document_sync_queues_workspaceDocId_fkey" FOREIGN KEY ("workspaceDocId") REFERENCES "workspace_documents" ("id") ON DELETE CASCADE ON UPDATE CASCADE
);
-- CreateTable
CREATE TABLE "document_sync_executions" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"queueId" INTEGER NOT NULL,
"status" TEXT NOT NULL DEFAULT 'unknown',
"result" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "document_sync_executions_queueId_fkey" FOREIGN KEY ("queueId") REFERENCES "document_sync_queues" ("id") ON DELETE CASCADE ON UPDATE CASCADE
);
-- CreateIndex
CREATE UNIQUE INDEX "document_sync_queues_workspaceDocId_key" ON "document_sync_queues"("workspaceDocId");

View File

@ -24,16 +24,18 @@ model api_keys {
} }
model workspace_documents { model workspace_documents {
id Int @id @default(autoincrement()) id Int @id @default(autoincrement())
docId String @unique docId String @unique
filename String filename String
docpath String docpath String
workspaceId Int workspaceId Int
metadata String? metadata String?
pinned Boolean? @default(false) pinned Boolean? @default(false)
createdAt DateTime @default(now()) watched Boolean? @default(false)
lastUpdatedAt DateTime @default(now()) createdAt DateTime @default(now())
workspace workspaces @relation(fields: [workspaceId], references: [id]) lastUpdatedAt DateTime @default(now())
workspace workspaces @relation(fields: [workspaceId], references: [id])
document_sync_queues document_sync_queues?
} }
model invites { model invites {
@ -275,3 +277,23 @@ model slash_command_presets {
@@unique([uid, command]) @@unique([uid, command])
} }
model document_sync_queues {
id Int @id @default(autoincrement())
staleAfterMs Int @default(604800000) // 7 days
nextSyncAt DateTime
createdAt DateTime @default(now())
lastSyncedAt DateTime @default(now())
workspaceDocId Int @unique
workspaceDoc workspace_documents? @relation(fields: [workspaceDocId], references: [id], onDelete: Cascade)
runs document_sync_executions[]
}
model document_sync_executions {
id Int @id @default(autoincrement())
queueId Int
status String @default("unknown")
result String?
createdAt DateTime @default(now())
queue document_sync_queues @relation(fields: [queueId], references: [id], onDelete: Cascade)
}

View File

@ -0,0 +1,88 @@
const path = require("path");
const Graceful = require("@ladjs/graceful");
const Bree = require("bree");
class BackgroundService {
name = "BackgroundWorkerService";
static _instance = null;
#root = path.resolve(__dirname, "../../jobs");
constructor() {
if (BackgroundService._instance) {
this.#log("SINGLETON LOCK: Using existing BackgroundService.");
return BackgroundService._instance;
}
this.logger = this.getLogger();
BackgroundService._instance = this;
}
#log(text, ...args) {
console.log(`\x1b[36m[${this.name}]\x1b[0m ${text}`, ...args);
}
async boot() {
const { DocumentSyncQueue } = require("../../models/documentSyncQueue");
if (!(await DocumentSyncQueue.enabled())) {
this.#log("Feature is not enabled and will not be started.");
return;
}
this.#log("Starting...");
this.bree = new Bree({
logger: this.logger,
root: this.#root,
jobs: this.jobs(),
errorHandler: this.onError,
workerMessageHandler: this.onWorkerMessageHandler,
});
this.graceful = new Graceful({ brees: [this.bree], logger: this.logger });
this.graceful.listen();
this.bree.start();
this.#log("Service started");
}
async stop() {
this.#log("Stopping...");
if (!!this.graceful && !!this.bree) this.graceful.stopBree(this.bree, 0);
this.bree = null;
this.graceful = null;
this.#log("Service stopped");
}
jobs() {
return [
// Job for auto-sync of documents
// https://github.com/breejs/bree
{
name: "sync-watched-documents",
interval: "1hr",
},
];
}
getLogger() {
const { format, createLogger, transports } = require("winston");
return new createLogger({
level: "info",
format: format.combine(
format.colorize(),
format.printf(({ level, message, service }) => {
return `\x1b[36m[${service}]\x1b[0m ${level}: ${message}`;
})
),
defaultMeta: { service: this.name },
transports: [new transports.Console()],
});
}
onError(error, _workerMetadata) {
this.logger.error(`[${error.name}]: ${error.message}`);
}
onWorkerMessageHandler(message, _workerMetadata) {
this.logger.info(`[${message.name}]: ${message.message}`);
}
}
module.exports.BackgroundService = BackgroundService;

View File

@ -0,0 +1,85 @@
const crypto = require("crypto");
const { dumpENV } = require("../helpers/updateENV");
// Class that is used to arbitrarily encrypt/decrypt string data via a persistent passphrase/salt that
// is either user defined or is created and saved to the ENV on creation.
class EncryptionManager {
#keyENV = "SIG_KEY";
#saltENV = "SIG_SALT";
#encryptionKey;
#encryptionSalt;
constructor({ key = null, salt = null } = {}) {
this.#loadOrCreateKeySalt(key, salt);
this.key = crypto.scryptSync(this.#encryptionKey, this.#encryptionSalt, 32);
this.algorithm = "aes-256-cbc";
this.separator = ":";
// Used to send key to collector process to be able to decrypt data since they do not share ENVs
// this value should use the CommunicationKey.encrypt process before sending anywhere outside the
// server process so it is never sent in its raw format.
this.xPayload = this.key.toString("base64");
}
log(text, ...args) {
console.log(`\x1b[36m[EncryptionManager]\x1b[0m ${text}`, ...args);
}
#loadOrCreateKeySalt(_key = null, _salt = null) {
if (!!_key && !!_salt) {
this.log(
"Pre-assigned key & salt for encrypting arbitrary data was used."
);
this.#encryptionKey = _key;
this.#encryptionSalt = _salt;
return;
}
if (!process.env[this.#keyENV] || !process.env[this.#saltENV]) {
this.log("Self-assigning key & salt for encrypting arbitrary data.");
process.env[this.#keyENV] = crypto.randomBytes(32).toString("hex");
process.env[this.#saltENV] = crypto.randomBytes(32).toString("hex");
if (process.env.NODE_ENV === "production") dumpENV();
} else
this.log("Loaded existing key & salt for encrypting arbitrary data.");
this.#encryptionKey = process.env[this.#keyENV];
this.#encryptionSalt = process.env[this.#saltENV];
return;
}
encrypt(plainTextString = null) {
try {
if (!plainTextString)
throw new Error("Empty string is not valid for this method.");
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(this.algorithm, this.key, iv);
const encrypted = cipher.update(plainTextString, "utf8", "hex");
return [
encrypted + cipher.final("hex"),
Buffer.from(iv).toString("hex"),
].join(this.separator);
} catch (e) {
this.log(e);
return null;
}
}
decrypt(encryptedString) {
try {
const [encrypted, iv] = encryptedString.split(this.separator);
if (!iv) throw new Error("IV not found");
const decipher = crypto.createDecipheriv(
this.algorithm,
this.key,
Buffer.from(iv, "hex")
);
return decipher.update(encrypted, "hex", "utf8") + decipher.final("utf8");
} catch (e) {
this.log(e);
return null;
}
}
}
module.exports = { EncryptionManager };

View File

@ -1,4 +1,6 @@
const { Telemetry } = require("../../models/telemetry"); const { Telemetry } = require("../../models/telemetry");
const { BackgroundService } = require("../BackgroundWorkers");
const { EncryptionManager } = require("../EncryptionManager");
const { CommunicationKey } = require("../comKey"); const { CommunicationKey } = require("../comKey");
const setupTelemetry = require("../telemetry"); const setupTelemetry = require("../telemetry");
@ -18,6 +20,8 @@ function bootSSL(app, port = 3001) {
.listen(port, async () => { .listen(port, async () => {
await setupTelemetry(); await setupTelemetry();
new CommunicationKey(true); new CommunicationKey(true);
new EncryptionManager();
new BackgroundService().boot();
console.log(`Primary server in HTTPS mode listening on port ${port}`); console.log(`Primary server in HTTPS mode listening on port ${port}`);
}) })
.on("error", catchSigTerms); .on("error", catchSigTerms);
@ -45,6 +49,8 @@ function bootHTTP(app, port = 3001) {
.listen(port, async () => { .listen(port, async () => {
await setupTelemetry(); await setupTelemetry();
new CommunicationKey(true); new CommunicationKey(true);
new EncryptionManager();
new BackgroundService().boot();
console.log(`Primary server in HTTP mode listening on port ${port}`); console.log(`Primary server in HTTP mode listening on port ${port}`);
}) })
.on("error", catchSigTerms); .on("error", catchSigTerms);

View File

@ -1,8 +1,9 @@
const { EncryptionManager } = require("../EncryptionManager");
// When running locally will occupy the 0.0.0.0 hostname space but when deployed inside // When running locally will occupy the 0.0.0.0 hostname space but when deployed inside
// of docker this endpoint is not exposed so it is only on the Docker instances internal network // of docker this endpoint is not exposed so it is only on the Docker instances internal network
// so no additional security is needed on the endpoint directly. Auth is done however by the express // so no additional security is needed on the endpoint directly. Auth is done however by the express
// middleware prior to leaving the node-side of the application so that is good enough >:) // middleware prior to leaving the node-side of the application so that is good enough >:)
class CollectorApi { class CollectorApi {
constructor() { constructor() {
const { CommunicationKey } = require("../comKey"); const { CommunicationKey } = require("../comKey");
@ -54,6 +55,9 @@ class CollectorApi {
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
"X-Integrity": this.comkey.sign(data), "X-Integrity": this.comkey.sign(data),
"X-Payload-Signer": this.comkey.encrypt(
new EncryptionManager().xPayload
),
}, },
body: data, body: data,
}) })
@ -77,6 +81,9 @@ class CollectorApi {
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
"X-Integrity": this.comkey.sign(data), "X-Integrity": this.comkey.sign(data),
"X-Payload-Signer": this.comkey.encrypt(
new EncryptionManager().xPayload
),
}, },
body: data, body: data,
}) })
@ -98,6 +105,9 @@ class CollectorApi {
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
"X-Integrity": this.comkey.sign(data), "X-Integrity": this.comkey.sign(data),
"X-Payload-Signer": this.comkey.encrypt(
new EncryptionManager().xPayload
),
}, },
body: data, body: data,
}) })
@ -122,6 +132,9 @@ class CollectorApi {
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
"X-Integrity": this.comkey.sign(body), "X-Integrity": this.comkey.sign(body),
"X-Payload-Signer": this.comkey.encrypt(
new EncryptionManager().xPayload
),
}, },
}) })
.then((res) => { .then((res) => {
@ -144,6 +157,9 @@ class CollectorApi {
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
"X-Integrity": this.comkey.sign(data), "X-Integrity": this.comkey.sign(data),
"X-Payload-Signer": this.comkey.encrypt(
new EncryptionManager().xPayload
),
}, },
body: data, body: data,
}) })

View File

@ -73,6 +73,14 @@ class CommunicationKey {
.sign("RSA-SHA256", Buffer.from(textData), this.#readPrivateKey()) .sign("RSA-SHA256", Buffer.from(textData), this.#readPrivateKey())
.toString("hex"); .toString("hex");
} }
// Use the rolling priv-key to encrypt arbitrary data that is text
// returns the encrypted content as a base64 string.
encrypt(textData = "") {
return crypto
.privateEncrypt(this.#readPrivateKey(), Buffer.from(textData, "utf-8"))
.toString("base64");
}
} }
module.exports = { CommunicationKey }; module.exports = { CommunicationKey };

View File

@ -2,6 +2,7 @@ const fs = require("fs");
const path = require("path"); const path = require("path");
const { v5: uuidv5 } = require("uuid"); const { v5: uuidv5 } = require("uuid");
const { Document } = require("../../models/documents"); const { Document } = require("../../models/documents");
const { DocumentSyncQueue } = require("../../models/documentSyncQueue");
const documentsPath = const documentsPath =
process.env.NODE_ENV === "development" process.env.NODE_ENV === "development"
? path.resolve(__dirname, `../../storage/documents`) ? path.resolve(__dirname, `../../storage/documents`)
@ -25,7 +26,7 @@ async function fileData(filePath = null) {
async function viewLocalFiles() { async function viewLocalFiles() {
if (!fs.existsSync(documentsPath)) fs.mkdirSync(documentsPath); if (!fs.existsSync(documentsPath)) fs.mkdirSync(documentsPath);
const liveSyncAvailable = await DocumentSyncQueue.enabled();
const directory = { const directory = {
name: "documents", name: "documents",
type: "folder", type: "folder",
@ -50,16 +51,28 @@ async function viewLocalFiles() {
const rawData = fs.readFileSync(filePath, "utf8"); const rawData = fs.readFileSync(filePath, "utf8");
const cachefilename = `${file}/${subfile}`; const cachefilename = `${file}/${subfile}`;
const { pageContent, ...metadata } = JSON.parse(rawData); const { pageContent, ...metadata } = JSON.parse(rawData);
const pinnedInWorkspaces = await Document.getOnlyWorkspaceIds({
docpath: cachefilename,
pinned: true,
});
const watchedInWorkspaces = liveSyncAvailable
? await Document.getOnlyWorkspaceIds({
docpath: cachefilename,
watched: true,
})
: [];
subdocs.items.push({ subdocs.items.push({
name: subfile, name: subfile,
type: "file", type: "file",
...metadata, ...metadata,
cached: await cachedVectorInformation(cachefilename, true), cached: await cachedVectorInformation(cachefilename, true),
pinnedWorkspaces: await Document.getPins({ pinnedWorkspaces: pinnedInWorkspaces,
docpath: cachefilename, canWatch: liveSyncAvailable
pinned: true, ? DocumentSyncQueue.canWatch(metadata)
}), : false,
// Is file watched in any workspace since sync updates all workspaces where file is referenced
watched: watchedInWorkspaces.length !== 0,
}); });
} }
directory.items.push(subdocs); directory.items.push(subdocs);

View File

@ -760,6 +760,9 @@ function dumpENV() {
// and are either managed or manually set ENV key:values. // and are either managed or manually set ENV key:values.
"STORAGE_DIR", "STORAGE_DIR",
"SERVER_PORT", "SERVER_PORT",
// For persistent data encryption
"SIG_KEY",
"SIG_SALT",
// Password Schema Keys if present. // Password Schema Keys if present.
"PASSWORDMINCHAR", "PASSWORDMINCHAR",
"PASSWORDMAXCHAR", "PASSWORDMAXCHAR",

View File

@ -0,0 +1,24 @@
const { SystemSettings } = require("../../models/systemSettings");
// Explicitly check that a specific feature flag is enabled.
// This should match the key in the SystemSetting label.
function featureFlagEnabled(featureFlagKey = null) {
return async (_, response, next) => {
if (!featureFlagKey) return response.sendStatus(401).end();
const flagValue = (
await SystemSettings.get({ label: String(featureFlagKey) })
)?.value;
if (!flagValue) return response.sendStatus(401).end();
if (flagValue === "enabled") {
next();
return;
}
return response.sendStatus(401).end();
};
}
module.exports = {
featureFlagEnabled,
};

View File

@ -100,7 +100,8 @@ const AstraDB = {
addDocumentToNamespace: async function ( addDocumentToNamespace: async function (
namespace, namespace,
documentData = {}, documentData = {},
fullFilePath = null fullFilePath = null,
skipCache = false
) { ) {
const { DocumentVectors } = require("../../../models/vectors"); const { DocumentVectors } = require("../../../models/vectors");
try { try {
@ -109,40 +110,42 @@ const AstraDB = {
if (!pageContent || pageContent.length == 0) return false; if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace); console.log("Adding new vectorized document into namespace", namespace);
const cacheResult = await cachedVectorInformation(fullFilePath); if (!skipCache) {
if (cacheResult.exists) { const cacheResult = await cachedVectorInformation(fullFilePath);
const { client } = await this.connect(); if (cacheResult.exists) {
const { chunks } = cacheResult; const { client } = await this.connect();
const documentVectors = []; const { chunks } = cacheResult;
vectorDimension = chunks[0][0].values.length || null; const documentVectors = [];
vectorDimension = chunks[0][0].values.length || null;
const collection = await this.getOrCreateCollection( const collection = await this.getOrCreateCollection(
client, client,
namespace,
vectorDimension
);
if (!(await this.isRealCollection(collection)))
throw new Error("Failed to create new AstraDB collection!", {
namespace, namespace,
}); vectorDimension
);
if (!(await this.isRealCollection(collection)))
throw new Error("Failed to create new AstraDB collection!", {
namespace,
});
for (const chunk of chunks) { for (const chunk of chunks) {
// Before sending to Astra and saving the records to our db // Before sending to Astra and saving the records to our db
// we need to assign the id of each chunk that is stored in the cached file. // we need to assign the id of each chunk that is stored in the cached file.
const newChunks = chunk.map((chunk) => { const newChunks = chunk.map((chunk) => {
const _id = uuidv4(); const _id = uuidv4();
documentVectors.push({ docId, vectorId: _id }); documentVectors.push({ docId, vectorId: _id });
return { return {
_id: _id, _id: _id,
$vector: chunk.values, $vector: chunk.values,
metadata: chunk.metadata || {}, metadata: chunk.metadata || {},
}; };
}); });
await collection.insertMany(newChunks); await collection.insertMany(newChunks);
}
await DocumentVectors.bulkInsert(documentVectors);
return { vectorized: true, error: null };
} }
await DocumentVectors.bulkInsert(documentVectors);
return { vectorized: true, error: null };
} }
const EmbedderEngine = getEmbeddingEngineSelection(); const EmbedderEngine = getEmbeddingEngineSelection();

View File

@ -185,7 +185,8 @@ const Chroma = {
addDocumentToNamespace: async function ( addDocumentToNamespace: async function (
namespace, namespace,
documentData = {}, documentData = {},
fullFilePath = null fullFilePath = null,
skipCache = false
) { ) {
const { DocumentVectors } = require("../../../models/vectors"); const { DocumentVectors } = require("../../../models/vectors");
try { try {
@ -193,43 +194,45 @@ const Chroma = {
if (!pageContent || pageContent.length == 0) return false; if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace); console.log("Adding new vectorized document into namespace", namespace);
const cacheResult = await cachedVectorInformation(fullFilePath); if (skipCache) {
if (cacheResult.exists) { const cacheResult = await cachedVectorInformation(fullFilePath);
const { client } = await this.connect(); if (cacheResult.exists) {
const collection = await client.getOrCreateCollection({ const { client } = await this.connect();
name: this.normalize(namespace), const collection = await client.getOrCreateCollection({
metadata: { "hnsw:space": "cosine" }, name: this.normalize(namespace),
}); metadata: { "hnsw:space": "cosine" },
const { chunks } = cacheResult;
const documentVectors = [];
for (const chunk of chunks) {
const submission = {
ids: [],
embeddings: [],
metadatas: [],
documents: [],
};
// Before sending to Chroma and saving the records to our db
// we need to assign the id of each chunk that is stored in the cached file.
chunk.forEach((chunk) => {
const id = uuidv4();
const { id: _id, ...metadata } = chunk.metadata;
documentVectors.push({ docId, vectorId: id });
submission.ids.push(id);
submission.embeddings.push(chunk.values);
submission.metadatas.push(metadata);
submission.documents.push(metadata.text);
}); });
const { chunks } = cacheResult;
const documentVectors = [];
const additionResult = await collection.add(submission); for (const chunk of chunks) {
if (!additionResult) const submission = {
throw new Error("Error embedding into ChromaDB", additionResult); ids: [],
embeddings: [],
metadatas: [],
documents: [],
};
// Before sending to Chroma and saving the records to our db
// we need to assign the id of each chunk that is stored in the cached file.
chunk.forEach((chunk) => {
const id = uuidv4();
const { id: _id, ...metadata } = chunk.metadata;
documentVectors.push({ docId, vectorId: id });
submission.ids.push(id);
submission.embeddings.push(chunk.values);
submission.metadatas.push(metadata);
submission.documents.push(metadata.text);
});
const additionResult = await collection.add(submission);
if (!additionResult)
throw new Error("Error embedding into ChromaDB", additionResult);
}
await DocumentVectors.bulkInsert(documentVectors);
return { vectorized: true, error: null };
} }
await DocumentVectors.bulkInsert(documentVectors);
return { vectorized: true, error: null };
} }
// If we are here then we are going to embed and store a novel document. // If we are here then we are going to embed and store a novel document.

View File

@ -153,7 +153,8 @@ const LanceDb = {
addDocumentToNamespace: async function ( addDocumentToNamespace: async function (
namespace, namespace,
documentData = {}, documentData = {},
fullFilePath = null fullFilePath = null,
skipCache = false
) { ) {
const { DocumentVectors } = require("../../../models/vectors"); const { DocumentVectors } = require("../../../models/vectors");
try { try {
@ -161,25 +162,27 @@ const LanceDb = {
if (!pageContent || pageContent.length == 0) return false; if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace); console.log("Adding new vectorized document into namespace", namespace);
const cacheResult = await cachedVectorInformation(fullFilePath); if (!skipCache) {
if (cacheResult.exists) { const cacheResult = await cachedVectorInformation(fullFilePath);
const { client } = await this.connect(); if (cacheResult.exists) {
const { chunks } = cacheResult; const { client } = await this.connect();
const documentVectors = []; const { chunks } = cacheResult;
const submissions = []; const documentVectors = [];
const submissions = [];
for (const chunk of chunks) { for (const chunk of chunks) {
chunk.forEach((chunk) => { chunk.forEach((chunk) => {
const id = uuidv4(); const id = uuidv4();
const { id: _id, ...metadata } = chunk.metadata; const { id: _id, ...metadata } = chunk.metadata;
documentVectors.push({ docId, vectorId: id }); documentVectors.push({ docId, vectorId: id });
submissions.push({ id: id, vector: chunk.values, ...metadata }); submissions.push({ id: id, vector: chunk.values, ...metadata });
}); });
}
await this.updateOrCreateCollection(client, submissions, namespace);
await DocumentVectors.bulkInsert(documentVectors);
return { vectorized: true, error: null };
} }
await this.updateOrCreateCollection(client, submissions, namespace);
await DocumentVectors.bulkInsert(documentVectors);
return { vectorized: true, error: null };
} }
// If we are here then we are going to embed and store a novel document. // If we are here then we are going to embed and store a novel document.

View File

@ -137,7 +137,8 @@ const Milvus = {
addDocumentToNamespace: async function ( addDocumentToNamespace: async function (
namespace, namespace,
documentData = {}, documentData = {},
fullFilePath = null fullFilePath = null,
skipCache = false
) { ) {
const { DocumentVectors } = require("../../../models/vectors"); const { DocumentVectors } = require("../../../models/vectors");
try { try {
@ -146,38 +147,40 @@ const Milvus = {
if (!pageContent || pageContent.length == 0) return false; if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace); console.log("Adding new vectorized document into namespace", namespace);
const cacheResult = await cachedVectorInformation(fullFilePath); if (skipCache) {
if (cacheResult.exists) { const cacheResult = await cachedVectorInformation(fullFilePath);
const { client } = await this.connect(); if (cacheResult.exists) {
const { chunks } = cacheResult; const { client } = await this.connect();
const documentVectors = []; const { chunks } = cacheResult;
vectorDimension = chunks[0][0].values.length || null; const documentVectors = [];
vectorDimension = chunks[0][0].values.length || null;
await this.getOrCreateCollection(client, namespace, vectorDimension); await this.getOrCreateCollection(client, namespace, vectorDimension);
for (const chunk of chunks) { for (const chunk of chunks) {
// Before sending to Pinecone and saving the records to our db // Before sending to Pinecone and saving the records to our db
// we need to assign the id of each chunk that is stored in the cached file. // we need to assign the id of each chunk that is stored in the cached file.
const newChunks = chunk.map((chunk) => { const newChunks = chunk.map((chunk) => {
const id = uuidv4(); const id = uuidv4();
documentVectors.push({ docId, vectorId: id }); documentVectors.push({ docId, vectorId: id });
return { id, vector: chunk.values, metadata: chunk.metadata }; return { id, vector: chunk.values, metadata: chunk.metadata };
}); });
const insertResult = await client.insert({ const insertResult = await client.insert({
collection_name: this.normalize(namespace), collection_name: this.normalize(namespace),
data: newChunks, data: newChunks,
}); });
if (insertResult?.status.error_code !== "Success") { if (insertResult?.status.error_code !== "Success") {
throw new Error( throw new Error(
`Error embedding into Milvus! Reason:${insertResult?.status.reason}` `Error embedding into Milvus! Reason:${insertResult?.status.reason}`
); );
}
} }
await DocumentVectors.bulkInsert(documentVectors);
await client.flushSync({
collection_names: [this.normalize(namespace)],
});
return { vectorized: true, error: null };
} }
await DocumentVectors.bulkInsert(documentVectors);
await client.flushSync({
collection_names: [this.normalize(namespace)],
});
return { vectorized: true, error: null };
} }
const EmbedderEngine = getEmbeddingEngineSelection(); const EmbedderEngine = getEmbeddingEngineSelection();

View File

@ -96,7 +96,8 @@ const PineconeDB = {
addDocumentToNamespace: async function ( addDocumentToNamespace: async function (
namespace, namespace,
documentData = {}, documentData = {},
fullFilePath = null fullFilePath = null,
skipCache = false
) { ) {
const { DocumentVectors } = require("../../../models/vectors"); const { DocumentVectors } = require("../../../models/vectors");
try { try {
@ -104,26 +105,28 @@ const PineconeDB = {
if (!pageContent || pageContent.length == 0) return false; if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace); console.log("Adding new vectorized document into namespace", namespace);
const cacheResult = await cachedVectorInformation(fullFilePath); if (!skipCache) {
if (cacheResult.exists) { const cacheResult = await cachedVectorInformation(fullFilePath);
const { pineconeIndex } = await this.connect(); if (cacheResult.exists) {
const pineconeNamespace = pineconeIndex.namespace(namespace); const { pineconeIndex } = await this.connect();
const { chunks } = cacheResult; const pineconeNamespace = pineconeIndex.namespace(namespace);
const documentVectors = []; const { chunks } = cacheResult;
const documentVectors = [];
for (const chunk of chunks) { for (const chunk of chunks) {
// Before sending to Pinecone and saving the records to our db // Before sending to Pinecone and saving the records to our db
// we need to assign the id of each chunk that is stored in the cached file. // we need to assign the id of each chunk that is stored in the cached file.
const newChunks = chunk.map((chunk) => { const newChunks = chunk.map((chunk) => {
const id = uuidv4(); const id = uuidv4();
documentVectors.push({ docId, vectorId: id }); documentVectors.push({ docId, vectorId: id });
return { ...chunk, id }; return { ...chunk, id };
}); });
await pineconeNamespace.upsert([...newChunks]); await pineconeNamespace.upsert([...newChunks]);
}
await DocumentVectors.bulkInsert(documentVectors);
return { vectorized: true, error: null };
} }
await DocumentVectors.bulkInsert(documentVectors);
return { vectorized: true, error: null };
} }
// If we are here then we are going to embed and store a novel document. // If we are here then we are going to embed and store a novel document.

View File

@ -137,7 +137,8 @@ const QDrant = {
addDocumentToNamespace: async function ( addDocumentToNamespace: async function (
namespace, namespace,
documentData = {}, documentData = {},
fullFilePath = null fullFilePath = null,
skipCache = false
) { ) {
const { DocumentVectors } = require("../../../models/vectors"); const { DocumentVectors } = require("../../../models/vectors");
try { try {
@ -146,59 +147,63 @@ const QDrant = {
if (!pageContent || pageContent.length == 0) return false; if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace); console.log("Adding new vectorized document into namespace", namespace);
const cacheResult = await cachedVectorInformation(fullFilePath); if (skipCache) {
if (cacheResult.exists) { const cacheResult = await cachedVectorInformation(fullFilePath);
const { client } = await this.connect(); if (cacheResult.exists) {
const { chunks } = cacheResult; const { client } = await this.connect();
const documentVectors = []; const { chunks } = cacheResult;
vectorDimension = const documentVectors = [];
chunks[0][0]?.vector?.length ?? chunks[0][0]?.values?.length ?? null; vectorDimension =
chunks[0][0]?.vector?.length ??
chunks[0][0]?.values?.length ??
null;
const collection = await this.getOrCreateCollection( const collection = await this.getOrCreateCollection(
client, client,
namespace,
vectorDimension
);
if (!collection)
throw new Error("Failed to create new QDrant collection!", {
namespace, namespace,
}); vectorDimension
);
if (!collection)
throw new Error("Failed to create new QDrant collection!", {
namespace,
});
for (const chunk of chunks) { for (const chunk of chunks) {
const submission = { const submission = {
ids: [], ids: [],
vectors: [], vectors: [],
payloads: [], payloads: [],
}; };
// Before sending to Qdrant and saving the records to our db // Before sending to Qdrant and saving the records to our db
// we need to assign the id of each chunk that is stored in the cached file. // we need to assign the id of each chunk that is stored in the cached file.
// The id property must be defined or else it will be unable to be managed by ALLM. // The id property must be defined or else it will be unable to be managed by ALLM.
chunk.forEach((chunk) => { chunk.forEach((chunk) => {
const id = uuidv4(); const id = uuidv4();
if (chunk?.payload?.hasOwnProperty("id")) { if (chunk?.payload?.hasOwnProperty("id")) {
const { id: _id, ...payload } = chunk.payload; const { id: _id, ...payload } = chunk.payload;
documentVectors.push({ docId, vectorId: id }); documentVectors.push({ docId, vectorId: id });
submission.ids.push(id); submission.ids.push(id);
submission.vectors.push(chunk.vector); submission.vectors.push(chunk.vector);
submission.payloads.push(payload); submission.payloads.push(payload);
} else { } else {
console.error( console.error(
"The 'id' property is not defined in chunk.payload - it will be omitted from being inserted in QDrant collection." "The 'id' property is not defined in chunk.payload - it will be omitted from being inserted in QDrant collection."
); );
} }
}); });
const additionResult = await client.upsert(namespace, { const additionResult = await client.upsert(namespace, {
wait: true, wait: true,
batch: { ...submission }, batch: { ...submission },
}); });
if (additionResult?.status !== "completed") if (additionResult?.status !== "completed")
throw new Error("Error embedding into QDrant", additionResult); throw new Error("Error embedding into QDrant", additionResult);
}
await DocumentVectors.bulkInsert(documentVectors);
return { vectorized: true, error: null };
} }
await DocumentVectors.bulkInsert(documentVectors);
return { vectorized: true, error: null };
} }
// If we are here then we are going to embed and store a novel document. // If we are here then we are going to embed and store a novel document.

View File

@ -179,7 +179,8 @@ const Weaviate = {
addDocumentToNamespace: async function ( addDocumentToNamespace: async function (
namespace, namespace,
documentData = {}, documentData = {},
fullFilePath = null fullFilePath = null,
skipCache = false
) { ) {
const { DocumentVectors } = require("../../../models/vectors"); const { DocumentVectors } = require("../../../models/vectors");
try { try {
@ -192,55 +193,57 @@ const Weaviate = {
if (!pageContent || pageContent.length == 0) return false; if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace); console.log("Adding new vectorized document into namespace", namespace);
const cacheResult = await cachedVectorInformation(fullFilePath); if (skipCache) {
if (cacheResult.exists) { const cacheResult = await cachedVectorInformation(fullFilePath);
const { client } = await this.connect(); if (cacheResult.exists) {
const weaviateClassExits = await this.hasNamespace(namespace); const { client } = await this.connect();
if (!weaviateClassExits) { const weaviateClassExits = await this.hasNamespace(namespace);
await client.schema if (!weaviateClassExits) {
.classCreator() await client.schema
.withClass({ .classCreator()
class: camelCase(namespace), .withClass({
description: `Class created by AnythingLLM named ${camelCase( class: camelCase(namespace),
namespace description: `Class created by AnythingLLM named ${camelCase(
)}`, namespace
vectorizer: "none", )}`,
}) vectorizer: "none",
.do(); })
} .do();
const { chunks } = cacheResult;
const documentVectors = [];
const vectors = [];
for (const chunk of chunks) {
// Before sending to Weaviate and saving the records to our db
// we need to assign the id of each chunk that is stored in the cached file.
chunk.forEach((chunk) => {
const id = uuidv4();
const flattenedMetadata = this.flattenObjectForWeaviate(
chunk.properties ?? chunk.metadata
);
documentVectors.push({ docId, vectorId: id });
const vectorRecord = {
id,
class: camelCase(namespace),
vector: chunk.vector || chunk.values || [],
properties: { ...flattenedMetadata },
};
vectors.push(vectorRecord);
});
const { success: additionResult, errors = [] } =
await this.addVectors(client, vectors);
if (!additionResult) {
console.error("Weaviate::addVectors failed to insert", errors);
throw new Error("Error embedding into Weaviate");
} }
}
await DocumentVectors.bulkInsert(documentVectors); const { chunks } = cacheResult;
return { vectorized: true, error: null }; const documentVectors = [];
const vectors = [];
for (const chunk of chunks) {
// Before sending to Weaviate and saving the records to our db
// we need to assign the id of each chunk that is stored in the cached file.
chunk.forEach((chunk) => {
const id = uuidv4();
const flattenedMetadata = this.flattenObjectForWeaviate(
chunk.properties ?? chunk.metadata
);
documentVectors.push({ docId, vectorId: id });
const vectorRecord = {
id,
class: camelCase(namespace),
vector: chunk.vector || chunk.values || [],
properties: { ...flattenedMetadata },
};
vectors.push(vectorRecord);
});
const { success: additionResult, errors = [] } =
await this.addVectors(client, vectors);
if (!additionResult) {
console.error("Weaviate::addVectors failed to insert", errors);
throw new Error("Error embedding into Weaviate");
}
}
await DocumentVectors.bulkInsert(documentVectors);
return { vectorized: true, error: null };
}
} }
// If we are here then we are going to embed and store a novel document. // If we are here then we are going to embed and store a novel document.

View File

@ -138,7 +138,8 @@ const Zilliz = {
addDocumentToNamespace: async function ( addDocumentToNamespace: async function (
namespace, namespace,
documentData = {}, documentData = {},
fullFilePath = null fullFilePath = null,
skipCache = false
) { ) {
const { DocumentVectors } = require("../../../models/vectors"); const { DocumentVectors } = require("../../../models/vectors");
try { try {
@ -147,38 +148,40 @@ const Zilliz = {
if (!pageContent || pageContent.length == 0) return false; if (!pageContent || pageContent.length == 0) return false;
console.log("Adding new vectorized document into namespace", namespace); console.log("Adding new vectorized document into namespace", namespace);
const cacheResult = await cachedVectorInformation(fullFilePath); if (skipCache) {
if (cacheResult.exists) { const cacheResult = await cachedVectorInformation(fullFilePath);
const { client } = await this.connect(); if (cacheResult.exists) {
const { chunks } = cacheResult; const { client } = await this.connect();
const documentVectors = []; const { chunks } = cacheResult;
vectorDimension = chunks[0][0].values.length || null; const documentVectors = [];
vectorDimension = chunks[0][0].values.length || null;
await this.getOrCreateCollection(client, namespace, vectorDimension); await this.getOrCreateCollection(client, namespace, vectorDimension);
for (const chunk of chunks) { for (const chunk of chunks) {
// Before sending to Pinecone and saving the records to our db // Before sending to Pinecone and saving the records to our db
// we need to assign the id of each chunk that is stored in the cached file. // we need to assign the id of each chunk that is stored in the cached file.
const newChunks = chunk.map((chunk) => { const newChunks = chunk.map((chunk) => {
const id = uuidv4(); const id = uuidv4();
documentVectors.push({ docId, vectorId: id }); documentVectors.push({ docId, vectorId: id });
return { id, vector: chunk.values, metadata: chunk.metadata }; return { id, vector: chunk.values, metadata: chunk.metadata };
}); });
const insertResult = await client.insert({ const insertResult = await client.insert({
collection_name: this.normalize(namespace), collection_name: this.normalize(namespace),
data: newChunks, data: newChunks,
}); });
if (insertResult?.status.error_code !== "Success") { if (insertResult?.status.error_code !== "Success") {
throw new Error( throw new Error(
`Error embedding into Zilliz! Reason:${insertResult?.status.reason}` `Error embedding into Zilliz! Reason:${insertResult?.status.reason}`
); );
}
} }
await DocumentVectors.bulkInsert(documentVectors);
await client.flushSync({
collection_names: [this.normalize(namespace)],
});
return { vectorized: true, error: null };
} }
await DocumentVectors.bulkInsert(documentVectors);
await client.flushSync({
collection_names: [this.normalize(namespace)],
});
return { vectorized: true, error: null };
} }
const EmbedderEngine = getEmbeddingEngineSelection(); const EmbedderEngine = getEmbeddingEngineSelection();

View File

@ -272,6 +272,18 @@
"@azure/logger" "^1.0.3" "@azure/logger" "^1.0.3"
tslib "^2.4.0" tslib "^2.4.0"
"@babel/runtime@^7.10.5":
version "7.24.7"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.7.tgz#f4f0d5530e8dbdf59b3451b9b3e594b6ba082e12"
integrity sha512-UwgBRMjJP+xv857DCngvqXI3Iq6J4v0wXmwc6sapg+zyhbwmQX67LUEFrkK5tbyJ30jGuG3ZvWpBiB9LCy1kWw==
dependencies:
regenerator-runtime "^0.14.0"
"@breejs/later@^4.2.0":
version "4.2.0"
resolved "https://registry.yarnpkg.com/@breejs/later/-/later-4.2.0.tgz#669661f3a02535ef900f360c74e48c3f5483c786"
integrity sha512-EVMD0SgJtOuFeg0lAVbCwa+qeTKILb87jqvLyUtQswGD9+ce2nB52Y5zbTF1Hc0MDFfbydcMcxb47jSdhikVHA==
"@colors/colors@1.6.0", "@colors/colors@^1.6.0": "@colors/colors@1.6.0", "@colors/colors@^1.6.0":
version "1.6.0" version "1.6.0"
resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0" resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0"
@ -567,6 +579,14 @@
resolved "https://registry.yarnpkg.com/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz#8ace5259254426ccef57f3175bc64ed7095ed919" resolved "https://registry.yarnpkg.com/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz#8ace5259254426ccef57f3175bc64ed7095ed919"
integrity sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw== integrity sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw==
"@ladjs/graceful@^3.2.2":
version "3.2.2"
resolved "https://registry.yarnpkg.com/@ladjs/graceful/-/graceful-3.2.2.tgz#1b141a9dc2604df99177d6714dbe4a0bff5e2ddf"
integrity sha512-GyL5Cpgh2RlndFW2e4AUHrEDe0tzyXKpAs92wrAQhNKcY0y++qfK8PC+6TOHzN9zvxPY9j1KAU29Gfa9vxWzDg==
dependencies:
lil-http-terminator "^1.2.2"
p-is-promise "3"
"@lancedb/vectordb-darwin-arm64@0.4.11": "@lancedb/vectordb-darwin-arm64@0.4.11":
version "0.4.11" version "0.4.11"
resolved "https://registry.yarnpkg.com/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.4.11.tgz#390549891e03f28ba0c1b741f30730b2d09227da" resolved "https://registry.yarnpkg.com/@lancedb/vectordb-darwin-arm64/-/vectordb-darwin-arm64-0.4.11.tgz#390549891e03f28ba0c1b741f30730b2d09227da"
@ -1114,6 +1134,11 @@
dependencies: dependencies:
"@types/node" "*" "@types/node" "*"
"@types/lodash@^4.14.165":
version "4.17.5"
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.5.tgz#e6c29b58e66995d57cd170ce3e2a61926d55ee04"
integrity sha512-MBIOHVZqVqgfro1euRDWX7OO0fBVUUMrN6Pwm8LQsz8cWhEpihlvR70ENj3f40j58TNxZaWv2ndSkInykNBBJw==
"@types/long@^4.0.1": "@types/long@^4.0.1":
version "4.0.2" version "4.0.2"
resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.2.tgz#b74129719fc8d11c01868010082d483b7545591a" resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.2.tgz#b74129719fc8d11c01868010082d483b7545591a"
@ -1708,6 +1733,11 @@ boolbase@^1.0.0:
resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e"
integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==
boolean@^3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/boolean/-/boolean-3.2.0.tgz#9e5294af4e98314494cbb17979fa54ca159f116b"
integrity sha512-d0II/GO9uf9lfUHH2BQsjxzRJZBdsjgsBiW4BvhWk/3qoKwQFjIDVN19PfX8F2D/r9PCMTtLWjYVCFrpeYUzsw==
bottleneck@^2.15.3: bottleneck@^2.15.3:
version "2.19.5" version "2.19.5"
resolved "https://registry.yarnpkg.com/bottleneck/-/bottleneck-2.19.5.tgz#5df0b90f59fd47656ebe63c78a98419205cadd91" resolved "https://registry.yarnpkg.com/bottleneck/-/bottleneck-2.19.5.tgz#5df0b90f59fd47656ebe63c78a98419205cadd91"
@ -1735,6 +1765,22 @@ braces@~3.0.2:
dependencies: dependencies:
fill-range "^7.0.1" fill-range "^7.0.1"
bree@^9.2.3:
version "9.2.3"
resolved "https://registry.yarnpkg.com/bree/-/bree-9.2.3.tgz#8c47402efcc79ed6da31637f84092ef59743d395"
integrity sha512-iCVyLVcqql8rFogVX5gzkofdo6OZu8mxe5dUSkAZyaR43UdNfP0DOj3jJk31yogy6lfnRMhGvO5Gj1ypLeInuA==
dependencies:
"@breejs/later" "^4.2.0"
boolean "^3.2.0"
combine-errors "^3.0.3"
cron-validate "^1.4.5"
human-interval "^2.0.1"
is-string-and-not-blank "^0.0.2"
is-valid-path "^0.1.1"
ms "^2.1.3"
p-wait-for "3"
safe-timers "^1.1.0"
bson@^6.2.0: bson@^6.2.0:
version "6.6.0" version "6.6.0"
resolved "https://registry.yarnpkg.com/bson/-/bson-6.6.0.tgz#f225137eb49fe19bee4d87949a0515c05176e2ad" resolved "https://registry.yarnpkg.com/bson/-/bson-6.6.0.tgz#f225137eb49fe19bee4d87949a0515c05176e2ad"
@ -2040,6 +2086,14 @@ colorspace@1.1.x:
color "^3.1.3" color "^3.1.3"
text-hex "1.0.x" text-hex "1.0.x"
combine-errors@^3.0.3:
version "3.0.3"
resolved "https://registry.yarnpkg.com/combine-errors/-/combine-errors-3.0.3.tgz#f4df6740083e5703a3181110c2b10551f003da86"
integrity sha512-C8ikRNRMygCwaTx+Ek3Yr+OuZzgZjduCOfSQBjbM8V3MfgcjSTeto/GXP6PAwKvJz/v15b7GHZvx5rOlczFw/Q==
dependencies:
custom-error-instance "2.1.1"
lodash.uniqby "4.5.0"
combined-stream@^1.0.8: combined-stream@^1.0.8:
version "1.0.8" version "1.0.8"
resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
@ -2165,6 +2219,13 @@ crc32-stream@^4.0.2:
crc-32 "^1.2.0" crc-32 "^1.2.0"
readable-stream "^3.4.0" readable-stream "^3.4.0"
cron-validate@^1.4.5:
version "1.4.5"
resolved "https://registry.yarnpkg.com/cron-validate/-/cron-validate-1.4.5.tgz#eceb221f7558e6302e5f84c7b3a454fdf4d064c3"
integrity sha512-nKlOJEnYKudMn/aNyNH8xxWczlfpaazfWV32Pcx/2St51r2bxWbGhZD7uwzMcRhunA/ZNL+Htm/i0792Z59UMQ==
dependencies:
yup "0.32.9"
cross-env@^7.0.3: cross-env@^7.0.3:
version "7.0.3" version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-7.0.3.tgz#865264b29677dc015ba8418918965dd232fc54cf" resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-7.0.3.tgz#865264b29677dc015ba8418918965dd232fc54cf"
@ -2209,6 +2270,11 @@ css-what@^6.1.0:
resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4"
integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==
custom-error-instance@2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/custom-error-instance/-/custom-error-instance-2.1.1.tgz#3cf6391487a6629a6247eb0ca0ce00081b7e361a"
integrity sha512-p6JFxJc3M4OTD2li2qaHkDCw9SfMw82Ldr6OC9Je1aXiGfhx2W8p3GaoeaGrPJTUN9NirTM/KTxHWMUdR1rsUg==
data-view-buffer@^1.0.1: data-view-buffer@^1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/data-view-buffer/-/data-view-buffer-1.0.1.tgz#8ea6326efec17a2e42620696e671d7d5a8bc66b2" resolved "https://registry.yarnpkg.com/data-view-buffer/-/data-view-buffer-1.0.1.tgz#8ea6326efec17a2e42620696e671d7d5a8bc66b2"
@ -3521,6 +3587,13 @@ https-proxy-agent@^7.0.0:
agent-base "^7.0.2" agent-base "^7.0.2"
debug "4" debug "4"
human-interval@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/human-interval/-/human-interval-2.0.1.tgz#655baf606c7067bb26042dcae14ec777b099af15"
integrity sha512-r4Aotzf+OtKIGQCB3odUowy4GfUDTy3aTWTfLd7ZF2gBCy3XW3v/dJLRefZnOFFnjqs5B1TypvS8WarpBkYUNQ==
dependencies:
numbered "^1.1.0"
human-signals@^2.1.0: human-signals@^2.1.0:
version "2.1.0" version "2.1.0"
resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0"
@ -3708,6 +3781,11 @@ is-docker@^2.0.0, is-docker@^2.1.1:
resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa"
integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==
is-extglob@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0"
integrity sha512-7Q+VbVafe6x2T+Tu6NcOf6sRklazEPmBoB3IWk3WdGZM2iGUwU/Oe3Wtq5lSEkDTTlpp8yx+5t4pzO/i9Ty1ww==
is-extglob@^2.1.1: is-extglob@^2.1.1:
version "2.1.1" version "2.1.1"
resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
@ -3732,6 +3810,13 @@ is-generator-function@^1.0.10:
dependencies: dependencies:
has-tostringtag "^1.0.0" has-tostringtag "^1.0.0"
is-glob@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863"
integrity sha512-a1dBeB19NXsf/E0+FHqkagizel/LQw2DjSQpvQrj3zT+jYPpaUCryPnrQajXKFLCMuf4I6FhRpaGtw4lPrG6Eg==
dependencies:
is-extglob "^1.0.0"
is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1:
version "4.0.3" version "4.0.3"
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084"
@ -3744,6 +3829,13 @@ is-interactive@^2.0.0:
resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-2.0.0.tgz#40c57614593826da1100ade6059778d597f16e90" resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-2.0.0.tgz#40c57614593826da1100ade6059778d597f16e90"
integrity sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ== integrity sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==
is-invalid-path@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/is-invalid-path/-/is-invalid-path-0.1.0.tgz#307a855b3cf1a938b44ea70d2c61106053714f34"
integrity sha512-aZMG0T3F34mTg4eTdszcGXx54oiZ4NtHSft3hWNJMGJXUUqdIj3cOZuHcU0nCWWcY3jd7yRe/3AEm3vSNTpBGQ==
dependencies:
is-glob "^2.0.0"
is-lambda@^1.0.1: is-lambda@^1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/is-lambda/-/is-lambda-1.0.1.tgz#3d9877899e6a53efc0160504cde15f82e6f061d5" resolved "https://registry.yarnpkg.com/is-lambda/-/is-lambda-1.0.1.tgz#3d9877899e6a53efc0160504cde15f82e6f061d5"
@ -3806,6 +3898,18 @@ is-stream@^2.0.0:
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077"
integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==
is-string-and-not-blank@^0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/is-string-and-not-blank/-/is-string-and-not-blank-0.0.2.tgz#cd19eded2ca4a514f79ca528915f1fb28e5dd38a"
integrity sha512-FyPGAbNVyZpTeDCTXnzuwbu9/WpNXbCfbHXLpCRpN4GANhS00eEIP5Ef+k5HYSNIzIhdN9zRDoBj6unscECvtQ==
dependencies:
is-string-blank "^1.0.1"
is-string-blank@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/is-string-blank/-/is-string-blank-1.0.1.tgz#866dca066d41d2894ebdfd2d8fe93e586e583a03"
integrity sha512-9H+ZBCVs3L9OYqv8nuUAzpcT9OTgMD1yAWrG7ihlnibdkbtB850heAmYWxHuXc4CHy4lKeK69tN+ny1K7gBIrw==
is-string@^1.0.5, is-string@^1.0.7: is-string@^1.0.5, is-string@^1.0.7:
version "1.0.7" version "1.0.7"
resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd"
@ -3832,6 +3936,13 @@ is-unicode-supported@^1.1.0, is-unicode-supported@^1.3.0:
resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz#d824984b616c292a2e198207d4a609983842f714" resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz#d824984b616c292a2e198207d4a609983842f714"
integrity sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ== integrity sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==
is-valid-path@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/is-valid-path/-/is-valid-path-0.1.1.tgz#110f9ff74c37f663e1ec7915eb451f2db93ac9df"
integrity sha512-+kwPrVDu9Ms03L90Qaml+79+6DZHqHyRoANI6IsZJ/g8frhnfchDOBCa0RbQ6/kdHt5CS5OeIEyrYznNuVN+8A==
dependencies:
is-invalid-path "^0.1.0"
is-weakmap@^2.0.2: is-weakmap@^2.0.2:
version "2.0.2" version "2.0.2"
resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.2.tgz#bf72615d649dfe5f699079c54b83e47d1ae19cfd" resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.2.tgz#bf72615d649dfe5f699079c54b83e47d1ae19cfd"
@ -4157,6 +4268,11 @@ levn@^0.4.1:
prelude-ls "^1.2.1" prelude-ls "^1.2.1"
type-check "~0.4.0" type-check "~0.4.0"
lil-http-terminator@^1.2.2:
version "1.2.3"
resolved "https://registry.yarnpkg.com/lil-http-terminator/-/lil-http-terminator-1.2.3.tgz#594ef0f3c2b2f7d43a8f2989b2b3de611bf507eb"
integrity sha512-vQcHSwAFq/kTR2cG6peOVS7SjgksGgSPeH0G2lkw+buue33thE/FCHdn10wJXXshc5RswFy0Iaz48qA2Busw5Q==
locate-path@^6.0.0: locate-path@^6.0.0:
version "6.0.0" version "6.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286"
@ -4164,6 +4280,48 @@ locate-path@^6.0.0:
dependencies: dependencies:
p-locate "^5.0.0" p-locate "^5.0.0"
lodash-es@^4.17.15:
version "4.17.21"
resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.21.tgz#43e626c46e6591b7750beb2b50117390c609e3ee"
integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==
lodash._baseiteratee@~4.7.0:
version "4.7.0"
resolved "https://registry.yarnpkg.com/lodash._baseiteratee/-/lodash._baseiteratee-4.7.0.tgz#34a9b5543572727c3db2e78edae3c0e9e66bd102"
integrity sha512-nqB9M+wITz0BX/Q2xg6fQ8mLkyfF7MU7eE+MNBNjTHFKeKaZAPEzEg+E8LWxKWf1DQVflNEn9N49yAuqKh2mWQ==
dependencies:
lodash._stringtopath "~4.8.0"
lodash._basetostring@~4.12.0:
version "4.12.0"
resolved "https://registry.yarnpkg.com/lodash._basetostring/-/lodash._basetostring-4.12.0.tgz#9327c9dc5158866b7fa4b9d42f4638e5766dd9df"
integrity sha512-SwcRIbyxnN6CFEEK4K1y+zuApvWdpQdBHM/swxP962s8HIxPO3alBH5t3m/dl+f4CMUug6sJb7Pww8d13/9WSw==
lodash._baseuniq@~4.6.0:
version "4.6.0"
resolved "https://registry.yarnpkg.com/lodash._baseuniq/-/lodash._baseuniq-4.6.0.tgz#0ebb44e456814af7905c6212fa2c9b2d51b841e8"
integrity sha512-Ja1YevpHZctlI5beLA7oc5KNDhGcPixFhcqSiORHNsp/1QTv7amAXzw+gu4YOvErqVlMVyIJGgtzeepCnnur0A==
dependencies:
lodash._createset "~4.0.0"
lodash._root "~3.0.0"
lodash._createset@~4.0.0:
version "4.0.3"
resolved "https://registry.yarnpkg.com/lodash._createset/-/lodash._createset-4.0.3.tgz#0f4659fbb09d75194fa9e2b88a6644d363c9fe26"
integrity sha512-GTkC6YMprrJZCYU3zcqZj+jkXkrXzq3IPBcF/fIPpNEAB4hZEtXU8zp/RwKOvZl43NUmwDbyRk3+ZTbeRdEBXA==
lodash._root@~3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/lodash._root/-/lodash._root-3.0.1.tgz#fba1c4524c19ee9a5f8136b4609f017cf4ded692"
integrity sha512-O0pWuFSK6x4EXhM1dhZ8gchNtG7JMqBtrHdoUFUWXD7dJnNSUze1GuyQr5sOs0aCvgGeI3o/OJW8f4ca7FDxmQ==
lodash._stringtopath@~4.8.0:
version "4.8.0"
resolved "https://registry.yarnpkg.com/lodash._stringtopath/-/lodash._stringtopath-4.8.0.tgz#941bcf0e64266e5fc1d66fed0a6959544c576824"
integrity sha512-SXL66C731p0xPDC5LZg4wI5H+dJo/EO4KTqOMwLYCH3+FmmfAKJEZCm6ohGpI+T1xwsDsJCfL4OnhorllvlTPQ==
dependencies:
lodash._basetostring "~4.12.0"
lodash.assignwith@^4.2.0: lodash.assignwith@^4.2.0:
version "4.2.0" version "4.2.0"
resolved "https://registry.yarnpkg.com/lodash.assignwith/-/lodash.assignwith-4.2.0.tgz#127a97f02adc41751a954d24b0de17e100e038eb" resolved "https://registry.yarnpkg.com/lodash.assignwith/-/lodash.assignwith-4.2.0.tgz#127a97f02adc41751a954d24b0de17e100e038eb"
@ -4234,7 +4392,15 @@ lodash.union@^4.6.0:
resolved "https://registry.yarnpkg.com/lodash.union/-/lodash.union-4.6.0.tgz#48bb5088409f16f1821666641c44dd1aaae3cd88" resolved "https://registry.yarnpkg.com/lodash.union/-/lodash.union-4.6.0.tgz#48bb5088409f16f1821666641c44dd1aaae3cd88"
integrity sha512-c4pB2CdGrGdjMKYLA+XiRDO7Y0PRQbm/Gzg8qMj+QH+pFVAoTp5sBpO0odL3FjoPCGjK96p6qsP+yQoiLoOBcw== integrity sha512-c4pB2CdGrGdjMKYLA+XiRDO7Y0PRQbm/Gzg8qMj+QH+pFVAoTp5sBpO0odL3FjoPCGjK96p6qsP+yQoiLoOBcw==
lodash@^4.17.21: lodash.uniqby@4.5.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/lodash.uniqby/-/lodash.uniqby-4.5.0.tgz#a3a17bbf62eeb6240f491846e97c1c4e2a5e1e21"
integrity sha512-IRt7cfTtHy6f1aRVA5n7kT8rgN3N1nH6MOWLcHfpWG2SH19E3JksLK38MktLxZDhlAjCP9jpIXkOnRXlu6oByQ==
dependencies:
lodash._baseiteratee "~4.7.0"
lodash._baseuniq "~4.6.0"
lodash@^4.17.20, lodash@^4.17.21:
version "4.17.21" version "4.17.21"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
@ -4546,7 +4712,7 @@ ms@2.1.2:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
ms@2.1.3, ms@^2.0.0, ms@^2.1.1: ms@2.1.3, ms@^2.0.0, ms@^2.1.1, ms@^2.1.3:
version "2.1.3" version "2.1.3"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
@ -4607,6 +4773,11 @@ named-placeholders@^1.1.3:
dependencies: dependencies:
lru-cache "^7.14.1" lru-cache "^7.14.1"
nanoclone@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/nanoclone/-/nanoclone-0.2.1.tgz#dd4090f8f1a110d26bb32c49ed2f5b9235209ed4"
integrity sha512-wynEP02LmIbLpcYw8uBKpcfF6dmg2vcpKqxeH5UcoKEYdExslsdUA4ugFauuaeYdTB76ez6gJW8XAZ6CgkXYxA==
napi-build-utils@^1.0.1: napi-build-utils@^1.0.1:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-1.0.2.tgz#b1fddc0b2c46e380a0b7a76f984dd47c41a13806" resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-1.0.2.tgz#b1fddc0b2c46e380a0b7a76f984dd47c41a13806"
@ -4808,6 +4979,11 @@ num-sort@^2.0.0:
resolved "https://registry.yarnpkg.com/num-sort/-/num-sort-2.1.0.tgz#1cbb37aed071329fdf41151258bc011898577a9b" resolved "https://registry.yarnpkg.com/num-sort/-/num-sort-2.1.0.tgz#1cbb37aed071329fdf41151258bc011898577a9b"
integrity sha512-1MQz1Ed8z2yckoBeSfkQHHO9K1yDRxxtotKSJ9yvcTUUxSvfvzEq5GwBrjjHEpMlq/k5gvXdmJ1SbYxWtpNoVg== integrity sha512-1MQz1Ed8z2yckoBeSfkQHHO9K1yDRxxtotKSJ9yvcTUUxSvfvzEq5GwBrjjHEpMlq/k5gvXdmJ1SbYxWtpNoVg==
numbered@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/numbered/-/numbered-1.1.0.tgz#9fcd79564c73a84b9574e8370c3d8e58fe3c133c"
integrity sha512-pv/ue2Odr7IfYOO0byC1KgBI10wo5YDauLhxY6/saNzAdAs0r1SotGCPzzCLNPL0xtrAwWRialLu23AAu9xO1g==
object-assign@^4, object-assign@^4.1.1: object-assign@^4, object-assign@^4.1.1:
version "4.1.1" version "4.1.1"
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
@ -5031,6 +5207,11 @@ p-finally@^1.0.0:
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow== integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==
p-is-promise@3:
version "3.0.0"
resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-3.0.0.tgz#58e78c7dfe2e163cf2a04ff869e7c1dba64a5971"
integrity sha512-Wo8VsW4IRQSKVXsJCn7TomUaVtyfjVDn3nUP7kE967BQk0CwFpdbZs0X0uk5sW9mkBa9eNM7hCMaG93WUAwxYQ==
p-limit@^3.0.2: p-limit@^3.0.2:
version "3.1.0" version "3.1.0"
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b"
@ -5068,13 +5249,20 @@ p-retry@4:
"@types/retry" "0.12.0" "@types/retry" "0.12.0"
retry "^0.13.1" retry "^0.13.1"
p-timeout@^3.2.0: p-timeout@^3.0.0, p-timeout@^3.2.0:
version "3.2.0" version "3.2.0"
resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-3.2.0.tgz#c7e17abc971d2a7962ef83626b35d635acf23dfe" resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-3.2.0.tgz#c7e17abc971d2a7962ef83626b35d635acf23dfe"
integrity sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg== integrity sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==
dependencies: dependencies:
p-finally "^1.0.0" p-finally "^1.0.0"
p-wait-for@3:
version "3.2.0"
resolved "https://registry.yarnpkg.com/p-wait-for/-/p-wait-for-3.2.0.tgz#640429bcabf3b0dd9f492c31539c5718cb6a3f1f"
integrity sha512-wpgERjNkLrBiFmkMEjuZJEWKKDrNfHCKA1OhyN1wg1FrLkULbviEy6py1AyJUgZ72YWFbZ38FIpnqvVqAlDUwA==
dependencies:
p-timeout "^3.0.0"
pad-left@^2.1.0: pad-left@^2.1.0:
version "2.1.0" version "2.1.0"
resolved "https://registry.yarnpkg.com/pad-left/-/pad-left-2.1.0.tgz#16e6a3b2d44a8e138cb0838cc7cb403a4fc9e994" resolved "https://registry.yarnpkg.com/pad-left/-/pad-left-2.1.0.tgz#16e6a3b2d44a8e138cb0838cc7cb403a4fc9e994"
@ -5318,6 +5506,11 @@ prop-types@^15.8.1:
object-assign "^4.1.1" object-assign "^4.1.1"
react-is "^16.13.1" react-is "^16.13.1"
property-expr@^2.0.4:
version "2.0.6"
resolved "https://registry.yarnpkg.com/property-expr/-/property-expr-2.0.6.tgz#f77bc00d5928a6c748414ad12882e83f24aec1e8"
integrity sha512-SVtmxhRE/CGkn3eZY1T6pC8Nln6Fr/lu1mKSgRud0eC73whjGfoAogbn78LkD8aFL0zz3bAFerKSnOl7NlErBA==
protobufjs@^6.8.8: protobufjs@^6.8.8:
version "6.11.4" version "6.11.4"
resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.4.tgz#29a412c38bf70d89e537b6d02d904a6f448173aa" resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.4.tgz#29a412c38bf70d89e537b6d02d904a6f448173aa"
@ -5507,6 +5700,11 @@ reflect.getprototypeof@^1.0.4:
globalthis "^1.0.3" globalthis "^1.0.3"
which-builtin-type "^1.1.3" which-builtin-type "^1.1.3"
regenerator-runtime@^0.14.0:
version "0.14.1"
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f"
integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==
regexp.prototype.flags@^1.5.2: regexp.prototype.flags@^1.5.2:
version "1.5.2" version "1.5.2"
resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz#138f644a3350f981a858c44f6bb1a61ff59be334" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz#138f644a3350f981a858c44f6bb1a61ff59be334"
@ -5627,6 +5825,11 @@ safe-stable-stringify@^2.3.1:
resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz#138c84b6f6edb3db5f8ef3ef7115b8f55ccbf886" resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz#138c84b6f6edb3db5f8ef3ef7115b8f55ccbf886"
integrity sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g== integrity sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g==
safe-timers@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/safe-timers/-/safe-timers-1.1.0.tgz#c58ae8325db8d3b067322f0a4ef3a0cad67aad83"
integrity sha512-9aqY+v5eMvmRaluUEtdRThV1EjlSElzO7HuCj0sTW9xvp++8iJ9t/RWGNWV6/WHcUJLHpyT2SNf/apoKTU2EpA==
"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": "safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0":
version "2.1.2" version "2.1.2"
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
@ -6194,6 +6397,11 @@ toidentifier@1.0.1:
resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35"
integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==
toposort@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/toposort/-/toposort-2.0.2.tgz#ae21768175d1559d48bef35420b2f4962f09c330"
integrity sha512-0a5EOkAUp8D4moMi2W8ZF8jcga7BgZd91O/yabJCFY8az+XSzeGyTKs0Aoo897iV1Nj6guFq8orWDS96z91oGg==
touch@^3.1.0: touch@^3.1.0:
version "3.1.0" version "3.1.0"
resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b" resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b"
@ -6571,7 +6779,7 @@ winston-transport@^4.7.0:
readable-stream "^3.6.0" readable-stream "^3.6.0"
triple-beam "^1.3.0" triple-beam "^1.3.0"
winston@^3.7.2, winston@^3.9.0: winston@^3.13.0, winston@^3.7.2, winston@^3.9.0:
version "3.13.0" version "3.13.0"
resolved "https://registry.yarnpkg.com/winston/-/winston-3.13.0.tgz#e76c0d722f78e04838158c61adc1287201de7ce3" resolved "https://registry.yarnpkg.com/winston/-/winston-3.13.0.tgz#e76c0d722f78e04838158c61adc1287201de7ce3"
integrity sha512-rwidmA1w3SE4j0E5MuIufFhyJPBDG7Nu71RkZor1p2+qHvJSZ9GYDA81AyleQcZbh/+V6HjeBdfnTZJm9rSeQQ== integrity sha512-rwidmA1w3SE4j0E5MuIufFhyJPBDG7Nu71RkZor1p2+qHvJSZ9GYDA81AyleQcZbh/+V6HjeBdfnTZJm9rSeQQ==
@ -6677,6 +6885,19 @@ yocto-queue@^0.1.0:
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
yup@0.32.9:
version "0.32.9"
resolved "https://registry.yarnpkg.com/yup/-/yup-0.32.9.tgz#9367bec6b1b0e39211ecbca598702e106019d872"
integrity sha512-Ci1qN+i2H0XpY7syDQ0k5zKQ/DoxO0LzPg8PAR/X4Mpj6DqaeCoIYEEjDJwhArh3Fa7GWbQQVDZKeXYlSH4JMg==
dependencies:
"@babel/runtime" "^7.10.5"
"@types/lodash" "^4.14.165"
lodash "^4.17.20"
lodash-es "^4.17.15"
nanoclone "^0.2.1"
property-expr "^2.0.4"
toposort "^2.0.2"
zip-stream@^4.1.0: zip-stream@^4.1.0:
version "4.1.1" version "4.1.1"
resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-4.1.1.tgz#1337fe974dbaffd2fa9a1ba09662a66932bd7135" resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-4.1.1.tgz#1337fe974dbaffd2fa9a1ba09662a66932bd7135"