mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2024-11-11 01:10:11 +01:00
dc4ad6b5a9
* wip bg workers for live document sync * Add ability to re-embed specific documents across many workspaces via background queue bgworkser is gated behind expieremental system setting flag that needs to be explictly enabled UI for watching/unwatching docments that are embedded. TODO: UI to easily manage all bg tasks and see run results TODO: UI to enable this feature and background endpoints to manage it * create frontend views and paths Move elements to correct experimental scope * update migration to delete runs on removal of watched document * Add watch support to YouTube transcripts (#1716) * Add watch support to YouTube transcripts refactor how sync is done for supported types * Watch specific files in Confluence space (#1718) Add failure-prune check for runs * create tmp workflow modifications for beta image * create tmp workflow modifications for beta image * create tmp workflow modifications for beta image * dual build update copy of alert modals * update job interval * Add support for live-sync of Github files * update copy for document sync feature * hide Experimental features from UI * update docs links * [FEAT] Implement new settings menu for experimental features (#1735) * implement new settings menu for experimental features * remove unused context save bar --------- Co-authored-by: timothycarambat <rambat1010@gmail.com> * dont run job on boot * unset workflow changes * Add persistent encryption service Relay key to collector so persistent encryption can be used Encrypt any private data in chunkSources used for replay during resync jobs * update jsDOC * Linting and organization * update modal copy for feature --------- Co-authored-by: Sean Hatfield <seanhatfield5@gmail.com>
160 lines
4.3 KiB
JavaScript
160 lines
4.3 KiB
JavaScript
const { setDataSigner } = require("../middleware/setDataSigner");
|
|
const { verifyPayloadIntegrity } = require("../middleware/verifyIntegrity");
|
|
const { reqBody } = require("../utils/http");
|
|
const { validURL } = require("../utils/url");
|
|
const RESYNC_METHODS = require("./resync");
|
|
|
|
function extensions(app) {
|
|
if (!app) return;
|
|
|
|
app.post(
|
|
"/ext/resync-source-document",
|
|
[verifyPayloadIntegrity, setDataSigner],
|
|
async function (request, response) {
|
|
try {
|
|
const { type, options } = reqBody(request);
|
|
if (!RESYNC_METHODS.hasOwnProperty(type)) throw new Error(`Type "${type}" is not a valid type to sync.`);
|
|
return await RESYNC_METHODS[type](options, response);
|
|
} catch (e) {
|
|
console.error(e);
|
|
response.status(200).json({
|
|
success: false,
|
|
content: null,
|
|
reason: e.message || "A processing error occurred.",
|
|
});
|
|
}
|
|
return;
|
|
}
|
|
)
|
|
|
|
app.post(
|
|
"/ext/github-repo",
|
|
[verifyPayloadIntegrity, setDataSigner],
|
|
async function (request, response) {
|
|
try {
|
|
const { loadGithubRepo } = require("../utils/extensions/GithubRepo");
|
|
const { success, reason, data } = await loadGithubRepo(
|
|
reqBody(request),
|
|
response,
|
|
);
|
|
response.status(200).json({
|
|
success,
|
|
reason,
|
|
data,
|
|
});
|
|
} catch (e) {
|
|
console.error(e);
|
|
response.status(200).json({
|
|
success: false,
|
|
reason: e.message || "A processing error occurred.",
|
|
data: {},
|
|
});
|
|
}
|
|
return;
|
|
}
|
|
);
|
|
|
|
// gets all branches for a specific repo
|
|
app.post(
|
|
"/ext/github-repo/branches",
|
|
[verifyPayloadIntegrity],
|
|
async function (request, response) {
|
|
try {
|
|
const GithubRepoLoader = require("../utils/extensions/GithubRepo/RepoLoader");
|
|
const allBranches = await new GithubRepoLoader(
|
|
reqBody(request)
|
|
).getRepoBranches();
|
|
response.status(200).json({
|
|
success: true,
|
|
reason: null,
|
|
data: {
|
|
branches: allBranches,
|
|
},
|
|
});
|
|
} catch (e) {
|
|
console.error(e);
|
|
response.status(400).json({
|
|
success: false,
|
|
reason: e.message,
|
|
data: {
|
|
branches: [],
|
|
},
|
|
});
|
|
}
|
|
return;
|
|
}
|
|
);
|
|
|
|
app.post(
|
|
"/ext/youtube-transcript",
|
|
[verifyPayloadIntegrity],
|
|
async function (request, response) {
|
|
try {
|
|
const { loadYouTubeTranscript } = require("../utils/extensions/YoutubeTranscript");
|
|
const { success, reason, data } = await loadYouTubeTranscript(
|
|
reqBody(request)
|
|
);
|
|
response.status(200).json({ success, reason, data });
|
|
} catch (e) {
|
|
console.error(e);
|
|
response.status(400).json({
|
|
success: false,
|
|
reason: e.message,
|
|
data: {
|
|
title: null,
|
|
author: null,
|
|
},
|
|
});
|
|
}
|
|
return;
|
|
}
|
|
);
|
|
|
|
app.post(
|
|
"/ext/website-depth",
|
|
[verifyPayloadIntegrity],
|
|
async function (request, response) {
|
|
try {
|
|
const websiteDepth = require("../utils/extensions/WebsiteDepth");
|
|
const { url, depth = 1, maxLinks = 20 } = reqBody(request);
|
|
if (!validURL(url)) return { success: false, reason: "Not a valid URL." };
|
|
|
|
const scrapedData = await websiteDepth(url, depth, maxLinks);
|
|
response.status(200).json({ success: true, data: scrapedData });
|
|
} catch (e) {
|
|
console.error(e);
|
|
response.status(400).json({ success: false, reason: e.message });
|
|
}
|
|
return;
|
|
}
|
|
);
|
|
|
|
app.post(
|
|
"/ext/confluence",
|
|
[verifyPayloadIntegrity, setDataSigner],
|
|
async function (request, response) {
|
|
try {
|
|
const { loadConfluence } = require("../utils/extensions/Confluence");
|
|
const { success, reason, data } = await loadConfluence(
|
|
reqBody(request),
|
|
response
|
|
);
|
|
response.status(200).json({ success, reason, data });
|
|
} catch (e) {
|
|
console.error(e);
|
|
response.status(400).json({
|
|
success: false,
|
|
reason: e.message,
|
|
data: {
|
|
title: null,
|
|
author: null,
|
|
},
|
|
});
|
|
}
|
|
return;
|
|
}
|
|
);
|
|
}
|
|
|
|
module.exports = extensions;
|