mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2024-11-19 20:50:09 +01:00
merge with master
Patch LLM selection for native to be disabled
This commit is contained in:
commit
75ced7e65a
@ -34,6 +34,7 @@
|
|||||||
"mime": "^3.0.0",
|
"mime": "^3.0.0",
|
||||||
"moment": "^2.29.4",
|
"moment": "^2.29.4",
|
||||||
"multer": "^1.4.5-lts.1",
|
"multer": "^1.4.5-lts.1",
|
||||||
|
"node-html-parser": "^6.1.13",
|
||||||
"officeparser": "^4.0.5",
|
"officeparser": "^4.0.5",
|
||||||
"openai": "^3.2.1",
|
"openai": "^3.2.1",
|
||||||
"pdf-parse": "^1.1.1",
|
"pdf-parse": "^1.1.1",
|
||||||
@ -42,11 +43,10 @@
|
|||||||
"url-pattern": "^1.0.3",
|
"url-pattern": "^1.0.3",
|
||||||
"uuid": "^9.0.0",
|
"uuid": "^9.0.0",
|
||||||
"wavefile": "^11.0.0",
|
"wavefile": "^11.0.0",
|
||||||
"youtube-transcript": "^1.0.6",
|
|
||||||
"youtubei.js": "^9.1.0"
|
"youtubei.js": "^9.1.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"nodemon": "^2.0.22",
|
"nodemon": "^2.0.22",
|
||||||
"prettier": "^2.4.1"
|
"prettier": "^2.4.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -0,0 +1,90 @@
|
|||||||
|
/*
|
||||||
|
* This is just a custom implementation of the Langchain JS YouTubeLoader class
|
||||||
|
* as the dependency for YoutubeTranscript is quite fickle and its a rat race to keep it up
|
||||||
|
* and instead of waiting for patches we can just bring this simple script in-house and at least
|
||||||
|
* be able to patch it since its so flaky. When we have more connectors we can kill this because
|
||||||
|
* it will be a pain to maintain over time.
|
||||||
|
*/
|
||||||
|
class YoutubeLoader {
|
||||||
|
#videoId;
|
||||||
|
#language;
|
||||||
|
#addVideoInfo;
|
||||||
|
|
||||||
|
constructor({ videoId = null, language = null, addVideoInfo = false } = {}) {
|
||||||
|
if (!videoId) throw new Error("Invalid video id!");
|
||||||
|
this.#videoId = videoId;
|
||||||
|
this.#language = language;
|
||||||
|
this.#addVideoInfo = addVideoInfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the videoId from a YouTube video URL.
|
||||||
|
* @param url The URL of the YouTube video.
|
||||||
|
* @returns The videoId of the YouTube video.
|
||||||
|
*/
|
||||||
|
static getVideoID(url) {
|
||||||
|
const match = url.match(
|
||||||
|
/.*(?:youtu.be\/|v\/|u\/\w\/|embed\/|watch\?v=)([^#&?]*).*/
|
||||||
|
);
|
||||||
|
if (match !== null && match[1].length === 11) {
|
||||||
|
return match[1];
|
||||||
|
} else {
|
||||||
|
throw new Error("Failed to get youtube video id from the url");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new instance of the YoutubeLoader class from a YouTube video
|
||||||
|
* URL.
|
||||||
|
* @param url The URL of the YouTube video.
|
||||||
|
* @param config Optional configuration options for the YoutubeLoader instance, excluding the videoId.
|
||||||
|
* @returns A new instance of the YoutubeLoader class.
|
||||||
|
*/
|
||||||
|
static createFromUrl(url, config = {}) {
|
||||||
|
const videoId = YoutubeLoader.getVideoID(url);
|
||||||
|
return new YoutubeLoader({ ...config, videoId });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Loads the transcript and video metadata from the specified YouTube
|
||||||
|
* video. It uses the youtube-transcript library to fetch the transcript
|
||||||
|
* and the youtubei.js library to fetch the video metadata.
|
||||||
|
* @returns Langchain like doc that is 1 element with PageContent and
|
||||||
|
*/
|
||||||
|
async load() {
|
||||||
|
let transcript;
|
||||||
|
const metadata = {
|
||||||
|
source: this.#videoId,
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
const { YoutubeTranscript } = require("./youtube-transcript");
|
||||||
|
transcript = await YoutubeTranscript.fetchTranscript(this.#videoId, {
|
||||||
|
lang: this.#language,
|
||||||
|
});
|
||||||
|
if (!transcript) {
|
||||||
|
throw new Error("Transcription not found");
|
||||||
|
}
|
||||||
|
if (this.#addVideoInfo) {
|
||||||
|
const { Innertube } = require("youtubei.js");
|
||||||
|
const youtube = await Innertube.create();
|
||||||
|
const info = (await youtube.getBasicInfo(this.#videoId)).basic_info;
|
||||||
|
metadata.description = info.short_description;
|
||||||
|
metadata.title = info.title;
|
||||||
|
metadata.view_count = info.view_count;
|
||||||
|
metadata.author = info.author;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
throw new Error(
|
||||||
|
`Failed to get YouTube video transcription: ${e?.message}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
pageContent: transcript,
|
||||||
|
metadata,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.YoutubeLoader = YoutubeLoader;
|
@ -0,0 +1,115 @@
|
|||||||
|
const { parse } = require("node-html-parser");
|
||||||
|
const RE_YOUTUBE =
|
||||||
|
/(?:youtube\.com\/(?:[^\/]+\/.+\/|(?:v|e(?:mbed)?)\/|.*[?&]v=)|youtu\.be\/)([^"&?\/\s]{11})/i;
|
||||||
|
const USER_AGENT =
|
||||||
|
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36,gzip(gfe)";
|
||||||
|
|
||||||
|
class YoutubeTranscriptError extends Error {
|
||||||
|
constructor(message) {
|
||||||
|
super(`[YoutubeTranscript] ${message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class to retrieve transcript if exist
|
||||||
|
*/
|
||||||
|
class YoutubeTranscript {
|
||||||
|
/**
|
||||||
|
* Fetch transcript from YTB Video
|
||||||
|
* @param videoId Video url or video identifier
|
||||||
|
* @param config Object with lang param (eg: en, es, hk, uk) format.
|
||||||
|
* Will just the grab first caption if it can find one, so no special lang caption support.
|
||||||
|
*/
|
||||||
|
static async fetchTranscript(videoId, config = {}) {
|
||||||
|
const identifier = this.retrieveVideoId(videoId);
|
||||||
|
const lang = config?.lang ?? "en";
|
||||||
|
try {
|
||||||
|
const transcriptUrl = await fetch(
|
||||||
|
`https://www.youtube.com/watch?v=${identifier}`,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
"User-Agent": USER_AGENT,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.then((res) => res.text())
|
||||||
|
.then((html) => parse(html))
|
||||||
|
.then((html) => this.#parseTranscriptEndpoint(html, lang));
|
||||||
|
|
||||||
|
if (!transcriptUrl)
|
||||||
|
throw new Error("Failed to locate a transcript for this video!");
|
||||||
|
|
||||||
|
// Result is hopefully some XML.
|
||||||
|
const transcriptXML = await fetch(transcriptUrl)
|
||||||
|
.then((res) => res.text())
|
||||||
|
.then((xml) => parse(xml));
|
||||||
|
|
||||||
|
let transcript = "";
|
||||||
|
const chunks = transcriptXML.getElementsByTagName("text");
|
||||||
|
for (const chunk of chunks) {
|
||||||
|
transcript += chunk.textContent;
|
||||||
|
}
|
||||||
|
|
||||||
|
return transcript;
|
||||||
|
} catch (e) {
|
||||||
|
throw new YoutubeTranscriptError(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static #parseTranscriptEndpoint(document, langCode = null) {
|
||||||
|
try {
|
||||||
|
// Get all script tags on document page
|
||||||
|
const scripts = document.getElementsByTagName("script");
|
||||||
|
|
||||||
|
// find the player data script.
|
||||||
|
const playerScript = scripts.find((script) =>
|
||||||
|
script.textContent.includes("var ytInitialPlayerResponse = {")
|
||||||
|
);
|
||||||
|
|
||||||
|
const dataString =
|
||||||
|
playerScript.textContent
|
||||||
|
?.split("var ytInitialPlayerResponse = ")?.[1] //get the start of the object {....
|
||||||
|
?.split("};")?.[0] + // chunk off any code after object closure.
|
||||||
|
"}"; // add back that curly brace we just cut.
|
||||||
|
|
||||||
|
const data = JSON.parse(dataString.trim()); // Attempt a JSON parse
|
||||||
|
const availableCaptions =
|
||||||
|
data?.captions?.playerCaptionsTracklistRenderer?.captionTracks || [];
|
||||||
|
|
||||||
|
// If languageCode was specified then search for it's code, otherwise get the first.
|
||||||
|
let captionTrack = availableCaptions?.[0];
|
||||||
|
if (langCode)
|
||||||
|
captionTrack =
|
||||||
|
availableCaptions.find((track) =>
|
||||||
|
track.languageCode.includes(langCode)
|
||||||
|
) ?? availableCaptions?.[0];
|
||||||
|
|
||||||
|
return captionTrack?.baseUrl;
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`YoutubeTranscript.#parseTranscriptEndpoint ${e.message}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve video id from url or string
|
||||||
|
* @param videoId video url or video id
|
||||||
|
*/
|
||||||
|
static retrieveVideoId(videoId) {
|
||||||
|
if (videoId.length === 11) {
|
||||||
|
return videoId;
|
||||||
|
}
|
||||||
|
const matchId = videoId.match(RE_YOUTUBE);
|
||||||
|
if (matchId && matchId.length) {
|
||||||
|
return matchId[1];
|
||||||
|
}
|
||||||
|
throw new YoutubeTranscriptError(
|
||||||
|
"Impossible to retrieve Youtube video ID."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
YoutubeTranscript,
|
||||||
|
YoutubeTranscriptError,
|
||||||
|
};
|
@ -1,17 +1,17 @@
|
|||||||
const { YoutubeLoader } = require("langchain/document_loaders/web/youtube");
|
|
||||||
const fs = require("fs");
|
const fs = require("fs");
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const { default: slugify } = require("slugify");
|
const { default: slugify } = require("slugify");
|
||||||
const { v4 } = require("uuid");
|
const { v4 } = require("uuid");
|
||||||
const { writeToServerDocuments, documentsFolder } = require("../../files");
|
const { writeToServerDocuments, documentsFolder } = require("../../files");
|
||||||
const { tokenizeString } = require("../../tokenizer");
|
const { tokenizeString } = require("../../tokenizer");
|
||||||
|
const { YoutubeLoader } = require("./YoutubeLoader");
|
||||||
|
|
||||||
function validYoutubeVideoUrl(link) {
|
function validYoutubeVideoUrl(link) {
|
||||||
const UrlPattern = require("url-pattern");
|
const UrlPattern = require("url-pattern");
|
||||||
const opts = new URL(link);
|
const opts = new URL(link);
|
||||||
const url = `${opts.protocol}//${opts.host}${
|
const url = `${opts.protocol}//${opts.host}${opts.pathname}${
|
||||||
opts.pathname
|
opts.searchParams.has("v") ? `?v=${opts.searchParams.get("v")}` : ""
|
||||||
}?v=${opts.searchParams.get("v")}`;
|
}`;
|
||||||
|
|
||||||
const shortPatternMatch = new UrlPattern(
|
const shortPatternMatch = new UrlPattern(
|
||||||
"https\\://(www.)youtu.be/(:videoId)"
|
"https\\://(www.)youtu.be/(:videoId)"
|
||||||
@ -56,9 +56,7 @@ async function loadYouTubeTranscript({ url }) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const metadata = docs[0].metadata;
|
const metadata = docs[0].metadata;
|
||||||
let content = "";
|
const content = docs[0].pageContent;
|
||||||
docs.forEach((doc) => (content = content.concat(doc.pageContent)));
|
|
||||||
|
|
||||||
if (!content.length) {
|
if (!content.length) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
|
@ -84,7 +84,7 @@ async function wipeCollectorStorage() {
|
|||||||
if (file === "__HOTDIR__.md") continue;
|
if (file === "__HOTDIR__.md") continue;
|
||||||
try {
|
try {
|
||||||
fs.rmSync(path.join(directory, file));
|
fs.rmSync(path.join(directory, file));
|
||||||
} catch { }
|
} catch {}
|
||||||
}
|
}
|
||||||
resolve();
|
resolve();
|
||||||
});
|
});
|
||||||
@ -99,7 +99,7 @@ async function wipeCollectorStorage() {
|
|||||||
if (file === ".placeholder") continue;
|
if (file === ".placeholder") continue;
|
||||||
try {
|
try {
|
||||||
fs.rmSync(path.join(directory, file));
|
fs.rmSync(path.join(directory, file));
|
||||||
} catch { }
|
} catch {}
|
||||||
}
|
}
|
||||||
resolve();
|
resolve();
|
||||||
});
|
});
|
||||||
|
@ -503,6 +503,11 @@ body-parser@^1.20.2:
|
|||||||
type-is "~1.6.18"
|
type-is "~1.6.18"
|
||||||
unpipe "1.0.0"
|
unpipe "1.0.0"
|
||||||
|
|
||||||
|
boolbase@^1.0.0:
|
||||||
|
version "1.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e"
|
||||||
|
integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==
|
||||||
|
|
||||||
brace-expansion@^1.1.7:
|
brace-expansion@^1.1.7:
|
||||||
version "1.1.11"
|
version "1.1.11"
|
||||||
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
|
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
|
||||||
@ -589,11 +594,6 @@ camelcase@6:
|
|||||||
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a"
|
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a"
|
||||||
integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==
|
integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==
|
||||||
|
|
||||||
centra@^2.6.0:
|
|
||||||
version "2.6.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/centra/-/centra-2.6.0.tgz#79117998ee6908642258db263871381aa5d1204a"
|
|
||||||
integrity sha512-dgh+YleemrT8u85QL11Z6tYhegAs3MMxsaWAq/oXeAmYJ7VxL3SI9TZtnfaEvNDMAPolj25FXIb3S+HCI4wQaQ==
|
|
||||||
|
|
||||||
chalk@^2.4.2:
|
chalk@^2.4.2:
|
||||||
version "2.4.2"
|
version "2.4.2"
|
||||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
|
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
|
||||||
@ -796,6 +796,22 @@ crypt@0.0.2:
|
|||||||
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
|
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
|
||||||
integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==
|
integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==
|
||||||
|
|
||||||
|
css-select@^5.1.0:
|
||||||
|
version "5.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/css-select/-/css-select-5.1.0.tgz#b8ebd6554c3637ccc76688804ad3f6a6fdaea8a6"
|
||||||
|
integrity sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==
|
||||||
|
dependencies:
|
||||||
|
boolbase "^1.0.0"
|
||||||
|
css-what "^6.1.0"
|
||||||
|
domhandler "^5.0.2"
|
||||||
|
domutils "^3.0.1"
|
||||||
|
nth-check "^2.0.1"
|
||||||
|
|
||||||
|
css-what@^6.1.0:
|
||||||
|
version "6.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4"
|
||||||
|
integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==
|
||||||
|
|
||||||
data-uri-to-buffer@^6.0.0:
|
data-uri-to-buffer@^6.0.0:
|
||||||
version "6.0.1"
|
version "6.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-6.0.1.tgz#540bd4c8753a25ee129035aebdedf63b078703c7"
|
resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-6.0.1.tgz#540bd4c8753a25ee129035aebdedf63b078703c7"
|
||||||
@ -2244,6 +2260,14 @@ node-forge@^1.3.1:
|
|||||||
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3"
|
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3"
|
||||||
integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==
|
integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==
|
||||||
|
|
||||||
|
node-html-parser@^6.1.13:
|
||||||
|
version "6.1.13"
|
||||||
|
resolved "https://registry.yarnpkg.com/node-html-parser/-/node-html-parser-6.1.13.tgz#a1df799b83df5c6743fcd92740ba14682083b7e4"
|
||||||
|
integrity sha512-qIsTMOY4C/dAa5Q5vsobRpOOvPfC4pB61UVW2uSwZNUp0QU/jCekTal1vMmbO0DgdHeLUJpv/ARmDqErVxA3Sg==
|
||||||
|
dependencies:
|
||||||
|
css-select "^5.1.0"
|
||||||
|
he "1.2.0"
|
||||||
|
|
||||||
nodemailer@6.9.3:
|
nodemailer@6.9.3:
|
||||||
version "6.9.3"
|
version "6.9.3"
|
||||||
resolved "https://registry.yarnpkg.com/nodemailer/-/nodemailer-6.9.3.tgz#e4425b85f05d83c43c5cd81bf84ab968f8ef5cbe"
|
resolved "https://registry.yarnpkg.com/nodemailer/-/nodemailer-6.9.3.tgz#e4425b85f05d83c43c5cd81bf84ab968f8ef5cbe"
|
||||||
@ -2294,6 +2318,13 @@ npmlog@^5.0.1:
|
|||||||
gauge "^3.0.0"
|
gauge "^3.0.0"
|
||||||
set-blocking "^2.0.0"
|
set-blocking "^2.0.0"
|
||||||
|
|
||||||
|
nth-check@^2.0.1:
|
||||||
|
version "2.1.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d"
|
||||||
|
integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==
|
||||||
|
dependencies:
|
||||||
|
boolbase "^1.0.0"
|
||||||
|
|
||||||
num-sort@^2.0.0:
|
num-sort@^2.0.0:
|
||||||
version "2.1.0"
|
version "2.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/num-sort/-/num-sort-2.1.0.tgz#1cbb37aed071329fdf41151258bc011898577a9b"
|
resolved "https://registry.yarnpkg.com/num-sort/-/num-sort-2.1.0.tgz#1cbb37aed071329fdf41151258bc011898577a9b"
|
||||||
@ -2522,13 +2553,6 @@ pend@~1.2.0:
|
|||||||
resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50"
|
resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50"
|
||||||
integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==
|
integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==
|
||||||
|
|
||||||
phin@^3.5.0:
|
|
||||||
version "3.7.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/phin/-/phin-3.7.0.tgz#eeeff7660408515d8cf0c6252901012d4ab7153b"
|
|
||||||
integrity sha512-DqnVNrpYhKGBZppNKprD+UJylMeEKOZxHgPB+ZP6mGzf3uA2uox4Ep9tUm+rUc8WLIdHT3HcAE4X8fhwQA9JKg==
|
|
||||||
dependencies:
|
|
||||||
centra "^2.6.0"
|
|
||||||
|
|
||||||
picomatch@^2.0.4, picomatch@^2.2.1:
|
picomatch@^2.0.4, picomatch@^2.2.1:
|
||||||
version "2.3.1"
|
version "2.3.1"
|
||||||
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
|
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
|
||||||
@ -3421,13 +3445,6 @@ yauzl@^2.10.0, yauzl@^2.4.2:
|
|||||||
buffer-crc32 "~0.2.3"
|
buffer-crc32 "~0.2.3"
|
||||||
fd-slicer "~1.1.0"
|
fd-slicer "~1.1.0"
|
||||||
|
|
||||||
youtube-transcript@^1.0.6:
|
|
||||||
version "1.0.6"
|
|
||||||
resolved "https://registry.yarnpkg.com/youtube-transcript/-/youtube-transcript-1.0.6.tgz#8414c04380d3ef1102bd00ca3729e94c46ae7a14"
|
|
||||||
integrity sha512-k/6uxB9voj/5astl6+q+VArX/aWHhnmle8BucvUCTYTQQEOSVlBiXkrI0KD3o8A0b44MV6q0bmVNiJFIpTlcZA==
|
|
||||||
dependencies:
|
|
||||||
phin "^3.5.0"
|
|
||||||
|
|
||||||
youtubei.js@^9.1.0:
|
youtubei.js@^9.1.0:
|
||||||
version "9.1.0"
|
version "9.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/youtubei.js/-/youtubei.js-9.1.0.tgz#bcf154c9fa21d3c8c1d00a5e10360d0a065c660e"
|
resolved "https://registry.yarnpkg.com/youtubei.js/-/youtubei.js-9.1.0.tgz#bcf154c9fa21d3c8c1d00a5e10360d0a065c660e"
|
||||||
|
1
frontend/.gitignore
vendored
1
frontend/.gitignore
vendored
@ -12,6 +12,7 @@ dist
|
|||||||
lib
|
lib
|
||||||
dist-ssr
|
dist-ssr
|
||||||
*.local
|
*.local
|
||||||
|
!frontend/components/lib
|
||||||
|
|
||||||
# Editor directories and files
|
# Editor directories and files
|
||||||
.vscode/*
|
.vscode/*
|
||||||
|
@ -35,16 +35,13 @@ const GeneralTranscriptionPreference = lazy(
|
|||||||
const GeneralEmbeddingPreference = lazy(
|
const GeneralEmbeddingPreference = lazy(
|
||||||
() => import("@/pages/GeneralSettings/EmbeddingPreference")
|
() => import("@/pages/GeneralSettings/EmbeddingPreference")
|
||||||
);
|
);
|
||||||
|
const EmbeddingTextSplitterPreference = lazy(
|
||||||
|
() => import("@/pages/GeneralSettings/EmbeddingTextSplitterPreference")
|
||||||
|
);
|
||||||
const GeneralVectorDatabase = lazy(
|
const GeneralVectorDatabase = lazy(
|
||||||
() => import("@/pages/GeneralSettings/VectorDatabase")
|
() => import("@/pages/GeneralSettings/VectorDatabase")
|
||||||
);
|
);
|
||||||
const GeneralSecurity = lazy(() => import("@/pages/GeneralSettings/Security"));
|
const GeneralSecurity = lazy(() => import("@/pages/GeneralSettings/Security"));
|
||||||
const DataConnectors = lazy(
|
|
||||||
() => import("@/pages/GeneralSettings/DataConnectors")
|
|
||||||
);
|
|
||||||
const DataConnectorSetup = lazy(
|
|
||||||
() => import("@/pages/GeneralSettings/DataConnectors/Connectors")
|
|
||||||
);
|
|
||||||
const WorkspaceSettings = lazy(() => import("@/pages/WorkspaceSettings"));
|
const WorkspaceSettings = lazy(() => import("@/pages/WorkspaceSettings"));
|
||||||
const EmbedConfigSetup = lazy(
|
const EmbedConfigSetup = lazy(
|
||||||
() => import("@/pages/GeneralSettings/EmbedConfigs")
|
() => import("@/pages/GeneralSettings/EmbedConfigs")
|
||||||
@ -92,6 +89,12 @@ export default function App() {
|
|||||||
path="/settings/embedding-preference"
|
path="/settings/embedding-preference"
|
||||||
element={<AdminRoute Component={GeneralEmbeddingPreference} />}
|
element={<AdminRoute Component={GeneralEmbeddingPreference} />}
|
||||||
/>
|
/>
|
||||||
|
<Route
|
||||||
|
path="/settings/text-splitter-preference"
|
||||||
|
element={
|
||||||
|
<AdminRoute Component={EmbeddingTextSplitterPreference} />
|
||||||
|
}
|
||||||
|
/>
|
||||||
<Route
|
<Route
|
||||||
path="/settings/vector-database"
|
path="/settings/vector-database"
|
||||||
element={<AdminRoute Component={GeneralVectorDatabase} />}
|
element={<AdminRoute Component={GeneralVectorDatabase} />}
|
||||||
@ -145,15 +148,6 @@ export default function App() {
|
|||||||
path="/settings/workspaces"
|
path="/settings/workspaces"
|
||||||
element={<ManagerRoute Component={AdminWorkspaces} />}
|
element={<ManagerRoute Component={AdminWorkspaces} />}
|
||||||
/>
|
/>
|
||||||
<Route
|
|
||||||
path="/settings/data-connectors"
|
|
||||||
element={<ManagerRoute Component={DataConnectors} />}
|
|
||||||
/>
|
|
||||||
<Route
|
|
||||||
path="/settings/data-connectors/:connector"
|
|
||||||
element={<ManagerRoute Component={DataConnectorSetup} />}
|
|
||||||
/>
|
|
||||||
|
|
||||||
{/* Onboarding Flow */}
|
{/* Onboarding Flow */}
|
||||||
<Route path="/onboarding" element={<OnboardingFlow />} />
|
<Route path="/onboarding" element={<OnboardingFlow />} />
|
||||||
<Route path="/onboarding/:step" element={<OnboardingFlow />} />
|
<Route path="/onboarding/:step" element={<OnboardingFlow />} />
|
||||||
|
@ -1,6 +1,3 @@
|
|||||||
import paths from "@/utils/paths";
|
|
||||||
import ConnectorImages from "./media";
|
|
||||||
|
|
||||||
export default function DataConnectorOption({ slug }) {
|
export default function DataConnectorOption({ slug }) {
|
||||||
if (!DATA_CONNECTORS.hasOwnProperty(slug)) return null;
|
if (!DATA_CONNECTORS.hasOwnProperty(slug)) return null;
|
||||||
const { path, image, name, description, link } = DATA_CONNECTORS[slug];
|
const { path, image, name, description, link } = DATA_CONNECTORS[slug];
|
||||||
@ -26,22 +23,3 @@ export default function DataConnectorOption({ slug }) {
|
|||||||
</a>
|
</a>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export const DATA_CONNECTORS = {
|
|
||||||
github: {
|
|
||||||
name: "GitHub Repo",
|
|
||||||
path: paths.settings.dataConnectors.github(),
|
|
||||||
image: ConnectorImages.github,
|
|
||||||
description:
|
|
||||||
"Import an entire public or private Github repository in a single click.",
|
|
||||||
link: "https://github.com",
|
|
||||||
},
|
|
||||||
"youtube-transcript": {
|
|
||||||
name: "YouTube Transcript",
|
|
||||||
path: paths.settings.dataConnectors.youtubeTranscript(),
|
|
||||||
image: ConnectorImages.youtube,
|
|
||||||
description:
|
|
||||||
"Import the transcription of an entire YouTube video from a link.",
|
|
||||||
link: "https://youtube.com",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
Binary file not shown.
Before Width: | Height: | Size: 22 KiB |
@ -0,0 +1,4 @@
|
|||||||
|
<svg width="38" height="39" viewBox="0 0 38 39" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<rect y="0.696777" width="37.9922" height="37.9922" rx="5.42746" fill="white"/>
|
||||||
|
<path d="M27.9829 16.8445V17.6583C27.9812 19.0353 27.4813 20.3652 26.5756 21.4024C25.6699 22.4395 24.4194 23.114 23.0552 23.3012C23.6121 24.0137 23.9143 24.8922 23.9138 25.7965V29.8656C23.9138 30.0815 23.8281 30.2885 23.6754 30.4411C23.5228 30.5937 23.3158 30.6794 23.1 30.6794H16.5894C16.3736 30.6794 16.1666 30.5937 16.014 30.4411C15.8613 30.2885 15.7756 30.0815 15.7756 29.8656V28.238H13.3341C12.255 28.238 11.22 27.8093 10.4569 27.0462C9.69375 26.2831 9.26505 25.2481 9.26505 24.1689C9.26505 23.5214 9.00782 22.9004 8.54996 22.4425C8.0921 21.9847 7.4711 21.7274 6.82359 21.7274C6.60775 21.7274 6.40075 21.6417 6.24813 21.4891C6.09551 21.3364 6.00977 21.1294 6.00977 20.9136C6.00977 20.6978 6.09551 20.4908 6.24813 20.3381C6.40075 20.1855 6.60775 20.0998 6.82359 20.0998C7.35795 20.0998 7.88708 20.205 8.38076 20.4095C8.87445 20.614 9.32302 20.9137 9.70087 21.2916C10.0787 21.6694 10.3785 22.118 10.5829 22.6117C10.7874 23.1054 10.8927 23.6345 10.8927 24.1689C10.8927 24.8164 11.1499 25.4374 11.6078 25.8953C12.0656 26.3531 12.6866 26.6103 13.3341 26.6103H15.7756V25.7965C15.7751 24.8922 16.0773 24.0137 16.6342 23.3012C15.27 23.114 14.0196 22.4395 13.1138 21.4024C12.2081 20.3652 11.7082 19.0353 11.7065 17.6583V16.8445C11.7166 15.8331 11.986 14.8412 12.4888 13.9636C12.24 13.1612 12.1602 12.3159 12.2544 11.4811C12.3486 10.6463 12.6148 9.84005 13.0361 9.11322C13.1075 8.98948 13.2103 8.88673 13.334 8.8153C13.4578 8.74387 13.5982 8.70628 13.7411 8.70631C14.689 8.70433 15.6242 8.92407 16.472 9.34799C17.3199 9.77191 18.0568 10.3883 18.624 11.1478H21.0654C21.6326 10.3883 22.3695 9.77191 23.2174 9.34799C24.0652 8.92407 25.0005 8.70433 25.9484 8.70631C26.0912 8.70628 26.2316 8.74387 26.3554 8.8153C26.4791 8.88673 26.5819 8.98948 26.6533 9.11322C27.0747 9.84003 27.3408 10.6463 27.4348 11.4812C27.5289 12.316 27.4488 13.1613 27.1996 13.9636C27.7034 14.8409 27.9731 15.8329 27.9829 16.8445Z" fill="#222628"/>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 2.0 KiB |
@ -1,5 +1,5 @@
|
|||||||
import Github from "./github.png";
|
import Github from "./github.svg";
|
||||||
import YouTube from "./youtube.png";
|
import YouTube from "./youtube.svg";
|
||||||
|
|
||||||
const ConnectorImages = {
|
const ConnectorImages = {
|
||||||
github: Github,
|
github: Github,
|
||||||
|
Binary file not shown.
Before Width: | Height: | Size: 5.3 KiB |
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 26 KiB |
@ -1,26 +1,6 @@
|
|||||||
import { Info } from "@phosphor-icons/react";
|
export default function AnthropicAiOptions({ settings }) {
|
||||||
import paths from "@/utils/paths";
|
|
||||||
|
|
||||||
export default function AnthropicAiOptions({ settings, showAlert = false }) {
|
|
||||||
return (
|
return (
|
||||||
<div className="w-full flex flex-col">
|
<div className="w-full flex flex-col">
|
||||||
{showAlert && (
|
|
||||||
<div className="flex flex-col md:flex-row md:items-center gap-x-2 text-white mb-6 bg-blue-800/30 w-fit rounded-lg px-4 py-2">
|
|
||||||
<div className="gap-x-2 flex items-center">
|
|
||||||
<Info size={12} className="hidden md:visible" />
|
|
||||||
<p className="text-sm md:text-base">
|
|
||||||
Anthropic as your LLM requires you to set an embedding service to
|
|
||||||
use.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<a
|
|
||||||
href={paths.settings.embeddingPreference()}
|
|
||||||
className="text-sm md:text-base my-2 underline"
|
|
||||||
>
|
|
||||||
Manage embedding →
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
<div className="w-full flex items-center gap-4">
|
<div className="w-full flex items-center gap-4">
|
||||||
<div className="flex flex-col w-60">
|
<div className="flex flex-col w-60">
|
||||||
<label className="text-white text-sm font-semibold block mb-4">
|
<label className="text-white text-sm font-semibold block mb-4">
|
||||||
@ -38,32 +18,34 @@ export default function AnthropicAiOptions({ settings, showAlert = false }) {
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex flex-col w-60">
|
{!settings?.credentialsOnly && (
|
||||||
<label className="text-white text-sm font-semibold block mb-4">
|
<div className="flex flex-col w-60">
|
||||||
Chat Model Selection
|
<label className="text-white text-sm font-semibold block mb-4">
|
||||||
</label>
|
Chat Model Selection
|
||||||
<select
|
</label>
|
||||||
name="AnthropicModelPref"
|
<select
|
||||||
defaultValue={settings?.AnthropicModelPref || "claude-2"}
|
name="AnthropicModelPref"
|
||||||
required={true}
|
defaultValue={settings?.AnthropicModelPref || "claude-2"}
|
||||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
required={true}
|
||||||
>
|
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||||
{[
|
>
|
||||||
"claude-instant-1.2",
|
{[
|
||||||
"claude-2.0",
|
"claude-instant-1.2",
|
||||||
"claude-2.1",
|
"claude-2.0",
|
||||||
"claude-3-haiku-20240307",
|
"claude-2.1",
|
||||||
"claude-3-opus-20240229",
|
"claude-3-haiku-20240307",
|
||||||
"claude-3-sonnet-20240229",
|
"claude-3-opus-20240229",
|
||||||
].map((model) => {
|
"claude-3-sonnet-20240229",
|
||||||
return (
|
].map((model) => {
|
||||||
<option key={model} value={model}>
|
return (
|
||||||
{model}
|
<option key={model} value={model}>
|
||||||
</option>
|
{model}
|
||||||
);
|
</option>
|
||||||
})}
|
);
|
||||||
</select>
|
})}
|
||||||
</div>
|
</select>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
@ -18,25 +18,27 @@ export default function GeminiLLMOptions({ settings }) {
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex flex-col w-60">
|
{!settings?.credentialsOnly && (
|
||||||
<label className="text-white text-sm font-semibold block mb-4">
|
<div className="flex flex-col w-60">
|
||||||
Chat Model Selection
|
<label className="text-white text-sm font-semibold block mb-4">
|
||||||
</label>
|
Chat Model Selection
|
||||||
<select
|
</label>
|
||||||
name="GeminiLLMModelPref"
|
<select
|
||||||
defaultValue={settings?.GeminiLLMModelPref || "gemini-pro"}
|
name="GeminiLLMModelPref"
|
||||||
required={true}
|
defaultValue={settings?.GeminiLLMModelPref || "gemini-pro"}
|
||||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
required={true}
|
||||||
>
|
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||||
{["gemini-pro"].map((model) => {
|
>
|
||||||
return (
|
{["gemini-pro"].map((model) => {
|
||||||
<option key={model} value={model}>
|
return (
|
||||||
{model}
|
<option key={model} value={model}>
|
||||||
</option>
|
{model}
|
||||||
);
|
</option>
|
||||||
})}
|
);
|
||||||
</select>
|
})}
|
||||||
</div>
|
</select>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
@ -17,25 +17,27 @@ export default function GroqAiOptions({ settings }) {
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex flex-col w-60">
|
{!settings?.credentialsOnly && (
|
||||||
<label className="text-white text-sm font-semibold block mb-4">
|
<div className="flex flex-col w-60">
|
||||||
Chat Model Selection
|
<label className="text-white text-sm font-semibold block mb-4">
|
||||||
</label>
|
Chat Model Selection
|
||||||
<select
|
</label>
|
||||||
name="GroqModelPref"
|
<select
|
||||||
defaultValue={settings?.GroqModelPref || "llama2-70b-4096"}
|
name="GroqModelPref"
|
||||||
required={true}
|
defaultValue={settings?.GroqModelPref || "llama2-70b-4096"}
|
||||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
required={true}
|
||||||
>
|
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||||
{["llama2-70b-4096", "mixtral-8x7b-32768"].map((model) => {
|
>
|
||||||
return (
|
{["llama2-70b-4096", "mixtral-8x7b-32768"].map((model) => {
|
||||||
<option key={model} value={model}>
|
return (
|
||||||
{model}
|
<option key={model} value={model}>
|
||||||
</option>
|
{model}
|
||||||
);
|
</option>
|
||||||
})}
|
);
|
||||||
</select>
|
})}
|
||||||
</div>
|
</select>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,7 @@ export default function LMStudioOptions({ settings, showAlert = false }) {
|
|||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<a
|
<a
|
||||||
href={paths.settings.embeddingPreference()}
|
href={paths.settings.embedder.modelPreference()}
|
||||||
className="text-sm md:text-base my-2 underline"
|
className="text-sm md:text-base my-2 underline"
|
||||||
>
|
>
|
||||||
Manage embedding →
|
Manage embedding →
|
||||||
@ -46,23 +46,27 @@ export default function LMStudioOptions({ settings, showAlert = false }) {
|
|||||||
onBlur={() => setBasePath(basePathValue)}
|
onBlur={() => setBasePath(basePathValue)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<LMStudioModelSelection settings={settings} basePath={basePath} />
|
{!settings?.credentialsOnly && (
|
||||||
<div className="flex flex-col w-60">
|
<>
|
||||||
<label className="text-white text-sm font-semibold block mb-4">
|
<LMStudioModelSelection settings={settings} basePath={basePath} />
|
||||||
Token context window
|
<div className="flex flex-col w-60">
|
||||||
</label>
|
<label className="text-white text-sm font-semibold block mb-4">
|
||||||
<input
|
Token context window
|
||||||
type="number"
|
</label>
|
||||||
name="LMStudioTokenLimit"
|
<input
|
||||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
type="number"
|
||||||
placeholder="4096"
|
name="LMStudioTokenLimit"
|
||||||
min={1}
|
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||||
onScroll={(e) => e.target.blur()}
|
placeholder="4096"
|
||||||
defaultValue={settings?.LMStudioTokenLimit}
|
min={1}
|
||||||
required={true}
|
onScroll={(e) => e.target.blur()}
|
||||||
autoComplete="off"
|
defaultValue={settings?.LMStudioTokenLimit}
|
||||||
/>
|
required={true}
|
||||||
</div>
|
autoComplete="off"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
@ -21,7 +21,7 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
|
|||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<a
|
<a
|
||||||
href={paths.settings.embeddingPreference()}
|
href={paths.settings.embedder.modelPreference()}
|
||||||
className="text-sm md:text-base my-2 underline"
|
className="text-sm md:text-base my-2 underline"
|
||||||
>
|
>
|
||||||
Manage embedding →
|
Manage embedding →
|
||||||
@ -46,27 +46,31 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
|
|||||||
onBlur={() => setBasePath(basePathValue)}
|
onBlur={() => setBasePath(basePathValue)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<LocalAIModelSelection
|
{!settings?.credentialsOnly && (
|
||||||
settings={settings}
|
<>
|
||||||
basePath={basePath}
|
<LocalAIModelSelection
|
||||||
apiKey={apiKey}
|
settings={settings}
|
||||||
/>
|
basePath={basePath}
|
||||||
<div className="flex flex-col w-60">
|
apiKey={apiKey}
|
||||||
<label className="text-white text-sm font-semibold block mb-4">
|
/>
|
||||||
Token context window
|
<div className="flex flex-col w-60">
|
||||||
</label>
|
<label className="text-white text-sm font-semibold block mb-4">
|
||||||
<input
|
Token context window
|
||||||
type="number"
|
</label>
|
||||||
name="LocalAiTokenLimit"
|
<input
|
||||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
type="number"
|
||||||
placeholder="4096"
|
name="LocalAiTokenLimit"
|
||||||
min={1}
|
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||||
onScroll={(e) => e.target.blur()}
|
placeholder="4096"
|
||||||
defaultValue={settings?.LocalAiTokenLimit}
|
min={1}
|
||||||
required={true}
|
onScroll={(e) => e.target.blur()}
|
||||||
autoComplete="off"
|
defaultValue={settings?.LocalAiTokenLimit}
|
||||||
/>
|
required={true}
|
||||||
</div>
|
autoComplete="off"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
<div className="w-full flex items-center gap-4">
|
<div className="w-full flex items-center gap-4">
|
||||||
<div className="flex flex-col w-60">
|
<div className="flex flex-col w-60">
|
||||||
|
@ -24,7 +24,9 @@ export default function MistralOptions({ settings }) {
|
|||||||
onBlur={() => setMistralKey(inputValue)}
|
onBlur={() => setMistralKey(inputValue)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<MistralModelSelection settings={settings} apiKey={mistralKey} />
|
{!settings?.credentialsOnly && (
|
||||||
|
<MistralModelSelection settings={settings} apiKey={mistralKey} />
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -27,23 +27,27 @@ export default function OllamaLLMOptions({ settings }) {
|
|||||||
onBlur={() => setBasePath(basePathValue)}
|
onBlur={() => setBasePath(basePathValue)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<OllamaLLMModelSelection settings={settings} basePath={basePath} />
|
{!settings?.credentialsOnly && (
|
||||||
<div className="flex flex-col w-60">
|
<>
|
||||||
<label className="text-white text-sm font-semibold block mb-4">
|
<OllamaLLMModelSelection settings={settings} basePath={basePath} />
|
||||||
Token context window
|
<div className="flex flex-col w-60">
|
||||||
</label>
|
<label className="text-white text-sm font-semibold block mb-4">
|
||||||
<input
|
Token context window
|
||||||
type="number"
|
</label>
|
||||||
name="OllamaLLMTokenLimit"
|
<input
|
||||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
type="number"
|
||||||
placeholder="4096"
|
name="OllamaLLMTokenLimit"
|
||||||
min={1}
|
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||||
onScroll={(e) => e.target.blur()}
|
placeholder="4096"
|
||||||
defaultValue={settings?.OllamaLLMTokenLimit}
|
min={1}
|
||||||
required={true}
|
onScroll={(e) => e.target.blur()}
|
||||||
autoComplete="off"
|
defaultValue={settings?.OllamaLLMTokenLimit}
|
||||||
/>
|
required={true}
|
||||||
</div>
|
autoComplete="off"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
@ -24,7 +24,9 @@ export default function OpenAiOptions({ settings }) {
|
|||||||
onBlur={() => setOpenAIKey(inputValue)}
|
onBlur={() => setOpenAIKey(inputValue)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<OpenAIModelSelection settings={settings} apiKey={openAIKey} />
|
{!settings?.credentialsOnly && (
|
||||||
|
<OpenAIModelSelection settings={settings} apiKey={openAIKey} />
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,9 @@ export default function OpenRouterOptions({ settings }) {
|
|||||||
spellCheck={false}
|
spellCheck={false}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<OpenRouterModelSelection settings={settings} />
|
{!settings?.credentialsOnly && (
|
||||||
|
<OpenRouterModelSelection settings={settings} />
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -84,7 +86,7 @@ function OpenRouterModelSelection({ settings }) {
|
|||||||
<option
|
<option
|
||||||
key={model.id}
|
key={model.id}
|
||||||
value={model.id}
|
value={model.id}
|
||||||
selected={settings.OpenRouterModelPref === model.id}
|
selected={settings?.OpenRouterModelPref === model.id}
|
||||||
>
|
>
|
||||||
{model.name}
|
{model.name}
|
||||||
</option>
|
</option>
|
||||||
|
@ -19,7 +19,9 @@ export default function PerplexityOptions({ settings }) {
|
|||||||
spellCheck={false}
|
spellCheck={false}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<PerplexityModelSelection settings={settings} />
|
{!settings?.credentialsOnly && (
|
||||||
|
<PerplexityModelSelection settings={settings} />
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,9 @@ export default function TogetherAiOptions({ settings }) {
|
|||||||
spellCheck={false}
|
spellCheck={false}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<TogetherAiModelSelection settings={settings} />
|
{!settings?.credentialsOnly && (
|
||||||
|
<TogetherAiModelSelection settings={settings} />
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -84,7 +86,7 @@ function TogetherAiModelSelection({ settings }) {
|
|||||||
<option
|
<option
|
||||||
key={model.id}
|
key={model.id}
|
||||||
value={model.id}
|
value={model.id}
|
||||||
selected={settings.OpenRouterModelPref === model.id}
|
selected={settings?.OpenRouterModelPref === model.id}
|
||||||
>
|
>
|
||||||
{model.name}
|
{model.name}
|
||||||
</option>
|
</option>
|
||||||
|
@ -0,0 +1,25 @@
|
|||||||
|
export default function ConnectorOption({
|
||||||
|
slug,
|
||||||
|
selectedConnector,
|
||||||
|
setSelectedConnector,
|
||||||
|
image,
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
}) {
|
||||||
|
return (
|
||||||
|
<button
|
||||||
|
onClick={() => setSelectedConnector(slug)}
|
||||||
|
className={`flex text-left gap-x-3.5 items-center py-2 px-4 hover:bg-white/10 ${
|
||||||
|
selectedConnector === slug ? "bg-white/10" : ""
|
||||||
|
} rounded-lg cursor-pointer w-full`}
|
||||||
|
>
|
||||||
|
<img src={image} alt={name} className="w-[40px] h-[40px] rounded-md" />
|
||||||
|
<div className="flex flex-col">
|
||||||
|
<div className="text-white font-bold text-[14px]">{name}</div>
|
||||||
|
<div>
|
||||||
|
<p className="text-[12px] text-white/60">{description}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</button>
|
||||||
|
);
|
||||||
|
}
|
@ -0,0 +1,271 @@
|
|||||||
|
import React, { useEffect, useState } from "react";
|
||||||
|
import System from "@/models/system";
|
||||||
|
import showToast from "@/utils/toast";
|
||||||
|
import pluralize from "pluralize";
|
||||||
|
import { TagsInput } from "react-tag-input-component";
|
||||||
|
import { Warning } from "@phosphor-icons/react";
|
||||||
|
import { Tooltip } from "react-tooltip";
|
||||||
|
|
||||||
|
const DEFAULT_BRANCHES = ["main", "master"];
|
||||||
|
export default function GithubOptions() {
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
const [repo, setRepo] = useState(null);
|
||||||
|
const [accessToken, setAccessToken] = useState(null);
|
||||||
|
const [ignores, setIgnores] = useState([]);
|
||||||
|
|
||||||
|
const [settings, setSettings] = useState({
|
||||||
|
repo: null,
|
||||||
|
accessToken: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
const handleSubmit = async (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
const form = new FormData(e.target);
|
||||||
|
|
||||||
|
try {
|
||||||
|
setLoading(true);
|
||||||
|
showToast(
|
||||||
|
"Fetching all files for repo - this may take a while.",
|
||||||
|
"info",
|
||||||
|
{ clear: true, autoClose: false }
|
||||||
|
);
|
||||||
|
const { data, error } = await System.dataConnectors.github.collect({
|
||||||
|
repo: form.get("repo"),
|
||||||
|
accessToken: form.get("accessToken"),
|
||||||
|
branch: form.get("branch"),
|
||||||
|
ignorePaths: ignores,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!!error) {
|
||||||
|
showToast(error, "error", { clear: true });
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
showToast(
|
||||||
|
`${data.files} ${pluralize("file", data.files)} collected from ${
|
||||||
|
data.author
|
||||||
|
}/${data.repo}:${data.branch}. Output folder is ${data.destination}.`,
|
||||||
|
"success",
|
||||||
|
{ clear: true }
|
||||||
|
);
|
||||||
|
e.target.reset();
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e);
|
||||||
|
showToast(e.message, "error", { clear: true });
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex w-full">
|
||||||
|
<div className="flex flex-col w-full px-1 md:pb-6 pb-16">
|
||||||
|
<form className="w-full" onSubmit={handleSubmit}>
|
||||||
|
<div className="w-full flex flex-col py-2">
|
||||||
|
<div className="w-full flex flex-col gap-4">
|
||||||
|
<div className="flex flex-col pr-10">
|
||||||
|
<div className="flex flex-col gap-y-1 mb-4">
|
||||||
|
<label className="text-white text-sm font-bold">
|
||||||
|
GitHub Repo URL
|
||||||
|
</label>
|
||||||
|
<p className="text-xs font-normal text-white/50">
|
||||||
|
Url of the GitHub repo you wish to collect.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<input
|
||||||
|
type="url"
|
||||||
|
name="repo"
|
||||||
|
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||||
|
placeholder="https://github.com/Mintplex-Labs/anything-llm"
|
||||||
|
required={true}
|
||||||
|
autoComplete="off"
|
||||||
|
onChange={(e) => setRepo(e.target.value)}
|
||||||
|
onBlur={() => setSettings({ ...settings, repo })}
|
||||||
|
spellCheck={false}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col pr-10">
|
||||||
|
<div className="flex flex-col gap-y-1 mb-4">
|
||||||
|
<label className="text-white font-bold text-sm flex gap-x-2 items-center">
|
||||||
|
<p className="font-bold text-white">Github Access Token</p>{" "}
|
||||||
|
<p className="text-xs text-white/50 font-light flex items-center">
|
||||||
|
optional
|
||||||
|
{!accessToken && (
|
||||||
|
<Warning
|
||||||
|
size={14}
|
||||||
|
className="ml-1 text-orange-500 cursor-pointer"
|
||||||
|
data-tooltip-id="access-token-tooltip"
|
||||||
|
data-tooltip-place="right"
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
<Tooltip
|
||||||
|
delayHide={300}
|
||||||
|
id="access-token-tooltip"
|
||||||
|
className="max-w-xs"
|
||||||
|
clickable={true}
|
||||||
|
>
|
||||||
|
<p className="text-sm">
|
||||||
|
Without a{" "}
|
||||||
|
<a
|
||||||
|
href="https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens"
|
||||||
|
rel="noreferrer"
|
||||||
|
target="_blank"
|
||||||
|
className="underline"
|
||||||
|
onClick={(e) => e.stopPropagation()}
|
||||||
|
>
|
||||||
|
Personal Access Token
|
||||||
|
</a>
|
||||||
|
, the GitHub API may limit the number of files that
|
||||||
|
can be collected due to rate limits. You can{" "}
|
||||||
|
<a
|
||||||
|
href="https://github.com/settings/personal-access-tokens/new"
|
||||||
|
rel="noreferrer"
|
||||||
|
target="_blank"
|
||||||
|
className="underline"
|
||||||
|
onClick={(e) => e.stopPropagation()}
|
||||||
|
>
|
||||||
|
create a temporary Access Token
|
||||||
|
</a>{" "}
|
||||||
|
to avoid this issue.
|
||||||
|
</p>
|
||||||
|
</Tooltip>
|
||||||
|
</p>
|
||||||
|
</label>
|
||||||
|
<p className="text-xs font-normal text-white/50">
|
||||||
|
Access Token to prevent rate limiting.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
name="accessToken"
|
||||||
|
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||||
|
placeholder="github_pat_1234_abcdefg"
|
||||||
|
required={false}
|
||||||
|
autoComplete="off"
|
||||||
|
spellCheck={false}
|
||||||
|
onChange={(e) => setAccessToken(e.target.value)}
|
||||||
|
onBlur={() => setSettings({ ...settings, accessToken })}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<GitHubBranchSelection
|
||||||
|
repo={settings.repo}
|
||||||
|
accessToken={settings.accessToken}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex flex-col w-full py-4 pr-10">
|
||||||
|
<div className="flex flex-col gap-y-1 mb-4">
|
||||||
|
<label className="text-white text-sm flex gap-x-2 items-center">
|
||||||
|
<p className="text-white text-sm font-bold">File Ignores</p>
|
||||||
|
</label>
|
||||||
|
<p className="text-xs font-normal text-white/50">
|
||||||
|
List in .gitignore format to ignore specific files during
|
||||||
|
collection. Press enter after each entry you want to save.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<TagsInput
|
||||||
|
value={ignores}
|
||||||
|
onChange={setIgnores}
|
||||||
|
name="ignores"
|
||||||
|
placeholder="!*.js, images/*, .DS_Store, bin/*"
|
||||||
|
classNames={{
|
||||||
|
tag: "bg-blue-300/10 text-zinc-800",
|
||||||
|
input:
|
||||||
|
"flex bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white",
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex flex-col gap-y-2 w-full pr-10">
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
disabled={loading}
|
||||||
|
className="mt-2 w-full justify-center border border-slate-200 px-4 py-2 rounded-lg text-[#222628] text-sm font-bold items-center flex gap-x-2 bg-slate-200 hover:bg-slate-300 hover:text-slate-800 disabled:bg-slate-300 disabled:cursor-not-allowed"
|
||||||
|
>
|
||||||
|
{loading ? "Collecting files..." : "Submit"}
|
||||||
|
</button>
|
||||||
|
{loading && (
|
||||||
|
<p className="text-xs text-white/50">
|
||||||
|
Once complete, all files will be available for embedding into
|
||||||
|
workspaces in the document picker.
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function GitHubBranchSelection({ repo, accessToken }) {
|
||||||
|
const [allBranches, setAllBranches] = useState(DEFAULT_BRANCHES);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
async function fetchAllBranches() {
|
||||||
|
if (!repo) {
|
||||||
|
setAllBranches(DEFAULT_BRANCHES);
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setLoading(true);
|
||||||
|
const { branches } = await System.dataConnectors.github.branches({
|
||||||
|
repo,
|
||||||
|
accessToken,
|
||||||
|
});
|
||||||
|
setAllBranches(branches.length > 0 ? branches : DEFAULT_BRANCHES);
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
fetchAllBranches();
|
||||||
|
}, [repo, accessToken]);
|
||||||
|
|
||||||
|
if (loading) {
|
||||||
|
return (
|
||||||
|
<div className="flex flex-col w-60">
|
||||||
|
<div className="flex flex-col gap-y-1 mb-4">
|
||||||
|
<label className="text-white text-sm font-bold">Branch</label>
|
||||||
|
<p className="text-xs font-normal text-white/50">
|
||||||
|
Branch you wish to collect files from.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<select
|
||||||
|
name="branch"
|
||||||
|
required={true}
|
||||||
|
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||||
|
>
|
||||||
|
<option disabled={true} selected={true}>
|
||||||
|
-- loading available branches --
|
||||||
|
</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex flex-col w-60">
|
||||||
|
<div className="flex flex-col gap-y-1 mb-4">
|
||||||
|
<label className="text-white text-sm font-bold">Branch</label>
|
||||||
|
<p className="text-xs font-normal text-white/50">
|
||||||
|
Branch you wish to collect files from.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<select
|
||||||
|
name="branch"
|
||||||
|
required={true}
|
||||||
|
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||||
|
>
|
||||||
|
{allBranches.map((branch) => {
|
||||||
|
return (
|
||||||
|
<option key={branch} value={branch}>
|
||||||
|
{branch}
|
||||||
|
</option>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
@ -0,0 +1,91 @@
|
|||||||
|
import React, { useState } from "react";
|
||||||
|
import System from "@/models/system";
|
||||||
|
import showToast from "@/utils/toast";
|
||||||
|
|
||||||
|
export default function YoutubeOptions() {
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
|
||||||
|
const handleSubmit = async (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
const form = new FormData(e.target);
|
||||||
|
|
||||||
|
try {
|
||||||
|
setLoading(true);
|
||||||
|
showToast("Fetching transcript for YouTube video.", "info", {
|
||||||
|
clear: true,
|
||||||
|
autoClose: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
const { data, error } = await System.dataConnectors.youtube.transcribe({
|
||||||
|
url: form.get("url"),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!!error) {
|
||||||
|
showToast(error, "error", { clear: true });
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
showToast(
|
||||||
|
`${data.title} by ${data.author} transcription completed. Output folder is ${data.destination}.`,
|
||||||
|
"success",
|
||||||
|
{ clear: true }
|
||||||
|
);
|
||||||
|
e.target.reset();
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e);
|
||||||
|
showToast(e.message, "error", { clear: true });
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex w-full">
|
||||||
|
<div className="flex flex-col w-full px-1 md:pb-6 pb-16">
|
||||||
|
<form className="w-full" onSubmit={handleSubmit}>
|
||||||
|
<div className="w-full flex flex-col py-2">
|
||||||
|
<div className="w-full flex flex-col gap-4">
|
||||||
|
<div className="flex flex-col pr-10">
|
||||||
|
<div className="flex flex-col gap-y-1 mb-4">
|
||||||
|
<label className="text-white text-sm font-bold">
|
||||||
|
YouTube Video URL
|
||||||
|
</label>
|
||||||
|
<p className="text-xs font-normal text-white/50">
|
||||||
|
URL of the YouTube video you wish to transcribe.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<input
|
||||||
|
type="url"
|
||||||
|
name="url"
|
||||||
|
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||||
|
placeholder="https://youtube.com/watch?v=abc123"
|
||||||
|
required={true}
|
||||||
|
autoComplete="off"
|
||||||
|
spellCheck={false}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex flex-col gap-y-2 w-full pr-10">
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
disabled={loading}
|
||||||
|
className="mt-2 w-full justify-center border border-slate-200 px-4 py-2 rounded-lg text-[#222628] text-sm font-bold items-center flex gap-x-2 bg-slate-200 hover:bg-slate-300 hover:text-slate-800 disabled:bg-slate-300 disabled:cursor-not-allowed"
|
||||||
|
>
|
||||||
|
{loading ? "Collecting transcript..." : "Collect transcript"}
|
||||||
|
</button>
|
||||||
|
{loading && (
|
||||||
|
<p className="text-xs text-white/50 max-w-sm">
|
||||||
|
Once complete, the transcription will be available for embedding
|
||||||
|
into workspaces in the document picker.
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
@ -0,0 +1,77 @@
|
|||||||
|
import ConnectorImages from "@/components/DataConnectorOption/media";
|
||||||
|
import { MagnifyingGlass } from "@phosphor-icons/react";
|
||||||
|
import GithubOptions from "./Connectors/Github";
|
||||||
|
import YoutubeOptions from "./Connectors/Youtube";
|
||||||
|
import { useState } from "react";
|
||||||
|
import ConnectorOption from "./ConnectorOption";
|
||||||
|
|
||||||
|
export const DATA_CONNECTORS = {
|
||||||
|
github: {
|
||||||
|
name: "GitHub Repo",
|
||||||
|
image: ConnectorImages.github,
|
||||||
|
description:
|
||||||
|
"Import an entire public or private Github repository in a single click.",
|
||||||
|
options: <GithubOptions />,
|
||||||
|
},
|
||||||
|
"youtube-transcript": {
|
||||||
|
name: "YouTube Transcript",
|
||||||
|
image: ConnectorImages.youtube,
|
||||||
|
description:
|
||||||
|
"Import the transcription of an entire YouTube video from a link.",
|
||||||
|
options: <YoutubeOptions />,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function DataConnectors() {
|
||||||
|
const [selectedConnector, setSelectedConnector] = useState("github");
|
||||||
|
const [searchQuery, setSearchQuery] = useState("");
|
||||||
|
|
||||||
|
const filteredConnectors = Object.keys(DATA_CONNECTORS).filter((slug) =>
|
||||||
|
DATA_CONNECTORS[slug].name.toLowerCase().includes(searchQuery.toLowerCase())
|
||||||
|
);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex upload-modal -mt-10 relative min-h-[80vh] w-[70vw]">
|
||||||
|
<div className="w-full p-4 top-0 z-20">
|
||||||
|
<div className="w-full flex items-center sticky top-0 z-50">
|
||||||
|
<MagnifyingGlass
|
||||||
|
size={16}
|
||||||
|
weight="bold"
|
||||||
|
className="absolute left-4 z-30 text-white"
|
||||||
|
/>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
placeholder="Search data connectors"
|
||||||
|
className="border-none bg-zinc-600 z-20 pl-10 h-[38px] rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||||
|
autoComplete="off"
|
||||||
|
value={searchQuery}
|
||||||
|
onChange={(e) => setSearchQuery(e.target.value)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="mt-2 flex flex-col gap-y-2">
|
||||||
|
{filteredConnectors.length > 0 ? (
|
||||||
|
filteredConnectors.map((slug, index) => (
|
||||||
|
<ConnectorOption
|
||||||
|
key={index}
|
||||||
|
slug={slug}
|
||||||
|
selectedConnector={selectedConnector}
|
||||||
|
setSelectedConnector={setSelectedConnector}
|
||||||
|
image={DATA_CONNECTORS[slug].image}
|
||||||
|
name={DATA_CONNECTORS[slug].name}
|
||||||
|
description={DATA_CONNECTORS[slug].description}
|
||||||
|
/>
|
||||||
|
))
|
||||||
|
) : (
|
||||||
|
<div className="text-white text-center mt-4">
|
||||||
|
No data connectors found.
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="xl:block hidden absolute left-1/2 top-0 bottom-0 w-[0.5px] bg-white/20 -translate-x-1/2"></div>
|
||||||
|
<div className="w-full p-4 top-0 text-white min-w-[500px]">
|
||||||
|
{DATA_CONNECTORS[selectedConnector].options}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
@ -6,12 +6,15 @@ import System from "../../../models/system";
|
|||||||
import { isMobile } from "react-device-detect";
|
import { isMobile } from "react-device-detect";
|
||||||
import useUser from "../../../hooks/useUser";
|
import useUser from "../../../hooks/useUser";
|
||||||
import DocumentSettings from "./Documents";
|
import DocumentSettings from "./Documents";
|
||||||
|
import DataConnectors from "./DataConnectors";
|
||||||
|
|
||||||
const noop = () => {};
|
const noop = () => {};
|
||||||
const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
|
const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
|
||||||
const { slug } = useParams();
|
const { slug } = useParams();
|
||||||
|
const { user } = useUser();
|
||||||
const [workspace, setWorkspace] = useState(null);
|
const [workspace, setWorkspace] = useState(null);
|
||||||
const [settings, setSettings] = useState({});
|
const [settings, setSettings] = useState({});
|
||||||
|
const [selectedTab, setSelectedTab] = useState("documents");
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
async function getSettings() {
|
async function getSettings() {
|
||||||
@ -67,7 +70,6 @@ const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
|
|||||||
<div className="absolute max-h-full w-fit transition duration-300 z-20 md:overflow-y-auto py-10">
|
<div className="absolute max-h-full w-fit transition duration-300 z-20 md:overflow-y-auto py-10">
|
||||||
<div className="relative bg-main-gradient rounded-[12px] shadow border-2 border-slate-300/10">
|
<div className="relative bg-main-gradient rounded-[12px] shadow border-2 border-slate-300/10">
|
||||||
<div className="flex items-start justify-between p-2 rounded-t border-gray-500/50 relative">
|
<div className="flex items-start justify-between p-2 rounded-t border-gray-500/50 relative">
|
||||||
<div />
|
|
||||||
<button
|
<button
|
||||||
onClick={hideModal}
|
onClick={hideModal}
|
||||||
type="button"
|
type="button"
|
||||||
@ -76,7 +78,19 @@ const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
|
|||||||
<X className="text-gray-300 text-lg" />
|
<X className="text-gray-300 text-lg" />
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
<DocumentSettings workspace={workspace} systemSettings={settings} />
|
|
||||||
|
{user?.role !== "default" && (
|
||||||
|
<ModalTabSwitcher
|
||||||
|
selectedTab={selectedTab}
|
||||||
|
setSelectedTab={setSelectedTab}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{selectedTab === "documents" ? (
|
||||||
|
<DocumentSettings workspace={workspace} systemSettings={settings} />
|
||||||
|
) : (
|
||||||
|
<DataConnectors workspace={workspace} systemSettings={settings} />
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -84,6 +98,35 @@ const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export default memo(ManageWorkspace);
|
export default memo(ManageWorkspace);
|
||||||
|
|
||||||
|
const ModalTabSwitcher = ({ selectedTab, setSelectedTab }) => {
|
||||||
|
return (
|
||||||
|
<div className="w-full flex justify-center z-10 relative">
|
||||||
|
<div className="gap-x-2 flex justify-center -mt-[68px] mb-10 bg-sidebar-button p-1 rounded-xl shadow border-2 border-slate-300/10 w-fit">
|
||||||
|
<button
|
||||||
|
onClick={() => setSelectedTab("documents")}
|
||||||
|
className={`px-4 py-2 rounded-[8px] font-semibold text-white hover:bg-switch-selected hover:bg-opacity-60 ${
|
||||||
|
selectedTab === "documents"
|
||||||
|
? "bg-switch-selected shadow-md font-bold"
|
||||||
|
: "bg-sidebar-button text-white/20 font-medium hover:text-white"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
Documents
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={() => setSelectedTab("dataConnectors")}
|
||||||
|
className={`px-4 py-2 rounded-[8px] font-semibold text-white hover:bg-switch-selected hover:bg-opacity-60 ${
|
||||||
|
selectedTab === "dataConnectors"
|
||||||
|
? "bg-switch-selected shadow-md font-bold"
|
||||||
|
: "bg-sidebar-button text-white/20 font-medium hover:text-white"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
Data Connectors
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
export function useManageWorkspaceModal() {
|
export function useManageWorkspaceModal() {
|
||||||
const { user } = useUser();
|
const { user } = useUser();
|
||||||
const [showing, setShowing] = useState(false);
|
const [showing, setShowing] = useState(false);
|
||||||
|
@ -15,12 +15,12 @@ import {
|
|||||||
House,
|
House,
|
||||||
List,
|
List,
|
||||||
FileCode,
|
FileCode,
|
||||||
Plugs,
|
|
||||||
Notepad,
|
Notepad,
|
||||||
CodeBlock,
|
CodeBlock,
|
||||||
Barcode,
|
Barcode,
|
||||||
ClosedCaptioning,
|
ClosedCaptioning,
|
||||||
EyeSlash,
|
EyeSlash,
|
||||||
|
SplitVertical,
|
||||||
} from "@phosphor-icons/react";
|
} from "@phosphor-icons/react";
|
||||||
import useUser from "@/hooks/useUser";
|
import useUser from "@/hooks/useUser";
|
||||||
import { USER_BACKGROUND_COLOR } from "@/utils/constants";
|
import { USER_BACKGROUND_COLOR } from "@/utils/constants";
|
||||||
@ -289,12 +289,25 @@ const SidebarOptions = ({ user = null }) => (
|
|||||||
allowedRole={["admin"]}
|
allowedRole={["admin"]}
|
||||||
/>
|
/>
|
||||||
<Option
|
<Option
|
||||||
href={paths.settings.embeddingPreference()}
|
href={paths.settings.embedder.modelPreference()}
|
||||||
btnText="Embedding Model"
|
childLinks={[paths.settings.embedder.chunkingPreference()]}
|
||||||
|
btnText="Embedder Preferences"
|
||||||
icon={<FileCode className="h-5 w-5 flex-shrink-0" />}
|
icon={<FileCode className="h-5 w-5 flex-shrink-0" />}
|
||||||
user={user}
|
user={user}
|
||||||
flex={true}
|
flex={true}
|
||||||
allowedRole={["admin"]}
|
allowedRole={["admin"]}
|
||||||
|
subOptions={
|
||||||
|
<>
|
||||||
|
<Option
|
||||||
|
href={paths.settings.embedder.chunkingPreference()}
|
||||||
|
btnText="Text Splitter & Chunking"
|
||||||
|
icon={<SplitVertical className="h-5 w-5 flex-shrink-0" />}
|
||||||
|
user={user}
|
||||||
|
flex={true}
|
||||||
|
allowedRole={["admin"]}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
}
|
||||||
/>
|
/>
|
||||||
<Option
|
<Option
|
||||||
href={paths.settings.vectorDatabase()}
|
href={paths.settings.vectorDatabase()}
|
||||||
@ -304,14 +317,6 @@ const SidebarOptions = ({ user = null }) => (
|
|||||||
flex={true}
|
flex={true}
|
||||||
allowedRole={["admin"]}
|
allowedRole={["admin"]}
|
||||||
/>
|
/>
|
||||||
<Option
|
|
||||||
href={paths.settings.dataConnectors.list()}
|
|
||||||
btnText="Data Connectors"
|
|
||||||
icon={<Plugs className="h-5 w-5 flex-shrink-0" />}
|
|
||||||
user={user}
|
|
||||||
flex={true}
|
|
||||||
allowedRole={["admin", "manager"]}
|
|
||||||
/>
|
|
||||||
<Option
|
<Option
|
||||||
href={paths.settings.embedSetup()}
|
href={paths.settings.embedSetup()}
|
||||||
childLinks={[paths.settings.embedChats()]}
|
childLinks={[paths.settings.embedChats()]}
|
||||||
|
@ -72,7 +72,7 @@ export default function ActiveWorkspaces() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div role="list" aria-label="Workspaces">
|
<div role="list" aria-label="Workspaces" className="flex flex-col gap-y-2">
|
||||||
{workspaces.map((workspace) => {
|
{workspaces.map((workspace) => {
|
||||||
const isActive = workspace.slug === slug;
|
const isActive = workspace.slug === slug;
|
||||||
const isHovered = hoverStates[workspace.id];
|
const isHovered = hoverStates[workspace.id];
|
||||||
|
@ -99,13 +99,15 @@ export function SidebarMobileHeader() {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="fixed top-0 left-0 right-0 z-10 flex justify-between items-center px-4 py-2 bg-sidebar text-slate-200 shadow-lg h-16">
|
<div
|
||||||
|
aria-label="Show sidebar"
|
||||||
|
className="fixed top-0 left-0 right-0 z-10 flex justify-between items-center px-4 py-2 bg-sidebar text-slate-200 shadow-lg h-16"
|
||||||
|
>
|
||||||
<button
|
<button
|
||||||
onClick={() => setShowSidebar(true)}
|
onClick={() => setShowSidebar(true)}
|
||||||
className="rounded-md p-2 flex items-center justify-center text-slate-200"
|
className="rounded-md p-2 flex items-center justify-center text-slate-200"
|
||||||
>
|
>
|
||||||
<List className="h-6 w-6" />
|
<List className="h-6 w-6" />
|
||||||
aria-label="Show sidebar"
|
|
||||||
</button>
|
</button>
|
||||||
<div className="flex items-center justify-center flex-grow">
|
<div className="flex items-center justify-center flex-grow">
|
||||||
<img
|
<img
|
||||||
|
16
frontend/src/components/lib/CTAButton/index.jsx
Normal file
16
frontend/src/components/lib/CTAButton/index.jsx
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
export default function CTAButton({
|
||||||
|
children,
|
||||||
|
disabled = false,
|
||||||
|
onClick,
|
||||||
|
className = "",
|
||||||
|
}) {
|
||||||
|
return (
|
||||||
|
<button
|
||||||
|
disabled={disabled}
|
||||||
|
onClick={() => onClick?.()}
|
||||||
|
className={`text-xs px-4 py-1 font-semibold rounded-lg bg-[#46C8FF] hover:bg-[#2C2F36] hover:text-white h-[34px] -mr-8 whitespace-nowrap shadow-[0_4px_14px_rgba(0,0,0,0.25)] w-fit ${className}`}
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-center gap-2">{children}</div>
|
||||||
|
</button>
|
||||||
|
);
|
||||||
|
}
|
@ -2,7 +2,7 @@ import System from "@/models/system";
|
|||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState } from "react";
|
||||||
|
|
||||||
// Providers which cannot use this feature for workspace<>model selection
|
// Providers which cannot use this feature for workspace<>model selection
|
||||||
export const DISABLED_PROVIDERS = ["azure", "lmstudio"];
|
export const DISABLED_PROVIDERS = ["azure", "lmstudio", "native"];
|
||||||
const PROVIDER_DEFAULT_MODELS = {
|
const PROVIDER_DEFAULT_MODELS = {
|
||||||
openai: [
|
openai: [
|
||||||
"gpt-3.5-turbo",
|
"gpt-3.5-turbo",
|
||||||
|
@ -10,6 +10,7 @@ import InviteRow from "./InviteRow";
|
|||||||
import NewInviteModal from "./NewInviteModal";
|
import NewInviteModal from "./NewInviteModal";
|
||||||
import { useModal } from "@/hooks/useModal";
|
import { useModal } from "@/hooks/useModal";
|
||||||
import ModalWrapper from "@/components/ModalWrapper";
|
import ModalWrapper from "@/components/ModalWrapper";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
export default function AdminInvites() {
|
export default function AdminInvites() {
|
||||||
const { isOpen, openModal, closeModal } = useModal();
|
const { isOpen, openModal, closeModal } = useModal();
|
||||||
@ -21,25 +22,24 @@ export default function AdminInvites() {
|
|||||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
||||||
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="items-center flex gap-x-4">
|
<div className="items-center flex gap-x-4">
|
||||||
<p className="text-lg leading-6 font-bold text-white">
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
Invitations
|
Invitations
|
||||||
</p>
|
</p>
|
||||||
<button
|
|
||||||
onClick={openModal}
|
|
||||||
className="flex items-center gap-x-2 px-4 py-2 rounded-lg bg-[#2C2F36] text-white text-sm hover:bg-[#3D4147] shadow-md border border-[#3D4147]"
|
|
||||||
>
|
|
||||||
<EnvelopeSimple className="h-4 w-4" />
|
|
||||||
Create Invite Link
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
Create invitation links for people in your organization to accept
|
Create invitation links for people in your organization to accept
|
||||||
and sign up with. Invitations can only be used by a single user.
|
and sign up with. Invitations can only be used by a single user.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="w-full justify-end flex">
|
||||||
|
<CTAButton onClick={openModal} className="mt-3 mr-0 -mb-12 z-10">
|
||||||
|
<EnvelopeSimple className="h-4 w-4" weight="bold" /> Create Invite
|
||||||
|
Link
|
||||||
|
</CTAButton>
|
||||||
|
</div>
|
||||||
<InvitationsContainer />
|
<InvitationsContainer />
|
||||||
</div>
|
</div>
|
||||||
<ModalWrapper isOpen={isOpen}>
|
<ModalWrapper isOpen={isOpen}>
|
||||||
|
@ -6,6 +6,7 @@ import { isMobile } from "react-device-detect";
|
|||||||
import * as Skeleton from "react-loading-skeleton";
|
import * as Skeleton from "react-loading-skeleton";
|
||||||
import LogRow from "./LogRow";
|
import LogRow from "./LogRow";
|
||||||
import showToast from "@/utils/toast";
|
import showToast from "@/utils/toast";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
export default function AdminLogs() {
|
export default function AdminLogs() {
|
||||||
const handleResetLogs = async () => {
|
const handleResetLogs = async () => {
|
||||||
@ -32,24 +33,26 @@ export default function AdminLogs() {
|
|||||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
||||||
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="flex gap-x-4 items-center">
|
<div className="flex gap-x-4 items-center">
|
||||||
<p className="text-lg leading-6 font-bold text-white">
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
Event Logs
|
Event Logs
|
||||||
</p>
|
</p>
|
||||||
<button
|
|
||||||
onClick={handleResetLogs}
|
|
||||||
className="flex items-center gap-x-2 px-4 py-2 rounded-lg bg-[#2C2F36] text-white text-sm hover:bg-[#3D4147] shadow-md border border-[#3D4147]"
|
|
||||||
>
|
|
||||||
Clear event logs
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
View all actions and events happening on this instance for
|
View all actions and events happening on this instance for
|
||||||
monitoring.
|
monitoring.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="w-full justify-end flex">
|
||||||
|
<CTAButton
|
||||||
|
onClick={handleResetLogs}
|
||||||
|
className="mt-3 mr-0 -mb-14 z-10"
|
||||||
|
>
|
||||||
|
Clear Event Logs
|
||||||
|
</CTAButton>
|
||||||
|
</div>
|
||||||
<LogsContainer />
|
<LogsContainer />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -3,6 +3,7 @@ import Sidebar from "@/components/SettingsSidebar";
|
|||||||
import { isMobile } from "react-device-detect";
|
import { isMobile } from "react-device-detect";
|
||||||
import Admin from "@/models/admin";
|
import Admin from "@/models/admin";
|
||||||
import showToast from "@/utils/toast";
|
import showToast from "@/utils/toast";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
export default function AdminSystem() {
|
export default function AdminSystem() {
|
||||||
const [saving, setSaving] = useState(false);
|
const [saving, setSaving] = useState(false);
|
||||||
@ -49,7 +50,7 @@ export default function AdminSystem() {
|
|||||||
<form
|
<form
|
||||||
onSubmit={handleSubmit}
|
onSubmit={handleSubmit}
|
||||||
onChange={() => setHasChanges(true)}
|
onChange={() => setHasChanges(true)}
|
||||||
className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16"
|
className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16"
|
||||||
>
|
>
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="items-center">
|
<div className="items-center">
|
||||||
@ -62,8 +63,14 @@ export default function AdminSystem() {
|
|||||||
instance.
|
instance.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
{hasChanges && (
|
||||||
<div className="mt-6 mb-8">
|
<div className="flex justify-end">
|
||||||
|
<CTAButton onClick={handleSubmit} className="mt-3 mr-0">
|
||||||
|
{saving ? "Saving..." : "Save changes"}
|
||||||
|
</CTAButton>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div className="mt-4 mb-8">
|
||||||
<div className="flex flex-col gap-y-1">
|
<div className="flex flex-col gap-y-1">
|
||||||
<h2 className="text-base leading-6 font-bold text-white">
|
<h2 className="text-base leading-6 font-bold text-white">
|
||||||
Users can delete workspaces
|
Users can delete workspaces
|
||||||
@ -141,18 +148,6 @@ export default function AdminSystem() {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{hasChanges && (
|
|
||||||
<div className="flex justify-start">
|
|
||||||
<button
|
|
||||||
type="submit"
|
|
||||||
disabled={saving}
|
|
||||||
className="flex items-center gap-x-2 px-4 py-2 rounded-lg bg-[#2C2F36] text-white text-sm hover:bg-[#3D4147] shadow-md border border-[#3D4147]"
|
|
||||||
>
|
|
||||||
{saving ? "Saving..." : "Save changes"}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -10,6 +10,7 @@ import useUser from "@/hooks/useUser";
|
|||||||
import NewUserModal from "./NewUserModal";
|
import NewUserModal from "./NewUserModal";
|
||||||
import { useModal } from "@/hooks/useModal";
|
import { useModal } from "@/hooks/useModal";
|
||||||
import ModalWrapper from "@/components/ModalWrapper";
|
import ModalWrapper from "@/components/ModalWrapper";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
export default function AdminUsers() {
|
export default function AdminUsers() {
|
||||||
const { isOpen, openModal, closeModal } = useModal();
|
const { isOpen, openModal, closeModal } = useModal();
|
||||||
@ -21,16 +22,10 @@ export default function AdminUsers() {
|
|||||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
||||||
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="items-center flex gap-x-4">
|
<div className="items-center flex gap-x-4">
|
||||||
<p className="text-lg leading-6 font-bold text-white">Users</p>
|
<p className="text-lg leading-6 font-bold text-white">Users</p>
|
||||||
<button
|
|
||||||
onClick={openModal}
|
|
||||||
className="flex items-center gap-x-2 px-4 py-2 rounded-lg bg-[#2C2F36] text-white text-sm hover:bg-[#3D4147] shadow-md border border-[#3D4147]"
|
|
||||||
>
|
|
||||||
<UserPlus className="h-4 w-4" /> Add user
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
These are all the accounts which have an account on this instance.
|
These are all the accounts which have an account on this instance.
|
||||||
@ -38,6 +33,11 @@ export default function AdminUsers() {
|
|||||||
instance.
|
instance.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="w-full justify-end flex">
|
||||||
|
<CTAButton onClick={openModal} className="mt-3 mr-0 -mb-6 z-10">
|
||||||
|
<UserPlus className="h-4 w-4" weight="bold" /> Add user
|
||||||
|
</CTAButton>
|
||||||
|
</div>
|
||||||
<UsersContainer />
|
<UsersContainer />
|
||||||
</div>
|
</div>
|
||||||
<ModalWrapper isOpen={isOpen}>
|
<ModalWrapper isOpen={isOpen}>
|
||||||
|
@ -9,6 +9,7 @@ import WorkspaceRow from "./WorkspaceRow";
|
|||||||
import NewWorkspaceModal from "./NewWorkspaceModal";
|
import NewWorkspaceModal from "./NewWorkspaceModal";
|
||||||
import { useModal } from "@/hooks/useModal";
|
import { useModal } from "@/hooks/useModal";
|
||||||
import ModalWrapper from "@/components/ModalWrapper";
|
import ModalWrapper from "@/components/ModalWrapper";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
export default function AdminWorkspaces() {
|
export default function AdminWorkspaces() {
|
||||||
const { isOpen, openModal, closeModal } = useModal();
|
const { isOpen, openModal, closeModal } = useModal();
|
||||||
@ -20,24 +21,23 @@ export default function AdminWorkspaces() {
|
|||||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
||||||
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="items-center flex gap-x-4">
|
<div className="items-center flex gap-x-4">
|
||||||
<p className="text-lg leading-6 font-bold text-white">
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
Instance Workspaces
|
Instance Workspaces
|
||||||
</p>
|
</p>
|
||||||
<button
|
|
||||||
onClick={openModal}
|
|
||||||
className="flex items-center gap-x-2 px-4 py-2 rounded-lg bg-[#2C2F36] text-white text-sm hover:bg-[#3D4147] shadow-md border border-[#3D4147]"
|
|
||||||
>
|
|
||||||
<BookOpen className="h-4 w-4" /> New Workspace
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
These are all the workspaces that exist on this instance. Removing
|
These are all the workspaces that exist on this instance. Removing
|
||||||
a workspace will delete all of it's associated chats and settings.
|
a workspace will delete all of it's associated chats and settings.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="w-full justify-end flex">
|
||||||
|
<CTAButton onClick={openModal} className="mt-3 mr-0 -mb-14 z-10">
|
||||||
|
<BookOpen className="h-4 w-4" weight="bold" /> New Workspace
|
||||||
|
</CTAButton>
|
||||||
|
</div>
|
||||||
<WorkspacesContainer />
|
<WorkspacesContainer />
|
||||||
</div>
|
</div>
|
||||||
<ModalWrapper isOpen={isOpen}>
|
<ModalWrapper isOpen={isOpen}>
|
||||||
|
@ -12,6 +12,7 @@ import { userFromStorage } from "@/utils/request";
|
|||||||
import System from "@/models/system";
|
import System from "@/models/system";
|
||||||
import ModalWrapper from "@/components/ModalWrapper";
|
import ModalWrapper from "@/components/ModalWrapper";
|
||||||
import { useModal } from "@/hooks/useModal";
|
import { useModal } from "@/hooks/useModal";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
export default function AdminApiKeys() {
|
export default function AdminApiKeys() {
|
||||||
const { isOpen, openModal, closeModal } = useModal();
|
const { isOpen, openModal, closeModal } = useModal();
|
||||||
@ -23,16 +24,10 @@ export default function AdminApiKeys() {
|
|||||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
||||||
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="items-center flex gap-x-4">
|
<div className="items-center flex gap-x-4">
|
||||||
<p className="text-lg leading-6 font-bold text-white">API Keys</p>
|
<p className="text-lg leading-6 font-bold text-white">API Keys</p>
|
||||||
<button
|
|
||||||
onClick={openModal}
|
|
||||||
className="flex items-center gap-x-2 px-4 py-2 rounded-lg bg-[#2C2F36] text-white text-sm hover:bg-[#3D4147] shadow-md border border-[#3D4147]"
|
|
||||||
>
|
|
||||||
<PlusCircle className="h-4 w-4" /> Generate New API Key
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
API keys allow the holder to programmatically access and manage
|
API keys allow the holder to programmatically access and manage
|
||||||
@ -47,6 +42,12 @@ export default function AdminApiKeys() {
|
|||||||
Read the API documentation →
|
Read the API documentation →
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="w-full justify-end flex">
|
||||||
|
<CTAButton onClick={openModal} className="mt-3 mr-0 -mb-14 z-10">
|
||||||
|
<PlusCircle className="h-4 w-4" weight="bold" /> Generate New API
|
||||||
|
Key
|
||||||
|
</CTAButton>
|
||||||
|
</div>
|
||||||
<ApiKeysContainer />
|
<ApiKeysContainer />
|
||||||
</div>
|
</div>
|
||||||
<ModalWrapper isOpen={isOpen}>
|
<ModalWrapper isOpen={isOpen}>
|
||||||
|
@ -90,7 +90,7 @@ export default function WorkspaceChats() {
|
|||||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
||||||
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="flex gap-x-4 items-center">
|
<div className="flex gap-x-4 items-center">
|
||||||
<p className="text-lg leading-6 font-bold text-white">
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
|
@ -1,293 +0,0 @@
|
|||||||
import React, { useEffect, useState } from "react";
|
|
||||||
import Sidebar from "@/components/SettingsSidebar";
|
|
||||||
import { isMobile } from "react-device-detect";
|
|
||||||
import { DATA_CONNECTORS } from "@/components/DataConnectorOption";
|
|
||||||
import System from "@/models/system";
|
|
||||||
import showToast from "@/utils/toast";
|
|
||||||
import pluralize from "pluralize";
|
|
||||||
import { TagsInput } from "react-tag-input-component";
|
|
||||||
import { Info } from "@phosphor-icons/react";
|
|
||||||
|
|
||||||
const DEFAULT_BRANCHES = ["main", "master"];
|
|
||||||
export default function GithubConnectorSetup() {
|
|
||||||
const { image } = DATA_CONNECTORS.github;
|
|
||||||
const [loading, setLoading] = useState(false);
|
|
||||||
const [repo, setRepo] = useState(null);
|
|
||||||
const [accessToken, setAccessToken] = useState(null);
|
|
||||||
const [ignores, setIgnores] = useState([]);
|
|
||||||
|
|
||||||
const [settings, setSettings] = useState({
|
|
||||||
repo: null,
|
|
||||||
accessToken: null,
|
|
||||||
});
|
|
||||||
|
|
||||||
const handleSubmit = async (e) => {
|
|
||||||
e.preventDefault();
|
|
||||||
const form = new FormData(e.target);
|
|
||||||
|
|
||||||
try {
|
|
||||||
setLoading(true);
|
|
||||||
showToast(
|
|
||||||
"Fetching all files for repo - this may take a while.",
|
|
||||||
"info",
|
|
||||||
{ clear: true, autoClose: false }
|
|
||||||
);
|
|
||||||
const { data, error } = await System.dataConnectors.github.collect({
|
|
||||||
repo: form.get("repo"),
|
|
||||||
accessToken: form.get("accessToken"),
|
|
||||||
branch: form.get("branch"),
|
|
||||||
ignorePaths: ignores,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!!error) {
|
|
||||||
showToast(error, "error", { clear: true });
|
|
||||||
setLoading(false);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
showToast(
|
|
||||||
`${data.files} ${pluralize("file", data.files)} collected from ${
|
|
||||||
data.author
|
|
||||||
}/${data.repo}:${data.branch}. Output folder is ${data.destination}.`,
|
|
||||||
"success",
|
|
||||||
{ clear: true }
|
|
||||||
);
|
|
||||||
e.target.reset();
|
|
||||||
setLoading(false);
|
|
||||||
return;
|
|
||||||
} catch (e) {
|
|
||||||
console.error(e);
|
|
||||||
showToast(e.message, "error", { clear: true });
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="w-screen h-screen overflow-hidden bg-sidebar flex">
|
|
||||||
<Sidebar />
|
|
||||||
<div
|
|
||||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
|
||||||
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
|
||||||
>
|
|
||||||
<div className="flex w-full">
|
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
|
||||||
<div className="flex w-full gap-x-4 items-center pb-6 border-white border-b-2 border-opacity-10">
|
|
||||||
<img src={image} alt="Github" className="rounded-lg h-16 w-16" />
|
|
||||||
<div className="w-full flex flex-col gap-y-1">
|
|
||||||
<div className="items-center">
|
|
||||||
<p className="text-lg leading-6 font-bold text-white">
|
|
||||||
Import GitHub Repository
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
|
||||||
Import all files from a public or private Github repository
|
|
||||||
and have its files be available in your workspace.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<form className="w-full" onSubmit={handleSubmit}>
|
|
||||||
{!accessToken && (
|
|
||||||
<div className="flex flex-col gap-y-1 py-4">
|
|
||||||
<div className="flex flex-col w-fit gap-y-2 bg-blue-600/20 rounded-lg px-4 py-2">
|
|
||||||
<div className="flex items-center gap-x-2">
|
|
||||||
<Info size={20} className="shrink-0 text-blue-400" />
|
|
||||||
<p className="text-blue-400 text-sm">
|
|
||||||
Trying to collect a GitHub repo without a{" "}
|
|
||||||
<a
|
|
||||||
href="https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens"
|
|
||||||
rel="noreferrer"
|
|
||||||
target="_blank"
|
|
||||||
className="underline"
|
|
||||||
>
|
|
||||||
Personal Access Token
|
|
||||||
</a>{" "}
|
|
||||||
will fail to collect all files due to GitHub API limits.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<a
|
|
||||||
href="https://github.com/settings/personal-access-tokens/new"
|
|
||||||
rel="noreferrer"
|
|
||||||
target="_blank"
|
|
||||||
className="text-blue-400 hover:underline"
|
|
||||||
>
|
|
||||||
Create a temporary Access Token for this data connector
|
|
||||||
→
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="w-full flex flex-col py-2">
|
|
||||||
<div className="w-full flex items-center gap-4">
|
|
||||||
<div className="flex flex-col w-60">
|
|
||||||
<div className="flex flex-col gap-y-1 mb-4">
|
|
||||||
<label className="text-white text-sm font-semibold block">
|
|
||||||
GitHub Repo URL
|
|
||||||
</label>
|
|
||||||
<p className="text-xs text-zinc-300">
|
|
||||||
Url of the GitHub repo you wish to collect.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<input
|
|
||||||
type="url"
|
|
||||||
name="repo"
|
|
||||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
|
||||||
placeholder="https://github.com/Mintplex-Labs/anything-llm"
|
|
||||||
required={true}
|
|
||||||
autoComplete="off"
|
|
||||||
onChange={(e) => setRepo(e.target.value)}
|
|
||||||
onBlur={() => setSettings({ ...settings, repo })}
|
|
||||||
spellCheck={false}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div className="flex flex-col w-60">
|
|
||||||
<div className="flex flex-col gap-y-1 mb-4">
|
|
||||||
<label className="text-white text-sm block flex gap-x-2 items-center">
|
|
||||||
<p className="font-semibold ">Github Access Token</p>{" "}
|
|
||||||
<p className="text-xs text-zinc-300 font-base!">
|
|
||||||
<i>optional</i>
|
|
||||||
</p>
|
|
||||||
</label>
|
|
||||||
<p className="text-xs text-zinc-300 flex gap-x-2">
|
|
||||||
Access Token to prevent rate limiting.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
name="accessToken"
|
|
||||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
|
||||||
placeholder="github_pat_1234_abcdefg"
|
|
||||||
required={false}
|
|
||||||
autoComplete="off"
|
|
||||||
spellCheck={false}
|
|
||||||
onChange={(e) => setAccessToken(e.target.value)}
|
|
||||||
onBlur={() => setSettings({ ...settings, accessToken })}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<GitHubBranchSelection
|
|
||||||
repo={settings.repo}
|
|
||||||
accessToken={settings.accessToken}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col w-1/2 py-4">
|
|
||||||
<div className="flex flex-col gap-y-1 mb-4">
|
|
||||||
<label className="text-white text-sm block flex gap-x-2 items-center">
|
|
||||||
<p className="font-semibold ">File Ignores</p>
|
|
||||||
</label>
|
|
||||||
<p className="text-xs text-zinc-300 flex gap-x-2">
|
|
||||||
List in .gitignore format to ignore specific files during
|
|
||||||
collection. Press enter after each entry you want to save.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<TagsInput
|
|
||||||
value={ignores}
|
|
||||||
onChange={setIgnores}
|
|
||||||
name="ignores"
|
|
||||||
placeholder="!*.js, images/*, .DS_Store, bin/*"
|
|
||||||
classNames={{
|
|
||||||
tag: "bg-blue-300/10 text-zinc-800 m-1",
|
|
||||||
input:
|
|
||||||
"flex bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white p-2.5",
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col gap-y-2 w-fit">
|
|
||||||
<button
|
|
||||||
type="submit"
|
|
||||||
disabled={loading}
|
|
||||||
className="mt-2 text-lg w-fit border border-slate-200 px-4 py-1 rounded-lg text-slate-200 items-center flex gap-x-2 hover:bg-slate-200 hover:text-slate-800 disabled:bg-slate-200 disabled:text-slate-800"
|
|
||||||
>
|
|
||||||
{loading
|
|
||||||
? "Collecting files..."
|
|
||||||
: "Collect all files from GitHub repo"}
|
|
||||||
</button>
|
|
||||||
{loading && (
|
|
||||||
<p className="text-xs text-zinc-300">
|
|
||||||
Once complete, all files will be available for embedding
|
|
||||||
into workspaces in the document picker.
|
|
||||||
</p>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function GitHubBranchSelection({ repo, accessToken }) {
|
|
||||||
const [allBranches, setAllBranches] = useState(DEFAULT_BRANCHES);
|
|
||||||
const [loading, setLoading] = useState(true);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
async function fetchAllBranches() {
|
|
||||||
if (!repo) {
|
|
||||||
setAllBranches(DEFAULT_BRANCHES);
|
|
||||||
setLoading(false);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
setLoading(true);
|
|
||||||
const { branches } = await System.dataConnectors.github.branches({
|
|
||||||
repo,
|
|
||||||
accessToken,
|
|
||||||
});
|
|
||||||
setAllBranches(branches.length > 0 ? branches : DEFAULT_BRANCHES);
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
fetchAllBranches();
|
|
||||||
}, [repo, accessToken]);
|
|
||||||
|
|
||||||
if (loading) {
|
|
||||||
return (
|
|
||||||
<div className="flex flex-col w-60">
|
|
||||||
<div className="flex flex-col gap-y-1 mb-4">
|
|
||||||
<label className="text-white text-sm font-semibold block">
|
|
||||||
Branch
|
|
||||||
</label>
|
|
||||||
<p className="text-xs text-zinc-300">
|
|
||||||
Branch you wish to collect files of
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<select
|
|
||||||
name="branch"
|
|
||||||
required={true}
|
|
||||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
|
||||||
>
|
|
||||||
<option disabled={true} selected={true}>
|
|
||||||
-- loading available models --
|
|
||||||
</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="flex flex-col w-60">
|
|
||||||
<div className="flex flex-col gap-y-1 mb-4">
|
|
||||||
<label className="text-white text-sm font-semibold block">Branch</label>
|
|
||||||
<p className="text-xs text-zinc-300">
|
|
||||||
Branch you wish to collect files of
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<select
|
|
||||||
name="branch"
|
|
||||||
required={true}
|
|
||||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
|
||||||
>
|
|
||||||
{allBranches.map((branch) => {
|
|
||||||
return (
|
|
||||||
<option key={branch} value={branch}>
|
|
||||||
{branch}
|
|
||||||
</option>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
@ -1,113 +0,0 @@
|
|||||||
import React, { useState } from "react";
|
|
||||||
import Sidebar from "@/components/SettingsSidebar";
|
|
||||||
import { isMobile } from "react-device-detect";
|
|
||||||
import { DATA_CONNECTORS } from "@/components/DataConnectorOption";
|
|
||||||
import System from "@/models/system";
|
|
||||||
import showToast from "@/utils/toast";
|
|
||||||
|
|
||||||
export default function YouTubeTranscriptConnectorSetup() {
|
|
||||||
const { image } = DATA_CONNECTORS["youtube-transcript"];
|
|
||||||
const [loading, setLoading] = useState(false);
|
|
||||||
const handleSubmit = async (e) => {
|
|
||||||
e.preventDefault();
|
|
||||||
const form = new FormData(e.target);
|
|
||||||
|
|
||||||
try {
|
|
||||||
setLoading(true);
|
|
||||||
showToast("Fetching transcript for YouTube video.", "info", {
|
|
||||||
clear: true,
|
|
||||||
autoClose: false,
|
|
||||||
});
|
|
||||||
const { data, error } = await System.dataConnectors.youtube.transcribe({
|
|
||||||
url: form.get("url"),
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!!error) {
|
|
||||||
showToast(error, "error", { clear: true });
|
|
||||||
setLoading(false);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
showToast(
|
|
||||||
`${data.title} by ${data.author} transcription completed. Output folder is ${data.destination}.`,
|
|
||||||
"success",
|
|
||||||
{ clear: true }
|
|
||||||
);
|
|
||||||
e.target.reset();
|
|
||||||
setLoading(false);
|
|
||||||
return;
|
|
||||||
} catch (e) {
|
|
||||||
console.error(e);
|
|
||||||
showToast(e.message, "error", { clear: true });
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="w-screen h-screen overflow-hidden bg-sidebar flex">
|
|
||||||
<Sidebar />
|
|
||||||
<div
|
|
||||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
|
||||||
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
|
||||||
>
|
|
||||||
<div className="flex w-full">
|
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
|
||||||
<div className="flex w-full gap-x-4 items-center pb-6 border-white border-b-2 border-opacity-10">
|
|
||||||
<img src={image} alt="YouTube" className="rounded-lg h-16 w-16" />
|
|
||||||
<div className="w-full flex flex-col gap-y-1">
|
|
||||||
<div className="items-center">
|
|
||||||
<p className="text-lg leading-6 font-bold text-white">
|
|
||||||
Import YouTube transcription
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
|
||||||
From a youtube link, import the entire transcript of that
|
|
||||||
video for embedding.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<form className="w-full" onSubmit={handleSubmit}>
|
|
||||||
<div className="w-full flex flex-col py-2">
|
|
||||||
<div className="w-full flex items-center gap-4">
|
|
||||||
<div className="flex flex-col w-60">
|
|
||||||
<div className="flex flex-col gap-y-1 mb-4">
|
|
||||||
<label className="text-white text-sm font-semibold block">
|
|
||||||
YouTube video URL
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<input
|
|
||||||
type="url"
|
|
||||||
name="url"
|
|
||||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
|
||||||
placeholder="https://youtube.com/watch?v=abc123"
|
|
||||||
required={true}
|
|
||||||
autoComplete="off"
|
|
||||||
spellCheck={false}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col gap-y-2 w-fit">
|
|
||||||
<button
|
|
||||||
type="submit"
|
|
||||||
disabled={loading}
|
|
||||||
className="mt-2 text-lg w-fit border border-slate-200 px-4 py-1 rounded-lg text-slate-200 items-center flex gap-x-2 hover:bg-slate-200 hover:text-slate-800 disabled:bg-slate-200 disabled:text-slate-800"
|
|
||||||
>
|
|
||||||
{loading ? "Collecting transcript..." : "Collect transcript"}
|
|
||||||
</button>
|
|
||||||
{loading && (
|
|
||||||
<p className="text-xs text-zinc-300">
|
|
||||||
Once complete, the transcription will be available for
|
|
||||||
embedding into workspaces in the document picker.
|
|
||||||
</p>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
@ -1,21 +0,0 @@
|
|||||||
import paths from "@/utils/paths";
|
|
||||||
import { lazy } from "react";
|
|
||||||
import { useParams } from "react-router-dom";
|
|
||||||
const Github = lazy(() => import("./Github"));
|
|
||||||
const YouTubeTranscript = lazy(() => import("./Youtube"));
|
|
||||||
|
|
||||||
const CONNECTORS = {
|
|
||||||
github: Github,
|
|
||||||
"youtube-transcript": YouTubeTranscript,
|
|
||||||
};
|
|
||||||
|
|
||||||
export default function DataConnectorSetup() {
|
|
||||||
const { connector } = useParams();
|
|
||||||
if (!connector || !CONNECTORS.hasOwnProperty(connector)) {
|
|
||||||
window.location = paths.home();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const Page = CONNECTORS[connector];
|
|
||||||
return <Page />;
|
|
||||||
}
|
|
@ -1,43 +0,0 @@
|
|||||||
import React from "react";
|
|
||||||
import Sidebar from "@/components/SettingsSidebar";
|
|
||||||
import { isMobile } from "react-device-detect";
|
|
||||||
import DataConnectorOption from "@/components/DataConnectorOption";
|
|
||||||
|
|
||||||
export default function DataConnectors() {
|
|
||||||
return (
|
|
||||||
<div className="w-screen h-screen overflow-hidden bg-sidebar flex">
|
|
||||||
<Sidebar />
|
|
||||||
<div
|
|
||||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
|
||||||
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
|
||||||
>
|
|
||||||
<div className="flex w-full">
|
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
|
||||||
<div className="items-center">
|
|
||||||
<p className="text-lg leading-6 font-bold text-white">
|
|
||||||
Data Connectors
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
|
||||||
Verified data connectors allow you to add more content to your
|
|
||||||
AnythingLLM workspaces with no custom code or complexity.
|
|
||||||
<br />
|
|
||||||
Guaranteed to work with your AnythingLLM instance.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<div className="text-sm font-medium text-white mt-6 mb-4">
|
|
||||||
Available Data Connectors
|
|
||||||
</div>
|
|
||||||
<div className="w-full">
|
|
||||||
<div className="py-4 w-full flex md:flex-wrap overflow-x-scroll gap-4 max-w-full">
|
|
||||||
<DataConnectorOption slug="github" />
|
|
||||||
<DataConnectorOption slug="youtube-transcript" />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
@ -9,6 +9,7 @@ import NewEmbedModal from "./NewEmbedModal";
|
|||||||
import { useModal } from "@/hooks/useModal";
|
import { useModal } from "@/hooks/useModal";
|
||||||
import ModalWrapper from "@/components/ModalWrapper";
|
import ModalWrapper from "@/components/ModalWrapper";
|
||||||
import Embed from "@/models/embed";
|
import Embed from "@/models/embed";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
export default function EmbedConfigs() {
|
export default function EmbedConfigs() {
|
||||||
const { isOpen, openModal, closeModal } = useModal();
|
const { isOpen, openModal, closeModal } = useModal();
|
||||||
@ -20,18 +21,12 @@ export default function EmbedConfigs() {
|
|||||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
||||||
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="items-center flex gap-x-4">
|
<div className="items-center flex gap-x-4">
|
||||||
<p className="text-lg leading-6 font-bold text-white">
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
Embeddable Chat Widgets
|
Embeddable Chat Widgets
|
||||||
</p>
|
</p>
|
||||||
<button
|
|
||||||
onClick={openModal}
|
|
||||||
className="flex items-center gap-x-2 px-4 py-2 rounded-lg bg-[#2C2F36] text-white text-sm hover:bg-[#3D4147] shadow-md border border-[#3D4147]"
|
|
||||||
>
|
|
||||||
<CodeBlock className="h-4 w-4" /> Create embed
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
Embeddable chat widgets are public facing chat interfaces that are
|
Embeddable chat widgets are public facing chat interfaces that are
|
||||||
@ -39,6 +34,11 @@ export default function EmbedConfigs() {
|
|||||||
that then you can publish to the world.
|
that then you can publish to the world.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="w-full justify-end flex">
|
||||||
|
<CTAButton onClick={openModal} className="mt-3 mr-0 -mb-14 z-10">
|
||||||
|
<CodeBlock className="h-4 w-4" weight="bold" /> Create embed
|
||||||
|
</CTAButton>
|
||||||
|
</div>
|
||||||
<EmbedContainer />
|
<EmbedContainer />
|
||||||
</div>
|
</div>
|
||||||
<ModalWrapper isOpen={isOpen}>
|
<ModalWrapper isOpen={isOpen}>
|
||||||
|
@ -19,6 +19,7 @@ import EmbedderItem from "@/components/EmbeddingSelection/EmbedderItem";
|
|||||||
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
|
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
|
||||||
import { useModal } from "@/hooks/useModal";
|
import { useModal } from "@/hooks/useModal";
|
||||||
import ModalWrapper from "@/components/ModalWrapper";
|
import ModalWrapper from "@/components/ModalWrapper";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
export default function GeneralEmbeddingPreference() {
|
export default function GeneralEmbeddingPreference() {
|
||||||
const [saving, setSaving] = useState(false);
|
const [saving, setSaving] = useState(false);
|
||||||
@ -165,21 +166,12 @@ export default function GeneralEmbeddingPreference() {
|
|||||||
onSubmit={handleSubmit}
|
onSubmit={handleSubmit}
|
||||||
className="flex w-full"
|
className="flex w-full"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="flex gap-x-4 items-center">
|
<div className="flex gap-x-4 items-center">
|
||||||
<p className="text-lg leading-6 font-bold text-white">
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
Embedding Preference
|
Embedding Preference
|
||||||
</p>
|
</p>
|
||||||
{hasChanges && (
|
|
||||||
<button
|
|
||||||
type="submit"
|
|
||||||
disabled={saving}
|
|
||||||
className="flex items-center gap-x-2 px-4 py-2 rounded-lg bg-[#2C2F36] text-white text-sm hover:bg-[#3D4147] shadow-md border border-[#3D4147]"
|
|
||||||
>
|
|
||||||
{saving ? "Saving..." : "Save changes"}
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
When using an LLM that does not natively support an embedding
|
When using an LLM that does not natively support an embedding
|
||||||
@ -191,6 +183,16 @@ export default function GeneralEmbeddingPreference() {
|
|||||||
format which AnythingLLM can use to process.
|
format which AnythingLLM can use to process.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="w-full justify-end flex">
|
||||||
|
{hasChanges && (
|
||||||
|
<CTAButton
|
||||||
|
onClick={() => handleSubmit()}
|
||||||
|
className="mt-3 mr-0 -mb-14 z-10"
|
||||||
|
>
|
||||||
|
{saving ? "Saving..." : "Save changes"}
|
||||||
|
</CTAButton>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
<div className="text-base font-bold text-white mt-6 mb-4">
|
<div className="text-base font-bold text-white mt-6 mb-4">
|
||||||
Embedding Provider
|
Embedding Provider
|
||||||
</div>
|
</div>
|
||||||
|
@ -0,0 +1,180 @@
|
|||||||
|
import React, { useEffect, useState } from "react";
|
||||||
|
import Sidebar from "@/components/SettingsSidebar";
|
||||||
|
import { isMobile } from "react-device-detect";
|
||||||
|
import PreLoader from "@/components/Preloader";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
import Admin from "@/models/admin";
|
||||||
|
import showToast from "@/utils/toast";
|
||||||
|
import { nFormatter, numberWithCommas } from "@/utils/numbers";
|
||||||
|
|
||||||
|
function isNullOrNaN(value) {
|
||||||
|
if (value === null) return true;
|
||||||
|
return isNaN(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function EmbeddingTextSplitterPreference() {
|
||||||
|
const [settings, setSettings] = useState({});
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [saving, setSaving] = useState(false);
|
||||||
|
const [hasChanges, setHasChanges] = useState(false);
|
||||||
|
|
||||||
|
const handleSubmit = async (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
const form = new FormData(e.target);
|
||||||
|
|
||||||
|
if (
|
||||||
|
Number(form.get("text_splitter_chunk_overlap")) >=
|
||||||
|
Number(form.get("text_splitter_chunk_size"))
|
||||||
|
) {
|
||||||
|
showToast(
|
||||||
|
"Chunk overlap cannot be larger or equal to chunk size.",
|
||||||
|
"error"
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setSaving(true);
|
||||||
|
await Admin.updateSystemPreferences({
|
||||||
|
text_splitter_chunk_size: isNullOrNaN(
|
||||||
|
form.get("text_splitter_chunk_size")
|
||||||
|
)
|
||||||
|
? 1000
|
||||||
|
: Number(form.get("text_splitter_chunk_size")),
|
||||||
|
text_splitter_chunk_overlap: isNullOrNaN(
|
||||||
|
form.get("text_splitter_chunk_overlap")
|
||||||
|
)
|
||||||
|
? 1000
|
||||||
|
: Number(form.get("text_splitter_chunk_overlap")),
|
||||||
|
});
|
||||||
|
setSaving(false);
|
||||||
|
setHasChanges(false);
|
||||||
|
showToast("Text chunking strategy settings saved.", "success");
|
||||||
|
};
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
async function fetchSettings() {
|
||||||
|
const _settings = (await Admin.systemPreferences())?.settings;
|
||||||
|
setSettings(_settings ?? {});
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
fetchSettings();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="w-screen h-screen overflow-hidden bg-sidebar flex">
|
||||||
|
<Sidebar />
|
||||||
|
{loading ? (
|
||||||
|
<div
|
||||||
|
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
||||||
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
|
>
|
||||||
|
<div className="w-full h-full flex justify-center items-center">
|
||||||
|
<PreLoader />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div
|
||||||
|
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
||||||
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
|
>
|
||||||
|
<form
|
||||||
|
onSubmit={handleSubmit}
|
||||||
|
onChange={() => setHasChanges(true)}
|
||||||
|
className="flex w-full"
|
||||||
|
>
|
||||||
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
|
<div className="w-full flex flex-col gap-y-1 pb-4 border-white border-b-2 border-opacity-10">
|
||||||
|
<div className="flex gap-x-4 items-center">
|
||||||
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
|
Text splitting & Chunking Preferences
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
|
Sometimes, you may want to change the default way that new
|
||||||
|
documents are split and chunked before being inserted into
|
||||||
|
your vector database. <br />
|
||||||
|
You should only modify this setting if you understand how text
|
||||||
|
splitting works and it's side effects.
|
||||||
|
</p>
|
||||||
|
<p className="text-xs leading-[18px] font-semibold text-white/80">
|
||||||
|
Changes here will only apply to{" "}
|
||||||
|
<i>newly embedded documents</i>, not existing documents.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div className="w-full justify-end flex">
|
||||||
|
{hasChanges && (
|
||||||
|
<CTAButton className="mt-3 mr-0 -mb-14 z-10">
|
||||||
|
{saving ? "Saving..." : "Save changes"}
|
||||||
|
</CTAButton>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex flex-col gap-y-4 mt-8">
|
||||||
|
<div className="flex flex-col max-w-[300px]">
|
||||||
|
<div className="flex flex-col gap-y-2 mb-4">
|
||||||
|
<label className="text-white text-sm font-semibold block">
|
||||||
|
Text Chunk Size
|
||||||
|
</label>
|
||||||
|
<p className="text-xs text-white/60">
|
||||||
|
This is the maximum length of characters that can be
|
||||||
|
present in a single vector.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<input
|
||||||
|
type="number"
|
||||||
|
name="text_splitter_chunk_size"
|
||||||
|
min={1}
|
||||||
|
max={settings?.max_embed_chunk_size || 1000}
|
||||||
|
onWheel={(e) => e?.currentTarget?.blur()}
|
||||||
|
className="border-none bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||||
|
placeholder="maximum length of vectorized text"
|
||||||
|
defaultValue={
|
||||||
|
isNullOrNaN(settings?.text_splitter_chunk_size)
|
||||||
|
? 1000
|
||||||
|
: Number(settings?.text_splitter_chunk_size)
|
||||||
|
}
|
||||||
|
required={true}
|
||||||
|
autoComplete="off"
|
||||||
|
/>
|
||||||
|
<p className="text-xs text-white/40">
|
||||||
|
Embed model maximum length is{" "}
|
||||||
|
{numberWithCommas(settings?.max_embed_chunk_size || 1000)}.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex flex-col gap-y-4 mt-8">
|
||||||
|
<div className="flex flex-col max-w-[300px]">
|
||||||
|
<div className="flex flex-col gap-y-2 mb-4">
|
||||||
|
<label className="text-white text-sm font-semibold block">
|
||||||
|
Text Chunk Overlap
|
||||||
|
</label>
|
||||||
|
<p className="text-xs text-white/60">
|
||||||
|
This is the maximum overlap of characters that occurs
|
||||||
|
during chunking between two adjacent text chunks.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<input
|
||||||
|
type="number"
|
||||||
|
name="text_splitter_chunk_overlap"
|
||||||
|
min={0}
|
||||||
|
onWheel={(e) => e?.currentTarget?.blur()}
|
||||||
|
className="border-none bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||||
|
placeholder="maximum length of vectorized text"
|
||||||
|
defaultValue={
|
||||||
|
isNullOrNaN(settings?.text_splitter_chunk_overlap)
|
||||||
|
? 20
|
||||||
|
: Number(settings?.text_splitter_chunk_overlap)
|
||||||
|
}
|
||||||
|
required={true}
|
||||||
|
autoComplete="off"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
@ -33,6 +33,131 @@ import GroqAiOptions from "@/components/LLMSelection/GroqAiOptions";
|
|||||||
|
|
||||||
import LLMItem from "@/components/LLMSelection/LLMItem";
|
import LLMItem from "@/components/LLMSelection/LLMItem";
|
||||||
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
|
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
|
export const AVAILABLE_LLM_PROVIDERS = [
|
||||||
|
{
|
||||||
|
name: "OpenAI",
|
||||||
|
value: "openai",
|
||||||
|
logo: OpenAiLogo,
|
||||||
|
options: (settings) => <OpenAiOptions settings={settings} />,
|
||||||
|
description: "The standard option for most non-commercial use.",
|
||||||
|
requiredConfig: ["OpenAiKey"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Azure OpenAI",
|
||||||
|
value: "azure",
|
||||||
|
logo: AzureOpenAiLogo,
|
||||||
|
options: (settings) => <AzureAiOptions settings={settings} />,
|
||||||
|
description: "The enterprise option of OpenAI hosted on Azure services.",
|
||||||
|
requiredConfig: ["AzureOpenAiEndpoint"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Anthropic",
|
||||||
|
value: "anthropic",
|
||||||
|
logo: AnthropicLogo,
|
||||||
|
options: (settings) => <AnthropicAiOptions settings={settings} />,
|
||||||
|
description: "A friendly AI Assistant hosted by Anthropic.",
|
||||||
|
requiredConfig: ["AnthropicApiKey"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Gemini",
|
||||||
|
value: "gemini",
|
||||||
|
logo: GeminiLogo,
|
||||||
|
options: (settings) => <GeminiLLMOptions settings={settings} />,
|
||||||
|
description: "Google's largest and most capable AI model",
|
||||||
|
requiredConfig: ["GeminiLLMApiKey"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "HuggingFace",
|
||||||
|
value: "huggingface",
|
||||||
|
logo: HuggingFaceLogo,
|
||||||
|
options: (settings) => <HuggingFaceOptions settings={settings} />,
|
||||||
|
description:
|
||||||
|
"Access 150,000+ open-source LLMs and the world's AI community",
|
||||||
|
requiredConfig: [
|
||||||
|
"HuggingFaceLLMEndpoint",
|
||||||
|
"HuggingFaceLLMAccessToken",
|
||||||
|
"HuggingFaceLLMTokenLimit",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Ollama",
|
||||||
|
value: "ollama",
|
||||||
|
logo: OllamaLogo,
|
||||||
|
options: (settings) => <OllamaLLMOptions settings={settings} />,
|
||||||
|
description: "Run LLMs locally on your own machine.",
|
||||||
|
requiredConfig: ["OllamaLLMBasePath"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "LM Studio",
|
||||||
|
value: "lmstudio",
|
||||||
|
logo: LMStudioLogo,
|
||||||
|
options: (settings) => <LMStudioOptions settings={settings} />,
|
||||||
|
description:
|
||||||
|
"Discover, download, and run thousands of cutting edge LLMs in a few clicks.",
|
||||||
|
requiredConfig: ["LMStudioBasePath"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Local AI",
|
||||||
|
value: "localai",
|
||||||
|
logo: LocalAiLogo,
|
||||||
|
options: (settings) => <LocalAiOptions settings={settings} />,
|
||||||
|
description: "Run LLMs locally on your own machine.",
|
||||||
|
requiredConfig: ["LocalAiApiKey", "LocalAiBasePath", "LocalAiTokenLimit"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Together AI",
|
||||||
|
value: "togetherai",
|
||||||
|
logo: TogetherAILogo,
|
||||||
|
options: (settings) => <TogetherAiOptions settings={settings} />,
|
||||||
|
description: "Run open source models from Together AI.",
|
||||||
|
requiredConfig: ["TogetherAiApiKey"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Mistral",
|
||||||
|
value: "mistral",
|
||||||
|
logo: MistralLogo,
|
||||||
|
options: (settings) => <MistralOptions settings={settings} />,
|
||||||
|
description: "Run open source models from Mistral AI.",
|
||||||
|
requiredConfig: ["MistralApiKey"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Perplexity AI",
|
||||||
|
value: "perplexity",
|
||||||
|
logo: PerplexityLogo,
|
||||||
|
options: (settings) => <PerplexityOptions settings={settings} />,
|
||||||
|
description:
|
||||||
|
"Run powerful and internet-connected models hosted by Perplexity AI.",
|
||||||
|
requiredConfig: ["PerplexityApiKey"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "OpenRouter",
|
||||||
|
value: "openrouter",
|
||||||
|
logo: OpenRouterLogo,
|
||||||
|
options: (settings) => <OpenRouterOptions settings={settings} />,
|
||||||
|
description: "A unified interface for LLMs.",
|
||||||
|
requiredConfig: ["OpenRouterApiKey"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Groq",
|
||||||
|
value: "groq",
|
||||||
|
logo: GroqLogo,
|
||||||
|
options: (settings) => <GroqAiOptions settings={settings} />,
|
||||||
|
description:
|
||||||
|
"The fastest LLM inferencing available for real-time AI applications.",
|
||||||
|
requiredConfig: ["GroqApiKey"],
|
||||||
|
},
|
||||||
|
// {
|
||||||
|
// name: "Native",
|
||||||
|
// value: "native",
|
||||||
|
// logo: AnythingLLMIcon,
|
||||||
|
// options: (settings) => <NativeLLMOptions settings={settings} />,
|
||||||
|
// description:
|
||||||
|
// "Use a downloaded custom Llama model for chatting on this AnythingLLM instance.",
|
||||||
|
// requiredConfig: [],
|
||||||
|
// },
|
||||||
|
];
|
||||||
|
|
||||||
export default function GeneralLLMPreference() {
|
export default function GeneralLLMPreference() {
|
||||||
const [saving, setSaving] = useState(false);
|
const [saving, setSaving] = useState(false);
|
||||||
@ -92,112 +217,15 @@ export default function GeneralLLMPreference() {
|
|||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const filtered = LLMS.filter((llm) =>
|
const filtered = AVAILABLE_LLM_PROVIDERS.filter((llm) =>
|
||||||
llm.name.toLowerCase().includes(searchQuery.toLowerCase())
|
llm.name.toLowerCase().includes(searchQuery.toLowerCase())
|
||||||
);
|
);
|
||||||
setFilteredLLMs(filtered);
|
setFilteredLLMs(filtered);
|
||||||
}, [searchQuery, selectedLLM]);
|
}, [searchQuery, selectedLLM]);
|
||||||
|
|
||||||
const LLMS = [
|
const selectedLLMObject = AVAILABLE_LLM_PROVIDERS.find(
|
||||||
{
|
(llm) => llm.value === selectedLLM
|
||||||
name: "OpenAI",
|
);
|
||||||
value: "openai",
|
|
||||||
logo: OpenAiLogo,
|
|
||||||
options: <OpenAiOptions settings={settings} />,
|
|
||||||
description: "The standard option for most non-commercial use.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Azure OpenAI",
|
|
||||||
value: "azure",
|
|
||||||
logo: AzureOpenAiLogo,
|
|
||||||
options: <AzureAiOptions settings={settings} />,
|
|
||||||
description: "The enterprise option of OpenAI hosted on Azure services.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Anthropic",
|
|
||||||
value: "anthropic",
|
|
||||||
logo: AnthropicLogo,
|
|
||||||
options: <AnthropicAiOptions settings={settings} />,
|
|
||||||
description: "A friendly AI Assistant hosted by Anthropic.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Gemini",
|
|
||||||
value: "gemini",
|
|
||||||
logo: GeminiLogo,
|
|
||||||
options: <GeminiLLMOptions settings={settings} />,
|
|
||||||
description: "Google's largest and most capable AI model",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "HuggingFace",
|
|
||||||
value: "huggingface",
|
|
||||||
logo: HuggingFaceLogo,
|
|
||||||
options: <HuggingFaceOptions settings={settings} />,
|
|
||||||
description:
|
|
||||||
"Access 150,000+ open-source LLMs and the world's AI community",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Ollama",
|
|
||||||
value: "ollama",
|
|
||||||
logo: OllamaLogo,
|
|
||||||
options: <OllamaLLMOptions settings={settings} />,
|
|
||||||
description: "Run LLMs locally on your own machine.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "LM Studio",
|
|
||||||
value: "lmstudio",
|
|
||||||
logo: LMStudioLogo,
|
|
||||||
options: <LMStudioOptions settings={settings} />,
|
|
||||||
description:
|
|
||||||
"Discover, download, and run thousands of cutting edge LLMs in a few clicks.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Local AI",
|
|
||||||
value: "localai",
|
|
||||||
logo: LocalAiLogo,
|
|
||||||
options: <LocalAiOptions settings={settings} />,
|
|
||||||
description: "Run LLMs locally on your own machine.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Together AI",
|
|
||||||
value: "togetherai",
|
|
||||||
logo: TogetherAILogo,
|
|
||||||
options: <TogetherAiOptions settings={settings} />,
|
|
||||||
description: "Run open source models from Together AI.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Mistral",
|
|
||||||
value: "mistral",
|
|
||||||
logo: MistralLogo,
|
|
||||||
options: <MistralOptions settings={settings} />,
|
|
||||||
description: "Run open source models from Mistral AI.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Perplexity AI",
|
|
||||||
value: "perplexity",
|
|
||||||
logo: PerplexityLogo,
|
|
||||||
options: <PerplexityOptions settings={settings} />,
|
|
||||||
description:
|
|
||||||
"Run powerful and internet-connected models hosted by Perplexity AI.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "OpenRouter",
|
|
||||||
value: "openrouter",
|
|
||||||
logo: OpenRouterLogo,
|
|
||||||
options: <OpenRouterOptions settings={settings} />,
|
|
||||||
description: "A unified interface for LLMs.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Groq",
|
|
||||||
value: "groq",
|
|
||||||
logo: GroqLogo,
|
|
||||||
options: <GroqAiOptions settings={settings} />,
|
|
||||||
description:
|
|
||||||
"The fastest LLM inferencing available for real-time AI applications.",
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const selectedLLMObject = LLMS.find((llm) => llm.value === selectedLLM);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="w-screen h-screen overflow-hidden bg-sidebar flex">
|
<div className="w-screen h-screen overflow-hidden bg-sidebar flex">
|
||||||
<Sidebar />
|
<Sidebar />
|
||||||
@ -216,21 +244,12 @@ export default function GeneralLLMPreference() {
|
|||||||
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
>
|
>
|
||||||
<form onSubmit={handleSubmit} className="flex w-full">
|
<form onSubmit={handleSubmit} className="flex w-full">
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="flex gap-x-4 items-center">
|
<div className="flex gap-x-4 items-center">
|
||||||
<p className="text-lg leading-6 font-bold text-white">
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
LLM Preference
|
LLM Preference
|
||||||
</p>
|
</p>
|
||||||
{hasChanges && (
|
|
||||||
<button
|
|
||||||
type="submit"
|
|
||||||
disabled={saving}
|
|
||||||
className="flex items-center gap-x-2 px-4 py-2 rounded-lg bg-[#2C2F36] text-white text-sm hover:bg-[#3D4147] shadow-md border border-[#3D4147]"
|
|
||||||
>
|
|
||||||
{saving ? "Saving..." : "Save changes"}
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
These are the credentials and settings for your preferred LLM
|
These are the credentials and settings for your preferred LLM
|
||||||
@ -239,6 +258,16 @@ export default function GeneralLLMPreference() {
|
|||||||
properly.
|
properly.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="w-full justify-end flex">
|
||||||
|
{hasChanges && (
|
||||||
|
<CTAButton
|
||||||
|
onClick={() => handleSubmit()}
|
||||||
|
className="mt-3 mr-0 -mb-14 z-10"
|
||||||
|
>
|
||||||
|
{saving ? "Saving..." : "Save changes"}
|
||||||
|
</CTAButton>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
<div className="text-base font-bold text-white mt-6 mb-4">
|
<div className="text-base font-bold text-white mt-6 mb-4">
|
||||||
LLM Provider
|
LLM Provider
|
||||||
</div>
|
</div>
|
||||||
@ -329,7 +358,9 @@ export default function GeneralLLMPreference() {
|
|||||||
className="mt-4 flex flex-col gap-y-1"
|
className="mt-4 flex flex-col gap-y-1"
|
||||||
>
|
>
|
||||||
{selectedLLM &&
|
{selectedLLM &&
|
||||||
LLMS.find((llm) => llm.value === selectedLLM)?.options}
|
AVAILABLE_LLM_PROVIDERS.find(
|
||||||
|
(llm) => llm.value === selectedLLM
|
||||||
|
)?.options?.(settings)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
|
@ -29,16 +29,16 @@ export default function PrivacyAndDataHandling() {
|
|||||||
<Sidebar />
|
<Sidebar />
|
||||||
<div
|
<div
|
||||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
||||||
className="transition-all duration-500 relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll border-2 border-outline"
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col w-full px-1 md:px-20 md:py-12 py-16">
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="items-center flex gap-x-4">
|
<div className="items-center flex gap-x-4">
|
||||||
<p className="text-2xl font-semibold text-white">
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
Privacy & Data-Handling
|
Privacy & Data-Handling
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<p className="text-sm font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
This is your configuration for how connected third party providers
|
This is your configuration for how connected third party providers
|
||||||
and AnythingLLM handle your data.
|
and AnythingLLM handle your data.
|
||||||
</p>
|
</p>
|
||||||
|
@ -6,6 +6,7 @@ import System from "@/models/system";
|
|||||||
import paths from "@/utils/paths";
|
import paths from "@/utils/paths";
|
||||||
import { AUTH_TIMESTAMP, AUTH_TOKEN, AUTH_USER } from "@/utils/constants";
|
import { AUTH_TIMESTAMP, AUTH_TOKEN, AUTH_USER } from "@/utils/constants";
|
||||||
import PreLoader from "@/components/Preloader";
|
import PreLoader from "@/components/Preloader";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
export default function GeneralSecurity() {
|
export default function GeneralSecurity() {
|
||||||
return (
|
return (
|
||||||
@ -13,7 +14,7 @@ export default function GeneralSecurity() {
|
|||||||
<Sidebar />
|
<Sidebar />
|
||||||
<div
|
<div
|
||||||
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
style={{ height: isMobile ? "100%" : "calc(100% - 32px)" }}
|
||||||
className="transition-all duration-500 relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll border-2 border-outline"
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
>
|
>
|
||||||
<MultiUserMode />
|
<MultiUserMode />
|
||||||
<PasswordProtection />
|
<PasswordProtection />
|
||||||
@ -32,7 +33,7 @@ function MultiUserMode() {
|
|||||||
const handleSubmit = async (e) => {
|
const handleSubmit = async (e) => {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
setSaving(true);
|
setSaving(true);
|
||||||
|
setHasChanges(false);
|
||||||
if (useMultiUserMode) {
|
if (useMultiUserMode) {
|
||||||
const form = new FormData(e.target);
|
const form = new FormData(e.target);
|
||||||
const data = {
|
const data = {
|
||||||
@ -83,27 +84,30 @@ function MultiUserMode() {
|
|||||||
<form
|
<form
|
||||||
onSubmit={handleSubmit}
|
onSubmit={handleSubmit}
|
||||||
onChange={() => setHasChanges(true)}
|
onChange={() => setHasChanges(true)}
|
||||||
className="flex w-full"
|
className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col w-full px-1 md:px-20 md:py-12 py-16">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1">
|
||||||
<div className="items-center flex gap-x-4">
|
<div className="items-center flex gap-x-4">
|
||||||
<p className="text-2xl font-semibold text-white">Multi-User Mode</p>
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
{hasChanges && (
|
Multi-User Mode
|
||||||
<button
|
</p>
|
||||||
type="submit"
|
|
||||||
disabled={saving}
|
|
||||||
className="border border-slate-200 px-4 py-1 rounded-lg text-slate-200 text-sm items-center flex gap-x-2 hover:bg-slate-200 hover:text-slate-800"
|
|
||||||
>
|
|
||||||
{saving ? "Saving..." : "Save changes"}
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
<p className="text-sm font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
Set up your instance to support your team by activating Multi-User
|
Set up your instance to support your team by activating Multi-User
|
||||||
Mode.
|
Mode.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
{hasChanges && (
|
||||||
|
<div className="flex justify-end">
|
||||||
|
<CTAButton
|
||||||
|
onClick={() => handleSubmit()}
|
||||||
|
className="mt-3 mr-0 -mb-20 z-10"
|
||||||
|
>
|
||||||
|
{saving ? "Saving..." : "Save changes"}
|
||||||
|
</CTAButton>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
<div className="relative w-full max-h-full">
|
<div className="relative w-full max-h-full">
|
||||||
<div className="relative rounded-lg">
|
<div className="relative rounded-lg">
|
||||||
<div className="flex items-start justify-between px-6 py-4"></div>
|
<div className="flex items-start justify-between px-6 py-4"></div>
|
||||||
@ -198,6 +202,7 @@ function PasswordProtection() {
|
|||||||
if (multiUserModeEnabled) return false;
|
if (multiUserModeEnabled) return false;
|
||||||
|
|
||||||
setSaving(true);
|
setSaving(true);
|
||||||
|
setHasChanges(false);
|
||||||
const form = new FormData(e.target);
|
const form = new FormData(e.target);
|
||||||
const data = {
|
const data = {
|
||||||
usePassword,
|
usePassword,
|
||||||
@ -248,29 +253,30 @@ function PasswordProtection() {
|
|||||||
<form
|
<form
|
||||||
onSubmit={handleSubmit}
|
onSubmit={handleSubmit}
|
||||||
onChange={() => setHasChanges(true)}
|
onChange={() => setHasChanges(true)}
|
||||||
className="flex w-full"
|
className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col w-full px-1 md:px-20 md:py-12 py-16">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1">
|
||||||
<div className="items-center flex gap-x-4">
|
<div className="items-center flex gap-x-4">
|
||||||
<p className="text-2xl font-semibold text-white">
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
Password Protection
|
Password Protection
|
||||||
</p>
|
</p>
|
||||||
{hasChanges && (
|
|
||||||
<button
|
|
||||||
type="submit"
|
|
||||||
disabled={saving}
|
|
||||||
className="border border-slate-200 px-4 py-1 rounded-lg text-slate-200 text-sm items-center flex gap-x-2 hover:bg-slate-200 hover:text-slate-800"
|
|
||||||
>
|
|
||||||
{saving ? "Saving..." : "Save changes"}
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
<p className="text-sm font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
Protect your AnythingLLM instance with a password. If you forget
|
Protect your AnythingLLM instance with a password. If you forget
|
||||||
this there is no recovery method so ensure you save this password.
|
this there is no recovery method so ensure you save this password.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
{hasChanges && (
|
||||||
|
<div className="flex justify-end">
|
||||||
|
<CTAButton
|
||||||
|
onClick={() => handleSubmit()}
|
||||||
|
className="mt-3 mr-0 -mb-20 z-10"
|
||||||
|
>
|
||||||
|
{saving ? "Saving..." : "Save changes"}
|
||||||
|
</CTAButton>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
<div className="relative w-full max-h-full">
|
<div className="relative w-full max-h-full">
|
||||||
<div className="relative rounded-lg">
|
<div className="relative rounded-lg">
|
||||||
<div className="flex items-start justify-between px-6 py-4"></div>
|
<div className="flex items-start justify-between px-6 py-4"></div>
|
||||||
|
@ -10,6 +10,7 @@ import OpenAiWhisperOptions from "@/components/TranscriptionSelection/OpenAiOpti
|
|||||||
import NativeTranscriptionOptions from "@/components/TranscriptionSelection/NativeTranscriptionOptions";
|
import NativeTranscriptionOptions from "@/components/TranscriptionSelection/NativeTranscriptionOptions";
|
||||||
import LLMItem from "@/components/LLMSelection/LLMItem";
|
import LLMItem from "@/components/LLMSelection/LLMItem";
|
||||||
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
|
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
export default function TranscriptionModelPreference() {
|
export default function TranscriptionModelPreference() {
|
||||||
const [saving, setSaving] = useState(false);
|
const [saving, setSaving] = useState(false);
|
||||||
@ -114,21 +115,12 @@ export default function TranscriptionModelPreference() {
|
|||||||
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
className="relative md:ml-[2px] md:mr-[16px] md:my-[16px] md:rounded-[16px] bg-main-gradient w-full h-full overflow-y-scroll"
|
||||||
>
|
>
|
||||||
<form onSubmit={handleSubmit} className="flex w-full">
|
<form onSubmit={handleSubmit} className="flex w-full">
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="flex gap-x-4 items-center">
|
<div className="flex gap-x-4 items-center">
|
||||||
<p className="text-lg leading-6 font-bold text-white">
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
Transcription Model Preference
|
Transcription Model Preference
|
||||||
</p>
|
</p>
|
||||||
{hasChanges && (
|
|
||||||
<button
|
|
||||||
type="submit"
|
|
||||||
disabled={saving}
|
|
||||||
className="flex items-center gap-x-2 px-4 py-2 rounded-lg bg-[#2C2F36] text-white text-sm hover:bg-[#3D4147] shadow-md border border-[#3D4147]"
|
|
||||||
>
|
|
||||||
{saving ? "Saving..." : "Save changes"}
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
These are the credentials and settings for your preferred
|
These are the credentials and settings for your preferred
|
||||||
@ -137,6 +129,16 @@ export default function TranscriptionModelPreference() {
|
|||||||
transcribe.
|
transcribe.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="w-full justify-end flex">
|
||||||
|
{hasChanges && (
|
||||||
|
<CTAButton
|
||||||
|
onClick={() => handleSubmit()}
|
||||||
|
className="mt-3 mr-0 -mb-14 z-10"
|
||||||
|
>
|
||||||
|
{saving ? "Saving..." : "Save changes"}
|
||||||
|
</CTAButton>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
<div className="text-base font-bold text-white mt-6 mb-4">
|
<div className="text-base font-bold text-white mt-6 mb-4">
|
||||||
Transcription Provider
|
Transcription Provider
|
||||||
</div>
|
</div>
|
||||||
|
@ -25,6 +25,7 @@ import ZillizCloudOptions from "@/components/VectorDBSelection/ZillizCloudOption
|
|||||||
import { useModal } from "@/hooks/useModal";
|
import { useModal } from "@/hooks/useModal";
|
||||||
import ModalWrapper from "@/components/ModalWrapper";
|
import ModalWrapper from "@/components/ModalWrapper";
|
||||||
import AstraDBOptions from "@/components/VectorDBSelection/AstraDBOptions";
|
import AstraDBOptions from "@/components/VectorDBSelection/AstraDBOptions";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
export default function GeneralVectorDatabase() {
|
export default function GeneralVectorDatabase() {
|
||||||
const [saving, setSaving] = useState(false);
|
const [saving, setSaving] = useState(false);
|
||||||
@ -189,21 +190,12 @@ export default function GeneralVectorDatabase() {
|
|||||||
onSubmit={handleSubmit}
|
onSubmit={handleSubmit}
|
||||||
className="flex w-full"
|
className="flex w-full"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[86px] md:py-6 py-16">
|
<div className="flex flex-col w-full px-1 md:pl-6 md:pr-[50px] md:py-6 py-16">
|
||||||
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
<div className="w-full flex flex-col gap-y-1 pb-6 border-white border-b-2 border-opacity-10">
|
||||||
<div className="flex gap-x-4 items-center">
|
<div className="flex gap-x-4 items-center">
|
||||||
<p className="text-lg leading-6 font-bold text-white">
|
<p className="text-lg leading-6 font-bold text-white">
|
||||||
Vector Database
|
Vector Database
|
||||||
</p>
|
</p>
|
||||||
{hasChanges && (
|
|
||||||
<button
|
|
||||||
type="submit"
|
|
||||||
disabled={saving}
|
|
||||||
className="flex items-center gap-x-2 px-4 py-2 rounded-lg bg-[#2C2F36] text-white text-sm hover:bg-[#3D4147] shadow-md border border-[#3D4147]"
|
|
||||||
>
|
|
||||||
{saving ? "Saving..." : "Save changes"}
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
<p className="text-xs leading-[18px] font-base text-white text-opacity-60">
|
||||||
These are the credentials and settings for how your
|
These are the credentials and settings for how your
|
||||||
@ -211,6 +203,16 @@ export default function GeneralVectorDatabase() {
|
|||||||
are current and correct.
|
are current and correct.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="w-full justify-end flex">
|
||||||
|
{hasChanges && (
|
||||||
|
<CTAButton
|
||||||
|
onClick={() => handleSubmit()}
|
||||||
|
className="mt-3 mr-0 -mb-14 z-10"
|
||||||
|
>
|
||||||
|
{saving ? "Saving..." : "Save changes"}
|
||||||
|
</CTAButton>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
<div className="text-base font-bold text-white mt-6 mb-4">
|
<div className="text-base font-bold text-white mt-6 mb-4">
|
||||||
Vector Database Provider
|
Vector Database Provider
|
||||||
</div>
|
</div>
|
||||||
|
@ -3,21 +3,20 @@ import useGetProviderModels, {
|
|||||||
} from "@/hooks/useGetProvidersModels";
|
} from "@/hooks/useGetProvidersModels";
|
||||||
|
|
||||||
export default function ChatModelSelection({
|
export default function ChatModelSelection({
|
||||||
settings,
|
provider,
|
||||||
workspace,
|
workspace,
|
||||||
setHasChanges,
|
setHasChanges,
|
||||||
}) {
|
}) {
|
||||||
const { defaultModels, customModels, loading } = useGetProviderModels(
|
const { defaultModels, customModels, loading } =
|
||||||
settings?.LLMProvider
|
useGetProviderModels(provider);
|
||||||
);
|
if (DISABLED_PROVIDERS.includes(provider)) return null;
|
||||||
if (DISABLED_PROVIDERS.includes(settings?.LLMProvider)) return null;
|
|
||||||
|
|
||||||
if (loading) {
|
if (loading) {
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<div className="flex flex-col">
|
<div className="flex flex-col">
|
||||||
<label htmlFor="name" className="block input-label">
|
<label htmlFor="name" className="block input-label">
|
||||||
Chat model
|
Workspace Chat model
|
||||||
</label>
|
</label>
|
||||||
<p className="text-white text-opacity-60 text-xs font-medium py-1.5">
|
<p className="text-white text-opacity-60 text-xs font-medium py-1.5">
|
||||||
The specific chat model that will be used for this workspace. If
|
The specific chat model that will be used for this workspace. If
|
||||||
@ -42,8 +41,7 @@ export default function ChatModelSelection({
|
|||||||
<div>
|
<div>
|
||||||
<div className="flex flex-col">
|
<div className="flex flex-col">
|
||||||
<label htmlFor="name" className="block input-label">
|
<label htmlFor="name" className="block input-label">
|
||||||
Chat model{" "}
|
Workspace Chat model
|
||||||
<span className="font-normal">({settings?.LLMProvider})</span>
|
|
||||||
</label>
|
</label>
|
||||||
<p className="text-white text-opacity-60 text-xs font-medium py-1.5">
|
<p className="text-white text-opacity-60 text-xs font-medium py-1.5">
|
||||||
The specific chat model that will be used for this workspace. If
|
The specific chat model that will be used for this workspace. If
|
||||||
@ -59,9 +57,6 @@ export default function ChatModelSelection({
|
|||||||
}}
|
}}
|
||||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
className="bg-zinc-900 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||||
>
|
>
|
||||||
<option disabled={true} selected={workspace?.chatModel === null}>
|
|
||||||
System default
|
|
||||||
</option>
|
|
||||||
{defaultModels.length > 0 && (
|
{defaultModels.length > 0 && (
|
||||||
<optgroup label="General models">
|
<optgroup label="General models">
|
||||||
{defaultModels.map((model) => {
|
{defaultModels.map((model) => {
|
||||||
|
@ -0,0 +1,151 @@
|
|||||||
|
// This component differs from the main LLMItem in that it shows if a provider is
|
||||||
|
// "ready for use" and if not - will then highjack the click handler to show a modal
|
||||||
|
// of the provider options that must be saved to continue.
|
||||||
|
import { createPortal } from "react-dom";
|
||||||
|
import ModalWrapper from "@/components/ModalWrapper";
|
||||||
|
import { useModal } from "@/hooks/useModal";
|
||||||
|
import { X } from "@phosphor-icons/react";
|
||||||
|
import System from "@/models/system";
|
||||||
|
import showToast from "@/utils/toast";
|
||||||
|
|
||||||
|
export default function WorkspaceLLM({
|
||||||
|
llm,
|
||||||
|
availableLLMs,
|
||||||
|
settings,
|
||||||
|
checked,
|
||||||
|
onClick,
|
||||||
|
}) {
|
||||||
|
const { isOpen, openModal, closeModal } = useModal();
|
||||||
|
const { name, value, logo, description } = llm;
|
||||||
|
|
||||||
|
function handleProviderSelection() {
|
||||||
|
// Determine if provider needs additional setup because its minimum required keys are
|
||||||
|
// not yet set in settings.
|
||||||
|
const requiresAdditionalSetup = (llm.requiredConfig || []).some(
|
||||||
|
(key) => !settings[key]
|
||||||
|
);
|
||||||
|
if (requiresAdditionalSetup) {
|
||||||
|
openModal();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
onClick(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div
|
||||||
|
onClick={handleProviderSelection}
|
||||||
|
className={`w-full p-2 rounded-md hover:cursor-pointer hover:bg-white/10 ${
|
||||||
|
checked ? "bg-white/10" : ""
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
value={value}
|
||||||
|
className="peer hidden"
|
||||||
|
checked={checked}
|
||||||
|
readOnly={true}
|
||||||
|
formNoValidate={true}
|
||||||
|
/>
|
||||||
|
<div className="flex gap-x-4 items-center">
|
||||||
|
<img
|
||||||
|
src={logo}
|
||||||
|
alt={`${name} logo`}
|
||||||
|
className="w-10 h-10 rounded-md"
|
||||||
|
/>
|
||||||
|
<div className="flex flex-col">
|
||||||
|
<div className="text-sm font-semibold text-white">{name}</div>
|
||||||
|
<div className="mt-1 text-xs text-[#D2D5DB]">{description}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<SetupProvider
|
||||||
|
availableLLMs={availableLLMs}
|
||||||
|
isOpen={isOpen}
|
||||||
|
provider={value}
|
||||||
|
closeModal={closeModal}
|
||||||
|
postSubmit={onClick}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function SetupProvider({
|
||||||
|
availableLLMs,
|
||||||
|
isOpen,
|
||||||
|
provider,
|
||||||
|
closeModal,
|
||||||
|
postSubmit,
|
||||||
|
}) {
|
||||||
|
if (!isOpen) return null;
|
||||||
|
const LLMOption = availableLLMs.find((llm) => llm.value === provider);
|
||||||
|
if (!LLMOption) return null;
|
||||||
|
|
||||||
|
async function handleUpdate(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
e.stopPropagation();
|
||||||
|
const data = {};
|
||||||
|
const form = new FormData(e.target);
|
||||||
|
for (var [key, value] of form.entries()) data[key] = value;
|
||||||
|
const { error } = await System.updateSystem(data);
|
||||||
|
if (error) {
|
||||||
|
showToast(`Failed to save ${LLMOption.name} settings: ${error}`, "error");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
closeModal();
|
||||||
|
postSubmit();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cannot do nested forms, it will cause all sorts of issues, so we portal this out
|
||||||
|
// to the parent container form so we don't have nested forms.
|
||||||
|
return createPortal(
|
||||||
|
<ModalWrapper isOpen={isOpen}>
|
||||||
|
<div className="relative w-fit max-w-1/2 max-h-full">
|
||||||
|
<div className="relative bg-main-gradient rounded-xl shadow-[0_4px_14px_rgba(0,0,0,0.25)]">
|
||||||
|
<div className="flex items-start justify-between p-4 border-b rounded-t border-gray-500/50">
|
||||||
|
<h3 className="text-xl font-semibold text-white">
|
||||||
|
Setup {LLMOption.name}
|
||||||
|
</h3>
|
||||||
|
<button
|
||||||
|
onClick={closeModal}
|
||||||
|
type="button"
|
||||||
|
className="transition-all duration-300 text-gray-400 bg-transparent hover:border-white/60 rounded-lg text-sm p-1.5 ml-auto inline-flex items-center bg-sidebar-button hover:bg-menu-item-selected-gradient hover:border-slate-100 hover:border-opacity-50 border-transparent border"
|
||||||
|
data-modal-hide="staticModal"
|
||||||
|
>
|
||||||
|
<X className="text-gray-300 text-lg" />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<form id="provider-form" onSubmit={handleUpdate}>
|
||||||
|
<div className="py-[17px] px-[20px] flex flex-col gap-y-6">
|
||||||
|
<p className="text-sm text-white">
|
||||||
|
To use {LLMOption.name} as this workspace's LLM you need to set
|
||||||
|
it up first.
|
||||||
|
</p>
|
||||||
|
<div>{LLMOption.options({ credentialsOnly: true })}</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex w-full justify-between items-center p-3 space-x-2 border-t rounded-b border-gray-500/50">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={closeModal}
|
||||||
|
className="text-xs px-2 py-1 font-semibold rounded-lg bg-white hover:bg-transparent border-2 border-transparent hover:border-white hover:text-white h-[32px] w-fit -mr-8 whitespace-nowrap shadow-[0_4px_14px_rgba(0,0,0,0.25)]"
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
form="provider-form"
|
||||||
|
className="text-xs px-2 py-1 font-semibold rounded-lg bg-[#46C8FF] hover:bg-[#2C2F36] border-2 border-transparent hover:border-[#46C8FF] hover:text-white h-[32px] w-fit -mr-8 whitespace-nowrap shadow-[0_4px_14px_rgba(0,0,0,0.25)]"
|
||||||
|
>
|
||||||
|
Save {LLMOption.name} settings
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</ModalWrapper>,
|
||||||
|
document.getElementById("workspace-chat-settings-container")
|
||||||
|
);
|
||||||
|
}
|
@ -0,0 +1,158 @@
|
|||||||
|
import React, { useEffect, useRef, useState } from "react";
|
||||||
|
import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
|
||||||
|
import WorkspaceLLMItem from "./WorkspaceLLMItem";
|
||||||
|
import { AVAILABLE_LLM_PROVIDERS } from "@/pages/GeneralSettings/LLMPreference";
|
||||||
|
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
|
||||||
|
import ChatModelSelection from "../ChatModelSelection";
|
||||||
|
|
||||||
|
const DISABLED_PROVIDERS = ["azure", "lmstudio", "native"];
|
||||||
|
const LLM_DEFAULT = {
|
||||||
|
name: "System default",
|
||||||
|
value: "default",
|
||||||
|
logo: AnythingLLMIcon,
|
||||||
|
options: () => <React.Fragment />,
|
||||||
|
description: "Use the system LLM preference for this workspace.",
|
||||||
|
requiredConfig: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function WorkspaceLLMSelection({
|
||||||
|
settings,
|
||||||
|
workspace,
|
||||||
|
setHasChanges,
|
||||||
|
}) {
|
||||||
|
const [filteredLLMs, setFilteredLLMs] = useState([]);
|
||||||
|
const [selectedLLM, setSelectedLLM] = useState(
|
||||||
|
workspace?.chatProvider ?? "default"
|
||||||
|
);
|
||||||
|
const [searchQuery, setSearchQuery] = useState("");
|
||||||
|
const [searchMenuOpen, setSearchMenuOpen] = useState(false);
|
||||||
|
const searchInputRef = useRef(null);
|
||||||
|
const LLMS = [LLM_DEFAULT, ...AVAILABLE_LLM_PROVIDERS].filter(
|
||||||
|
(llm) => !DISABLED_PROVIDERS.includes(llm.value)
|
||||||
|
);
|
||||||
|
|
||||||
|
function updateLLMChoice(selection) {
|
||||||
|
setSearchQuery("");
|
||||||
|
setSelectedLLM(selection);
|
||||||
|
setSearchMenuOpen(false);
|
||||||
|
setHasChanges(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleXButton() {
|
||||||
|
if (searchQuery.length > 0) {
|
||||||
|
setSearchQuery("");
|
||||||
|
if (searchInputRef.current) searchInputRef.current.value = "";
|
||||||
|
} else {
|
||||||
|
setSearchMenuOpen(!searchMenuOpen);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const filtered = LLMS.filter((llm) =>
|
||||||
|
llm.name.toLowerCase().includes(searchQuery.toLowerCase())
|
||||||
|
);
|
||||||
|
setFilteredLLMs(filtered);
|
||||||
|
}, [LLMS, searchQuery, selectedLLM]);
|
||||||
|
|
||||||
|
const selectedLLMObject = LLMS.find((llm) => llm.value === selectedLLM);
|
||||||
|
return (
|
||||||
|
<div className="border-b border-white/40 pb-8">
|
||||||
|
<div className="flex flex-col">
|
||||||
|
<label htmlFor="name" className="block input-label">
|
||||||
|
Workspace LLM Provider
|
||||||
|
</label>
|
||||||
|
<p className="text-white text-opacity-60 text-xs font-medium py-1.5">
|
||||||
|
The specific LLM provider & model that will be used for this
|
||||||
|
workspace. By default, it uses the system LLM provider and settings.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="relative">
|
||||||
|
<input type="hidden" name="chatProvider" value={selectedLLM} />
|
||||||
|
{searchMenuOpen && (
|
||||||
|
<div
|
||||||
|
className="fixed top-0 left-0 w-full h-full bg-black bg-opacity-70 backdrop-blur-sm z-10"
|
||||||
|
onClick={() => setSearchMenuOpen(false)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
{searchMenuOpen ? (
|
||||||
|
<div className="absolute top-0 left-0 w-full max-w-[640px] max-h-[310px] overflow-auto white-scrollbar min-h-[64px] bg-[#18181B] rounded-lg flex flex-col justify-between cursor-pointer border-2 border-[#46C8FF] z-20">
|
||||||
|
<div className="w-full flex flex-col gap-y-1">
|
||||||
|
<div className="flex items-center sticky top-0 border-b border-[#9CA3AF] mx-4 bg-[#18181B]">
|
||||||
|
<MagnifyingGlass
|
||||||
|
size={20}
|
||||||
|
weight="bold"
|
||||||
|
className="absolute left-4 z-30 text-white -ml-4 my-2"
|
||||||
|
/>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
name="llm-search"
|
||||||
|
autoComplete="off"
|
||||||
|
placeholder="Search all LLM providers"
|
||||||
|
className="-ml-4 my-2 bg-transparent z-20 pl-12 h-[38px] w-full px-4 py-1 text-sm outline-none focus:border-white text-white placeholder:text-white placeholder:font-medium"
|
||||||
|
onChange={(e) => setSearchQuery(e.target.value)}
|
||||||
|
ref={searchInputRef}
|
||||||
|
onKeyDown={(e) => {
|
||||||
|
if (e.key === "Enter") e.preventDefault();
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
<X
|
||||||
|
size={20}
|
||||||
|
weight="bold"
|
||||||
|
className="cursor-pointer text-white hover:text-[#9CA3AF]"
|
||||||
|
onClick={handleXButton}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex-1 pl-4 pr-2 flex flex-col gap-y-1 overflow-y-auto white-scrollbar pb-4">
|
||||||
|
{filteredLLMs.map((llm) => {
|
||||||
|
return (
|
||||||
|
<WorkspaceLLMItem
|
||||||
|
llm={llm}
|
||||||
|
key={llm.name}
|
||||||
|
availableLLMs={LLMS}
|
||||||
|
settings={settings}
|
||||||
|
checked={selectedLLM === llm.value}
|
||||||
|
onClick={() => updateLLMChoice(llm.value)}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<button
|
||||||
|
className="w-full max-w-[640px] h-[64px] bg-[#18181B] rounded-lg flex items-center p-[14px] justify-between cursor-pointer border-2 border-transparent hover:border-[#46C8FF] transition-all duration-300"
|
||||||
|
type="button"
|
||||||
|
onClick={() => setSearchMenuOpen(true)}
|
||||||
|
>
|
||||||
|
<div className="flex gap-x-4 items-center">
|
||||||
|
<img
|
||||||
|
src={selectedLLMObject.logo}
|
||||||
|
alt={`${selectedLLMObject.name} logo`}
|
||||||
|
className="w-10 h-10 rounded-md"
|
||||||
|
/>
|
||||||
|
<div className="flex flex-col text-left">
|
||||||
|
<div className="text-sm font-semibold text-white">
|
||||||
|
{selectedLLMObject.name}
|
||||||
|
</div>
|
||||||
|
<div className="mt-1 text-xs text-[#D2D5DB]">
|
||||||
|
{selectedLLMObject.description}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<CaretUpDown size={24} weight="bold" className="text-white" />
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
{selectedLLM !== "default" && (
|
||||||
|
<div className="mt-4 flex flex-col gap-y-1">
|
||||||
|
<ChatModelSelection
|
||||||
|
provider={selectedLLM}
|
||||||
|
workspace={workspace}
|
||||||
|
setHasChanges={setHasChanges}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
@ -3,11 +3,11 @@ import Workspace from "@/models/workspace";
|
|||||||
import showToast from "@/utils/toast";
|
import showToast from "@/utils/toast";
|
||||||
import { castToType } from "@/utils/types";
|
import { castToType } from "@/utils/types";
|
||||||
import { useEffect, useRef, useState } from "react";
|
import { useEffect, useRef, useState } from "react";
|
||||||
import ChatModelSelection from "./ChatModelSelection";
|
|
||||||
import ChatHistorySettings from "./ChatHistorySettings";
|
import ChatHistorySettings from "./ChatHistorySettings";
|
||||||
import ChatPromptSettings from "./ChatPromptSettings";
|
import ChatPromptSettings from "./ChatPromptSettings";
|
||||||
import ChatTemperatureSettings from "./ChatTemperatureSettings";
|
import ChatTemperatureSettings from "./ChatTemperatureSettings";
|
||||||
import ChatModeSelection from "./ChatModeSelection";
|
import ChatModeSelection from "./ChatModeSelection";
|
||||||
|
import WorkspaceLLMSelection from "./WorkspaceLLMSelection";
|
||||||
|
|
||||||
export default function ChatSettings({ workspace }) {
|
export default function ChatSettings({ workspace }) {
|
||||||
const [settings, setSettings] = useState({});
|
const [settings, setSettings] = useState({});
|
||||||
@ -44,35 +44,45 @@ export default function ChatSettings({ workspace }) {
|
|||||||
|
|
||||||
if (!workspace) return null;
|
if (!workspace) return null;
|
||||||
return (
|
return (
|
||||||
<form
|
<div id="workspace-chat-settings-container">
|
||||||
ref={formEl}
|
<form
|
||||||
onSubmit={handleUpdate}
|
ref={formEl}
|
||||||
className="w-1/2 flex flex-col gap-y-6"
|
onSubmit={handleUpdate}
|
||||||
>
|
id="chat-settings-form"
|
||||||
<ChatModeSelection workspace={workspace} setHasChanges={setHasChanges} />
|
className="w-1/2 flex flex-col gap-y-6"
|
||||||
<ChatModelSelection
|
>
|
||||||
settings={settings}
|
<WorkspaceLLMSelection
|
||||||
workspace={workspace}
|
settings={settings}
|
||||||
setHasChanges={setHasChanges}
|
workspace={workspace}
|
||||||
/>
|
setHasChanges={setHasChanges}
|
||||||
<ChatHistorySettings
|
/>
|
||||||
workspace={workspace}
|
<ChatModeSelection
|
||||||
setHasChanges={setHasChanges}
|
workspace={workspace}
|
||||||
/>
|
setHasChanges={setHasChanges}
|
||||||
<ChatPromptSettings workspace={workspace} setHasChanges={setHasChanges} />
|
/>
|
||||||
<ChatTemperatureSettings
|
<ChatHistorySettings
|
||||||
settings={settings}
|
workspace={workspace}
|
||||||
workspace={workspace}
|
setHasChanges={setHasChanges}
|
||||||
setHasChanges={setHasChanges}
|
/>
|
||||||
/>
|
<ChatPromptSettings
|
||||||
{hasChanges && (
|
workspace={workspace}
|
||||||
<button
|
setHasChanges={setHasChanges}
|
||||||
type="submit"
|
/>
|
||||||
className="w-fit transition-all duration-300 border border-slate-200 px-5 py-2.5 rounded-lg text-white text-sm items-center flex gap-x-2 hover:bg-slate-200 hover:text-slate-800 focus:ring-gray-800"
|
<ChatTemperatureSettings
|
||||||
>
|
settings={settings}
|
||||||
{saving ? "Updating..." : "Update workspace"}
|
workspace={workspace}
|
||||||
</button>
|
setHasChanges={setHasChanges}
|
||||||
)}
|
/>
|
||||||
</form>
|
{hasChanges && (
|
||||||
|
<button
|
||||||
|
type="submit"
|
||||||
|
form="chat-settings-form"
|
||||||
|
className="w-fit transition-all duration-300 border border-slate-200 px-5 py-2.5 rounded-lg text-white text-sm items-center flex gap-x-2 hover:bg-slate-200 hover:text-slate-800 focus:ring-gray-800"
|
||||||
|
>
|
||||||
|
{saving ? "Updating..." : "Update workspace"}
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -138,15 +138,17 @@ export default function AddMemberModal({ closeModal, workspace, users }) {
|
|||||||
</div>
|
</div>
|
||||||
<p className="text-white text-sm font-medium">Select All</p>
|
<p className="text-white text-sm font-medium">Select All</p>
|
||||||
</button>
|
</button>
|
||||||
<button
|
{selectedUsers.length > 0 && (
|
||||||
type="button"
|
<button
|
||||||
onClick={handleUnselect}
|
type="button"
|
||||||
className="flex items-center gap-x-2 ml-2"
|
onClick={handleUnselect}
|
||||||
>
|
className="flex items-center gap-x-2 ml-2"
|
||||||
<p className="text-white/60 text-sm font-medium hover:text-white">
|
>
|
||||||
Unselect
|
<p className="text-white/60 text-sm font-medium hover:text-white">
|
||||||
</p>
|
Unselect
|
||||||
</button>
|
</p>
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
<button
|
<button
|
||||||
type="submit"
|
type="submit"
|
||||||
|
@ -5,6 +5,7 @@ import { useEffect, useState } from "react";
|
|||||||
import * as Skeleton from "react-loading-skeleton";
|
import * as Skeleton from "react-loading-skeleton";
|
||||||
import AddMemberModal from "./AddMemberModal";
|
import AddMemberModal from "./AddMemberModal";
|
||||||
import WorkspaceMemberRow from "./WorkspaceMemberRow";
|
import WorkspaceMemberRow from "./WorkspaceMemberRow";
|
||||||
|
import CTAButton from "@/components/lib/CTAButton";
|
||||||
|
|
||||||
export default function Members({ workspace }) {
|
export default function Members({ workspace }) {
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
@ -77,14 +78,7 @@ export default function Members({ workspace }) {
|
|||||||
)}
|
)}
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
<CTAButton onClick={openModal}>Manage Users</CTAButton>
|
||||||
<button
|
|
||||||
onClick={openModal}
|
|
||||||
className="text-xs px-2 py-1 font-semibold rounded-lg bg-[#46C8FF] hover:bg-[#2C2F36] hover:text-white h-[34px] w-[100px] -mr-8 whitespace-nowrap shadow-[0_4px_14px_rgba(0,0,0,0.25)]"
|
|
||||||
>
|
|
||||||
Manage Users
|
|
||||||
</button>
|
|
||||||
|
|
||||||
<ModalWrapper isOpen={isOpen}>
|
<ModalWrapper isOpen={isOpen}>
|
||||||
<AddMemberModal
|
<AddMemberModal
|
||||||
closeModal={closeModal}
|
closeModal={closeModal}
|
||||||
|
@ -98,6 +98,10 @@ export default {
|
|||||||
transcriptionPreference: () => {
|
transcriptionPreference: () => {
|
||||||
return "/settings/transcription-preference";
|
return "/settings/transcription-preference";
|
||||||
},
|
},
|
||||||
|
embedder: {
|
||||||
|
modelPreference: () => "/settings/embedding-preference",
|
||||||
|
chunkingPreference: () => "/settings/text-splitter-preference",
|
||||||
|
},
|
||||||
embeddingPreference: () => {
|
embeddingPreference: () => {
|
||||||
return "/settings/embedding-preference";
|
return "/settings/embedding-preference";
|
||||||
},
|
},
|
||||||
@ -125,16 +129,5 @@ export default {
|
|||||||
embedChats: () => {
|
embedChats: () => {
|
||||||
return `/settings/embed-chats`;
|
return `/settings/embed-chats`;
|
||||||
},
|
},
|
||||||
dataConnectors: {
|
|
||||||
list: () => {
|
|
||||||
return "/settings/data-connectors";
|
|
||||||
},
|
|
||||||
github: () => {
|
|
||||||
return "/settings/data-connectors/github";
|
|
||||||
},
|
|
||||||
youtubeTranscript: () => {
|
|
||||||
return "/settings/data-connectors/youtube-transcript";
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -8,7 +8,10 @@ const { User } = require("../models/user");
|
|||||||
const { DocumentVectors } = require("../models/vectors");
|
const { DocumentVectors } = require("../models/vectors");
|
||||||
const { Workspace } = require("../models/workspace");
|
const { Workspace } = require("../models/workspace");
|
||||||
const { WorkspaceChats } = require("../models/workspaceChats");
|
const { WorkspaceChats } = require("../models/workspaceChats");
|
||||||
const { getVectorDbClass } = require("../utils/helpers");
|
const {
|
||||||
|
getVectorDbClass,
|
||||||
|
getEmbeddingEngineSelection,
|
||||||
|
} = require("../utils/helpers");
|
||||||
const {
|
const {
|
||||||
validRoleSelection,
|
validRoleSelection,
|
||||||
canModifyAdmin,
|
canModifyAdmin,
|
||||||
@ -311,6 +314,7 @@ function adminEndpoints(app) {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// TODO: Allow specification of which props to get instead of returning all of them all the time.
|
||||||
app.get(
|
app.get(
|
||||||
"/admin/system-preferences",
|
"/admin/system-preferences",
|
||||||
[validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],
|
[validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],
|
||||||
@ -333,6 +337,16 @@ function adminEndpoints(app) {
|
|||||||
support_email:
|
support_email:
|
||||||
(await SystemSettings.get({ label: "support_email" }))?.value ||
|
(await SystemSettings.get({ label: "support_email" }))?.value ||
|
||||||
null,
|
null,
|
||||||
|
text_splitter_chunk_size:
|
||||||
|
(await SystemSettings.get({ label: "text_splitter_chunk_size" }))
|
||||||
|
?.value ||
|
||||||
|
getEmbeddingEngineSelection()?.embeddingMaxChunkLength ||
|
||||||
|
null,
|
||||||
|
text_splitter_chunk_overlap:
|
||||||
|
(await SystemSettings.get({ label: "text_splitter_chunk_overlap" }))
|
||||||
|
?.value || null,
|
||||||
|
max_embed_chunk_size:
|
||||||
|
getEmbeddingEngineSelection()?.embeddingMaxChunkLength || 1000,
|
||||||
};
|
};
|
||||||
response.status(200).json({ settings });
|
response.status(200).json({ settings });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
@ -3,10 +3,10 @@ const fs = require("fs");
|
|||||||
process.env.NODE_ENV === "development"
|
process.env.NODE_ENV === "development"
|
||||||
? require("dotenv").config({ path: `.env.${process.env.NODE_ENV}` })
|
? require("dotenv").config({ path: `.env.${process.env.NODE_ENV}` })
|
||||||
: require("dotenv").config({
|
: require("dotenv").config({
|
||||||
path: process.env.STORAGE_DIR
|
path: process.env.STORAGE_DIR
|
||||||
? path.resolve(process.env.STORAGE_DIR, ".env")
|
? path.resolve(process.env.STORAGE_DIR, ".env")
|
||||||
: path.resolve(__dirname, ".env"),
|
: path.resolve(__dirname, ".env"),
|
||||||
});
|
});
|
||||||
|
|
||||||
const { viewLocalFiles, normalizePath } = require("../utils/files");
|
const { viewLocalFiles, normalizePath } = require("../utils/files");
|
||||||
const { purgeDocument, purgeFolder } = require("../utils/files/purgeDocument");
|
const { purgeDocument, purgeFolder } = require("../utils/files/purgeDocument");
|
||||||
|
@ -508,7 +508,7 @@ function workspaceEndpoints(app) {
|
|||||||
if (fs.existsSync(oldPfpPath)) fs.unlinkSync(oldPfpPath);
|
if (fs.existsSync(oldPfpPath)) fs.unlinkSync(oldPfpPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
const { workspace, message } = await Workspace.update(
|
const { workspace, message } = await Workspace._update(
|
||||||
workspaceRecord.id,
|
workspaceRecord.id,
|
||||||
{
|
{
|
||||||
pfpFilename: uploadedFileName,
|
pfpFilename: uploadedFileName,
|
||||||
@ -547,7 +547,7 @@ function workspaceEndpoints(app) {
|
|||||||
if (fs.existsSync(oldPfpPath)) fs.unlinkSync(oldPfpPath);
|
if (fs.existsSync(oldPfpPath)) fs.unlinkSync(oldPfpPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
const { workspace, message } = await Workspace.update(
|
const { workspace, message } = await Workspace._update(
|
||||||
workspaceRecord.id,
|
workspaceRecord.id,
|
||||||
{
|
{
|
||||||
pfpFilename: null,
|
pfpFilename: null,
|
||||||
|
@ -4,10 +4,15 @@ const { isValidUrl } = require("../utils/http");
|
|||||||
process.env.NODE_ENV === "development"
|
process.env.NODE_ENV === "development"
|
||||||
? require("dotenv").config({ path: `.env.${process.env.NODE_ENV}` })
|
? require("dotenv").config({ path: `.env.${process.env.NODE_ENV}` })
|
||||||
: require("dotenv").config({
|
: require("dotenv").config({
|
||||||
path: process.env.STORAGE_DIR
|
path: process.env.STORAGE_DIR
|
||||||
? path.resolve(process.env.STORAGE_DIR, ".env")
|
? path.resolve(process.env.STORAGE_DIR, ".env")
|
||||||
: path.resolve(__dirname, ".env"),
|
: path.resolve(__dirname, ".env"),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
function isNullOrNaN(value) {
|
||||||
|
if (value === null) return true;
|
||||||
|
return isNaN(value);
|
||||||
|
}
|
||||||
|
|
||||||
const SystemSettings = {
|
const SystemSettings = {
|
||||||
protectedFields: ["multi_user_mode"],
|
protectedFields: ["multi_user_mode"],
|
||||||
@ -19,6 +24,8 @@ const SystemSettings = {
|
|||||||
"telemetry_id",
|
"telemetry_id",
|
||||||
"footer_data",
|
"footer_data",
|
||||||
"support_email",
|
"support_email",
|
||||||
|
"text_splitter_chunk_size",
|
||||||
|
"text_splitter_chunk_overlap",
|
||||||
],
|
],
|
||||||
validations: {
|
validations: {
|
||||||
footer_data: (updates) => {
|
footer_data: (updates) => {
|
||||||
@ -32,6 +39,32 @@ const SystemSettings = {
|
|||||||
return JSON.stringify([]);
|
return JSON.stringify([]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
text_splitter_chunk_size: (update) => {
|
||||||
|
try {
|
||||||
|
if (isNullOrNaN(update)) throw new Error("Value is not a number.");
|
||||||
|
if (Number(update) <= 0) throw new Error("Value must be non-zero.");
|
||||||
|
return Number(update);
|
||||||
|
} catch (e) {
|
||||||
|
console.error(
|
||||||
|
`Failed to run validation function on text_splitter_chunk_size`,
|
||||||
|
e.message
|
||||||
|
);
|
||||||
|
return 1000;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
text_splitter_chunk_overlap: (update) => {
|
||||||
|
try {
|
||||||
|
if (isNullOrNaN(update)) throw new Error("Value is not a number");
|
||||||
|
if (Number(update) < 0) throw new Error("Value cannot be less than 0.");
|
||||||
|
return Number(update);
|
||||||
|
} catch (e) {
|
||||||
|
console.error(
|
||||||
|
`Failed to run validation function on text_splitter_chunk_overlap`,
|
||||||
|
e.message
|
||||||
|
);
|
||||||
|
return 20;
|
||||||
|
}
|
||||||
|
},
|
||||||
},
|
},
|
||||||
currentSettings: async function () {
|
currentSettings: async function () {
|
||||||
const llmProvider = process.env.LLM_PROVIDER;
|
const llmProvider = process.env.LLM_PROVIDER;
|
||||||
@ -61,103 +94,13 @@ const SystemSettings = {
|
|||||||
// VectorDB Provider Selection Settings & Configs
|
// VectorDB Provider Selection Settings & Configs
|
||||||
// --------------------------------------------------------
|
// --------------------------------------------------------
|
||||||
VectorDB: vectorDB,
|
VectorDB: vectorDB,
|
||||||
// Pinecone DB Keys
|
...this.vectorDBPreferenceKeys(),
|
||||||
PineConeKey: !!process.env.PINECONE_API_KEY,
|
|
||||||
PineConeIndex: process.env.PINECONE_INDEX,
|
|
||||||
|
|
||||||
// Chroma DB Keys
|
|
||||||
ChromaEndpoint: process.env.CHROMA_ENDPOINT,
|
|
||||||
ChromaApiHeader: process.env.CHROMA_API_HEADER,
|
|
||||||
ChromaApiKey: !!process.env.CHROMA_API_KEY,
|
|
||||||
|
|
||||||
// Weaviate DB Keys
|
|
||||||
WeaviateEndpoint: process.env.WEAVIATE_ENDPOINT,
|
|
||||||
WeaviateApiKey: process.env.WEAVIATE_API_KEY,
|
|
||||||
|
|
||||||
// QDrant DB Keys
|
|
||||||
QdrantEndpoint: process.env.QDRANT_ENDPOINT,
|
|
||||||
QdrantApiKey: process.env.QDRANT_API_KEY,
|
|
||||||
|
|
||||||
// Milvus DB Keys
|
|
||||||
MilvusAddress: process.env.MILVUS_ADDRESS,
|
|
||||||
MilvusUsername: process.env.MILVUS_USERNAME,
|
|
||||||
MilvusPassword: !!process.env.MILVUS_PASSWORD,
|
|
||||||
|
|
||||||
// Zilliz DB Keys
|
|
||||||
ZillizEndpoint: process.env.ZILLIZ_ENDPOINT,
|
|
||||||
ZillizApiToken: process.env.ZILLIZ_API_TOKEN,
|
|
||||||
|
|
||||||
// AstraDB Keys
|
|
||||||
AstraDBApplicationToken: process?.env?.ASTRA_DB_APPLICATION_TOKEN,
|
|
||||||
AstraDBEndpoint: process?.env?.ASTRA_DB_ENDPOINT,
|
|
||||||
|
|
||||||
// --------------------------------------------------------
|
// --------------------------------------------------------
|
||||||
// LLM Provider Selection Settings & Configs
|
// LLM Provider Selection Settings & Configs
|
||||||
// --------------------------------------------------------
|
// --------------------------------------------------------
|
||||||
LLMProvider: llmProvider,
|
LLMProvider: llmProvider,
|
||||||
// OpenAI Keys
|
...this.llmPreferenceKeys(),
|
||||||
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
|
||||||
OpenAiModelPref: process.env.OPEN_MODEL_PREF || "gpt-3.5-turbo",
|
|
||||||
|
|
||||||
// Azure + OpenAI Keys
|
|
||||||
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
|
||||||
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
|
||||||
AzureOpenAiModelPref: process.env.OPEN_MODEL_PREF,
|
|
||||||
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
|
||||||
AzureOpenAiTokenLimit: process.env.AZURE_OPENAI_TOKEN_LIMIT || 4096,
|
|
||||||
|
|
||||||
// Anthropic Keys
|
|
||||||
AnthropicApiKey: !!process.env.ANTHROPIC_API_KEY,
|
|
||||||
AnthropicModelPref: process.env.ANTHROPIC_MODEL_PREF || "claude-2",
|
|
||||||
|
|
||||||
// Gemini Keys
|
|
||||||
GeminiLLMApiKey: !!process.env.GEMINI_API_KEY,
|
|
||||||
GeminiLLMModelPref: process.env.GEMINI_LLM_MODEL_PREF || "gemini-pro",
|
|
||||||
|
|
||||||
// LMStudio Keys
|
|
||||||
LMStudioBasePath: process.env.LMSTUDIO_BASE_PATH,
|
|
||||||
LMStudioTokenLimit: process.env.LMSTUDIO_MODEL_TOKEN_LIMIT,
|
|
||||||
LMStudioModelPref: process.env.LMSTUDIO_MODEL_PREF,
|
|
||||||
|
|
||||||
// LocalAI Keys
|
|
||||||
LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
|
|
||||||
LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH,
|
|
||||||
LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF,
|
|
||||||
LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT,
|
|
||||||
|
|
||||||
// Ollama LLM Keys
|
|
||||||
OllamaLLMBasePath: process.env.OLLAMA_BASE_PATH,
|
|
||||||
OllamaLLMModelPref: process.env.OLLAMA_MODEL_PREF,
|
|
||||||
OllamaLLMTokenLimit: process.env.OLLAMA_MODEL_TOKEN_LIMIT,
|
|
||||||
|
|
||||||
// TogetherAI Keys
|
|
||||||
TogetherAiApiKey: !!process.env.TOGETHER_AI_API_KEY,
|
|
||||||
TogetherAiModelPref: process.env.TOGETHER_AI_MODEL_PREF,
|
|
||||||
|
|
||||||
// Perplexity AI Keys
|
|
||||||
PerplexityApiKey: !!process.env.PERPLEXITY_API_KEY,
|
|
||||||
PerplexityModelPref: process.env.PERPLEXITY_MODEL_PREF,
|
|
||||||
|
|
||||||
// OpenRouter Keys
|
|
||||||
OpenRouterApiKey: !!process.env.OPENROUTER_API_KEY,
|
|
||||||
OpenRouterModelPref: process.env.OPENROUTER_MODEL_PREF,
|
|
||||||
|
|
||||||
// Mistral AI (API) Keys
|
|
||||||
MistralApiKey: !!process.env.MISTRAL_API_KEY,
|
|
||||||
MistralModelPref: process.env.MISTRAL_MODEL_PREF,
|
|
||||||
|
|
||||||
// Groq AI API Keys
|
|
||||||
GroqApiKey: !!process.env.GROQ_API_KEY,
|
|
||||||
GroqModelPref: process.env.GROQ_MODEL_PREF,
|
|
||||||
|
|
||||||
// Native LLM Keys
|
|
||||||
NativeLLMModelPref: process.env.NATIVE_LLM_MODEL_PREF,
|
|
||||||
NativeLLMTokenLimit: process.env.NATIVE_LLM_MODEL_TOKEN_LIMIT,
|
|
||||||
|
|
||||||
// HuggingFace Dedicated Inference
|
|
||||||
HuggingFaceLLMEndpoint: process.env.HUGGING_FACE_LLM_ENDPOINT,
|
|
||||||
HuggingFaceLLMAccessToken: !!process.env.HUGGING_FACE_LLM_API_KEY,
|
|
||||||
HuggingFaceLLMTokenLimit: process.env.HUGGING_FACE_LLM_TOKEN_LIMIT,
|
|
||||||
|
|
||||||
// --------------------------------------------------------
|
// --------------------------------------------------------
|
||||||
// Whisper (Audio transcription) Selection Settings & Configs
|
// Whisper (Audio transcription) Selection Settings & Configs
|
||||||
@ -178,6 +121,15 @@ const SystemSettings = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
getValueOrFallback: async function (clause = {}, fallback = null) {
|
||||||
|
try {
|
||||||
|
return (await this.get(clause))?.value ?? fallback;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error.message);
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
where: async function (clause = {}, limit) {
|
where: async function (clause = {}, limit) {
|
||||||
try {
|
try {
|
||||||
const settings = await prisma.system_settings.findMany({
|
const settings = await prisma.system_settings.findMany({
|
||||||
@ -277,6 +229,108 @@ const SystemSettings = {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
vectorDBPreferenceKeys: function () {
|
||||||
|
return {
|
||||||
|
// Pinecone DB Keys
|
||||||
|
PineConeKey: !!process.env.PINECONE_API_KEY,
|
||||||
|
PineConeIndex: process.env.PINECONE_INDEX,
|
||||||
|
|
||||||
|
// Chroma DB Keys
|
||||||
|
ChromaEndpoint: process.env.CHROMA_ENDPOINT,
|
||||||
|
ChromaApiHeader: process.env.CHROMA_API_HEADER,
|
||||||
|
ChromaApiKey: !!process.env.CHROMA_API_KEY,
|
||||||
|
|
||||||
|
// Weaviate DB Keys
|
||||||
|
WeaviateEndpoint: process.env.WEAVIATE_ENDPOINT,
|
||||||
|
WeaviateApiKey: process.env.WEAVIATE_API_KEY,
|
||||||
|
|
||||||
|
// QDrant DB Keys
|
||||||
|
QdrantEndpoint: process.env.QDRANT_ENDPOINT,
|
||||||
|
QdrantApiKey: process.env.QDRANT_API_KEY,
|
||||||
|
|
||||||
|
// Milvus DB Keys
|
||||||
|
MilvusAddress: process.env.MILVUS_ADDRESS,
|
||||||
|
MilvusUsername: process.env.MILVUS_USERNAME,
|
||||||
|
MilvusPassword: !!process.env.MILVUS_PASSWORD,
|
||||||
|
|
||||||
|
// Zilliz DB Keys
|
||||||
|
ZillizEndpoint: process.env.ZILLIZ_ENDPOINT,
|
||||||
|
ZillizApiToken: process.env.ZILLIZ_API_TOKEN,
|
||||||
|
|
||||||
|
// AstraDB Keys
|
||||||
|
AstraDBApplicationToken: process?.env?.ASTRA_DB_APPLICATION_TOKEN,
|
||||||
|
AstraDBEndpoint: process?.env?.ASTRA_DB_ENDPOINT,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
llmPreferenceKeys: function () {
|
||||||
|
return {
|
||||||
|
// OpenAI Keys
|
||||||
|
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
||||||
|
OpenAiModelPref: process.env.OPEN_MODEL_PREF || "gpt-3.5-turbo",
|
||||||
|
|
||||||
|
// Azure + OpenAI Keys
|
||||||
|
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
||||||
|
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
||||||
|
AzureOpenAiModelPref: process.env.OPEN_MODEL_PREF,
|
||||||
|
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
||||||
|
AzureOpenAiTokenLimit: process.env.AZURE_OPENAI_TOKEN_LIMIT || 4096,
|
||||||
|
|
||||||
|
// Anthropic Keys
|
||||||
|
AnthropicApiKey: !!process.env.ANTHROPIC_API_KEY,
|
||||||
|
AnthropicModelPref: process.env.ANTHROPIC_MODEL_PREF || "claude-2",
|
||||||
|
|
||||||
|
// Gemini Keys
|
||||||
|
GeminiLLMApiKey: !!process.env.GEMINI_API_KEY,
|
||||||
|
GeminiLLMModelPref: process.env.GEMINI_LLM_MODEL_PREF || "gemini-pro",
|
||||||
|
|
||||||
|
// LMStudio Keys
|
||||||
|
LMStudioBasePath: process.env.LMSTUDIO_BASE_PATH,
|
||||||
|
LMStudioTokenLimit: process.env.LMSTUDIO_MODEL_TOKEN_LIMIT,
|
||||||
|
LMStudioModelPref: process.env.LMSTUDIO_MODEL_PREF,
|
||||||
|
|
||||||
|
// LocalAI Keys
|
||||||
|
LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
|
||||||
|
LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH,
|
||||||
|
LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF,
|
||||||
|
LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT,
|
||||||
|
|
||||||
|
// Ollama LLM Keys
|
||||||
|
OllamaLLMBasePath: process.env.OLLAMA_BASE_PATH,
|
||||||
|
OllamaLLMModelPref: process.env.OLLAMA_MODEL_PREF,
|
||||||
|
OllamaLLMTokenLimit: process.env.OLLAMA_MODEL_TOKEN_LIMIT,
|
||||||
|
|
||||||
|
// TogetherAI Keys
|
||||||
|
TogetherAiApiKey: !!process.env.TOGETHER_AI_API_KEY,
|
||||||
|
TogetherAiModelPref: process.env.TOGETHER_AI_MODEL_PREF,
|
||||||
|
|
||||||
|
// Perplexity AI Keys
|
||||||
|
PerplexityApiKey: !!process.env.PERPLEXITY_API_KEY,
|
||||||
|
PerplexityModelPref: process.env.PERPLEXITY_MODEL_PREF,
|
||||||
|
|
||||||
|
// OpenRouter Keys
|
||||||
|
OpenRouterApiKey: !!process.env.OPENROUTER_API_KEY,
|
||||||
|
OpenRouterModelPref: process.env.OPENROUTER_MODEL_PREF,
|
||||||
|
|
||||||
|
// Mistral AI (API) Keys
|
||||||
|
MistralApiKey: !!process.env.MISTRAL_API_KEY,
|
||||||
|
MistralModelPref: process.env.MISTRAL_MODEL_PREF,
|
||||||
|
|
||||||
|
// Groq AI API Keys
|
||||||
|
GroqApiKey: !!process.env.GROQ_API_KEY,
|
||||||
|
GroqModelPref: process.env.GROQ_MODEL_PREF,
|
||||||
|
|
||||||
|
// Native LLM Keys
|
||||||
|
NativeLLMModelPref: process.env.NATIVE_LLM_MODEL_PREF,
|
||||||
|
NativeLLMTokenLimit: process.env.NATIVE_LLM_MODEL_TOKEN_LIMIT,
|
||||||
|
|
||||||
|
// HuggingFace Dedicated Inference
|
||||||
|
HuggingFaceLLMEndpoint: process.env.HUGGING_FACE_LLM_ENDPOINT,
|
||||||
|
HuggingFaceLLMAccessToken: !!process.env.HUGGING_FACE_LLM_API_KEY,
|
||||||
|
HuggingFaceLLMTokenLimit: process.env.HUGGING_FACE_LLM_TOKEN_LIMIT,
|
||||||
|
};
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports.SystemSettings = SystemSettings;
|
module.exports.SystemSettings = SystemSettings;
|
||||||
|
@ -19,6 +19,7 @@ const Workspace = {
|
|||||||
"lastUpdatedAt",
|
"lastUpdatedAt",
|
||||||
"openAiPrompt",
|
"openAiPrompt",
|
||||||
"similarityThreshold",
|
"similarityThreshold",
|
||||||
|
"chatProvider",
|
||||||
"chatModel",
|
"chatModel",
|
||||||
"topN",
|
"topN",
|
||||||
"chatMode",
|
"chatMode",
|
||||||
@ -52,19 +53,42 @@ const Workspace = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
update: async function (id = null, data = {}) {
|
update: async function (id = null, updates = {}) {
|
||||||
if (!id) throw new Error("No workspace id provided for update");
|
if (!id) throw new Error("No workspace id provided for update");
|
||||||
|
|
||||||
const validKeys = Object.keys(data).filter((key) =>
|
const validFields = Object.keys(updates).filter((key) =>
|
||||||
this.writable.includes(key)
|
this.writable.includes(key)
|
||||||
);
|
);
|
||||||
if (validKeys.length === 0)
|
|
||||||
|
Object.entries(updates).forEach(([key]) => {
|
||||||
|
if (validFields.includes(key)) return;
|
||||||
|
delete updates[key];
|
||||||
|
});
|
||||||
|
|
||||||
|
if (Object.keys(updates).length === 0)
|
||||||
return { workspace: { id }, message: "No valid fields to update!" };
|
return { workspace: { id }, message: "No valid fields to update!" };
|
||||||
|
|
||||||
|
// If the user unset the chatProvider we will need
|
||||||
|
// to then clear the chatModel as well to prevent confusion during
|
||||||
|
// LLM loading.
|
||||||
|
if (updates?.chatProvider === "default") {
|
||||||
|
updates.chatProvider = null;
|
||||||
|
updates.chatModel = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this._update(id, updates);
|
||||||
|
},
|
||||||
|
|
||||||
|
// Explicit update of settings + key validations.
|
||||||
|
// Only use this method when directly setting a key value
|
||||||
|
// that takes no user input for the keys being modified.
|
||||||
|
_update: async function (id = null, data = {}) {
|
||||||
|
if (!id) throw new Error("No workspace id provided for update");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const workspace = await prisma.workspaces.update({
|
const workspace = await prisma.workspaces.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data, // TODO: strict validation on writables here.
|
data,
|
||||||
});
|
});
|
||||||
return { workspace, message: null };
|
return { workspace, message: null };
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -229,47 +253,40 @@ const Workspace = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
resetWorkspaceChatModels: async () => {
|
|
||||||
try {
|
|
||||||
await prisma.workspaces.updateMany({
|
|
||||||
data: {
|
|
||||||
chatModel: null,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
return { success: true, error: null };
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Error resetting workspace chat models:", error.message);
|
|
||||||
return { success: false, error: error.message };
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
trackChange: async function (prevData, newData, user) {
|
trackChange: async function (prevData, newData, user) {
|
||||||
try {
|
try {
|
||||||
const { Telemetry } = require("./telemetry");
|
await this._trackWorkspacePromptChange(prevData, newData, user);
|
||||||
const { EventLogs } = require("./eventLogs");
|
|
||||||
if (
|
|
||||||
!newData?.openAiPrompt ||
|
|
||||||
newData?.openAiPrompt === this.defaultPrompt ||
|
|
||||||
newData?.openAiPrompt === prevData?.openAiPrompt
|
|
||||||
)
|
|
||||||
return;
|
|
||||||
|
|
||||||
await Telemetry.sendTelemetry("workspace_prompt_changed");
|
|
||||||
await EventLogs.logEvent(
|
|
||||||
"workspace_prompt_changed",
|
|
||||||
{
|
|
||||||
workspaceName: prevData?.name,
|
|
||||||
prevSystemPrompt: prevData?.openAiPrompt || this.defaultPrompt,
|
|
||||||
newSystemPrompt: newData?.openAiPrompt,
|
|
||||||
},
|
|
||||||
user?.id
|
|
||||||
);
|
|
||||||
return;
|
return;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error tracking workspace change:", error.message);
|
console.error("Error tracking workspace change:", error.message);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
// We are only tracking this change to determine the need to a prompt library or
|
||||||
|
// prompt assistant feature. If this is something you would like to see - tell us on GitHub!
|
||||||
|
_trackWorkspacePromptChange: async function (prevData, newData, user) {
|
||||||
|
const { Telemetry } = require("./telemetry");
|
||||||
|
const { EventLogs } = require("./eventLogs");
|
||||||
|
if (
|
||||||
|
!newData?.openAiPrompt ||
|
||||||
|
newData?.openAiPrompt === this.defaultPrompt ||
|
||||||
|
newData?.openAiPrompt === prevData?.openAiPrompt
|
||||||
|
)
|
||||||
|
return;
|
||||||
|
|
||||||
|
await Telemetry.sendTelemetry("workspace_prompt_changed");
|
||||||
|
await EventLogs.logEvent(
|
||||||
|
"workspace_prompt_changed",
|
||||||
|
{
|
||||||
|
workspaceName: prevData?.name,
|
||||||
|
prevSystemPrompt: prevData?.openAiPrompt || this.defaultPrompt,
|
||||||
|
newSystemPrompt: newData?.openAiPrompt,
|
||||||
|
},
|
||||||
|
user?.id
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = { Workspace };
|
module.exports = { Workspace };
|
||||||
|
@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "workspaces" ADD COLUMN "chatProvider" TEXT;
|
@ -98,6 +98,7 @@ model workspaces {
|
|||||||
lastUpdatedAt DateTime @default(now())
|
lastUpdatedAt DateTime @default(now())
|
||||||
openAiPrompt String?
|
openAiPrompt String?
|
||||||
similarityThreshold Float? @default(0.25)
|
similarityThreshold Float? @default(0.25)
|
||||||
|
chatProvider String?
|
||||||
chatModel String?
|
chatModel String?
|
||||||
topN Int? @default(4)
|
topN Int? @default(4)
|
||||||
chatMode String? @default("chat")
|
chatMode String? @default("chat")
|
||||||
|
@ -17,7 +17,9 @@ class AzureOpenAiEmbedder {
|
|||||||
// Limit of how many strings we can process in a single pass to stay with resource or network limits
|
// Limit of how many strings we can process in a single pass to stay with resource or network limits
|
||||||
// https://learn.microsoft.com/en-us/azure/ai-services/openai/faq#i-am-trying-to-use-embeddings-and-received-the-error--invalidrequesterror--too-many-inputs--the-max-number-of-inputs-is-1---how-do-i-fix-this-:~:text=consisting%20of%20up%20to%2016%20inputs%20per%20API%20request
|
// https://learn.microsoft.com/en-us/azure/ai-services/openai/faq#i-am-trying-to-use-embeddings-and-received-the-error--invalidrequesterror--too-many-inputs--the-max-number-of-inputs-is-1---how-do-i-fix-this-:~:text=consisting%20of%20up%20to%2016%20inputs%20per%20API%20request
|
||||||
this.maxConcurrentChunks = 16;
|
this.maxConcurrentChunks = 16;
|
||||||
this.embeddingMaxChunkLength = 1_000;
|
|
||||||
|
// https://learn.microsoft.com/en-us/answers/questions/1188074/text-embedding-ada-002-token-context-length
|
||||||
|
this.embeddingMaxChunkLength = 2048;
|
||||||
}
|
}
|
||||||
|
|
||||||
async embedTextInput(textInput) {
|
async embedTextInput(textInput) {
|
||||||
|
@ -4,6 +4,12 @@ const { toChunks } = require("../../helpers");
|
|||||||
const { v4 } = require("uuid");
|
const { v4 } = require("uuid");
|
||||||
|
|
||||||
class NativeEmbedder {
|
class NativeEmbedder {
|
||||||
|
// This is a folder that Mintplex Labs hosts for those who cannot capture the HF model download
|
||||||
|
// endpoint for various reasons. This endpoint is not guaranteed to be active or maintained
|
||||||
|
// and may go offline at any time at Mintplex Labs's discretion.
|
||||||
|
#fallbackHost =
|
||||||
|
"https://s3.us-west-1.amazonaws.com/public.useanything.com/support/models/";
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
// Model Card: https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2
|
// Model Card: https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2
|
||||||
this.model = "Xenova/all-MiniLM-L6-v2";
|
this.model = "Xenova/all-MiniLM-L6-v2";
|
||||||
@ -13,6 +19,7 @@ class NativeEmbedder {
|
|||||||
: path.resolve(__dirname, `../../../storage/models`)
|
: path.resolve(__dirname, `../../../storage/models`)
|
||||||
);
|
);
|
||||||
this.modelPath = path.resolve(this.cacheDir, "Xenova", "all-MiniLM-L6-v2");
|
this.modelPath = path.resolve(this.cacheDir, "Xenova", "all-MiniLM-L6-v2");
|
||||||
|
this.modelDownloaded = fs.existsSync(this.modelPath);
|
||||||
|
|
||||||
// Limit of how many strings we can process in a single pass to stay with resource or network limits
|
// Limit of how many strings we can process in a single pass to stay with resource or network limits
|
||||||
this.maxConcurrentChunks = 25;
|
this.maxConcurrentChunks = 25;
|
||||||
@ -20,6 +27,11 @@ class NativeEmbedder {
|
|||||||
|
|
||||||
// Make directory when it does not exist in existing installations
|
// Make directory when it does not exist in existing installations
|
||||||
if (!fs.existsSync(this.cacheDir)) fs.mkdirSync(this.cacheDir);
|
if (!fs.existsSync(this.cacheDir)) fs.mkdirSync(this.cacheDir);
|
||||||
|
this.log("Initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
log(text, ...args) {
|
||||||
|
console.log(`\x1b[36m[NativeEmbedder]\x1b[0m ${text}`, ...args);
|
||||||
}
|
}
|
||||||
|
|
||||||
#tempfilePath() {
|
#tempfilePath() {
|
||||||
@ -39,41 +51,73 @@ class NativeEmbedder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async embedderClient() {
|
async #fetchWithHost(hostOverride = null) {
|
||||||
if (!fs.existsSync(this.modelPath)) {
|
|
||||||
console.log(
|
|
||||||
"\x1b[34m[INFO]\x1b[0m The native embedding model has never been run and will be downloaded right now. Subsequent runs will be faster. (~23MB)\n\n"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Convert ESM to CommonJS via import so we can load this library.
|
// Convert ESM to CommonJS via import so we can load this library.
|
||||||
const pipeline = (...args) =>
|
const pipeline = (...args) =>
|
||||||
import("@xenova/transformers").then(({ pipeline }) =>
|
import("@xenova/transformers").then(({ pipeline, env }) => {
|
||||||
pipeline(...args)
|
if (!this.modelDownloaded) {
|
||||||
);
|
// if model is not downloaded, we will log where we are fetching from.
|
||||||
return await pipeline("feature-extraction", this.model, {
|
if (hostOverride) {
|
||||||
cache_dir: this.cacheDir,
|
env.remoteHost = hostOverride;
|
||||||
...(!fs.existsSync(this.modelPath)
|
env.remotePathTemplate = "{model}/"; // Our S3 fallback url does not support revision File structure.
|
||||||
? {
|
|
||||||
// Show download progress if we need to download any files
|
|
||||||
progress_callback: (data) => {
|
|
||||||
if (!data.hasOwnProperty("progress")) return;
|
|
||||||
console.log(
|
|
||||||
`\x1b[34m[Embedding - Downloading Model Files]\x1b[0m ${
|
|
||||||
data.file
|
|
||||||
} ${~~data?.progress}%`
|
|
||||||
);
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
: {}),
|
this.log(`Downloading ${this.model} from ${env.remoteHost}`);
|
||||||
});
|
}
|
||||||
|
return pipeline(...args);
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
pipeline: await pipeline("feature-extraction", this.model, {
|
||||||
|
cache_dir: this.cacheDir,
|
||||||
|
...(!this.modelDownloaded
|
||||||
|
? {
|
||||||
|
// Show download progress if we need to download any files
|
||||||
|
progress_callback: (data) => {
|
||||||
|
if (!data.hasOwnProperty("progress")) return;
|
||||||
|
console.log(
|
||||||
|
`\x1b[36m[NativeEmbedder - Downloading model]\x1b[0m ${
|
||||||
|
data.file
|
||||||
|
} ${~~data?.progress}%`
|
||||||
|
);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {}),
|
||||||
|
}),
|
||||||
|
retry: false,
|
||||||
|
error: null,
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Failed to load the native embedding model:", error);
|
return {
|
||||||
throw error;
|
pipeline: null,
|
||||||
|
retry: hostOverride === null ? this.#fallbackHost : false,
|
||||||
|
error,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This function will do a single fallback attempt (not recursive on purpose) to try to grab the embedder model on first embed
|
||||||
|
// since at time, some clients cannot properly download the model from HF servers due to a number of reasons (IP, VPN, etc).
|
||||||
|
// Given this model is critical and nobody reads the GitHub issues before submitting the bug, we get the same bug
|
||||||
|
// report 20 times a day: https://github.com/Mintplex-Labs/anything-llm/issues/821
|
||||||
|
// So to attempt to monkey-patch this we have a single fallback URL to help alleviate duplicate bug reports.
|
||||||
|
async embedderClient() {
|
||||||
|
if (!this.modelDownloaded)
|
||||||
|
this.log(
|
||||||
|
"The native embedding model has never been run and will be downloaded right now. Subsequent runs will be faster. (~23MB)"
|
||||||
|
);
|
||||||
|
|
||||||
|
let fetchResponse = await this.#fetchWithHost();
|
||||||
|
if (fetchResponse.pipeline !== null) return fetchResponse.pipeline;
|
||||||
|
|
||||||
|
this.log(
|
||||||
|
`Failed to download model from primary URL. Using fallback ${fetchResponse.retry}`
|
||||||
|
);
|
||||||
|
if (!!fetchResponse.retry)
|
||||||
|
fetchResponse = await this.#fetchWithHost(fetchResponse.retry);
|
||||||
|
if (fetchResponse.pipeline !== null) return fetchResponse.pipeline;
|
||||||
|
throw fetchResponse.error;
|
||||||
|
}
|
||||||
|
|
||||||
async embedTextInput(textInput) {
|
async embedTextInput(textInput) {
|
||||||
const result = await this.embedChunks(textInput);
|
const result = await this.embedChunks(textInput);
|
||||||
return result?.[0] || [];
|
return result?.[0] || [];
|
||||||
@ -89,6 +133,7 @@ class NativeEmbedder {
|
|||||||
// during a very large document (>100K words) but can spike up to 70% before gc.
|
// during a very large document (>100K words) but can spike up to 70% before gc.
|
||||||
// This seems repeatable for all document sizes.
|
// This seems repeatable for all document sizes.
|
||||||
// While this does take a while, it is zero set up and is 100% free and on-instance.
|
// While this does take a while, it is zero set up and is 100% free and on-instance.
|
||||||
|
// It still may crash depending on other elements at play - so no promises it works under all conditions.
|
||||||
async embedChunks(textChunks = []) {
|
async embedChunks(textChunks = []) {
|
||||||
const tmpFilePath = this.#tempfilePath();
|
const tmpFilePath = this.#tempfilePath();
|
||||||
const chunks = toChunks(textChunks, this.maxConcurrentChunks);
|
const chunks = toChunks(textChunks, this.maxConcurrentChunks);
|
||||||
@ -112,7 +157,7 @@ class NativeEmbedder {
|
|||||||
|
|
||||||
data = JSON.stringify(output.tolist());
|
data = JSON.stringify(output.tolist());
|
||||||
await this.#writeToTempfile(tmpFilePath, data);
|
await this.#writeToTempfile(tmpFilePath, data);
|
||||||
console.log(`\x1b[34m[Embedded Chunk ${idx + 1} of ${chunkLen}]\x1b[0m`);
|
this.log(`Embedded Chunk ${idx + 1} of ${chunkLen}`);
|
||||||
if (chunkLen - 1 !== idx) await this.#writeToTempfile(tmpFilePath, ",");
|
if (chunkLen - 1 !== idx) await this.#writeToTempfile(tmpFilePath, ",");
|
||||||
if (chunkLen - 1 === idx) await this.#writeToTempfile(tmpFilePath, "]");
|
if (chunkLen - 1 === idx) await this.#writeToTempfile(tmpFilePath, "]");
|
||||||
pipeline = null;
|
pipeline = null;
|
||||||
|
@ -18,12 +18,28 @@ class OllamaEmbedder {
|
|||||||
console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args);
|
console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async #isAlive() {
|
||||||
|
return await fetch(process.env.EMBEDDING_BASE_PATH, {
|
||||||
|
method: "HEAD",
|
||||||
|
})
|
||||||
|
.then((res) => res.ok)
|
||||||
|
.catch((e) => {
|
||||||
|
this.log(e.message);
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
async embedTextInput(textInput) {
|
async embedTextInput(textInput) {
|
||||||
const result = await this.embedChunks([textInput]);
|
const result = await this.embedChunks([textInput]);
|
||||||
return result?.[0] || [];
|
return result?.[0] || [];
|
||||||
}
|
}
|
||||||
|
|
||||||
async embedChunks(textChunks = []) {
|
async embedChunks(textChunks = []) {
|
||||||
|
if (!(await this.#isAlive()))
|
||||||
|
throw new Error(
|
||||||
|
`Ollama service could not be reached. Is Ollama running?`
|
||||||
|
);
|
||||||
|
|
||||||
const embeddingRequests = [];
|
const embeddingRequests = [];
|
||||||
this.log(
|
this.log(
|
||||||
`Embedding ${textChunks.length} chunks of text with ${this.model}.`
|
`Embedding ${textChunks.length} chunks of text with ${this.model}.`
|
||||||
|
@ -13,7 +13,9 @@ class OpenAiEmbedder {
|
|||||||
|
|
||||||
// Limit of how many strings we can process in a single pass to stay with resource or network limits
|
// Limit of how many strings we can process in a single pass to stay with resource or network limits
|
||||||
this.maxConcurrentChunks = 500;
|
this.maxConcurrentChunks = 500;
|
||||||
this.embeddingMaxChunkLength = 1_000;
|
|
||||||
|
// https://platform.openai.com/docs/guides/embeddings/embedding-models
|
||||||
|
this.embeddingMaxChunkLength = 8_191;
|
||||||
}
|
}
|
||||||
|
|
||||||
async embedTextInput(textInput) {
|
async embedTextInput(textInput) {
|
||||||
|
84
server/utils/TextSplitter/index.js
Normal file
84
server/utils/TextSplitter/index.js
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
function isNullOrNaN(value) {
|
||||||
|
if (value === null) return true;
|
||||||
|
return isNaN(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
class TextSplitter {
|
||||||
|
#splitter;
|
||||||
|
constructor(config = {}) {
|
||||||
|
/*
|
||||||
|
config can be a ton of things depending on what is required or optional by the specific splitter.
|
||||||
|
Non-splitter related keys
|
||||||
|
{
|
||||||
|
splitByFilename: string, // TODO
|
||||||
|
}
|
||||||
|
------
|
||||||
|
Default: "RecursiveCharacterTextSplitter"
|
||||||
|
Config: {
|
||||||
|
chunkSize: number,
|
||||||
|
chunkOverlap: number,
|
||||||
|
}
|
||||||
|
------
|
||||||
|
*/
|
||||||
|
this.config = config;
|
||||||
|
this.#splitter = this.#setSplitter(config);
|
||||||
|
}
|
||||||
|
|
||||||
|
log(text, ...args) {
|
||||||
|
console.log(`\x1b[35m[TextSplitter]\x1b[0m ${text}`, ...args);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Does a quick check to determine the text chunk length limit.
|
||||||
|
// Embedder models have hard-set limits that cannot be exceeded, just like an LLM context
|
||||||
|
// so here we want to allow override of the default 1000, but up to the models maximum, which is
|
||||||
|
// sometimes user defined.
|
||||||
|
static determineMaxChunkSize(preferred = null, embedderLimit = 1000) {
|
||||||
|
const prefValue = isNullOrNaN(preferred)
|
||||||
|
? Number(embedderLimit)
|
||||||
|
: Number(preferred);
|
||||||
|
const limit = Number(embedderLimit);
|
||||||
|
if (prefValue > limit)
|
||||||
|
console.log(
|
||||||
|
`\x1b[43m[WARN]\x1b[0m Text splitter chunk length of ${prefValue} exceeds embedder model max of ${embedderLimit}. Will use ${embedderLimit}.`
|
||||||
|
);
|
||||||
|
return prefValue > limit ? limit : prefValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
#setSplitter(config = {}) {
|
||||||
|
// if (!config?.splitByFilename) {// TODO do something when specific extension is present? }
|
||||||
|
return new RecursiveSplitter({
|
||||||
|
chunkSize: isNaN(config?.chunkSize) ? 1_000 : Number(config?.chunkSize),
|
||||||
|
chunkOverlap: isNaN(config?.chunkOverlap)
|
||||||
|
? 20
|
||||||
|
: Number(config?.chunkOverlap),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async splitText(documentText) {
|
||||||
|
return this.#splitter._splitText(documentText);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrapper for Langchain default RecursiveCharacterTextSplitter class.
|
||||||
|
class RecursiveSplitter {
|
||||||
|
constructor({ chunkSize, chunkOverlap }) {
|
||||||
|
const {
|
||||||
|
RecursiveCharacterTextSplitter,
|
||||||
|
} = require("langchain/text_splitter");
|
||||||
|
this.log(`Will split with`, { chunkSize, chunkOverlap });
|
||||||
|
this.engine = new RecursiveCharacterTextSplitter({
|
||||||
|
chunkSize,
|
||||||
|
chunkOverlap,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
log(text, ...args) {
|
||||||
|
console.log(`\x1b[35m[RecursiveSplitter]\x1b[0m ${text}`, ...args);
|
||||||
|
}
|
||||||
|
|
||||||
|
async _splitText(documentText) {
|
||||||
|
return this.engine.splitText(documentText);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.TextSplitter = TextSplitter;
|
@ -28,7 +28,9 @@ async function streamChatWithForEmbed(
|
|||||||
embed.workspace.openAiTemp = parseFloat(temperatureOverride);
|
embed.workspace.openAiTemp = parseFloat(temperatureOverride);
|
||||||
|
|
||||||
const uuid = uuidv4();
|
const uuid = uuidv4();
|
||||||
const LLMConnector = getLLMProvider(chatModel ?? embed.workspace?.chatModel);
|
const LLMConnector = getLLMProvider({
|
||||||
|
model: chatModel ?? embed.workspace?.chatModel,
|
||||||
|
});
|
||||||
const VectorDb = getVectorDbClass();
|
const VectorDb = getVectorDbClass();
|
||||||
const { safe, reasons = [] } = await LLMConnector.isSafe(message);
|
const { safe, reasons = [] } = await LLMConnector.isSafe(message);
|
||||||
if (!safe) {
|
if (!safe) {
|
||||||
|
@ -37,7 +37,10 @@ async function chatWithWorkspace(
|
|||||||
return await VALID_COMMANDS[command](workspace, message, uuid, user);
|
return await VALID_COMMANDS[command](workspace, message, uuid, user);
|
||||||
}
|
}
|
||||||
|
|
||||||
const LLMConnector = getLLMProvider(workspace?.chatModel);
|
const LLMConnector = getLLMProvider({
|
||||||
|
provider: workspace?.chatProvider,
|
||||||
|
model: workspace?.chatModel,
|
||||||
|
});
|
||||||
const VectorDb = getVectorDbClass();
|
const VectorDb = getVectorDbClass();
|
||||||
const { safe, reasons = [] } = await LLMConnector.isSafe(message);
|
const { safe, reasons = [] } = await LLMConnector.isSafe(message);
|
||||||
if (!safe) {
|
if (!safe) {
|
||||||
|
@ -35,7 +35,10 @@ async function streamChatWithWorkspace(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const LLMConnector = getLLMProvider(workspace?.chatModel);
|
const LLMConnector = getLLMProvider({
|
||||||
|
provider: workspace?.chatProvider,
|
||||||
|
model: workspace?.chatModel,
|
||||||
|
});
|
||||||
const VectorDb = getVectorDbClass();
|
const VectorDb = getVectorDbClass();
|
||||||
const { safe, reasons = [] } = await LLMConnector.isSafe(message);
|
const { safe, reasons = [] } = await LLMConnector.isSafe(message);
|
||||||
if (!safe) {
|
if (!safe) {
|
||||||
|
@ -9,7 +9,7 @@ const { v4 } = require("uuid");
|
|||||||
// be out of bounds and the `hotdir` is always inside of the collector folder. It is not mounted
|
// be out of bounds and the `hotdir` is always inside of the collector folder. It is not mounted
|
||||||
// with the rest of the storage.
|
// with the rest of the storage.
|
||||||
// This line is only relevant for Render/Railway.
|
// This line is only relevant for Render/Railway.
|
||||||
const RENDER_STORAGE = path.resolve(__dirname, `../../../collector/hotdir`)
|
const RENDER_STORAGE = path.resolve(__dirname, `../../../collector/hotdir`);
|
||||||
|
|
||||||
// Handle File uploads for auto-uploading.
|
// Handle File uploads for auto-uploading.
|
||||||
const fileUploadStorage = multer.diskStorage({
|
const fileUploadStorage = multer.diskStorage({
|
||||||
|
@ -30,52 +30,53 @@ function getVectorDbClass() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function getLLMProvider(modelPreference = null) {
|
function getLLMProvider({ provider = null, model = null } = {}) {
|
||||||
const vectorSelection = process.env.LLM_PROVIDER || "openai";
|
const LLMSelection = provider ?? process.env.LLM_PROVIDER ?? "openai";
|
||||||
const embedder = getEmbeddingEngineSelection();
|
const embedder = getEmbeddingEngineSelection();
|
||||||
switch (vectorSelection) {
|
|
||||||
|
switch (LLMSelection) {
|
||||||
case "openai":
|
case "openai":
|
||||||
const { OpenAiLLM } = require("../AiProviders/openAi");
|
const { OpenAiLLM } = require("../AiProviders/openAi");
|
||||||
return new OpenAiLLM(embedder, modelPreference);
|
return new OpenAiLLM(embedder, model);
|
||||||
case "azure":
|
case "azure":
|
||||||
const { AzureOpenAiLLM } = require("../AiProviders/azureOpenAi");
|
const { AzureOpenAiLLM } = require("../AiProviders/azureOpenAi");
|
||||||
return new AzureOpenAiLLM(embedder, modelPreference);
|
return new AzureOpenAiLLM(embedder, model);
|
||||||
case "anthropic":
|
case "anthropic":
|
||||||
const { AnthropicLLM } = require("../AiProviders/anthropic");
|
const { AnthropicLLM } = require("../AiProviders/anthropic");
|
||||||
return new AnthropicLLM(embedder, modelPreference);
|
return new AnthropicLLM(embedder, model);
|
||||||
case "gemini":
|
case "gemini":
|
||||||
const { GeminiLLM } = require("../AiProviders/gemini");
|
const { GeminiLLM } = require("../AiProviders/gemini");
|
||||||
return new GeminiLLM(embedder, modelPreference);
|
return new GeminiLLM(embedder, model);
|
||||||
case "lmstudio":
|
case "lmstudio":
|
||||||
const { LMStudioLLM } = require("../AiProviders/lmStudio");
|
const { LMStudioLLM } = require("../AiProviders/lmStudio");
|
||||||
return new LMStudioLLM(embedder, modelPreference);
|
return new LMStudioLLM(embedder, model);
|
||||||
case "localai":
|
case "localai":
|
||||||
const { LocalAiLLM } = require("../AiProviders/localAi");
|
const { LocalAiLLM } = require("../AiProviders/localAi");
|
||||||
return new LocalAiLLM(embedder, modelPreference);
|
return new LocalAiLLM(embedder, model);
|
||||||
case "ollama":
|
case "ollama":
|
||||||
const { OllamaAILLM } = require("../AiProviders/ollama");
|
const { OllamaAILLM } = require("../AiProviders/ollama");
|
||||||
return new OllamaAILLM(embedder, modelPreference);
|
return new OllamaAILLM(embedder, model);
|
||||||
case "togetherai":
|
case "togetherai":
|
||||||
const { TogetherAiLLM } = require("../AiProviders/togetherAi");
|
const { TogetherAiLLM } = require("../AiProviders/togetherAi");
|
||||||
return new TogetherAiLLM(embedder, modelPreference);
|
return new TogetherAiLLM(embedder, model);
|
||||||
case "perplexity":
|
case "perplexity":
|
||||||
const { PerplexityLLM } = require("../AiProviders/perplexity");
|
const { PerplexityLLM } = require("../AiProviders/perplexity");
|
||||||
return new PerplexityLLM(embedder, modelPreference);
|
return new PerplexityLLM(embedder, model);
|
||||||
case "openrouter":
|
case "openrouter":
|
||||||
const { OpenRouterLLM } = require("../AiProviders/openRouter");
|
const { OpenRouterLLM } = require("../AiProviders/openRouter");
|
||||||
return new OpenRouterLLM(embedder, modelPreference);
|
return new OpenRouterLLM(embedder, model);
|
||||||
case "mistral":
|
case "mistral":
|
||||||
const { MistralLLM } = require("../AiProviders/mistral");
|
const { MistralLLM } = require("../AiProviders/mistral");
|
||||||
return new MistralLLM(embedder, modelPreference);
|
return new MistralLLM(embedder, model);
|
||||||
case "native":
|
case "native":
|
||||||
const { NativeLLM } = require("../AiProviders/native");
|
const { NativeLLM } = require("../AiProviders/native");
|
||||||
return new NativeLLM(embedder, modelPreference);
|
return new NativeLLM(embedder, model);
|
||||||
case "huggingface":
|
case "huggingface":
|
||||||
const { HuggingFaceLLM } = require("../AiProviders/huggingface");
|
const { HuggingFaceLLM } = require("../AiProviders/huggingface");
|
||||||
return new HuggingFaceLLM(embedder, modelPreference);
|
return new HuggingFaceLLM(embedder, model);
|
||||||
case "groq":
|
case "groq":
|
||||||
const { GroqLLM } = require("../AiProviders/groq");
|
const { GroqLLM } = require("../AiProviders/groq");
|
||||||
return new GroqLLM(embedder, modelPreference);
|
return new GroqLLM(embedder, model);
|
||||||
default:
|
default:
|
||||||
throw new Error("ENV: No LLM_PROVIDER value found in environment!");
|
throw new Error("ENV: No LLM_PROVIDER value found in environment!");
|
||||||
}
|
}
|
||||||
@ -100,7 +101,6 @@ function getEmbeddingEngineSelection() {
|
|||||||
return new OllamaEmbedder();
|
return new OllamaEmbedder();
|
||||||
case "native":
|
case "native":
|
||||||
const { NativeEmbedder } = require("../EmbeddingEngines/native");
|
const { NativeEmbedder } = require("../EmbeddingEngines/native");
|
||||||
console.log("\x1b[34m[INFO]\x1b[0m Using Native Embedder");
|
|
||||||
return new NativeEmbedder();
|
return new NativeEmbedder();
|
||||||
default:
|
default:
|
||||||
return null;
|
return null;
|
||||||
|
@ -2,7 +2,6 @@ const KEY_MAPPING = {
|
|||||||
LLMProvider: {
|
LLMProvider: {
|
||||||
envKey: "LLM_PROVIDER",
|
envKey: "LLM_PROVIDER",
|
||||||
checks: [isNotEmpty, supportedLLM],
|
checks: [isNotEmpty, supportedLLM],
|
||||||
postUpdate: [wipeWorkspaceModelPreference],
|
|
||||||
},
|
},
|
||||||
// OpenAI Settings
|
// OpenAI Settings
|
||||||
OpenAiKey: {
|
OpenAiKey: {
|
||||||
@ -493,15 +492,6 @@ function validHuggingFaceEndpoint(input = "") {
|
|||||||
: null;
|
: null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the LLMProvider has changed we need to reset all workspace model preferences to
|
|
||||||
// null since the provider<>model name combination will be invalid for whatever the new
|
|
||||||
// provider is.
|
|
||||||
async function wipeWorkspaceModelPreference(key, prev, next) {
|
|
||||||
if (prev === next) return;
|
|
||||||
const { Workspace } = require("../../models/workspace");
|
|
||||||
await Workspace.resetWorkspaceChatModels();
|
|
||||||
}
|
|
||||||
|
|
||||||
// This will force update .env variables which for any which reason were not able to be parsed or
|
// This will force update .env variables which for any which reason were not able to be parsed or
|
||||||
// read from an ENV file as this seems to be a complicating step for many so allowing people to write
|
// read from an ENV file as this seems to be a complicating step for many so allowing people to write
|
||||||
// to the process will at least alleviate that issue. It does not perform comprehensive validity checks or sanity checks
|
// to the process will at least alleviate that issue. It does not perform comprehensive validity checks or sanity checks
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
const { AstraDB: AstraClient } = require("@datastax/astra-db-ts");
|
const { AstraDB: AstraClient } = require("@datastax/astra-db-ts");
|
||||||
const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter");
|
const { TextSplitter } = require("../../TextSplitter");
|
||||||
|
const { SystemSettings } = require("../../../models/systemSettings");
|
||||||
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
||||||
const { v4: uuidv4 } = require("uuid");
|
const { v4: uuidv4 } = require("uuid");
|
||||||
const {
|
const {
|
||||||
@ -147,10 +148,17 @@ const AstraDB = {
|
|||||||
return { vectorized: true, error: null };
|
return { vectorized: true, error: null };
|
||||||
}
|
}
|
||||||
|
|
||||||
const textSplitter = new RecursiveCharacterTextSplitter({
|
const textSplitter = new TextSplitter({
|
||||||
chunkSize:
|
chunkSize: TextSplitter.determineMaxChunkSize(
|
||||||
getEmbeddingEngineSelection()?.embeddingMaxChunkLength || 1_000,
|
await SystemSettings.getValueOrFallback({
|
||||||
chunkOverlap: 20,
|
label: "text_splitter_chunk_size",
|
||||||
|
}),
|
||||||
|
getEmbeddingEngineSelection()?.embeddingMaxChunkLength
|
||||||
|
),
|
||||||
|
chunkOverlap: await SystemSettings.getValueOrFallback(
|
||||||
|
{ label: "text_splitter_chunk_overlap" },
|
||||||
|
20
|
||||||
|
),
|
||||||
});
|
});
|
||||||
const textChunks = await textSplitter.splitText(pageContent);
|
const textChunks = await textSplitter.splitText(pageContent);
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
const { ChromaClient } = require("chromadb");
|
const { ChromaClient } = require("chromadb");
|
||||||
const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter");
|
const { TextSplitter } = require("../../TextSplitter");
|
||||||
|
const { SystemSettings } = require("../../../models/systemSettings");
|
||||||
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
||||||
const { v4: uuidv4 } = require("uuid");
|
const { v4: uuidv4 } = require("uuid");
|
||||||
const {
|
const {
|
||||||
@ -180,10 +181,17 @@ const Chroma = {
|
|||||||
// We have to do this manually as opposed to using LangChains `Chroma.fromDocuments`
|
// We have to do this manually as opposed to using LangChains `Chroma.fromDocuments`
|
||||||
// because we then cannot atomically control our namespace to granularly find/remove documents
|
// because we then cannot atomically control our namespace to granularly find/remove documents
|
||||||
// from vectordb.
|
// from vectordb.
|
||||||
const textSplitter = new RecursiveCharacterTextSplitter({
|
const textSplitter = new TextSplitter({
|
||||||
chunkSize:
|
chunkSize: TextSplitter.determineMaxChunkSize(
|
||||||
getEmbeddingEngineSelection()?.embeddingMaxChunkLength || 1_000,
|
await SystemSettings.getValueOrFallback({
|
||||||
chunkOverlap: 20,
|
label: "text_splitter_chunk_size",
|
||||||
|
}),
|
||||||
|
getEmbeddingEngineSelection()?.embeddingMaxChunkLength
|
||||||
|
),
|
||||||
|
chunkOverlap: await SystemSettings.getValueOrFallback(
|
||||||
|
{ label: "text_splitter_chunk_overlap" },
|
||||||
|
20
|
||||||
|
),
|
||||||
});
|
});
|
||||||
const textChunks = await textSplitter.splitText(pageContent);
|
const textChunks = await textSplitter.splitText(pageContent);
|
||||||
|
|
||||||
|
@ -5,7 +5,8 @@ const {
|
|||||||
getEmbeddingEngineSelection,
|
getEmbeddingEngineSelection,
|
||||||
} = require("../../helpers");
|
} = require("../../helpers");
|
||||||
const { OpenAIEmbeddings } = require("langchain/embeddings/openai");
|
const { OpenAIEmbeddings } = require("langchain/embeddings/openai");
|
||||||
const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter");
|
const { TextSplitter } = require("../../TextSplitter");
|
||||||
|
const { SystemSettings } = require("../../../models/systemSettings");
|
||||||
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
||||||
const { v4: uuidv4 } = require("uuid");
|
const { v4: uuidv4 } = require("uuid");
|
||||||
|
|
||||||
@ -180,10 +181,17 @@ const LanceDb = {
|
|||||||
// We have to do this manually as opposed to using LangChains `xyz.fromDocuments`
|
// We have to do this manually as opposed to using LangChains `xyz.fromDocuments`
|
||||||
// because we then cannot atomically control our namespace to granularly find/remove documents
|
// because we then cannot atomically control our namespace to granularly find/remove documents
|
||||||
// from vectordb.
|
// from vectordb.
|
||||||
const textSplitter = new RecursiveCharacterTextSplitter({
|
const textSplitter = new TextSplitter({
|
||||||
chunkSize:
|
chunkSize: TextSplitter.determineMaxChunkSize(
|
||||||
getEmbeddingEngineSelection()?.embeddingMaxChunkLength || 1_000,
|
await SystemSettings.getValueOrFallback({
|
||||||
chunkOverlap: 20,
|
label: "text_splitter_chunk_size",
|
||||||
|
}),
|
||||||
|
getEmbeddingEngineSelection()?.embeddingMaxChunkLength
|
||||||
|
),
|
||||||
|
chunkOverlap: await SystemSettings.getValueOrFallback(
|
||||||
|
{ label: "text_splitter_chunk_overlap" },
|
||||||
|
20
|
||||||
|
),
|
||||||
});
|
});
|
||||||
const textChunks = await textSplitter.splitText(pageContent);
|
const textChunks = await textSplitter.splitText(pageContent);
|
||||||
|
|
||||||
|
@ -4,7 +4,8 @@ const {
|
|||||||
IndexType,
|
IndexType,
|
||||||
MilvusClient,
|
MilvusClient,
|
||||||
} = require("@zilliz/milvus2-sdk-node");
|
} = require("@zilliz/milvus2-sdk-node");
|
||||||
const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter");
|
const { TextSplitter } = require("../../TextSplitter");
|
||||||
|
const { SystemSettings } = require("../../../models/systemSettings");
|
||||||
const { v4: uuidv4 } = require("uuid");
|
const { v4: uuidv4 } = require("uuid");
|
||||||
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
||||||
const {
|
const {
|
||||||
@ -182,10 +183,17 @@ const Milvus = {
|
|||||||
return { vectorized: true, error: null };
|
return { vectorized: true, error: null };
|
||||||
}
|
}
|
||||||
|
|
||||||
const textSplitter = new RecursiveCharacterTextSplitter({
|
const textSplitter = new TextSplitter({
|
||||||
chunkSize:
|
chunkSize: TextSplitter.determineMaxChunkSize(
|
||||||
getEmbeddingEngineSelection()?.embeddingMaxChunkLength || 1_000,
|
await SystemSettings.getValueOrFallback({
|
||||||
chunkOverlap: 20,
|
label: "text_splitter_chunk_size",
|
||||||
|
}),
|
||||||
|
getEmbeddingEngineSelection()?.embeddingMaxChunkLength
|
||||||
|
),
|
||||||
|
chunkOverlap: await SystemSettings.getValueOrFallback(
|
||||||
|
{ label: "text_splitter_chunk_overlap" },
|
||||||
|
20
|
||||||
|
),
|
||||||
});
|
});
|
||||||
const textChunks = await textSplitter.splitText(pageContent);
|
const textChunks = await textSplitter.splitText(pageContent);
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
const { Pinecone } = require("@pinecone-database/pinecone");
|
const { Pinecone } = require("@pinecone-database/pinecone");
|
||||||
const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter");
|
const { TextSplitter } = require("../../TextSplitter");
|
||||||
|
const { SystemSettings } = require("../../../models/systemSettings");
|
||||||
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
||||||
const { v4: uuidv4 } = require("uuid");
|
const { v4: uuidv4 } = require("uuid");
|
||||||
const {
|
const {
|
||||||
@ -125,10 +126,17 @@ const PineconeDB = {
|
|||||||
// because we then cannot atomically control our namespace to granularly find/remove documents
|
// because we then cannot atomically control our namespace to granularly find/remove documents
|
||||||
// from vectordb.
|
// from vectordb.
|
||||||
// https://github.com/hwchase17/langchainjs/blob/2def486af734c0ca87285a48f1a04c057ab74bdf/langchain/src/vectorstores/pinecone.ts#L167
|
// https://github.com/hwchase17/langchainjs/blob/2def486af734c0ca87285a48f1a04c057ab74bdf/langchain/src/vectorstores/pinecone.ts#L167
|
||||||
const textSplitter = new RecursiveCharacterTextSplitter({
|
const textSplitter = new TextSplitter({
|
||||||
chunkSize:
|
chunkSize: TextSplitter.determineMaxChunkSize(
|
||||||
getEmbeddingEngineSelection()?.embeddingMaxChunkLength || 1_000,
|
await SystemSettings.getValueOrFallback({
|
||||||
chunkOverlap: 20,
|
label: "text_splitter_chunk_size",
|
||||||
|
}),
|
||||||
|
getEmbeddingEngineSelection()?.embeddingMaxChunkLength
|
||||||
|
),
|
||||||
|
chunkOverlap: await SystemSettings.getValueOrFallback(
|
||||||
|
{ label: "text_splitter_chunk_overlap" },
|
||||||
|
20
|
||||||
|
),
|
||||||
});
|
});
|
||||||
const textChunks = await textSplitter.splitText(pageContent);
|
const textChunks = await textSplitter.splitText(pageContent);
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
const { QdrantClient } = require("@qdrant/js-client-rest");
|
const { QdrantClient } = require("@qdrant/js-client-rest");
|
||||||
const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter");
|
const { TextSplitter } = require("../../TextSplitter");
|
||||||
|
const { SystemSettings } = require("../../../models/systemSettings");
|
||||||
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
||||||
const { v4: uuidv4 } = require("uuid");
|
const { v4: uuidv4 } = require("uuid");
|
||||||
const {
|
const {
|
||||||
@ -198,10 +199,17 @@ const QDrant = {
|
|||||||
// We have to do this manually as opposed to using LangChains `Qdrant.fromDocuments`
|
// We have to do this manually as opposed to using LangChains `Qdrant.fromDocuments`
|
||||||
// because we then cannot atomically control our namespace to granularly find/remove documents
|
// because we then cannot atomically control our namespace to granularly find/remove documents
|
||||||
// from vectordb.
|
// from vectordb.
|
||||||
const textSplitter = new RecursiveCharacterTextSplitter({
|
const textSplitter = new TextSplitter({
|
||||||
chunkSize:
|
chunkSize: TextSplitter.determineMaxChunkSize(
|
||||||
getEmbeddingEngineSelection()?.embeddingMaxChunkLength || 1_000,
|
await SystemSettings.getValueOrFallback({
|
||||||
chunkOverlap: 20,
|
label: "text_splitter_chunk_size",
|
||||||
|
}),
|
||||||
|
getEmbeddingEngineSelection()?.embeddingMaxChunkLength
|
||||||
|
),
|
||||||
|
chunkOverlap: await SystemSettings.getValueOrFallback(
|
||||||
|
{ label: "text_splitter_chunk_overlap" },
|
||||||
|
20
|
||||||
|
),
|
||||||
});
|
});
|
||||||
const textChunks = await textSplitter.splitText(pageContent);
|
const textChunks = await textSplitter.splitText(pageContent);
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
const { default: weaviate } = require("weaviate-ts-client");
|
const { default: weaviate } = require("weaviate-ts-client");
|
||||||
const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter");
|
const { TextSplitter } = require("../../TextSplitter");
|
||||||
|
const { SystemSettings } = require("../../../models/systemSettings");
|
||||||
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
||||||
const { v4: uuidv4 } = require("uuid");
|
const { v4: uuidv4 } = require("uuid");
|
||||||
const {
|
const {
|
||||||
@ -241,10 +242,17 @@ const Weaviate = {
|
|||||||
// We have to do this manually as opposed to using LangChains `Chroma.fromDocuments`
|
// We have to do this manually as opposed to using LangChains `Chroma.fromDocuments`
|
||||||
// because we then cannot atomically control our namespace to granularly find/remove documents
|
// because we then cannot atomically control our namespace to granularly find/remove documents
|
||||||
// from vectordb.
|
// from vectordb.
|
||||||
const textSplitter = new RecursiveCharacterTextSplitter({
|
const textSplitter = new TextSplitter({
|
||||||
chunkSize:
|
chunkSize: TextSplitter.determineMaxChunkSize(
|
||||||
getEmbeddingEngineSelection()?.embeddingMaxChunkLength || 1_000,
|
await SystemSettings.getValueOrFallback({
|
||||||
chunkOverlap: 20,
|
label: "text_splitter_chunk_size",
|
||||||
|
}),
|
||||||
|
getEmbeddingEngineSelection()?.embeddingMaxChunkLength
|
||||||
|
),
|
||||||
|
chunkOverlap: await SystemSettings.getValueOrFallback(
|
||||||
|
{ label: "text_splitter_chunk_overlap" },
|
||||||
|
20
|
||||||
|
),
|
||||||
});
|
});
|
||||||
const textChunks = await textSplitter.splitText(pageContent);
|
const textChunks = await textSplitter.splitText(pageContent);
|
||||||
|
|
||||||
|
@ -4,7 +4,8 @@ const {
|
|||||||
IndexType,
|
IndexType,
|
||||||
MilvusClient,
|
MilvusClient,
|
||||||
} = require("@zilliz/milvus2-sdk-node");
|
} = require("@zilliz/milvus2-sdk-node");
|
||||||
const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter");
|
const { TextSplitter } = require("../../TextSplitter");
|
||||||
|
const { SystemSettings } = require("../../../models/systemSettings");
|
||||||
const { v4: uuidv4 } = require("uuid");
|
const { v4: uuidv4 } = require("uuid");
|
||||||
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
const { storeVectorResult, cachedVectorInformation } = require("../../files");
|
||||||
const {
|
const {
|
||||||
@ -183,10 +184,17 @@ const Zilliz = {
|
|||||||
return { vectorized: true, error: null };
|
return { vectorized: true, error: null };
|
||||||
}
|
}
|
||||||
|
|
||||||
const textSplitter = new RecursiveCharacterTextSplitter({
|
const textSplitter = new TextSplitter({
|
||||||
chunkSize:
|
chunkSize: TextSplitter.determineMaxChunkSize(
|
||||||
getEmbeddingEngineSelection()?.embeddingMaxChunkLength || 1_000,
|
await SystemSettings.getValueOrFallback({
|
||||||
chunkOverlap: 20,
|
label: "text_splitter_chunk_size",
|
||||||
|
}),
|
||||||
|
getEmbeddingEngineSelection()?.embeddingMaxChunkLength
|
||||||
|
),
|
||||||
|
chunkOverlap: await SystemSettings.getValueOrFallback(
|
||||||
|
{ label: "text_splitter_chunk_overlap" },
|
||||||
|
20
|
||||||
|
),
|
||||||
});
|
});
|
||||||
const textChunks = await textSplitter.splitText(pageContent);
|
const textChunks = await textSplitter.splitText(pageContent);
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user