Merge branch 'master' of github.com:Mintplex-Labs/anything-llm into render

This commit is contained in:
timothycarambat 2024-09-27 09:51:13 -07:00
commit 6d7f8b71cf
28 changed files with 936 additions and 139 deletions

View File

@ -87,6 +87,7 @@ AnythingLLM divides your documents into objects called `workspaces`. A Workspace
- [Fireworks AI (chat models)](https://fireworks.ai/) - [Fireworks AI (chat models)](https://fireworks.ai/)
- [Perplexity (chat models)](https://www.perplexity.ai/) - [Perplexity (chat models)](https://www.perplexity.ai/)
- [OpenRouter (chat models)](https://openrouter.ai/) - [OpenRouter (chat models)](https://openrouter.ai/)
- [DeepSeek (chat models)](https://deepseek.com/)
- [Mistral](https://mistral.ai/) - [Mistral](https://mistral.ai/)
- [Groq](https://groq.com/) - [Groq](https://groq.com/)
- [Cohere](https://cohere.com/) - [Cohere](https://cohere.com/)

View File

@ -15,6 +15,7 @@
"lint": "yarn prettier --ignore-path ../.prettierignore --write ./processSingleFile ./processLink ./utils index.js" "lint": "yarn prettier --ignore-path ../.prettierignore --write ./processSingleFile ./processLink ./utils index.js"
}, },
"dependencies": { "dependencies": {
"@langchain/community": "^0.2.23",
"@xenova/transformers": "^2.11.0", "@xenova/transformers": "^2.11.0",
"bcrypt": "^5.1.0", "bcrypt": "^5.1.0",
"body-parser": "^1.20.2", "body-parser": "^1.20.2",
@ -30,7 +31,6 @@
"mammoth": "^1.6.0", "mammoth": "^1.6.0",
"mbox-parser": "^1.0.1", "mbox-parser": "^1.0.1",
"mime": "^3.0.0", "mime": "^3.0.0",
"minimatch": "5.1.0",
"moment": "^2.29.4", "moment": "^2.29.4",
"node-html-parser": "^6.1.13", "node-html-parser": "^6.1.13",
"officeparser": "^4.0.5", "officeparser": "^4.0.5",

View File

@ -13,6 +13,7 @@ class ConfluencePagesLoader {
limit = 25, limit = 25,
expand = "body.storage,version", expand = "body.storage,version",
personalAccessToken, personalAccessToken,
cloud = true,
}) { }) {
this.baseUrl = baseUrl; this.baseUrl = baseUrl;
this.spaceKey = spaceKey; this.spaceKey = spaceKey;
@ -21,6 +22,7 @@ class ConfluencePagesLoader {
this.limit = limit; this.limit = limit;
this.expand = expand; this.expand = expand;
this.personalAccessToken = personalAccessToken; this.personalAccessToken = personalAccessToken;
this.cloud = cloud;
} }
get authorizationHeader() { get authorizationHeader() {
@ -74,7 +76,11 @@ class ConfluencePagesLoader {
// https://developer.atlassian.com/cloud/confluence/rest/v2/intro/#auth // https://developer.atlassian.com/cloud/confluence/rest/v2/intro/#auth
async fetchAllPagesInSpace(start = 0, limit = this.limit) { async fetchAllPagesInSpace(start = 0, limit = this.limit) {
const url = `${this.baseUrl}/wiki/rest/api/content?spaceKey=${this.spaceKey}&limit=${limit}&start=${start}&expand=${this.expand}`; const url = `${this.baseUrl}${
this.cloud ? "/wiki" : ""
}/rest/api/content?spaceKey=${
this.spaceKey
}&limit=${limit}&start=${start}&expand=${this.expand}`;
const data = await this.fetchConfluenceData(url); const data = await this.fetchConfluenceData(url);
if (data.size === 0) { if (data.size === 0) {
return []; return [];

View File

@ -13,7 +13,13 @@ const { ConfluencePagesLoader } = require("./ConfluenceLoader");
* @returns * @returns
*/ */
async function loadConfluence( async function loadConfluence(
{ baseUrl = null, spaceKey = null, username = null, accessToken = null }, {
baseUrl = null,
spaceKey = null,
username = null,
accessToken = null,
cloud = true,
},
response response
) { ) {
if (!baseUrl || !spaceKey || !username || !accessToken) { if (!baseUrl || !spaceKey || !username || !accessToken) {
@ -45,6 +51,7 @@ async function loadConfluence(
spaceKey, spaceKey,
username, username,
accessToken, accessToken,
cloud,
}); });
const { docs, error } = await loader const { docs, error } = await loader
@ -66,7 +73,7 @@ async function loadConfluence(
}; };
} }
const outFolder = slugify( const outFolder = slugify(
`confluence-${origin}-${v4().slice(0, 4)}` `confluence-${hostname}-${v4().slice(0, 4)}`
).toLowerCase(); ).toLowerCase();
const outFolderPath = path.resolve(documentsFolder, outFolder); const outFolderPath = path.resolve(documentsFolder, outFolder);
if (!fs.existsSync(outFolderPath)) if (!fs.existsSync(outFolderPath))
@ -83,7 +90,7 @@ async function loadConfluence(
description: doc.metadata.title, description: doc.metadata.title,
docSource: `${origin} Confluence`, docSource: `${origin} Confluence`,
chunkSource: generateChunkSource( chunkSource: generateChunkSource(
{ doc, baseUrl: origin, spaceKey, accessToken, username }, { doc, baseUrl: origin, spaceKey, accessToken, username, cloud },
response.locals.encryptionWorker response.locals.encryptionWorker
), ),
published: new Date().toLocaleString(), published: new Date().toLocaleString(),
@ -122,6 +129,7 @@ async function fetchConfluencePage({
spaceKey, spaceKey,
username, username,
accessToken, accessToken,
cloud = true,
}) { }) {
if (!pageUrl || !baseUrl || !spaceKey || !username || !accessToken) { if (!pageUrl || !baseUrl || !spaceKey || !username || !accessToken) {
return { return {
@ -154,6 +162,7 @@ async function fetchConfluencePage({
spaceKey, spaceKey,
username, username,
accessToken, accessToken,
cloud,
}); });
const { docs, error } = await loader const { docs, error } = await loader
@ -217,7 +226,7 @@ function validBaseUrl(baseUrl) {
* @returns {string} * @returns {string}
*/ */
function generateChunkSource( function generateChunkSource(
{ doc, baseUrl, spaceKey, accessToken, username }, { doc, baseUrl, spaceKey, accessToken, username, cloud },
encryptionWorker encryptionWorker
) { ) {
const payload = { const payload = {
@ -225,6 +234,7 @@ function generateChunkSource(
spaceKey, spaceKey,
token: accessToken, token: accessToken,
username, username,
cloud,
}; };
return `confluence://${doc.metadata.url}?payload=${encryptionWorker.encrypt( return `confluence://${doc.metadata.url}?payload=${encryptionWorker.encrypt(
JSON.stringify(payload) JSON.stringify(payload)

View File

@ -105,7 +105,7 @@ class GitHubRepoLoader {
if (!this.ready) throw new Error("[Github Loader]: not in ready state!"); if (!this.ready) throw new Error("[Github Loader]: not in ready state!");
const { const {
GithubRepoLoader: LCGithubLoader, GithubRepoLoader: LCGithubLoader,
} = require("langchain/document_loaders/web/github"); } = require("@langchain/community/document_loaders/web/github");
if (this.accessToken) if (this.accessToken)
console.log( console.log(
@ -113,17 +113,16 @@ class GitHubRepoLoader {
); );
const loader = new LCGithubLoader(this.repo, { const loader = new LCGithubLoader(this.repo, {
accessToken: this.accessToken,
branch: this.branch, branch: this.branch,
recursive: !!this.accessToken, // Recursive will hit rate limits. recursive: !!this.accessToken, // Recursive will hit rate limits.
maxConcurrency: 5, maxConcurrency: 5,
unknown: "ignore", unknown: "warn",
accessToken: this.accessToken,
ignorePaths: this.ignorePaths, ignorePaths: this.ignorePaths,
verbose: true, verbose: true,
}); });
const docs = []; const docs = await loader.load();
for await (const doc of loader.loadAsStream()) docs.push(doc);
return docs; return docs;
} }

View File

@ -1,4 +1,4 @@
const minimatch = require("minimatch"); const ignore = require("ignore");
/** /**
* @typedef {Object} RepoLoaderArgs * @typedef {Object} RepoLoaderArgs
@ -6,6 +6,7 @@ const minimatch = require("minimatch");
* @property {string} [branch] - The branch to load from (optional). * @property {string} [branch] - The branch to load from (optional).
* @property {string} [accessToken] - GitLab access token for authentication (optional). * @property {string} [accessToken] - GitLab access token for authentication (optional).
* @property {string[]} [ignorePaths] - Array of paths to ignore when loading (optional). * @property {string[]} [ignorePaths] - Array of paths to ignore when loading (optional).
* @property {boolean} [fetchIssues] - Should issues be fetched (optional).
*/ */
/** /**
@ -33,6 +34,8 @@ class GitLabRepoLoader {
this.branch = args?.branch; this.branch = args?.branch;
this.accessToken = args?.accessToken || null; this.accessToken = args?.accessToken || null;
this.ignorePaths = args?.ignorePaths || []; this.ignorePaths = args?.ignorePaths || [];
this.ignoreFilter = ignore().add(this.ignorePaths);
this.withIssues = args?.fetchIssues || false;
this.projectId = null; this.projectId = null;
this.apiBase = "https://gitlab.com"; this.apiBase = "https://gitlab.com";
@ -123,22 +126,44 @@ class GitLabRepoLoader {
if (this.accessToken) if (this.accessToken)
console.log( console.log(
`[Gitlab Loader]: Access token set! Recursive loading enabled!` `[Gitlab Loader]: Access token set! Recursive loading enabled for ${this.repo}!`
); );
const files = await this.fetchFilesRecursive();
const docs = []; const docs = [];
for (const file of files) { console.log(`[Gitlab Loader]: Fetching files.`);
if (this.ignorePaths.some((path) => file.path.includes(path))) continue;
const files = await this.fetchFilesRecursive();
console.log(`[Gitlab Loader]: Fetched ${files.length} files.`);
for (const file of files) {
if (this.ignoreFilter.ignores(file.path)) continue;
const content = await this.fetchSingleFileContents(file.path);
if (content) {
docs.push({ docs.push({
pageContent: content, pageContent: file.content,
metadata: { source: file.path }, metadata: {
source: file.path,
url: `${this.repo}/-/blob/${this.branch}/${file.path}`,
},
}); });
} }
if (this.withIssues) {
console.log(`[Gitlab Loader]: Fetching issues.`);
const issues = await this.fetchIssues();
console.log(
`[Gitlab Loader]: Fetched ${issues.length} issues with discussions.`
);
docs.push(
...issues.map((issue) => ({
issue,
metadata: {
source: `issue-${this.repo}-${issue.iid}`,
url: issue.web_url,
},
}))
);
} }
return docs; return docs;
@ -160,51 +185,14 @@ class GitLabRepoLoader {
if (!this.#validGitlabUrl() || !this.projectId) return []; if (!this.#validGitlabUrl() || !this.projectId) return [];
await this.#validateAccessToken(); await this.#validateAccessToken();
this.branches = []; this.branches = [];
let fetching = true;
let page = 1;
let perPage = 50;
while (fetching) { const branchesRequestData = {
try { endpoint: `/api/v4/projects/${this.projectId}/repository/branches`,
const params = new URLSearchParams({ };
per_page: perPage,
page,
});
const response = await fetch(
`${this.apiBase}/api/v4/projects/${
this.projectId
}/repository/branches?${params.toString()}`,
{
method: "GET",
headers: {
Accepts: "application/json",
...(this.accessToken
? { "PRIVATE-TOKEN": this.accessToken }
: {}),
},
}
)
.then((res) => res.json())
.then((branches) => {
if (!Array.isArray(branches) || branches.length === 0) {
fetching = false;
return [];
}
return branches.map((b) => b.name);
})
.catch((e) => {
console.error(e);
fetching = false;
return [];
});
this.branches.push(...response); let branchesPage = [];
page++; while ((branchesPage = await this.fetchNextPage(branchesRequestData))) {
} catch (err) { this.branches.push(...branchesPage.map((branch) => branch.name));
console.log(`RepoLoader.getRepoBranches`, err);
fetching = false;
return [];
}
} }
return this.#branchPrefSort(this.branches); return this.#branchPrefSort(this.branches);
} }
@ -215,62 +203,91 @@ class GitLabRepoLoader {
*/ */
async fetchFilesRecursive() { async fetchFilesRecursive() {
const files = []; const files = [];
let perPage = 100; const filesRequestData = {
let fetching = true; endpoint: `/api/v4/projects/${this.projectId}/repository/tree`,
let page = 1; queryParams: {
while (fetching) {
try {
const params = new URLSearchParams({
ref: this.branch, ref: this.branch,
recursive: true, recursive: true,
per_page: perPage, },
page, };
let filesPage = null;
let pagePromises = [];
while ((filesPage = await this.fetchNextPage(filesRequestData))) {
// Fetch all the files that are not ignored in parallel.
pagePromises = filesPage
.filter((file) => {
if (file.type !== "blob") return false;
return !this.ignoreFilter.ignores(file.path);
})
.map(async (file) => {
const content = await this.fetchSingleFileContents(file.path);
if (!content) return null;
return {
path: file.path,
content,
};
}); });
const queryUrl = `${this.apiBase}/api/v4/projects/${
this.projectId
}/repository/tree?${params.toString()}`;
const response = await fetch(queryUrl, {
method: "GET",
headers: this.accessToken
? { "PRIVATE-TOKEN": this.accessToken }
: {},
});
const totalPages = Number(response.headers.get("x-total-pages"));
const nextPage = Number(response.headers.get("x-next-page"));
const data = await response.json();
/** @type {FileTreeObject[]} */ const pageFiles = await Promise.all(pagePromises);
const objects = Array.isArray(data)
? data.filter((item) => item.type === "blob")
: []; // only get files, not paths or submodules
// Apply ignore path rules to found objects. If any rules match it is an invalid file path. files.push(...pageFiles.filter((item) => item !== null));
console.log( console.log(`Fetched ${files.length} files.`);
`Found ${objects.length} blobs from repo from pg ${page}/${totalPages}`
);
for (const file of objects) {
const isIgnored = this.ignorePaths.some((ignorePattern) =>
minimatch(file.path, ignorePattern, { matchBase: true })
);
if (!isIgnored) files.push(file);
}
if (page === totalPages) {
fetching = false;
break;
}
page = Number(nextPage);
} catch (e) {
console.error(`RepoLoader.getRepositoryTree`, e);
fetching = false;
break;
}
} }
console.log(`Total files fetched: ${files.length}`);
return files; return files;
} }
/**
* Fetches all issues from the repository.
* @returns {Promise<Issue[]>} An array of issue objects.
*/
async fetchIssues() {
const issues = [];
const issuesRequestData = {
endpoint: `/api/v4/projects/${this.projectId}/issues`,
};
let issuesPage = null;
let pagePromises = [];
while ((issuesPage = await this.fetchNextPage(issuesRequestData))) {
// Fetch all the issues in parallel.
pagePromises = issuesPage.map(async (issue) => {
const discussionsRequestData = {
endpoint: `/api/v4/projects/${this.projectId}/issues/${issue.iid}/discussions`,
};
let discussionPage = null;
const discussions = [];
while (
(discussionPage = await this.fetchNextPage(discussionsRequestData))
) {
discussions.push(
...discussionPage.map(({ notes }) =>
notes.map(
({ body, author, created_at }) =>
`${author.username} at ${created_at}:
${body}`
)
)
);
}
const result = {
...issue,
discussions,
};
return result;
});
const pageIssues = await Promise.all(pagePromises);
issues.push(...pageIssues);
console.log(`Fetched ${issues.length} issues.`);
}
console.log(`Total issues fetched: ${issues.length}`);
return issues;
}
/** /**
* Fetches the content of a single file from the repository. * Fetches the content of a single file from the repository.
* @param {string} sourceFilePath - The path to the file in the repository. * @param {string} sourceFilePath - The path to the file in the repository.
@ -301,6 +318,59 @@ class GitLabRepoLoader {
return null; return null;
} }
} }
/**
* Fetches the next page of data from the API.
* @param {Object} requestData - The request data.
* @returns {Promise<Array<Object>|null>} The next page of data, or null if no more pages.
*/
async fetchNextPage(requestData) {
try {
if (requestData.page === -1) return null;
if (!requestData.page) requestData.page = 1;
const { endpoint, perPage = 100, queryParams = {} } = requestData;
const params = new URLSearchParams({
...queryParams,
per_page: perPage,
page: requestData.page,
});
const url = `${this.apiBase}${endpoint}?${params.toString()}`;
const response = await fetch(url, {
method: "GET",
headers: this.accessToken ? { "PRIVATE-TOKEN": this.accessToken } : {},
});
// Rate limits get hit very often if no PAT is provided
if (response.status === 401) {
console.warn(`Rate limit hit for ${endpoint}. Skipping.`);
return null;
}
const totalPages = Number(response.headers.get("x-total-pages"));
const data = await response.json();
if (!Array.isArray(data)) {
console.warn(`Unexpected response format for ${endpoint}:`, data);
return [];
}
console.log(
`Gitlab RepoLoader: fetched ${endpoint} page ${requestData.page}/${totalPages} with ${data.length} records.`
);
if (totalPages === requestData.page) {
requestData.page = -1;
} else {
requestData.page = Number(response.headers.get("x-next-page"));
}
return data;
} catch (e) {
console.error(`RepoLoader.fetchNextPage`, e);
return null;
}
}
} }
module.exports = GitLabRepoLoader; module.exports = GitLabRepoLoader;

View File

@ -43,13 +43,12 @@ async function loadGitlabRepo(args, response) {
fs.mkdirSync(outFolderPath, { recursive: true }); fs.mkdirSync(outFolderPath, { recursive: true });
for (const doc of docs) { for (const doc of docs) {
if (!doc.pageContent) continue; if (!doc.metadata || (!doc.pageContent && !doc.issue)) continue;
let pageContent = null;
const data = { const data = {
id: v4(), id: v4(),
url: "gitlab://" + doc.metadata.source, url: "gitlab://" + doc.metadata.source,
title: doc.metadata.source,
docAuthor: repo.author,
description: "No description found.",
docSource: doc.metadata.source, docSource: doc.metadata.source,
chunkSource: generateChunkSource( chunkSource: generateChunkSource(
repo, repo,
@ -57,13 +56,32 @@ async function loadGitlabRepo(args, response) {
response.locals.encryptionWorker response.locals.encryptionWorker
), ),
published: new Date().toLocaleString(), published: new Date().toLocaleString(),
wordCount: doc.pageContent.split(" ").length,
pageContent: doc.pageContent,
token_count_estimate: tokenizeString(doc.pageContent).length,
}; };
if (doc.pageContent) {
pageContent = doc.pageContent;
data.title = doc.metadata.source;
data.docAuthor = repo.author;
data.description = "No description found.";
} else if (doc.issue) {
pageContent = issueToMarkdown(doc.issue);
data.title = `Issue ${doc.issue.iid}: ${doc.issue.title}`;
data.docAuthor = doc.issue.author.username;
data.description = doc.issue.description;
} else {
continue;
}
data.wordCount = pageContent.split(" ").length;
data.token_count_estimate = tokenizeString(pageContent).length;
data.pageContent = pageContent;
console.log( console.log(
`[GitLab Loader]: Saving ${doc.metadata.source} to ${outFolder}` `[GitLab Loader]: Saving ${doc.metadata.source} to ${outFolder}`
); );
writeToServerDocuments( writeToServerDocuments(
data, data,
`${slugify(doc.metadata.source)}-${data.id}`, `${slugify(doc.metadata.source)}-${data.id}`,
@ -135,4 +153,93 @@ function generateChunkSource(repo, doc, encryptionWorker) {
)}`; )}`;
} }
function issueToMarkdown(issue) {
const metadata = {};
const userFields = ["author", "assignees", "closed_by"];
const userToUsername = ({ username }) => username;
for (const userField of userFields) {
if (issue[userField]) {
if (Array.isArray(issue[userField])) {
metadata[userField] = issue[userField].map(userToUsername);
} else {
metadata[userField] = userToUsername(issue[userField]);
}
}
}
const singleValueFields = [
"web_url",
"state",
"created_at",
"updated_at",
"closed_at",
"due_date",
"type",
"merge_request_count",
"upvotes",
"downvotes",
"labels",
"has_tasks",
"task_status",
"confidential",
"severity",
];
for (const singleValueField of singleValueFields) {
metadata[singleValueField] = issue[singleValueField];
}
if (issue.milestone) {
metadata.milestone = `${issue.milestone.title} (${issue.milestone.id})`;
}
if (issue.time_stats) {
const timeFields = ["time_estimate", "total_time_spent"];
for (const timeField of timeFields) {
const fieldName = `human_${timeField}`;
if (issue?.time_stats[fieldName]) {
metadata[timeField] = issue.time_stats[fieldName];
}
}
}
const metadataString = Object.entries(metadata)
.map(([name, value]) => {
if (!value || value?.length < 1) {
return null;
}
let result = `- ${name.replace("_", " ")}:`;
if (!Array.isArray(value)) {
result += ` ${value}`;
} else {
result += "\n" + value.map((s) => ` - ${s}`).join("\n");
}
return result;
})
.filter((item) => item != null)
.join("\n");
let markdown = `# ${issue.title} (${issue.iid})
${issue.description}
## Metadata
${metadataString}`;
if (issue.discussions.length > 0) {
markdown += `
## Activity
${issue.discussions.join("\n\n")}
`;
}
return markdown;
}
module.exports = { loadGitlabRepo, fetchGitlabFile }; module.exports = { loadGitlabRepo, fetchGitlabFile };

View File

@ -64,6 +64,23 @@
resolved "https://registry.yarnpkg.com/@huggingface/jinja/-/jinja-0.2.2.tgz#faeb205a9d6995089bef52655ddd8245d3190627" resolved "https://registry.yarnpkg.com/@huggingface/jinja/-/jinja-0.2.2.tgz#faeb205a9d6995089bef52655ddd8245d3190627"
integrity sha512-/KPde26khDUIPkTGU82jdtTW9UAuvUTumCAbFs/7giR0SxsvZC4hru51PBvpijH6BVkHcROcvZM/lpy5h1jRRA== integrity sha512-/KPde26khDUIPkTGU82jdtTW9UAuvUTumCAbFs/7giR0SxsvZC4hru51PBvpijH6BVkHcROcvZM/lpy5h1jRRA==
"@langchain/community@^0.2.23":
version "0.2.23"
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.2.23.tgz#20560e107bcc8432c42e499f1b9292d41a3732f2"
integrity sha512-p1n/zZ1F+O5l51RzeoUeJyhpzq6Wp11tkqKOj8oThKOQJgLhO7q6iFIvmKThzL7mZCNNuPM5r1OPnU4wO6iF/A==
dependencies:
"@langchain/core" ">=0.2.16 <0.3.0"
"@langchain/openai" ">=0.1.0 <0.3.0"
binary-extensions "^2.2.0"
expr-eval "^2.0.2"
flat "^5.0.2"
js-yaml "^4.1.0"
langchain "~0.2.3"
langsmith "~0.1.30"
uuid "^10.0.0"
zod "^3.22.3"
zod-to-json-schema "^3.22.5"
"@langchain/community@~0.0.47": "@langchain/community@~0.0.47":
version "0.0.53" version "0.0.53"
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.0.53.tgz#a9aaedffa0ed2977e8d302d74e9f90a49a6da037" resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.0.53.tgz#a9aaedffa0ed2977e8d302d74e9f90a49a6da037"
@ -78,6 +95,23 @@
zod "^3.22.3" zod "^3.22.3"
zod-to-json-schema "^3.22.5" zod-to-json-schema "^3.22.5"
"@langchain/core@>=0.2.11 <0.3.0", "@langchain/core@>=0.2.16 <0.3.0":
version "0.2.20"
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.2.20.tgz#5115781b0a86db3ce4b697e473405892c09621ca"
integrity sha512-WPBjrzOj79/yqjloDUIw1GDhuRQfHis07TyyDj+qS81nHh0svSasetKcqAZ3L5JoPcBmEL7rRBtM+OcyC3mLVg==
dependencies:
ansi-styles "^5.0.0"
camelcase "6"
decamelize "1.2.0"
js-tiktoken "^1.0.12"
langsmith "~0.1.39"
mustache "^4.2.0"
p-queue "^6.6.2"
p-retry "4"
uuid "^10.0.0"
zod "^3.22.4"
zod-to-json-schema "^3.22.3"
"@langchain/core@~0.1", "@langchain/core@~0.1.56", "@langchain/core@~0.1.60": "@langchain/core@~0.1", "@langchain/core@~0.1.56", "@langchain/core@~0.1.60":
version "0.1.61" version "0.1.61"
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.1.61.tgz#9313363e04f1c6981a938b2909c44ce6fceb2736" resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.1.61.tgz#9313363e04f1c6981a938b2909c44ce6fceb2736"
@ -96,6 +130,17 @@
zod "^3.22.4" zod "^3.22.4"
zod-to-json-schema "^3.22.3" zod-to-json-schema "^3.22.3"
"@langchain/openai@>=0.1.0 <0.3.0":
version "0.2.5"
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.2.5.tgz#e85b983986a7415ea743d4c854bb0674134334d4"
integrity sha512-gQXS5VBFyAco0jgSnUVan6fYVSIxlffmDaeDGpXrAmz2nQPgiN/h24KYOt2NOZ1zRheRzRuO/CfRagMhyVUaFA==
dependencies:
"@langchain/core" ">=0.2.16 <0.3.0"
js-tiktoken "^1.0.12"
openai "^4.49.1"
zod "^3.22.4"
zod-to-json-schema "^3.22.3"
"@langchain/openai@~0.0.28": "@langchain/openai@~0.0.28":
version "0.0.28" version "0.0.28"
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.0.28.tgz#afaeec61b44816935db9ae937496c964c81ab571" resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.0.28.tgz#afaeec61b44816935db9ae937496c964c81ab571"
@ -559,13 +604,6 @@ brace-expansion@^1.1.7:
balanced-match "^1.0.0" balanced-match "^1.0.0"
concat-map "0.0.1" concat-map "0.0.1"
brace-expansion@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae"
integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==
dependencies:
balanced-match "^1.0.0"
braces@~3.0.2: braces@~3.0.2:
version "3.0.2" version "3.0.2"
resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
@ -1769,6 +1807,13 @@ js-tiktoken@^1.0.11, js-tiktoken@^1.0.7, js-tiktoken@^1.0.8:
dependencies: dependencies:
base64-js "^1.5.1" base64-js "^1.5.1"
js-tiktoken@^1.0.12:
version "1.0.12"
resolved "https://registry.yarnpkg.com/js-tiktoken/-/js-tiktoken-1.0.12.tgz#af0f5cf58e5e7318240d050c8413234019424211"
integrity sha512-L7wURW1fH9Qaext0VzaUDpFGVQgjkdE3Dgsy9/+yXyGEpBKnylTd0mU0bfbNkKDlXRb6TEsZkwuflu1B8uQbJQ==
dependencies:
base64-js "^1.5.1"
js-tokens@^4.0.0: js-tokens@^4.0.0:
version "4.0.0" version "4.0.0"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
@ -1844,6 +1889,28 @@ langchain@0.1.36:
zod "^3.22.4" zod "^3.22.4"
zod-to-json-schema "^3.22.3" zod-to-json-schema "^3.22.3"
langchain@~0.2.3:
version "0.2.12"
resolved "https://registry.yarnpkg.com/langchain/-/langchain-0.2.12.tgz#3fac0b9519a070689b6dd679d5854abc57824dcf"
integrity sha512-ZHtJrHUpridZ7IQu7N/wAQ6iMAAO7VLzkupHqKP79S6p+alrPbn1BjRnh+PeGm92YiY5DafTCuvchmujxx7bCQ==
dependencies:
"@langchain/core" ">=0.2.11 <0.3.0"
"@langchain/openai" ">=0.1.0 <0.3.0"
"@langchain/textsplitters" "~0.0.0"
binary-extensions "^2.2.0"
js-tiktoken "^1.0.12"
js-yaml "^4.1.0"
jsonpointer "^5.0.1"
langchainhub "~0.0.8"
langsmith "~0.1.30"
ml-distance "^4.0.0"
openapi-types "^12.1.3"
p-retry "4"
uuid "^10.0.0"
yaml "^2.2.1"
zod "^3.22.4"
zod-to-json-schema "^3.22.3"
langchainhub@~0.0.8: langchainhub@~0.0.8:
version "0.0.8" version "0.0.8"
resolved "https://registry.yarnpkg.com/langchainhub/-/langchainhub-0.0.8.tgz#fd4b96dc795e22e36c1a20bad31b61b0c33d3110" resolved "https://registry.yarnpkg.com/langchainhub/-/langchainhub-0.0.8.tgz#fd4b96dc795e22e36c1a20bad31b61b0c33d3110"
@ -1860,6 +1927,18 @@ langsmith@~0.1.1, langsmith@~0.1.7:
p-retry "4" p-retry "4"
uuid "^9.0.0" uuid "^9.0.0"
langsmith@~0.1.30, langsmith@~0.1.39:
version "0.1.40"
resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.1.40.tgz#9708889386a5b9d0eb43dd3a9eba93513b57101d"
integrity sha512-11E2WLbh/+41+Qc0w8fJJTC/iz91BA+zXRMX/Wz0KSstnfzIPBoiWa++Kp2X8yCIDNywWWLJhy/B8gYzm7VKig==
dependencies:
"@types/uuid" "^9.0.1"
commander "^10.0.1"
p-queue "^6.6.2"
p-retry "4"
semver "^7.6.3"
uuid "^9.0.0"
leac@^0.6.0: leac@^0.6.0:
version "0.6.0" version "0.6.0"
resolved "https://registry.yarnpkg.com/leac/-/leac-0.6.0.tgz#dcf136e382e666bd2475f44a1096061b70dc0912" resolved "https://registry.yarnpkg.com/leac/-/leac-0.6.0.tgz#dcf136e382e666bd2475f44a1096061b70dc0912"
@ -2082,13 +2161,6 @@ mimic-response@^3.1.0:
resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9"
integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==
minimatch@5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7"
integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg==
dependencies:
brace-expansion "^2.0.1"
minimatch@^3.1.1, minimatch@^3.1.2: minimatch@^3.1.1, minimatch@^3.1.2:
version "3.1.2" version "3.1.2"
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b"
@ -2417,6 +2489,19 @@ openai@^4.32.1:
node-fetch "^2.6.7" node-fetch "^2.6.7"
web-streams-polyfill "^3.2.1" web-streams-polyfill "^3.2.1"
openai@^4.49.1:
version "4.54.0"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.54.0.tgz#eeb209c6892b997e524181b6ddb7e27bf4d09389"
integrity sha512-e/12BdtTtj+tXs7iHm+Dm7H7WjEWnw7O52B2wSfCQ6lD5F6cvjzo7cANXy5TJ1Q3/qc8YRPT5wBTTFtP5sBp1g==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"
abort-controller "^3.0.0"
agentkeepalive "^4.2.1"
form-data-encoder "1.7.2"
formdata-node "^4.3.2"
node-fetch "^2.6.7"
openapi-types@^12.1.3: openapi-types@^12.1.3:
version "12.1.3" version "12.1.3"
resolved "https://registry.yarnpkg.com/openapi-types/-/openapi-types-12.1.3.tgz#471995eb26c4b97b7bd356aacf7b91b73e777dd3" resolved "https://registry.yarnpkg.com/openapi-types/-/openapi-types-12.1.3.tgz#471995eb26c4b97b7bd356aacf7b91b73e777dd3"
@ -2863,6 +2948,11 @@ semver@^7.3.5, semver@^7.5.4:
dependencies: dependencies:
lru-cache "^6.0.0" lru-cache "^6.0.0"
semver@^7.6.3:
version "7.6.3"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143"
integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==
semver@~7.0.0: semver@~7.0.0:
version "7.0.0" version "7.0.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e"
@ -3336,6 +3426,11 @@ utils-merge@1.0.1:
resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==
uuid@^10.0.0:
version "10.0.0"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-10.0.0.tgz#5a95aa454e6e002725c79055fd42aaba30ca6294"
integrity sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==
uuid@^9.0.0: uuid@^9.0.0:
version "9.0.1" version "9.0.1"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30"

View File

@ -0,0 +1,100 @@
import { useState, useEffect } from "react";
import System from "@/models/system";
export default function DeepSeekOptions({ settings }) {
const [inputValue, setInputValue] = useState(settings?.DeepSeekApiKey);
const [deepSeekApiKey, setDeepSeekApiKey] = useState(
settings?.DeepSeekApiKey
);
return (
<div className="flex gap-[36px] mt-1.5">
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
API Key
</label>
<input
type="password"
name="DeepSeekApiKey"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
placeholder="DeepSeek API Key"
defaultValue={settings?.DeepSeekApiKey ? "*".repeat(20) : ""}
required={true}
autoComplete="off"
spellCheck={false}
onChange={(e) => setInputValue(e.target.value)}
onBlur={() => setDeepSeekApiKey(inputValue)}
/>
</div>
{!settings?.credentialsOnly && (
<DeepSeekModelSelection settings={settings} apiKey={deepSeekApiKey} />
)}
</div>
);
}
function DeepSeekModelSelection({ apiKey, settings }) {
const [models, setModels] = useState([]);
const [loading, setLoading] = useState(true);
useEffect(() => {
async function findCustomModels() {
if (!apiKey) {
setModels([]);
setLoading(true);
return;
}
setLoading(true);
const { models } = await System.customModels(
"deepseek",
typeof apiKey === "boolean" ? null : apiKey
);
setModels(models || []);
setLoading(false);
}
findCustomModels();
}, [apiKey]);
if (loading) {
return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Chat Model Selection
</label>
<select
name="DeepSeekModelPref"
disabled={true}
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
<option disabled={true} selected={true}>
-- loading available models --
</option>
</select>
</div>
);
}
return (
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Chat Model Selection
</label>
<select
name="DeepSeekModelPref"
required={true}
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
{models.map((model) => (
<option
key={model.id}
value={model.id}
selected={settings?.DeepSeekModelPref === model.id}
>
{model.name}
</option>
))}
</select>
</div>
);
}

View File

@ -26,6 +26,7 @@ export default function ConfluenceOptions() {
spaceKey: form.get("spaceKey"), spaceKey: form.get("spaceKey"),
username: form.get("username"), username: form.get("username"),
accessToken: form.get("accessToken"), accessToken: form.get("accessToken"),
cloud: form.get("isCloud") === "true",
}); });
if (!!error) { if (!!error) {
@ -54,6 +55,31 @@ export default function ConfluenceOptions() {
<form className="w-full" onSubmit={handleSubmit}> <form className="w-full" onSubmit={handleSubmit}>
<div className="w-full flex flex-col py-2"> <div className="w-full flex flex-col py-2">
<div className="w-full flex flex-col gap-4"> <div className="w-full flex flex-col gap-4">
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold flex gap-x-2 items-center">
<p className="font-bold text-white">
Confluence deployment type
</p>
</label>
<p className="text-xs font-normal text-white/50">
Determine if your Confluence instance is hosted on Atlassian
cloud or self-hosted.
</p>
</div>
<select
name="isCloud"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
required={true}
autoComplete="off"
spellCheck={false}
defaultValue="true"
>
<option value="true">Atlassian Cloud</option>
<option value="false">Self-hosted</option>
</select>
</div>
<div className="flex flex-col pr-10"> <div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4"> <div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-bold flex gap-x-2 items-center"> <label className="text-white text-sm font-bold flex gap-x-2 items-center">
@ -103,7 +129,7 @@ export default function ConfluenceOptions() {
</p> </p>
</div> </div>
<input <input
type="email" type="text"
name="username" name="username"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5" className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
placeholder="jdoe@example.com" placeholder="jdoe@example.com"

View File

@ -34,6 +34,7 @@ export default function GitlabOptions() {
accessToken: form.get("accessToken"), accessToken: form.get("accessToken"),
branch: form.get("branch"), branch: form.get("branch"),
ignorePaths: ignores, ignorePaths: ignores,
fetchIssues: form.get("fetchIssues"),
}); });
if (!!error) { if (!!error) {
@ -112,6 +113,30 @@ export default function GitlabOptions() {
onBlur={() => setSettings({ ...settings, accessToken })} onBlur={() => setSettings({ ...settings, accessToken })}
/> />
</div> </div>
<div className="flex flex-col pr-10">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white font-bold text-sm flex gap-x-2 items-center">
<p className="font-bold text-white">Settings</p>{" "}
</label>
<p className="text-xs font-normal text-white/50">
Select additional entities to fetch from the GitLab API.
</p>
</div>
<div className="flex items-center gap-x-2">
<label className="relative inline-flex cursor-pointer items-center">
<input
type="checkbox"
name="fetchIssues"
value={true}
className="peer sr-only"
/>
<div className="pointer-events-none peer h-6 w-11 rounded-full bg-stone-400 after:absolute after:left-[2px] after:top-[2px] after:h-5 after:w-5 after:rounded-full after:shadow-xl after:border after:border-gray-600 after:bg-white after:box-shadow-md after:transition-all after:content-[''] peer-checked:bg-lime-300 peer-checked:after:translate-x-full peer-checked:after:border-white peer-focus:outline-none peer-focus:ring-4 peer-focus:ring-blue-800"></div>
<span className="ml-3 text-sm font-medium text-white">
Fetch Issues as Documents
</span>
</label>
</div>
</div>
<GitLabBranchSelection <GitLabBranchSelection
repo={settings.repo} repo={settings.repo}
accessToken={settings.accessToken} accessToken={settings.accessToken}

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

View File

@ -64,11 +64,23 @@ const DataConnector = {
return { branches: [], error: e.message }; return { branches: [], error: e.message };
}); });
}, },
collect: async function ({ repo, accessToken, branch, ignorePaths = [] }) { collect: async function ({
repo,
accessToken,
branch,
ignorePaths = [],
fetchIssues = false,
}) {
return await fetch(`${API_BASE}/ext/gitlab/repo`, { return await fetch(`${API_BASE}/ext/gitlab/repo`, {
method: "POST", method: "POST",
headers: baseHeaders(), headers: baseHeaders(),
body: JSON.stringify({ repo, accessToken, branch, ignorePaths }), body: JSON.stringify({
repo,
accessToken,
branch,
ignorePaths,
fetchIssues,
}),
}) })
.then((res) => res.json()) .then((res) => res.json())
.then((res) => { .then((res) => {
@ -119,7 +131,13 @@ const DataConnector = {
}, },
confluence: { confluence: {
collect: async function ({ baseUrl, spaceKey, username, accessToken }) { collect: async function ({
baseUrl,
spaceKey,
username,
accessToken,
cloud,
}) {
return await fetch(`${API_BASE}/ext/confluence`, { return await fetch(`${API_BASE}/ext/confluence`, {
method: "POST", method: "POST",
headers: baseHeaders(), headers: baseHeaders(),
@ -128,6 +146,7 @@ const DataConnector = {
spaceKey, spaceKey,
username, username,
accessToken, accessToken,
cloud,
}), }),
}) })
.then((res) => res.json()) .then((res) => res.json())

View File

@ -24,6 +24,7 @@ import TextGenWebUILogo from "@/media/llmprovider/text-generation-webui.png";
import CohereLogo from "@/media/llmprovider/cohere.png"; import CohereLogo from "@/media/llmprovider/cohere.png";
import LiteLLMLogo from "@/media/llmprovider/litellm.png"; import LiteLLMLogo from "@/media/llmprovider/litellm.png";
import AWSBedrockLogo from "@/media/llmprovider/bedrock.png"; import AWSBedrockLogo from "@/media/llmprovider/bedrock.png";
import DeepSeekLogo from "@/media/llmprovider/deepseek.png";
import PreLoader from "@/components/Preloader"; import PreLoader from "@/components/Preloader";
import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions"; import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
@ -46,6 +47,7 @@ import KoboldCPPOptions from "@/components/LLMSelection/KoboldCPPOptions";
import TextGenWebUIOptions from "@/components/LLMSelection/TextGenWebUIOptions"; import TextGenWebUIOptions from "@/components/LLMSelection/TextGenWebUIOptions";
import LiteLLMOptions from "@/components/LLMSelection/LiteLLMOptions"; import LiteLLMOptions from "@/components/LLMSelection/LiteLLMOptions";
import AWSBedrockLLMOptions from "@/components/LLMSelection/AwsBedrockLLMOptions"; import AWSBedrockLLMOptions from "@/components/LLMSelection/AwsBedrockLLMOptions";
import DeepSeekOptions from "@/components/LLMSelection/DeepSeekOptions";
import LLMItem from "@/components/LLMSelection/LLMItem"; import LLMItem from "@/components/LLMSelection/LLMItem";
import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react"; import { CaretUpDown, MagnifyingGlass, X } from "@phosphor-icons/react";
@ -209,6 +211,14 @@ export const AVAILABLE_LLM_PROVIDERS = [
description: "Run LiteLLM's OpenAI compatible proxy for various LLMs.", description: "Run LiteLLM's OpenAI compatible proxy for various LLMs.",
requiredConfig: ["LiteLLMBasePath"], requiredConfig: ["LiteLLMBasePath"],
}, },
{
name: "DeepSeek",
value: "deepseek",
logo: DeepSeekLogo,
options: (settings) => <DeepSeekOptions settings={settings} />,
description: "Run DeepSeek's powerful LLMs.",
requiredConfig: ["DeepSeekApiKey"],
},
{ {
name: "Generic OpenAI", name: "Generic OpenAI",
value: "generic-openai", value: "generic-openai",

View File

@ -20,6 +20,7 @@ import KoboldCPPLogo from "@/media/llmprovider/koboldcpp.png";
import TextGenWebUILogo from "@/media/llmprovider/text-generation-webui.png"; import TextGenWebUILogo from "@/media/llmprovider/text-generation-webui.png";
import LiteLLMLogo from "@/media/llmprovider/litellm.png"; import LiteLLMLogo from "@/media/llmprovider/litellm.png";
import AWSBedrockLogo from "@/media/llmprovider/bedrock.png"; import AWSBedrockLogo from "@/media/llmprovider/bedrock.png";
import DeepSeekLogo from "@/media/llmprovider/deepseek.png";
import CohereLogo from "@/media/llmprovider/cohere.png"; import CohereLogo from "@/media/llmprovider/cohere.png";
import ZillizLogo from "@/media/vectordbs/zilliz.png"; import ZillizLogo from "@/media/vectordbs/zilliz.png";
@ -196,6 +197,11 @@ export const LLM_SELECTION_PRIVACY = {
], ],
logo: AWSBedrockLogo, logo: AWSBedrockLogo,
}, },
deepseek: {
name: "DeepSeek",
description: ["Your model and chat contents are visible to DeepSeek"],
logo: DeepSeekLogo,
},
}; };
export const VECTOR_DB_PRIVACY = { export const VECTOR_DB_PRIVACY = {

View File

@ -19,6 +19,7 @@ import KoboldCPPLogo from "@/media/llmprovider/koboldcpp.png";
import TextGenWebUILogo from "@/media/llmprovider/text-generation-webui.png"; import TextGenWebUILogo from "@/media/llmprovider/text-generation-webui.png";
import LiteLLMLogo from "@/media/llmprovider/litellm.png"; import LiteLLMLogo from "@/media/llmprovider/litellm.png";
import AWSBedrockLogo from "@/media/llmprovider/bedrock.png"; import AWSBedrockLogo from "@/media/llmprovider/bedrock.png";
import DeepSeekLogo from "@/media/llmprovider/deepseek.png";
import CohereLogo from "@/media/llmprovider/cohere.png"; import CohereLogo from "@/media/llmprovider/cohere.png";
import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions"; import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
@ -41,6 +42,7 @@ import KoboldCPPOptions from "@/components/LLMSelection/KoboldCPPOptions";
import TextGenWebUIOptions from "@/components/LLMSelection/TextGenWebUIOptions"; import TextGenWebUIOptions from "@/components/LLMSelection/TextGenWebUIOptions";
import LiteLLMOptions from "@/components/LLMSelection/LiteLLMOptions"; import LiteLLMOptions from "@/components/LLMSelection/LiteLLMOptions";
import AWSBedrockLLMOptions from "@/components/LLMSelection/AwsBedrockLLMOptions"; import AWSBedrockLLMOptions from "@/components/LLMSelection/AwsBedrockLLMOptions";
import DeepSeekOptions from "@/components/LLMSelection/DeepSeekOptions";
import LLMItem from "@/components/LLMSelection/LLMItem"; import LLMItem from "@/components/LLMSelection/LLMItem";
import System from "@/models/system"; import System from "@/models/system";
@ -184,6 +186,13 @@ const LLMS = [
options: (settings) => <LiteLLMOptions settings={settings} />, options: (settings) => <LiteLLMOptions settings={settings} />,
description: "Run LiteLLM's OpenAI compatible proxy for various LLMs.", description: "Run LiteLLM's OpenAI compatible proxy for various LLMs.",
}, },
{
name: "DeepSeek",
value: "deepseek",
logo: DeepSeekLogo,
options: (settings) => <DeepSeekOptions settings={settings} />,
description: "Run DeepSeek's powerful LLMs.",
},
{ {
name: "Generic OpenAI", name: "Generic OpenAI",
value: "generic-openai", value: "generic-openai",

View File

@ -23,6 +23,7 @@ const ENABLED_PROVIDERS = [
"generic-openai", "generic-openai",
"bedrock", "bedrock",
"fireworksai", "fireworksai",
"deepseek",
// TODO: More agent support. // TODO: More agent support.
// "cohere", // Has tool calling and will need to build explicit support // "cohere", // Has tool calling and will need to build explicit support
// "huggingface" // Can be done but already has issues with no-chat templated. Needs to be tested. // "huggingface" // Can be done but already has issues with no-chat templated. Needs to be tested.

View File

@ -511,6 +511,10 @@ const SystemSettings = {
// VoyageAi API Keys // VoyageAi API Keys
VoyageAiApiKey: !!process.env.VOYAGEAI_API_KEY, VoyageAiApiKey: !!process.env.VOYAGEAI_API_KEY,
// DeepSeek API Keys
DeepSeekApiKey: !!process.env.DEEPSEEK_API_KEY,
DeepSeekModelPref: process.env.DEEPSEEK_MODEL_PREF,
}; };
}, },

View File

@ -0,0 +1,127 @@
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const {
handleDefaultStreamResponseV2,
} = require("../../helpers/chat/responses");
const { MODEL_MAP } = require("../modelMap");
class DeepSeekLLM {
constructor(embedder = null, modelPreference = null) {
if (!process.env.DEEPSEEK_API_KEY)
throw new Error("No DeepSeek API key was set.");
const { OpenAI: OpenAIApi } = require("openai");
this.openai = new OpenAIApi({
apiKey: process.env.DEEPSEEK_API_KEY,
baseURL: "https://api.deepseek.com/v1",
});
this.model =
modelPreference || process.env.DEEPSEEK_MODEL_PREF || "deepseek-chat";
this.limits = {
history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15,
user: this.promptWindowLimit() * 0.7,
};
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
}
#appendContext(contextTexts = []) {
if (!contextTexts || !contextTexts.length) return "";
return (
"\nContext:\n" +
contextTexts
.map((text, i) => {
return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
})
.join("")
);
}
streamingEnabled() {
return "streamGetChatCompletion" in this;
}
static promptWindowLimit(modelName) {
return MODEL_MAP.deepseek[modelName] ?? 8192;
}
promptWindowLimit() {
return MODEL_MAP.deepseek[this.model] ?? 8192;
}
async isValidChatCompletionModel(modelName = "") {
const models = await this.openai.models.list().catch(() => ({ data: [] }));
return models.data.some((model) => model.id === modelName);
}
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
}) {
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
}
async getChatCompletion(messages = null, { temperature = 0.7 }) {
if (!(await this.isValidChatCompletionModel(this.model)))
throw new Error(
`DeepSeek chat: ${this.model} is not valid for chat completion!`
);
const result = await this.openai.chat.completions
.create({
model: this.model,
messages,
temperature,
})
.catch((e) => {
throw new Error(e.message);
});
if (!result.hasOwnProperty("choices") || result.choices.length === 0)
return null;
return result.choices[0].message.content;
}
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
if (!(await this.isValidChatCompletionModel(this.model)))
throw new Error(
`DeepSeek chat: ${this.model} is not valid for chat completion!`
);
const streamRequest = await this.openai.chat.completions.create({
model: this.model,
stream: true,
messages,
temperature,
});
return streamRequest;
}
handleStream(response, stream, responseProps) {
return handleDefaultStreamResponseV2(response, stream, responseProps);
}
async embedTextInput(textInput) {
return await this.embedder.embedTextInput(textInput);
}
async embedChunks(textChunks = []) {
return await this.embedder.embedChunks(textChunks);
}
async compressMessages(promptArgs = {}, rawHistory = []) {
const { messageArrayCompressor } = require("../../helpers/chat");
const messageArray = this.constructPrompt(promptArgs);
return await messageArrayCompressor(this, messageArray, rawHistory);
}
}
module.exports = {
DeepSeekLLM,
};

View File

@ -53,6 +53,10 @@ const MODEL_MAP = {
"gpt-4": 8_192, "gpt-4": 8_192,
"gpt-4-32k": 32_000, "gpt-4-32k": 32_000,
}, },
deepseek: {
"deepseek-chat": 128_000,
"deepseek-coder": 128_000,
},
}; };
module.exports = { MODEL_MAP }; module.exports = { MODEL_MAP };

View File

@ -783,6 +783,8 @@ ${this.getHistory({ to: route.to })
return new Providers.AWSBedrockProvider({}); return new Providers.AWSBedrockProvider({});
case "fireworksai": case "fireworksai":
return new Providers.FireworksAIProvider({ model: config.model }); return new Providers.FireworksAIProvider({ model: config.model });
case "deepseek":
return new Providers.DeepSeekProvider({ model: config.model });
default: default:
throw new Error( throw new Error(

View File

@ -174,6 +174,14 @@ class Provider {
apiKey: process.env.TEXT_GEN_WEB_UI_API_KEY ?? "not-used", apiKey: process.env.TEXT_GEN_WEB_UI_API_KEY ?? "not-used",
...config, ...config,
}); });
case "deepseek":
return new ChatOpenAI({
configuration: {
baseURL: "https://api.deepseek.com/v1",
},
apiKey: process.env.DEEPSEEK_API_KEY ?? null,
...config,
});
default: default:
throw new Error(`Unsupported provider ${provider} for this task.`); throw new Error(`Unsupported provider ${provider} for this task.`);
} }

View File

@ -0,0 +1,118 @@
const OpenAI = require("openai");
const Provider = require("./ai-provider.js");
const InheritMultiple = require("./helpers/classes.js");
const UnTooled = require("./helpers/untooled.js");
const { toValidNumber } = require("../../../http/index.js");
class DeepSeekProvider extends InheritMultiple([Provider, UnTooled]) {
model;
constructor(config = {}) {
super();
const { model = "deepseek-chat" } = config;
const client = new OpenAI({
baseURL: "https://api.deepseek.com/v1",
apiKey: process.env.DEEPSEEK_API_KEY ?? null,
maxRetries: 3,
});
this._client = client;
this.model = model;
this.verbose = true;
this.maxTokens = process.env.DEEPSEEK_MAX_TOKENS
? toValidNumber(process.env.DEEPSEEK_MAX_TOKENS, 1024)
: 1024;
}
get client() {
return this._client;
}
async #handleFunctionCallChat({ messages = [] }) {
return await this.client.chat.completions
.create({
model: this.model,
temperature: 0,
messages,
max_tokens: this.maxTokens,
})
.then((result) => {
if (!result.hasOwnProperty("choices"))
throw new Error("DeepSeek chat: No results!");
if (result.choices.length === 0)
throw new Error("DeepSeek chat: No results length!");
return result.choices[0].message.content;
})
.catch((_) => {
return null;
});
}
/**
* Create a completion based on the received messages.
*
* @param messages A list of messages to send to the API.
* @param functions
* @returns The completion.
*/
async complete(messages, functions = null) {
try {
let completion;
if (functions.length > 0) {
const { toolCall, text } = await this.functionCall(
messages,
functions,
this.#handleFunctionCallChat.bind(this)
);
if (toolCall !== null) {
this.providerLog(`Valid tool call found - running ${toolCall.name}.`);
this.deduplicator.trackRun(toolCall.name, toolCall.arguments);
return {
result: null,
functionCall: {
name: toolCall.name,
arguments: toolCall.arguments,
},
cost: 0,
};
}
completion = { content: text };
}
if (!completion?.content) {
this.providerLog(
"Will assume chat completion without tool call inputs."
);
const response = await this.client.chat.completions.create({
model: this.model,
messages: this.cleanMsgs(messages),
});
completion = response.choices[0].message;
}
// The UnTooled class inherited Deduplicator is mostly useful to prevent the agent
// from calling the exact same function over and over in a loop within a single chat exchange
// _but_ we should enable it to call previously used tools in a new chat interaction.
this.deduplicator.reset("runs");
return {
result: completion.content,
cost: 0,
};
} catch (error) {
throw error;
}
}
/**
* Get the cost of the completion.
*
* @param _usage The completion to get the cost for.
* @returns The cost of the completion.
*/
getCost(_usage) {
return 0;
}
}
module.exports = DeepSeekProvider;

View File

@ -14,6 +14,7 @@ const PerplexityProvider = require("./perplexity.js");
const TextWebGenUiProvider = require("./textgenwebui.js"); const TextWebGenUiProvider = require("./textgenwebui.js");
const AWSBedrockProvider = require("./bedrock.js"); const AWSBedrockProvider = require("./bedrock.js");
const FireworksAIProvider = require("./fireworksai.js"); const FireworksAIProvider = require("./fireworksai.js");
const DeepSeekProvider = require("./deepseek.js");
module.exports = { module.exports = {
OpenAIProvider, OpenAIProvider,
@ -28,6 +29,7 @@ module.exports = {
OpenRouterProvider, OpenRouterProvider,
MistralProvider, MistralProvider,
GenericOpenAiProvider, GenericOpenAiProvider,
DeepSeekProvider,
PerplexityProvider, PerplexityProvider,
TextWebGenUiProvider, TextWebGenUiProvider,
AWSBedrockProvider, AWSBedrockProvider,

View File

@ -162,6 +162,10 @@ class AgentHandler {
"FireworksAI API Key must be provided to use agents." "FireworksAI API Key must be provided to use agents."
); );
break; break;
case "deepseek":
if (!process.env.DEEPSEEK_API_KEY)
throw new Error("DeepSeek API Key must be provided to use agents.");
break;
default: default:
throw new Error( throw new Error(
@ -206,6 +210,8 @@ class AgentHandler {
return null; return null;
case "fireworksai": case "fireworksai":
return null; return null;
case "deepseek":
return "deepseek-chat";
default: default:
return "unknown"; return "unknown";
} }

View File

@ -18,6 +18,7 @@ const SUPPORT_CUSTOM_MODELS = [
"litellm", "litellm",
"elevenlabs-tts", "elevenlabs-tts",
"groq", "groq",
"deepseek",
]; ];
async function getCustomModels(provider = "", apiKey = null, basePath = null) { async function getCustomModels(provider = "", apiKey = null, basePath = null) {
@ -53,6 +54,8 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
return await getElevenLabsModels(apiKey); return await getElevenLabsModels(apiKey);
case "groq": case "groq":
return await getGroqAiModels(apiKey); return await getGroqAiModels(apiKey);
case "deepseek":
return await getDeepSeekModels(apiKey);
default: default:
return { models: [], error: "Invalid provider for custom models" }; return { models: [], error: "Invalid provider for custom models" };
} }
@ -419,6 +422,31 @@ async function getElevenLabsModels(apiKey = null) {
return { models, error: null }; return { models, error: null };
} }
async function getDeepSeekModels(apiKey = null) {
const { OpenAI: OpenAIApi } = require("openai");
const openai = new OpenAIApi({
apiKey: apiKey || process.env.DEEPSEEK_API_KEY,
baseURL: "https://api.deepseek.com/v1",
});
const models = await openai.models
.list()
.then((results) => results.data)
.then((models) =>
models.map((model) => ({
id: model.id,
name: model.id,
organization: model.owned_by,
}))
)
.catch((e) => {
console.error(`DeepSeek:listModels`, e.message);
return [];
});
if (models.length > 0 && !!apiKey) process.env.DEEPSEEK_API_KEY = apiKey;
return { models, error: null };
}
module.exports = { module.exports = {
getCustomModels, getCustomModels,
}; };

View File

@ -159,6 +159,9 @@ function getLLMProvider({ provider = null, model = null } = {}) {
case "bedrock": case "bedrock":
const { AWSBedrockLLM } = require("../AiProviders/bedrock"); const { AWSBedrockLLM } = require("../AiProviders/bedrock");
return new AWSBedrockLLM(embedder, model); return new AWSBedrockLLM(embedder, model);
case "deepseek":
const { DeepSeekLLM } = require("../AiProviders/deepseek");
return new DeepSeekLLM(embedder, model);
default: default:
throw new Error( throw new Error(
`ENV: No valid LLM_PROVIDER value found in environment! Using ${process.env.LLM_PROVIDER}` `ENV: No valid LLM_PROVIDER value found in environment! Using ${process.env.LLM_PROVIDER}`

View File

@ -501,6 +501,16 @@ const KEY_MAPPING = {
envKey: "TTS_PIPER_VOICE_MODEL", envKey: "TTS_PIPER_VOICE_MODEL",
checks: [], checks: [],
}, },
// DeepSeek Options
DeepSeekApiKey: {
envKey: "DEEPSEEK_API_KEY",
checks: [isNotEmpty],
},
DeepSeekModelPref: {
envKey: "DEEPSEEK_MODEL_PREF",
checks: [isNotEmpty],
},
}; };
function isNotEmpty(input = "") { function isNotEmpty(input = "") {
@ -602,6 +612,7 @@ function supportedLLM(input = "") {
"litellm", "litellm",
"generic-openai", "generic-openai",
"bedrock", "bedrock",
"deepseek",
].includes(input); ].includes(input);
return validSelection ? null : `${input} is not a valid LLM provider.`; return validSelection ? null : `${input} is not a valid LLM provider.`;
} }