patch workspace chat history windows to persist most recent chats, not the top n

This commit is contained in:
timothycarambat 2023-11-01 14:12:27 -07:00
parent 67c85f1550
commit 24823cb5e2
2 changed files with 34 additions and 15 deletions

View File

@ -21,7 +21,8 @@ const WorkspaceChats = {
forWorkspaceByUser: async function ( forWorkspaceByUser: async function (
workspaceId = null, workspaceId = null,
userId = null, userId = null,
limit = null limit = null,
orderBy = null
) { ) {
if (!workspaceId || !userId) return []; if (!workspaceId || !userId) return [];
try { try {
@ -32,9 +33,7 @@ const WorkspaceChats = {
include: true, include: true,
}, },
...(limit !== null ? { take: limit } : {}), ...(limit !== null ? { take: limit } : {}),
orderBy: { ...(orderBy !== null ? { orderBy } : { orderBy: { id: "asc" } }),
id: "asc",
},
}); });
return chats; return chats;
} catch (error) { } catch (error) {
@ -43,7 +42,11 @@ const WorkspaceChats = {
} }
}, },
forWorkspace: async function (workspaceId = null, limit = null) { forWorkspace: async function (
workspaceId = null,
limit = null,
orderBy = null
) {
if (!workspaceId) return []; if (!workspaceId) return [];
try { try {
const chats = await prisma.workspace_chats.findMany({ const chats = await prisma.workspace_chats.findMany({
@ -52,9 +55,7 @@ const WorkspaceChats = {
include: true, include: true,
}, },
...(limit !== null ? { take: limit } : {}), ...(limit !== null ? { take: limit } : {}),
orderBy: { ...(orderBy !== null ? { orderBy } : { orderBy: { id: "asc" } }),
id: "asc",
},
}); });
return chats; return chats;
} catch (error) { } catch (error) {

View File

@ -87,10 +87,22 @@ async function chatWithWorkspace(
}; };
} }
const messageLimit = workspace?.openAiHistory || 20;
const hasVectorizedSpace = await VectorDb.hasNamespace(workspace.slug); const hasVectorizedSpace = await VectorDb.hasNamespace(workspace.slug);
const embeddingsCount = await VectorDb.namespaceCount(workspace.slug); const embeddingsCount = await VectorDb.namespaceCount(workspace.slug);
if (!hasVectorizedSpace || embeddingsCount === 0) { if (!hasVectorizedSpace || embeddingsCount === 0) {
const rawHistory = await WorkspaceChats.forWorkspace(workspace.id); const rawHistory = (
user
? await WorkspaceChats.forWorkspaceByUser(
workspace.id,
user.id,
messageLimit,
{ id: "desc" }
)
: await WorkspaceChats.forWorkspace(workspace.id, messageLimit, {
id: "desc",
})
).reverse();
const chatHistory = convertToPromptHistory(rawHistory); const chatHistory = convertToPromptHistory(rawHistory);
const response = await LLMConnector.sendChat( const response = await LLMConnector.sendChat(
chatHistory, chatHistory,
@ -114,12 +126,18 @@ async function chatWithWorkspace(
error: null, error: null,
}; };
} else { } else {
var messageLimit = workspace?.openAiHistory; const rawHistory = (
user
const rawHistory = await WorkspaceChats.forWorkspace( ? await WorkspaceChats.forWorkspaceByUser(
workspace.id, workspace.id,
messageLimit user.id,
); messageLimit,
{ id: "desc" }
)
: await WorkspaceChats.forWorkspace(workspace.id, messageLimit, {
id: "desc",
})
).reverse();
const chatHistory = convertToPromptHistory(rawHistory); const chatHistory = convertToPromptHistory(rawHistory);
const { const {
response, response,