[FEAT] Persist query mode refusal responses as chat history (#1727)

* log query refusals to workspace chats but hide in ui

* linting

---------

Co-authored-by: timothycarambat <rambat1010@gmail.com>
This commit is contained in:
Sean Hatfield 2024-06-20 15:44:19 -07:00 committed by GitHub
parent 1d675d09fb
commit c2523a9593
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 71 additions and 12 deletions

View File

@ -7,6 +7,7 @@ const WorkspaceChats = {
response = {},
user = null,
threadId = null,
include = true,
}) {
try {
const chat = await prisma.workspace_chats.create({
@ -16,6 +17,7 @@ const WorkspaceChats = {
response: JSON.stringify(response),
user_id: user?.id || null,
thread_id: threadId,
include,
},
});
return { chat, message: null };

View File

@ -77,15 +77,30 @@ async function chatWithWorkspace(
// User is trying to query-mode chat a workspace that has no data in it - so
// we should exit early as no information can be found under these conditions.
if ((!hasVectorizedSpace || embeddingsCount === 0) && chatMode === "query") {
const textResponse =
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.";
await WorkspaceChats.new({
workspaceId: workspace.id,
prompt: message,
response: {
text: textResponse,
sources: [],
type: chatMode,
},
threadId: thread?.id || null,
include: false,
user,
});
return {
id: uuid,
type: "textResponse",
sources: [],
close: true,
error: null,
textResponse:
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.",
textResponse,
};
}
@ -172,15 +187,30 @@ async function chatWithWorkspace(
// If in query mode and no context chunks are found from search, backfill, or pins - do not
// let the LLM try to hallucinate a response or use general knowledge and exit early
if (chatMode === "query" && contextTexts.length === 0) {
const textResponse =
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.";
await WorkspaceChats.new({
workspaceId: workspace.id,
prompt: message,
response: {
text: textResponse,
sources: [],
type: chatMode,
},
threadId: thread?.id || null,
include: false,
user,
});
return {
id: uuid,
type: "textResponse",
sources: [],
close: true,
error: null,
textResponse:
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.",
textResponse,
};
}

View File

@ -75,16 +75,29 @@ async function streamChatWithWorkspace(
// User is trying to query-mode chat a workspace that has no data in it - so
// we should exit early as no information can be found under these conditions.
if ((!hasVectorizedSpace || embeddingsCount === 0) && chatMode === "query") {
const textResponse =
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.";
writeResponseChunk(response, {
id: uuid,
type: "textResponse",
textResponse:
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.",
textResponse,
sources: [],
close: true,
error: null,
});
await WorkspaceChats.new({
workspaceId: workspace.id,
prompt: message,
response: {
text: textResponse,
sources: [],
type: chatMode,
},
threadId: thread?.id || null,
include: false,
user,
});
return;
}
@ -177,16 +190,30 @@ async function streamChatWithWorkspace(
// If in query mode and no context chunks are found from search, backfill, or pins - do not
// let the LLM try to hallucinate a response or use general knowledge and exit early
if (chatMode === "query" && contextTexts.length === 0) {
const textResponse =
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.";
writeResponseChunk(response, {
id: uuid,
type: "textResponse",
textResponse:
workspace?.queryRefusalResponse ??
"There is no relevant information in this workspace to answer your query.",
textResponse,
sources: [],
close: true,
error: null,
});
await WorkspaceChats.new({
workspaceId: workspace.id,
prompt: message,
response: {
text: textResponse,
sources: [],
type: chatMode,
},
threadId: thread?.id || null,
include: false,
user,
});
return;
}