diff --git a/server/utils/chats/embed.js b/server/utils/chats/embed.js index 533ea0c3..98b096fb 100644 --- a/server/utils/chats/embed.js +++ b/server/utils/chats/embed.js @@ -131,7 +131,11 @@ async function streamChatWithForEmbed( // If in query mode and no sources are found, do not // let the LLM try to hallucinate a response or use general knowledge - if (chatMode === "query" && sources.length === 0) { + if ( + chatMode === "query" && + sources.length === 0 && + pinnedDocIdentifiers.length === 0 + ) { writeResponseChunk(response, { id: uuid, type: "textResponse", diff --git a/server/utils/chats/index.js b/server/utils/chats/index.js index 38ce6c9b..76f98e0d 100644 --- a/server/utils/chats/index.js +++ b/server/utils/chats/index.js @@ -140,9 +140,13 @@ async function chatWithWorkspace( contextTexts = [...contextTexts, ...vectorSearchResults.contextTexts]; sources = [...sources, ...vectorSearchResults.sources]; - // If in query mode and no sources are found, do not + // If in query mode and no sources are found from the vector search and no pinned documents, do not // let the LLM try to hallucinate a response or use general knowledge and exit early - if (chatMode === "query" && sources.length === 0) { + if ( + chatMode === "query" && + vectorSearchResults.sources.length === 0 && + pinnedDocIdentifiers.length === 0 + ) { return { id: uuid, type: "textResponse", diff --git a/server/utils/chats/stream.js b/server/utils/chats/stream.js index 57f32666..ba4dea16 100644 --- a/server/utils/chats/stream.js +++ b/server/utils/chats/stream.js @@ -160,9 +160,13 @@ async function streamChatWithWorkspace( contextTexts = [...contextTexts, ...vectorSearchResults.contextTexts]; sources = [...sources, ...vectorSearchResults.sources]; - // If in query mode and no sources are found, do not + // If in query mode and no sources are found from the vector search and no pinned documents, do not // let the LLM try to hallucinate a response or use general knowledge and exit early - if (chatMode === "query" && sources.length === 0) { + if ( + chatMode === "query" && + sources.length === 0 && + pinnedDocIdentifiers.length === 0 + ) { writeResponseChunk(response, { id: uuid, type: "textResponse",