2023-06-08 06:31:35 +02:00
const { v4 : uuidv4 } = require ( "uuid" ) ;
const { WorkspaceChats } = require ( "../../models/workspaceChats" ) ;
2023-06-04 04:28:07 +02:00
const { resetMemory } = require ( "./commands/reset" ) ;
2023-08-04 23:56:27 +02:00
const { getVectorDbClass , getLLMProvider } = require ( "../helpers" ) ;
2024-02-14 21:32:07 +01:00
const { convertToPromptHistory } = require ( "../helpers/chat/responses" ) ;
2024-02-21 22:15:45 +01:00
const { DocumentManager } = require ( "../DocumentManager" ) ;
2023-06-04 04:28:07 +02:00
const VALID _COMMANDS = {
2023-06-08 06:31:35 +02:00
"/reset" : resetMemory ,
} ;
2023-06-04 04:28:07 +02:00
function grepCommand ( message ) {
const availableCommands = Object . keys ( VALID _COMMANDS ) ;
for ( let i = 0 ; i < availableCommands . length ; i ++ ) {
const cmd = availableCommands [ i ] ;
const re = new RegExp ( ` ^( ${ cmd } ) ` , "i" ) ;
if ( re . test ( message ) ) {
return cmd ;
}
}
2023-06-08 06:31:35 +02:00
return null ;
2023-06-04 04:28:07 +02:00
}
2023-07-25 19:37:04 +02:00
async function chatWithWorkspace (
workspace ,
message ,
chatMode = "chat" ,
2024-02-14 21:32:07 +01:00
user = null ,
thread = null
2023-07-25 19:37:04 +02:00
) {
2023-06-04 04:28:07 +02:00
const uuid = uuidv4 ( ) ;
2023-06-08 06:31:35 +02:00
const command = grepCommand ( message ) ;
2023-06-04 04:28:07 +02:00
if ( ! ! command && Object . keys ( VALID _COMMANDS ) . includes ( command ) ) {
2023-07-25 19:37:04 +02:00
return await VALID _COMMANDS [ command ] ( workspace , message , uuid , user ) ;
2023-06-04 04:28:07 +02:00
}
2024-01-17 21:59:25 +01:00
const LLMConnector = getLLMProvider ( workspace ? . chatModel ) ;
2023-10-30 23:44:03 +01:00
const VectorDb = getVectorDbClass ( ) ;
2023-08-04 23:56:27 +02:00
const { safe , reasons = [ ] } = await LLMConnector . isSafe ( message ) ;
2023-06-04 04:28:07 +02:00
if ( ! safe ) {
return {
id : uuid ,
2023-06-08 06:31:35 +02:00
type : "abort" ,
2023-06-04 04:28:07 +02:00
textResponse : null ,
sources : [ ] ,
close : true ,
2023-06-08 06:31:35 +02:00
error : ` This message was moderated and will not be allowed. Violations for ${ reasons . join (
", "
) } found . ` ,
2023-06-04 04:28:07 +02:00
} ;
}
2023-11-01 22:12:27 +01:00
const messageLimit = workspace ? . openAiHistory || 20 ;
2023-06-08 06:31:35 +02:00
const hasVectorizedSpace = await VectorDb . hasNamespace ( workspace . slug ) ;
2023-07-25 19:37:04 +02:00
const embeddingsCount = await VectorDb . namespaceCount ( workspace . slug ) ;
2024-01-16 18:32:51 +01:00
2024-02-14 21:32:07 +01:00
// User is trying to query-mode chat a workspace that has no data in it - so
// we should exit early as no information can be found under these conditions.
if ( ( ! hasVectorizedSpace || embeddingsCount === 0 ) && chatMode === "query" ) {
return {
id : uuid ,
type : "textResponse" ,
sources : [ ] ,
close : true ,
error : null ,
textResponse :
"There is no relevant information in this workspace to answer your query." ,
} ;
2023-11-06 22:13:53 +01:00
}
2024-02-14 21:32:07 +01:00
// If we are here we know that we are in a workspace that is:
// 1. Chatting in "chat" mode and may or may _not_ have embeddings
// 2. Chatting in "query" mode and has at least 1 embedding
2024-02-21 22:15:45 +01:00
let contextTexts = [ ] ;
let sources = [ ] ;
2024-02-14 21:32:07 +01:00
const { rawHistory , chatHistory } = await recentChatHistory ( {
2023-11-06 22:13:53 +01:00
user ,
workspace ,
2024-02-14 21:32:07 +01:00
thread ,
2023-11-06 22:13:53 +01:00
messageLimit ,
2024-02-14 21:32:07 +01:00
chatMode ,
} ) ;
2024-02-21 22:15:45 +01:00
// Look for pinned documents and see if the user decided to use this feature. We will also do a vector search
// as pinning is a supplemental tool but it should be used with caution since it can easily blow up a context window.
await new DocumentManager ( {
workspace ,
maxTokens : LLMConnector . limits . system ,
} )
. pinnedDocs ( )
. then ( ( pinnedDocs ) => {
pinnedDocs . forEach ( ( doc ) => {
const { pageContent , ... metadata } = doc ;
contextTexts . push ( doc . pageContent ) ;
sources . push ( {
text :
pageContent . slice ( 0 , 1_000 ) +
"...continued on in source document..." ,
... metadata ,
} ) ;
} ) ;
} ) ;
const vectorSearchResults =
embeddingsCount !== 0
? await VectorDb . performSimilaritySearch ( {
namespace : workspace . slug ,
input : message ,
LLMConnector ,
similarityThreshold : workspace ? . similarityThreshold ,
topN : workspace ? . topN ,
} )
: {
contextTexts : [ ] ,
sources : [ ] ,
message : null ,
} ;
2023-11-06 22:13:53 +01:00
2024-02-14 21:32:07 +01:00
// Failed similarity search if it was run at all and failed.
2024-02-21 22:15:45 +01:00
if ( ! ! vectorSearchResults . message ) {
2023-06-04 04:28:07 +02:00
return {
id : uuid ,
2023-11-06 22:13:53 +01:00
type : "abort" ,
textResponse : null ,
2023-06-04 04:28:07 +02:00
sources : [ ] ,
close : true ,
2024-02-21 22:15:45 +01:00
error : vectorSearchResults . message ,
2023-06-04 04:28:07 +02:00
} ;
2023-11-06 22:13:53 +01:00
}
2024-02-21 22:15:45 +01:00
contextTexts = [ ... contextTexts , ... vectorSearchResults . contextTexts ] ;
sources = [ ... sources , ... vectorSearchResults . sources ] ;
2024-01-16 18:32:51 +01:00
// If in query mode and no sources are found, do not
2024-02-14 21:32:07 +01:00
// let the LLM try to hallucinate a response or use general knowledge and exit early
2024-01-16 18:32:51 +01:00
if ( chatMode === "query" && sources . length === 0 ) {
return {
id : uuid ,
type : "textResponse" ,
sources : [ ] ,
close : true ,
error : null ,
textResponse :
"There is no relevant information in this workspace to answer your query." ,
} ;
}
2024-02-14 21:32:07 +01:00
// Compress & Assemble message to ensure prompt passes token limit with room for response
2023-11-06 22:13:53 +01:00
// and build system messages based on inputs and history.
const messages = await LLMConnector . compressMessages (
{
systemPrompt : chatPrompt ( workspace ) ,
userPrompt : message ,
contextTexts ,
2023-06-27 00:08:47 +02:00
chatHistory ,
2023-11-06 22:13:53 +01:00
} ,
rawHistory
) ;
2023-06-04 04:28:07 +02:00
2023-11-06 22:13:53 +01:00
// Send the text completion.
const textResponse = await LLMConnector . getChatCompletion ( messages , {
2024-01-17 23:42:05 +01:00
temperature : workspace ? . openAiTemp ? ? LLMConnector . defaultTemp ,
2023-11-06 22:13:53 +01:00
} ) ;
if ( ! textResponse ) {
2023-06-04 04:28:07 +02:00
return {
id : uuid ,
2023-11-06 22:13:53 +01:00
type : "abort" ,
textResponse : null ,
sources : [ ] ,
2023-06-04 04:28:07 +02:00
close : true ,
2023-11-06 22:13:53 +01:00
error : "No text completion could be completed with this input." ,
2023-06-04 04:28:07 +02:00
} ;
}
2024-02-14 21:32:07 +01:00
2024-02-13 20:33:05 +01:00
const { chat } = await WorkspaceChats . new ( {
2023-11-06 22:13:53 +01:00
workspaceId : workspace . id ,
prompt : message ,
response : { text : textResponse , sources , type : chatMode } ,
2024-02-14 21:32:07 +01:00
threadId : thread ? . id || null ,
2023-11-06 22:13:53 +01:00
user ,
} ) ;
return {
id : uuid ,
type : "textResponse" ,
close : true ,
2024-02-13 20:33:05 +01:00
error : null ,
chatId : chat . id ,
2023-11-06 22:13:53 +01:00
textResponse ,
sources ,
} ;
}
2024-02-14 21:32:07 +01:00
async function recentChatHistory ( {
2023-11-06 22:13:53 +01:00
user = null ,
workspace ,
2024-02-14 21:32:07 +01:00
thread = null ,
2023-11-06 22:13:53 +01:00
messageLimit = 20 ,
2024-02-14 21:32:07 +01:00
chatMode = null ,
} ) {
if ( chatMode === "query" ) return { rawHistory : [ ] , chatHistory : [ ] } ;
2024-02-09 03:37:22 +01:00
const rawHistory = (
await WorkspaceChats . where (
{
workspaceId : workspace . id ,
user _id : user ? . id || null ,
thread _id : thread ? . id || null ,
include : true ,
} ,
messageLimit ,
{ id : "desc" }
)
) . reverse ( ) ;
return { rawHistory , chatHistory : convertToPromptHistory ( rawHistory ) } ;
}
2023-07-20 20:14:23 +02:00
function chatPrompt ( workspace ) {
return (
workspace ? . openAiPrompt ? ?
"Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed."
) ;
}
2023-06-04 04:28:07 +02:00
module . exports = {
2023-11-14 00:07:30 +01:00
recentChatHistory ,
2023-06-08 06:31:35 +02:00
chatWithWorkspace ,
2023-07-20 20:14:23 +02:00
chatPrompt ,
2023-11-14 00:07:30 +01:00
grepCommand ,
VALID _COMMANDS ,
2023-06-08 06:31:35 +02:00
} ;