2024-04-16 19:50:10 +02:00
/ * *
* A service that provides an AI client to create a completion .
* /
2024-04-30 21:04:24 +02:00
const { ChatOpenAI } = require ( "@langchain/openai" ) ;
const { ChatAnthropic } = require ( "@langchain/anthropic" ) ;
2024-05-08 01:35:47 +02:00
const DEFAULT _WORKSPACE _PROMPT =
"You are a helpful ai assistant who can assist the user and use tools available to help answer the users prompts and questions." ;
2024-04-17 23:04:51 +02:00
2024-04-16 19:50:10 +02:00
class Provider {
_client ;
constructor ( client ) {
if ( this . constructor == Provider ) {
2024-05-08 01:35:47 +02:00
return ;
2024-04-16 19:50:10 +02:00
}
this . _client = client ;
}
2024-05-08 01:35:47 +02:00
providerLog ( text , ... args ) {
console . log (
` \x 1b[36m[AgentLLM ${ this ? . model ? ` - ${ this . model } ` : "" } ] \x 1b[0m ${ text } ` ,
... args
) ;
}
2024-04-16 19:50:10 +02:00
get client ( ) {
return this . _client ;
}
2024-04-17 23:04:51 +02:00
static LangChainChatModel ( provider = "openai" , config = { } ) {
switch ( provider ) {
case "openai" :
return new ChatOpenAI ( {
2024-04-30 21:04:24 +02:00
apiKey : process . env . OPEN _AI _KEY ,
2024-04-17 23:04:51 +02:00
... config ,
} ) ;
case "anthropic" :
return new ChatAnthropic ( {
2024-04-30 21:04:24 +02:00
apiKey : process . env . ANTHROPIC _API _KEY ,
2024-04-17 23:04:51 +02:00
... config ,
} ) ;
default :
return new ChatOpenAI ( {
2024-04-30 21:04:24 +02:00
apiKey : process . env . OPEN _AI _KEY ,
2024-04-17 23:04:51 +02:00
... config ,
} ) ;
}
}
static contextLimit ( provider = "openai" ) {
switch ( provider ) {
case "openai" :
return 8_000 ;
case "anthropic" :
return 100_000 ;
default :
return 8_000 ;
}
}
2024-05-08 01:35:47 +02:00
static systemPrompt ( provider = null ) {
switch ( provider ) {
case "lmstudio" :
return "You are a helpful ai assistant who can assist the user and use tools available to help answer the users prompts and questions. Tools will be handled by another assistant and you will simply receive their responses to help answer the user prompt - always try to answer the user's prompt the best you can with the context available to you and your general knowledge." ;
default :
return DEFAULT _WORKSPACE _PROMPT ;
}
}
2024-04-16 19:50:10 +02:00
}
module . exports = Provider ;