mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2024-11-16 03:10:31 +01:00
9366e69d88
add AWS bedrock support for LLM + agents
381 lines
11 KiB
JavaScript
381 lines
11 KiB
JavaScript
const AIbitat = require("./aibitat");
|
|
const AgentPlugins = require("./aibitat/plugins");
|
|
const {
|
|
WorkspaceAgentInvocation,
|
|
} = require("../../models/workspaceAgentInvocation");
|
|
const { WorkspaceChats } = require("../../models/workspaceChats");
|
|
const { safeJsonParse } = require("../http");
|
|
const { USER_AGENT, WORKSPACE_AGENT } = require("./defaults");
|
|
|
|
class AgentHandler {
|
|
#invocationUUID;
|
|
#funcsToLoad = [];
|
|
#noProviderModelDefault = {
|
|
azure: "OPEN_MODEL_PREF",
|
|
lmstudio: "LMSTUDIO_MODEL_PREF",
|
|
textgenwebui: null, // does not even use `model` in API req
|
|
"generic-openai": "GENERIC_OPEN_AI_MODEL_PREF",
|
|
};
|
|
invocation = null;
|
|
aibitat = null;
|
|
channel = null;
|
|
provider = null;
|
|
model = null;
|
|
|
|
constructor({ uuid }) {
|
|
this.#invocationUUID = uuid;
|
|
}
|
|
|
|
log(text, ...args) {
|
|
console.log(`\x1b[36m[AgentHandler]\x1b[0m ${text}`, ...args);
|
|
}
|
|
|
|
closeAlert() {
|
|
this.log(`End ${this.#invocationUUID}::${this.provider}:${this.model}`);
|
|
}
|
|
|
|
async #chatHistory(limit = 10) {
|
|
try {
|
|
const rawHistory = (
|
|
await WorkspaceChats.where(
|
|
{
|
|
workspaceId: this.invocation.workspace_id,
|
|
user_id: this.invocation.user_id || null,
|
|
thread_id: this.invocation.thread_id || null,
|
|
include: true,
|
|
},
|
|
limit,
|
|
{ id: "desc" }
|
|
)
|
|
).reverse();
|
|
|
|
const agentHistory = [];
|
|
rawHistory.forEach((chatLog) => {
|
|
agentHistory.push(
|
|
{
|
|
from: USER_AGENT.name,
|
|
to: WORKSPACE_AGENT.name,
|
|
content: chatLog.prompt,
|
|
state: "success",
|
|
},
|
|
{
|
|
from: WORKSPACE_AGENT.name,
|
|
to: USER_AGENT.name,
|
|
content: safeJsonParse(chatLog.response)?.text || "",
|
|
state: "success",
|
|
}
|
|
);
|
|
});
|
|
return agentHistory;
|
|
} catch (e) {
|
|
this.log("Error loading chat history", e.message);
|
|
return [];
|
|
}
|
|
}
|
|
|
|
#checkSetup() {
|
|
switch (this.provider) {
|
|
case "openai":
|
|
if (!process.env.OPEN_AI_KEY)
|
|
throw new Error("OpenAI API key must be provided to use agents.");
|
|
break;
|
|
case "anthropic":
|
|
if (!process.env.ANTHROPIC_API_KEY)
|
|
throw new Error("Anthropic API key must be provided to use agents.");
|
|
break;
|
|
case "lmstudio":
|
|
if (!process.env.LMSTUDIO_BASE_PATH)
|
|
throw new Error("LMStudio base path must be provided to use agents.");
|
|
break;
|
|
case "ollama":
|
|
if (!process.env.OLLAMA_BASE_PATH)
|
|
throw new Error("Ollama base path must be provided to use agents.");
|
|
break;
|
|
case "groq":
|
|
if (!process.env.GROQ_API_KEY)
|
|
throw new Error("Groq API key must be provided to use agents.");
|
|
break;
|
|
case "togetherai":
|
|
if (!process.env.TOGETHER_AI_API_KEY)
|
|
throw new Error("TogetherAI API key must be provided to use agents.");
|
|
break;
|
|
case "azure":
|
|
if (!process.env.AZURE_OPENAI_ENDPOINT || !process.env.AZURE_OPENAI_KEY)
|
|
throw new Error(
|
|
"Azure OpenAI API endpoint and key must be provided to use agents."
|
|
);
|
|
break;
|
|
case "koboldcpp":
|
|
if (!process.env.KOBOLD_CPP_BASE_PATH)
|
|
throw new Error(
|
|
"KoboldCPP must have a valid base path to use for the api."
|
|
);
|
|
break;
|
|
case "localai":
|
|
if (!process.env.LOCAL_AI_BASE_PATH)
|
|
throw new Error(
|
|
"LocalAI must have a valid base path to use for the api."
|
|
);
|
|
break;
|
|
case "gemini":
|
|
if (!process.env.GEMINI_API_KEY)
|
|
throw new Error("Gemini API key must be provided to use agents.");
|
|
break;
|
|
case "openrouter":
|
|
if (!process.env.OPENROUTER_API_KEY)
|
|
throw new Error("OpenRouter API key must be provided to use agents.");
|
|
break;
|
|
case "mistral":
|
|
if (!process.env.MISTRAL_API_KEY)
|
|
throw new Error("Mistral API key must be provided to use agents.");
|
|
break;
|
|
case "generic-openai":
|
|
if (!process.env.GENERIC_OPEN_AI_BASE_PATH)
|
|
throw new Error("API base path must be provided to use agents.");
|
|
break;
|
|
case "perplexity":
|
|
if (!process.env.PERPLEXITY_API_KEY)
|
|
throw new Error("Perplexity API key must be provided to use agents.");
|
|
break;
|
|
case "textgenwebui":
|
|
if (!process.env.TEXT_GEN_WEB_UI_BASE_PATH)
|
|
throw new Error(
|
|
"TextWebGenUI API base path must be provided to use agents."
|
|
);
|
|
break;
|
|
case "bedrock":
|
|
if (
|
|
!process.env.AWS_BEDROCK_LLM_ACCESS_KEY_ID ||
|
|
!process.env.AWS_BEDROCK_LLM_ACCESS_KEY ||
|
|
!process.env.AWS_BEDROCK_LLM_REGION ||
|
|
!process.env.AWS_BEDROCK_LLM_MODEL_PREFERENCE
|
|
)
|
|
throw new Error(
|
|
"AWS Bedrock Access Keys, model and region must be provided to use agents."
|
|
);
|
|
break;
|
|
|
|
default:
|
|
throw new Error(
|
|
"No workspace agent provider set. Please set your agent provider in the workspace's settings"
|
|
);
|
|
}
|
|
}
|
|
|
|
#providerDefault() {
|
|
switch (this.provider) {
|
|
case "openai":
|
|
return "gpt-4o";
|
|
case "anthropic":
|
|
return "claude-3-sonnet-20240229";
|
|
case "lmstudio":
|
|
return "server-default";
|
|
case "ollama":
|
|
return "llama3:latest";
|
|
case "groq":
|
|
return "llama3-70b-8192";
|
|
case "togetherai":
|
|
return "mistralai/Mixtral-8x7B-Instruct-v0.1";
|
|
case "azure":
|
|
return "gpt-3.5-turbo";
|
|
case "koboldcpp":
|
|
return null;
|
|
case "gemini":
|
|
return "gemini-pro";
|
|
case "localai":
|
|
return null;
|
|
case "openrouter":
|
|
return "openrouter/auto";
|
|
case "mistral":
|
|
return "mistral-medium";
|
|
case "generic-openai":
|
|
return null;
|
|
case "perplexity":
|
|
return "sonar-small-online";
|
|
case "textgenwebui":
|
|
return null;
|
|
case "bedrock":
|
|
return null;
|
|
default:
|
|
return "unknown";
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Finds or assumes the model preference value to use for API calls.
|
|
* If multi-model loading is supported, we use their agent model selection of the workspace
|
|
* If not supported, we attempt to fallback to the system provider value for the LLM preference
|
|
* and if that fails - we assume a reasonable base model to exist.
|
|
* @returns {string} the model preference value to use in API calls
|
|
*/
|
|
#fetchModel() {
|
|
if (!Object.keys(this.#noProviderModelDefault).includes(this.provider))
|
|
return this.invocation.workspace.agentModel || this.#providerDefault();
|
|
|
|
// Provider has no reliable default (cant load many models) - so we need to look at system
|
|
// for the model param.
|
|
const sysModelKey = this.#noProviderModelDefault[this.provider];
|
|
if (!!sysModelKey)
|
|
return process.env[sysModelKey] ?? this.#providerDefault();
|
|
|
|
// If all else fails - look at the provider default list
|
|
return this.#providerDefault();
|
|
}
|
|
|
|
#providerSetupAndCheck() {
|
|
this.provider = this.invocation.workspace.agentProvider;
|
|
this.model = this.#fetchModel();
|
|
this.log(`Start ${this.#invocationUUID}::${this.provider}:${this.model}`);
|
|
this.#checkSetup();
|
|
}
|
|
|
|
async #validInvocation() {
|
|
const invocation = await WorkspaceAgentInvocation.getWithWorkspace({
|
|
uuid: String(this.#invocationUUID),
|
|
});
|
|
if (invocation?.closed)
|
|
throw new Error("This agent invocation is already closed");
|
|
this.invocation = invocation ?? null;
|
|
}
|
|
|
|
#parseCallOptions(args, config = {}, pluginName) {
|
|
const callOpts = {};
|
|
for (const [param, definition] of Object.entries(config)) {
|
|
if (
|
|
definition.required &&
|
|
(!args.hasOwnProperty(param) || args[param] === null)
|
|
) {
|
|
this.log(
|
|
`'${param}' required parameter for '${pluginName}' plugin is missing. Plugin may not function or crash agent.`
|
|
);
|
|
continue;
|
|
}
|
|
callOpts[param] = args.hasOwnProperty(param)
|
|
? args[param]
|
|
: definition.default || null;
|
|
}
|
|
return callOpts;
|
|
}
|
|
|
|
#attachPlugins(args) {
|
|
for (const name of this.#funcsToLoad) {
|
|
// Load child plugin
|
|
if (name.includes("#")) {
|
|
const [parent, childPluginName] = name.split("#");
|
|
if (!AgentPlugins.hasOwnProperty(parent)) {
|
|
this.log(
|
|
`${parent} is not a valid plugin. Skipping inclusion to agent cluster.`
|
|
);
|
|
continue;
|
|
}
|
|
|
|
const childPlugin = AgentPlugins[parent].plugin.find(
|
|
(child) => child.name === childPluginName
|
|
);
|
|
if (!childPlugin) {
|
|
this.log(
|
|
`${parent} does not have child plugin named ${childPluginName}. Skipping inclusion to agent cluster.`
|
|
);
|
|
continue;
|
|
}
|
|
|
|
const callOpts = this.#parseCallOptions(
|
|
args,
|
|
childPlugin?.startupConfig?.params,
|
|
name
|
|
);
|
|
this.aibitat.use(childPlugin.plugin(callOpts));
|
|
this.log(
|
|
`Attached ${parent}:${childPluginName} plugin to Agent cluster`
|
|
);
|
|
continue;
|
|
}
|
|
|
|
// Load single-stage plugin.
|
|
if (!AgentPlugins.hasOwnProperty(name)) {
|
|
this.log(
|
|
`${name} is not a valid plugin. Skipping inclusion to agent cluster.`
|
|
);
|
|
continue;
|
|
}
|
|
|
|
const callOpts = this.#parseCallOptions(
|
|
args,
|
|
AgentPlugins[name].startupConfig.params
|
|
);
|
|
const AIbitatPlugin = AgentPlugins[name];
|
|
this.aibitat.use(AIbitatPlugin.plugin(callOpts));
|
|
this.log(`Attached ${name} plugin to Agent cluster`);
|
|
}
|
|
}
|
|
|
|
async #loadAgents() {
|
|
// Default User agent and workspace agent
|
|
this.log(`Attaching user and default agent to Agent cluster.`);
|
|
this.aibitat.agent(USER_AGENT.name, await USER_AGENT.getDefinition());
|
|
this.aibitat.agent(
|
|
WORKSPACE_AGENT.name,
|
|
await WORKSPACE_AGENT.getDefinition(this.provider)
|
|
);
|
|
|
|
this.#funcsToLoad = [
|
|
...((await USER_AGENT.getDefinition())?.functions || []),
|
|
...((await WORKSPACE_AGENT.getDefinition())?.functions || []),
|
|
];
|
|
}
|
|
|
|
async init() {
|
|
await this.#validInvocation();
|
|
this.#providerSetupAndCheck();
|
|
return this;
|
|
}
|
|
|
|
async createAIbitat(
|
|
args = {
|
|
socket,
|
|
}
|
|
) {
|
|
this.aibitat = new AIbitat({
|
|
provider: this.provider ?? "openai",
|
|
model: this.model ?? "gpt-4o",
|
|
chats: await this.#chatHistory(20),
|
|
handlerProps: {
|
|
invocation: this.invocation,
|
|
log: this.log,
|
|
},
|
|
});
|
|
|
|
// Attach standard websocket plugin for frontend communication.
|
|
this.log(`Attached ${AgentPlugins.websocket.name} plugin to Agent cluster`);
|
|
this.aibitat.use(
|
|
AgentPlugins.websocket.plugin({
|
|
socket: args.socket,
|
|
muteUserReply: true,
|
|
introspection: true,
|
|
})
|
|
);
|
|
|
|
// Attach standard chat-history plugin for message storage.
|
|
this.log(
|
|
`Attached ${AgentPlugins.chatHistory.name} plugin to Agent cluster`
|
|
);
|
|
this.aibitat.use(AgentPlugins.chatHistory.plugin());
|
|
|
|
// Load required agents (Default + custom)
|
|
await this.#loadAgents();
|
|
|
|
// Attach all required plugins for functions to operate.
|
|
this.#attachPlugins(args);
|
|
}
|
|
|
|
startAgentCluster() {
|
|
return this.aibitat.start({
|
|
from: USER_AGENT.name,
|
|
to: this.channel ?? WORKSPACE_AGENT.name,
|
|
content: this.invocation.prompt,
|
|
});
|
|
}
|
|
}
|
|
|
|
module.exports.AgentHandler = AgentHandler;
|