2024-04-16 19:50:10 +02:00
|
|
|
const AIbitat = require("./aibitat");
|
|
|
|
const AgentPlugins = require("./aibitat/plugins");
|
|
|
|
const {
|
|
|
|
WorkspaceAgentInvocation,
|
|
|
|
} = require("../../models/workspaceAgentInvocation");
|
|
|
|
const { WorkspaceChats } = require("../../models/workspaceChats");
|
|
|
|
const { safeJsonParse } = require("../http");
|
|
|
|
const { USER_AGENT, WORKSPACE_AGENT } = require("./defaults");
|
2024-09-11 02:06:02 +02:00
|
|
|
const ImportedPlugin = require("./imported");
|
2024-04-16 19:50:10 +02:00
|
|
|
|
|
|
|
class AgentHandler {
|
|
|
|
#invocationUUID;
|
|
|
|
#funcsToLoad = [];
|
|
|
|
invocation = null;
|
|
|
|
aibitat = null;
|
|
|
|
channel = null;
|
|
|
|
provider = null;
|
|
|
|
model = null;
|
|
|
|
|
|
|
|
constructor({ uuid }) {
|
|
|
|
this.#invocationUUID = uuid;
|
|
|
|
}
|
|
|
|
|
|
|
|
log(text, ...args) {
|
|
|
|
console.log(`\x1b[36m[AgentHandler]\x1b[0m ${text}`, ...args);
|
|
|
|
}
|
|
|
|
|
|
|
|
closeAlert() {
|
|
|
|
this.log(`End ${this.#invocationUUID}::${this.provider}:${this.model}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
async #chatHistory(limit = 10) {
|
|
|
|
try {
|
|
|
|
const rawHistory = (
|
|
|
|
await WorkspaceChats.where(
|
|
|
|
{
|
|
|
|
workspaceId: this.invocation.workspace_id,
|
|
|
|
user_id: this.invocation.user_id || null,
|
2024-05-11 01:09:34 +02:00
|
|
|
thread_id: this.invocation.thread_id || null,
|
2024-08-22 00:25:47 +02:00
|
|
|
api_session_id: null,
|
2024-04-16 19:50:10 +02:00
|
|
|
include: true,
|
|
|
|
},
|
|
|
|
limit,
|
|
|
|
{ id: "desc" }
|
|
|
|
)
|
|
|
|
).reverse();
|
|
|
|
|
|
|
|
const agentHistory = [];
|
|
|
|
rawHistory.forEach((chatLog) => {
|
|
|
|
agentHistory.push(
|
|
|
|
{
|
|
|
|
from: USER_AGENT.name,
|
|
|
|
to: WORKSPACE_AGENT.name,
|
|
|
|
content: chatLog.prompt,
|
2024-04-17 20:54:58 +02:00
|
|
|
state: "success",
|
2024-04-16 19:50:10 +02:00
|
|
|
},
|
|
|
|
{
|
|
|
|
from: WORKSPACE_AGENT.name,
|
|
|
|
to: USER_AGENT.name,
|
|
|
|
content: safeJsonParse(chatLog.response)?.text || "",
|
|
|
|
state: "success",
|
|
|
|
}
|
|
|
|
);
|
|
|
|
});
|
|
|
|
return agentHistory;
|
|
|
|
} catch (e) {
|
|
|
|
this.log("Error loading chat history", e.message);
|
|
|
|
return [];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-08-22 22:12:09 +02:00
|
|
|
checkSetup() {
|
2024-04-16 19:50:10 +02:00
|
|
|
switch (this.provider) {
|
|
|
|
case "openai":
|
|
|
|
if (!process.env.OPEN_AI_KEY)
|
|
|
|
throw new Error("OpenAI API key must be provided to use agents.");
|
|
|
|
break;
|
|
|
|
case "anthropic":
|
|
|
|
if (!process.env.ANTHROPIC_API_KEY)
|
|
|
|
throw new Error("Anthropic API key must be provided to use agents.");
|
|
|
|
break;
|
2024-05-08 01:35:47 +02:00
|
|
|
case "lmstudio":
|
|
|
|
if (!process.env.LMSTUDIO_BASE_PATH)
|
2024-05-08 03:06:31 +02:00
|
|
|
throw new Error("LMStudio base path must be provided to use agents.");
|
|
|
|
break;
|
|
|
|
case "ollama":
|
|
|
|
if (!process.env.OLLAMA_BASE_PATH)
|
|
|
|
throw new Error("Ollama base path must be provided to use agents.");
|
2024-05-08 01:35:47 +02:00
|
|
|
break;
|
2024-05-09 00:17:54 +02:00
|
|
|
case "groq":
|
|
|
|
if (!process.env.GROQ_API_KEY)
|
|
|
|
throw new Error("Groq API key must be provided to use agents.");
|
|
|
|
break;
|
|
|
|
case "togetherai":
|
|
|
|
if (!process.env.TOGETHER_AI_API_KEY)
|
|
|
|
throw new Error("TogetherAI API key must be provided to use agents.");
|
|
|
|
break;
|
|
|
|
case "azure":
|
|
|
|
if (!process.env.AZURE_OPENAI_ENDPOINT || !process.env.AZURE_OPENAI_KEY)
|
|
|
|
throw new Error(
|
|
|
|
"Azure OpenAI API endpoint and key must be provided to use agents."
|
|
|
|
);
|
|
|
|
break;
|
|
|
|
case "koboldcpp":
|
|
|
|
if (!process.env.KOBOLD_CPP_BASE_PATH)
|
|
|
|
throw new Error(
|
|
|
|
"KoboldCPP must have a valid base path to use for the api."
|
|
|
|
);
|
|
|
|
break;
|
|
|
|
case "localai":
|
|
|
|
if (!process.env.LOCAL_AI_BASE_PATH)
|
|
|
|
throw new Error(
|
|
|
|
"LocalAI must have a valid base path to use for the api."
|
|
|
|
);
|
|
|
|
break;
|
|
|
|
case "gemini":
|
|
|
|
if (!process.env.GEMINI_API_KEY)
|
|
|
|
throw new Error("Gemini API key must be provided to use agents.");
|
|
|
|
break;
|
2024-05-09 01:04:18 +02:00
|
|
|
case "openrouter":
|
|
|
|
if (!process.env.OPENROUTER_API_KEY)
|
|
|
|
throw new Error("OpenRouter API key must be provided to use agents.");
|
|
|
|
break;
|
|
|
|
case "mistral":
|
|
|
|
if (!process.env.MISTRAL_API_KEY)
|
|
|
|
throw new Error("Mistral API key must be provided to use agents.");
|
|
|
|
break;
|
|
|
|
case "generic-openai":
|
|
|
|
if (!process.env.GENERIC_OPEN_AI_BASE_PATH)
|
|
|
|
throw new Error("API base path must be provided to use agents.");
|
|
|
|
break;
|
|
|
|
case "perplexity":
|
|
|
|
if (!process.env.PERPLEXITY_API_KEY)
|
|
|
|
throw new Error("Perplexity API key must be provided to use agents.");
|
|
|
|
break;
|
|
|
|
case "textgenwebui":
|
|
|
|
if (!process.env.TEXT_GEN_WEB_UI_BASE_PATH)
|
|
|
|
throw new Error(
|
|
|
|
"TextWebGenUI API base path must be provided to use agents."
|
|
|
|
);
|
|
|
|
break;
|
2024-07-24 01:35:37 +02:00
|
|
|
case "bedrock":
|
|
|
|
if (
|
|
|
|
!process.env.AWS_BEDROCK_LLM_ACCESS_KEY_ID ||
|
|
|
|
!process.env.AWS_BEDROCK_LLM_ACCESS_KEY ||
|
2024-09-25 22:30:20 +02:00
|
|
|
!process.env.AWS_BEDROCK_LLM_REGION
|
2024-07-24 01:35:37 +02:00
|
|
|
)
|
|
|
|
throw new Error(
|
2024-09-25 22:30:20 +02:00
|
|
|
"AWS Bedrock Access Keys and region must be provided to use agents."
|
2024-07-24 01:35:37 +02:00
|
|
|
);
|
|
|
|
break;
|
2024-09-16 21:10:44 +02:00
|
|
|
case "fireworksai":
|
2024-09-25 22:30:20 +02:00
|
|
|
if (!process.env.FIREWORKS_AI_LLM_API_KEY)
|
2024-09-16 21:10:44 +02:00
|
|
|
throw new Error(
|
2024-09-25 22:30:20 +02:00
|
|
|
"FireworksAI API Key must be provided to use agents."
|
2024-09-16 21:10:44 +02:00
|
|
|
);
|
|
|
|
break;
|
2024-09-26 21:55:12 +02:00
|
|
|
case "deepseek":
|
|
|
|
if (!process.env.DEEPSEEK_API_KEY)
|
|
|
|
throw new Error("DeepSeek API Key must be provided to use agents.");
|
|
|
|
break;
|
2024-10-15 21:43:14 +02:00
|
|
|
case "litellm":
|
|
|
|
if (!process.env.LITE_LLM_BASE_PATH)
|
|
|
|
throw new Error(
|
|
|
|
"LiteLLM API base path and key must be provided to use agents."
|
|
|
|
);
|
|
|
|
break;
|
2024-10-15 21:36:06 +02:00
|
|
|
case "apipie":
|
|
|
|
if (!process.env.APIPIE_LLM_API_KEY)
|
|
|
|
throw new Error("ApiPie API Key must be provided to use agents.");
|
|
|
|
break;
|
2024-10-22 01:32:49 +02:00
|
|
|
case "xai":
|
|
|
|
if (!process.env.XAI_LLM_API_KEY)
|
|
|
|
throw new Error("xAI API Key must be provided to use agents.");
|
|
|
|
break;
|
2024-05-09 01:04:18 +02:00
|
|
|
|
2024-04-16 19:50:10 +02:00
|
|
|
default:
|
2024-06-12 22:19:06 +02:00
|
|
|
throw new Error(
|
|
|
|
"No workspace agent provider set. Please set your agent provider in the workspace's settings"
|
|
|
|
);
|
2024-04-16 19:50:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-10-21 23:40:30 +02:00
|
|
|
/**
|
|
|
|
* Finds the default model for a given provider. If no default model is set for it's associated ENV then
|
|
|
|
* it will return a reasonable base model for the provider if one exists.
|
|
|
|
* @param {string} provider - The provider to find the default model for.
|
|
|
|
* @returns {string|null} The default model for the provider.
|
|
|
|
*/
|
2024-09-25 22:30:20 +02:00
|
|
|
providerDefault(provider = this.provider) {
|
|
|
|
switch (provider) {
|
2024-05-08 01:35:47 +02:00
|
|
|
case "openai":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.OPEN_MODEL_PREF ?? "gpt-4o";
|
2024-05-08 01:35:47 +02:00
|
|
|
case "anthropic":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.ANTHROPIC_MODEL_PREF ?? "claude-3-sonnet-20240229";
|
2024-05-08 01:35:47 +02:00
|
|
|
case "lmstudio":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.LMSTUDIO_MODEL_PREF ?? "server-default";
|
2024-05-08 03:06:31 +02:00
|
|
|
case "ollama":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.OLLAMA_MODEL_PREF ?? "llama3:latest";
|
2024-05-09 00:17:54 +02:00
|
|
|
case "groq":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.GROQ_MODEL_PREF ?? "llama3-70b-8192";
|
2024-05-09 00:17:54 +02:00
|
|
|
case "togetherai":
|
2024-10-21 23:40:30 +02:00
|
|
|
return (
|
|
|
|
process.env.TOGETHER_AI_MODEL_PREF ??
|
|
|
|
"mistralai/Mixtral-8x7B-Instruct-v0.1"
|
|
|
|
);
|
2024-05-09 00:17:54 +02:00
|
|
|
case "azure":
|
|
|
|
return null;
|
2024-10-21 23:40:30 +02:00
|
|
|
case "koboldcpp":
|
|
|
|
return process.env.KOBOLD_CPP_MODEL_PREF ?? null;
|
2024-05-09 00:17:54 +02:00
|
|
|
case "gemini":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.GEMINI_MODEL_PREF ?? "gemini-pro";
|
2024-05-09 00:17:54 +02:00
|
|
|
case "localai":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.LOCAL_AI_MODEL_PREF ?? null;
|
2024-05-09 01:04:18 +02:00
|
|
|
case "openrouter":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.OPENROUTER_MODEL_PREF ?? "openrouter/auto";
|
2024-05-09 01:04:18 +02:00
|
|
|
case "mistral":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.MISTRAL_MODEL_PREF ?? "mistral-medium";
|
2024-05-09 01:04:18 +02:00
|
|
|
case "generic-openai":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.GENERIC_OPEN_AI_MODEL_PREF ?? null;
|
2024-05-09 01:04:18 +02:00
|
|
|
case "perplexity":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.PERPLEXITY_MODEL_PREF ?? "sonar-small-online";
|
2024-05-09 01:04:18 +02:00
|
|
|
case "textgenwebui":
|
|
|
|
return null;
|
2024-07-24 01:35:37 +02:00
|
|
|
case "bedrock":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.AWS_BEDROCK_LLM_MODEL_PREFERENCE ?? null;
|
2024-09-16 21:10:44 +02:00
|
|
|
case "fireworksai":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.FIREWORKS_AI_LLM_MODEL_PREF ?? null;
|
2024-09-26 21:55:12 +02:00
|
|
|
case "deepseek":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.DEEPSEEK_MODEL_PREF ?? "deepseek-chat";
|
2024-10-15 21:43:14 +02:00
|
|
|
case "litellm":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.LITE_LLM_MODEL_PREF ?? null;
|
2024-10-15 21:36:06 +02:00
|
|
|
case "apipie":
|
2024-10-21 23:40:30 +02:00
|
|
|
return process.env.APIPIE_LLM_MODEL_PREF ?? null;
|
2024-10-22 01:32:49 +02:00
|
|
|
case "xai":
|
|
|
|
return process.env.XAI_LLM_MODEL_PREF ?? "grok-beta";
|
2024-05-08 01:35:47 +02:00
|
|
|
default:
|
2024-10-21 23:40:30 +02:00
|
|
|
return null;
|
2024-05-08 01:35:47 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-10-21 23:40:30 +02:00
|
|
|
/**
|
|
|
|
* Attempts to find a fallback provider and model to use if the workspace
|
|
|
|
* does not have an explicit `agentProvider` and `agentModel` set.
|
|
|
|
* 1. Fallback to the workspace `chatProvider` and `chatModel` if they exist.
|
|
|
|
* 2. Fallback to the system `LLM_PROVIDER` and try to load the the associated default model via ENV params or a base available model.
|
|
|
|
* 3. Otherwise, return null - will likely throw an error the user can act on.
|
|
|
|
* @returns {object|null} - An object with provider and model keys.
|
|
|
|
*/
|
2024-09-25 22:30:20 +02:00
|
|
|
#getFallbackProvider() {
|
|
|
|
// First, fallback to the workspace chat provider and model if they exist
|
|
|
|
if (
|
|
|
|
this.invocation.workspace.chatProvider &&
|
|
|
|
this.invocation.workspace.chatModel
|
|
|
|
) {
|
|
|
|
return {
|
|
|
|
provider: this.invocation.workspace.chatProvider,
|
|
|
|
model: this.invocation.workspace.chatModel,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
// If workspace does not have chat provider and model fallback
|
|
|
|
// to system provider and try to load provider default model
|
|
|
|
const systemProvider = process.env.LLM_PROVIDER;
|
|
|
|
const systemModel = this.providerDefault(systemProvider);
|
|
|
|
if (systemProvider && systemModel) {
|
|
|
|
return {
|
|
|
|
provider: systemProvider,
|
|
|
|
model: systemModel,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
2024-07-11 23:03:24 +02:00
|
|
|
/**
|
|
|
|
* Finds or assumes the model preference value to use for API calls.
|
|
|
|
* If multi-model loading is supported, we use their agent model selection of the workspace
|
|
|
|
* If not supported, we attempt to fallback to the system provider value for the LLM preference
|
|
|
|
* and if that fails - we assume a reasonable base model to exist.
|
2024-10-21 23:40:30 +02:00
|
|
|
* @returns {string|null} the model preference value to use in API calls
|
2024-07-11 23:03:24 +02:00
|
|
|
*/
|
|
|
|
#fetchModel() {
|
2024-09-25 22:30:20 +02:00
|
|
|
// Provider was not explicitly set for workspace, so we are going to run our fallback logic
|
|
|
|
// that will set a provider and model for us to use.
|
|
|
|
if (!this.provider) {
|
|
|
|
const fallback = this.#getFallbackProvider();
|
|
|
|
if (!fallback) throw new Error("No valid provider found for the agent.");
|
|
|
|
this.provider = fallback.provider; // re-set the provider to the fallback provider so it is not null.
|
|
|
|
return fallback.model; // set its defined model based on fallback logic.
|
|
|
|
}
|
|
|
|
|
|
|
|
// The provider was explicitly set, so check if the workspace has an agent model set.
|
2024-10-21 23:40:30 +02:00
|
|
|
if (this.invocation.workspace.agentModel)
|
2024-09-25 22:30:20 +02:00
|
|
|
return this.invocation.workspace.agentModel;
|
2024-07-11 23:03:24 +02:00
|
|
|
|
2024-10-21 23:40:30 +02:00
|
|
|
// Otherwise, we have no model to use - so guess a default model to use via the provider
|
|
|
|
// and it's system ENV params and if that fails - we return either a base model or null.
|
2024-08-22 22:12:09 +02:00
|
|
|
return this.providerDefault();
|
2024-07-11 23:03:24 +02:00
|
|
|
}
|
|
|
|
|
2024-04-16 19:50:10 +02:00
|
|
|
#providerSetupAndCheck() {
|
2024-09-25 22:30:20 +02:00
|
|
|
this.provider = this.invocation.workspace.agentProvider ?? null; // set provider to workspace agent provider if it exists
|
2024-07-11 23:03:24 +02:00
|
|
|
this.model = this.#fetchModel();
|
2024-09-25 22:30:20 +02:00
|
|
|
|
|
|
|
if (!this.provider)
|
|
|
|
throw new Error("No valid provider found for the agent.");
|
2024-04-16 19:50:10 +02:00
|
|
|
this.log(`Start ${this.#invocationUUID}::${this.provider}:${this.model}`);
|
2024-08-22 22:12:09 +02:00
|
|
|
this.checkSetup();
|
2024-04-16 19:50:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
async #validInvocation() {
|
|
|
|
const invocation = await WorkspaceAgentInvocation.getWithWorkspace({
|
|
|
|
uuid: String(this.#invocationUUID),
|
|
|
|
});
|
|
|
|
if (invocation?.closed)
|
|
|
|
throw new Error("This agent invocation is already closed");
|
|
|
|
this.invocation = invocation ?? null;
|
|
|
|
}
|
|
|
|
|
2024-08-22 22:12:09 +02:00
|
|
|
parseCallOptions(args, config = {}, pluginName) {
|
2024-05-16 19:38:21 +02:00
|
|
|
const callOpts = {};
|
|
|
|
for (const [param, definition] of Object.entries(config)) {
|
|
|
|
if (
|
|
|
|
definition.required &&
|
|
|
|
(!args.hasOwnProperty(param) || args[param] === null)
|
|
|
|
) {
|
2024-04-16 19:50:10 +02:00
|
|
|
this.log(
|
2024-05-16 19:38:21 +02:00
|
|
|
`'${param}' required parameter for '${pluginName}' plugin is missing. Plugin may not function or crash agent.`
|
2024-04-16 19:50:10 +02:00
|
|
|
);
|
|
|
|
continue;
|
|
|
|
}
|
2024-05-16 19:38:21 +02:00
|
|
|
callOpts[param] = args.hasOwnProperty(param)
|
|
|
|
? args[param]
|
|
|
|
: definition.default || null;
|
|
|
|
}
|
|
|
|
return callOpts;
|
|
|
|
}
|
2024-04-16 19:50:10 +02:00
|
|
|
|
2024-05-16 19:38:21 +02:00
|
|
|
#attachPlugins(args) {
|
|
|
|
for (const name of this.#funcsToLoad) {
|
|
|
|
// Load child plugin
|
|
|
|
if (name.includes("#")) {
|
|
|
|
const [parent, childPluginName] = name.split("#");
|
|
|
|
if (!AgentPlugins.hasOwnProperty(parent)) {
|
|
|
|
this.log(
|
|
|
|
`${parent} is not a valid plugin. Skipping inclusion to agent cluster.`
|
|
|
|
);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
const childPlugin = AgentPlugins[parent].plugin.find(
|
|
|
|
(child) => child.name === childPluginName
|
|
|
|
);
|
|
|
|
if (!childPlugin) {
|
2024-04-16 19:50:10 +02:00
|
|
|
this.log(
|
2024-05-16 19:38:21 +02:00
|
|
|
`${parent} does not have child plugin named ${childPluginName}. Skipping inclusion to agent cluster.`
|
2024-04-16 19:50:10 +02:00
|
|
|
);
|
|
|
|
continue;
|
|
|
|
}
|
2024-05-16 19:38:21 +02:00
|
|
|
|
2024-08-22 22:12:09 +02:00
|
|
|
const callOpts = this.parseCallOptions(
|
2024-05-16 19:38:21 +02:00
|
|
|
args,
|
|
|
|
childPlugin?.startupConfig?.params,
|
|
|
|
name
|
|
|
|
);
|
|
|
|
this.aibitat.use(childPlugin.plugin(callOpts));
|
|
|
|
this.log(
|
|
|
|
`Attached ${parent}:${childPluginName} plugin to Agent cluster`
|
|
|
|
);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2024-09-11 02:06:02 +02:00
|
|
|
// Load imported plugin. This is marked by `@@` in the array of functions to load.
|
|
|
|
// and is the @@hubID of the plugin.
|
|
|
|
if (name.startsWith("@@")) {
|
|
|
|
const hubId = name.replace("@@", "");
|
|
|
|
const valid = ImportedPlugin.validateImportedPluginHandler(hubId);
|
|
|
|
if (!valid) {
|
|
|
|
this.log(
|
|
|
|
`Imported plugin by hubId ${hubId} not found in plugin directory. Skipping inclusion to agent cluster.`
|
|
|
|
);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
const plugin = ImportedPlugin.loadPluginByHubId(hubId);
|
|
|
|
const callOpts = plugin.parseCallOptions();
|
|
|
|
this.aibitat.use(plugin.plugin(callOpts));
|
|
|
|
this.log(
|
|
|
|
`Attached ${plugin.name} (${hubId}) imported plugin to Agent cluster`
|
|
|
|
);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2024-05-16 19:38:21 +02:00
|
|
|
// Load single-stage plugin.
|
|
|
|
if (!AgentPlugins.hasOwnProperty(name)) {
|
|
|
|
this.log(
|
|
|
|
`${name} is not a valid plugin. Skipping inclusion to agent cluster.`
|
|
|
|
);
|
|
|
|
continue;
|
2024-04-16 19:50:10 +02:00
|
|
|
}
|
|
|
|
|
2024-08-22 22:12:09 +02:00
|
|
|
const callOpts = this.parseCallOptions(
|
2024-05-16 19:38:21 +02:00
|
|
|
args,
|
|
|
|
AgentPlugins[name].startupConfig.params
|
|
|
|
);
|
2024-04-16 19:50:10 +02:00
|
|
|
const AIbitatPlugin = AgentPlugins[name];
|
|
|
|
this.aibitat.use(AIbitatPlugin.plugin(callOpts));
|
|
|
|
this.log(`Attached ${name} plugin to Agent cluster`);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async #loadAgents() {
|
|
|
|
// Default User agent and workspace agent
|
|
|
|
this.log(`Attaching user and default agent to Agent cluster.`);
|
|
|
|
this.aibitat.agent(USER_AGENT.name, await USER_AGENT.getDefinition());
|
|
|
|
this.aibitat.agent(
|
|
|
|
WORKSPACE_AGENT.name,
|
2024-05-08 01:35:47 +02:00
|
|
|
await WORKSPACE_AGENT.getDefinition(this.provider)
|
2024-04-16 19:50:10 +02:00
|
|
|
);
|
|
|
|
|
|
|
|
this.#funcsToLoad = [
|
|
|
|
...((await USER_AGENT.getDefinition())?.functions || []),
|
|
|
|
...((await WORKSPACE_AGENT.getDefinition())?.functions || []),
|
|
|
|
];
|
|
|
|
}
|
|
|
|
|
|
|
|
async init() {
|
|
|
|
await this.#validInvocation();
|
|
|
|
this.#providerSetupAndCheck();
|
|
|
|
return this;
|
|
|
|
}
|
|
|
|
|
|
|
|
async createAIbitat(
|
|
|
|
args = {
|
|
|
|
socket,
|
|
|
|
}
|
|
|
|
) {
|
|
|
|
this.aibitat = new AIbitat({
|
2024-04-17 01:42:06 +02:00
|
|
|
provider: this.provider ?? "openai",
|
2024-05-13 23:31:49 +02:00
|
|
|
model: this.model ?? "gpt-4o",
|
2024-04-16 19:50:10 +02:00
|
|
|
chats: await this.#chatHistory(20),
|
|
|
|
handlerProps: {
|
|
|
|
invocation: this.invocation,
|
|
|
|
log: this.log,
|
|
|
|
},
|
|
|
|
});
|
|
|
|
|
|
|
|
// Attach standard websocket plugin for frontend communication.
|
|
|
|
this.log(`Attached ${AgentPlugins.websocket.name} plugin to Agent cluster`);
|
|
|
|
this.aibitat.use(
|
|
|
|
AgentPlugins.websocket.plugin({
|
|
|
|
socket: args.socket,
|
|
|
|
muteUserReply: true,
|
|
|
|
introspection: true,
|
|
|
|
})
|
|
|
|
);
|
|
|
|
|
|
|
|
// Attach standard chat-history plugin for message storage.
|
|
|
|
this.log(
|
|
|
|
`Attached ${AgentPlugins.chatHistory.name} plugin to Agent cluster`
|
|
|
|
);
|
|
|
|
this.aibitat.use(AgentPlugins.chatHistory.plugin());
|
|
|
|
|
|
|
|
// Load required agents (Default + custom)
|
|
|
|
await this.#loadAgents();
|
|
|
|
|
|
|
|
// Attach all required plugins for functions to operate.
|
|
|
|
this.#attachPlugins(args);
|
|
|
|
}
|
|
|
|
|
|
|
|
startAgentCluster() {
|
|
|
|
return this.aibitat.start({
|
|
|
|
from: USER_AGENT.name,
|
|
|
|
to: this.channel ?? WORKSPACE_AGENT.name,
|
|
|
|
content: this.invocation.prompt,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
module.exports.AgentHandler = AgentHandler;
|