mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2024-11-19 12:40:09 +01:00
LiteLLM agent support (#2460)
* litellm agent support * lint * add LiteLLM provider config --------- Co-authored-by: Timothy Carambat <rambat1010@gmail.com>
This commit is contained in:
parent
bce7988683
commit
cb4b0a878e
@ -24,6 +24,7 @@ const ENABLED_PROVIDERS = [
|
||||
"bedrock",
|
||||
"fireworksai",
|
||||
"deepseek",
|
||||
"litellm",
|
||||
"apipie",
|
||||
// TODO: More agent support.
|
||||
// "cohere", // Has tool calling and will need to build explicit support
|
||||
|
@ -785,6 +785,8 @@ ${this.getHistory({ to: route.to })
|
||||
return new Providers.FireworksAIProvider({ model: config.model });
|
||||
case "deepseek":
|
||||
return new Providers.DeepSeekProvider({ model: config.model });
|
||||
case "litellm":
|
||||
return new Providers.LiteLLMProvider({ model: config.model });
|
||||
case "apipie":
|
||||
return new Providers.ApiPieProvider({ model: config.model });
|
||||
|
||||
|
@ -130,6 +130,22 @@ class Provider {
|
||||
apiKey: process.env.FIREWORKS_AI_LLM_API_KEY,
|
||||
...config,
|
||||
});
|
||||
case "apipie":
|
||||
return new ChatOpenAI({
|
||||
configuration: {
|
||||
baseURL: "https://apipie.ai/v1",
|
||||
},
|
||||
apiKey: process.env.APIPIE_LLM_API_KEY ?? null,
|
||||
...config,
|
||||
});
|
||||
case "deepseek":
|
||||
return new ChatOpenAI({
|
||||
configuration: {
|
||||
baseURL: "https://api.deepseek.com/v1",
|
||||
},
|
||||
apiKey: process.env.DEEPSEEK_API_KEY ?? null,
|
||||
...config,
|
||||
});
|
||||
|
||||
// OSS Model Runners
|
||||
// case "anythingllm_ollama":
|
||||
@ -174,22 +190,15 @@ class Provider {
|
||||
apiKey: process.env.TEXT_GEN_WEB_UI_API_KEY ?? "not-used",
|
||||
...config,
|
||||
});
|
||||
case "deepseek":
|
||||
case "litellm":
|
||||
return new ChatOpenAI({
|
||||
configuration: {
|
||||
baseURL: "https://api.deepseek.com/v1",
|
||||
baseURL: process.env.LITE_LLM_BASE_PATH,
|
||||
},
|
||||
apiKey: process.env.DEEPSEEK_API_KEY ?? null,
|
||||
...config,
|
||||
});
|
||||
case "apipie":
|
||||
return new ChatOpenAI({
|
||||
configuration: {
|
||||
baseURL: "https://apipie.ai/v1",
|
||||
},
|
||||
apiKey: process.env.APIPIE_LLM_API_KEY ?? null,
|
||||
apiKey: process.env.LITE_LLM_API_KEY ?? null,
|
||||
...config,
|
||||
});
|
||||
|
||||
default:
|
||||
throw new Error(`Unsupported provider ${provider} for this task.`);
|
||||
}
|
||||
|
@ -15,6 +15,7 @@ const TextWebGenUiProvider = require("./textgenwebui.js");
|
||||
const AWSBedrockProvider = require("./bedrock.js");
|
||||
const FireworksAIProvider = require("./fireworksai.js");
|
||||
const DeepSeekProvider = require("./deepseek.js");
|
||||
const LiteLLMProvider = require("./litellm.js");
|
||||
const ApiPieProvider = require("./apipie.js");
|
||||
|
||||
module.exports = {
|
||||
@ -35,5 +36,6 @@ module.exports = {
|
||||
TextWebGenUiProvider,
|
||||
AWSBedrockProvider,
|
||||
FireworksAIProvider,
|
||||
LiteLLMProvider,
|
||||
ApiPieProvider,
|
||||
};
|
||||
|
110
server/utils/agents/aibitat/providers/litellm.js
Normal file
110
server/utils/agents/aibitat/providers/litellm.js
Normal file
@ -0,0 +1,110 @@
|
||||
const OpenAI = require("openai");
|
||||
const Provider = require("./ai-provider.js");
|
||||
const InheritMultiple = require("./helpers/classes.js");
|
||||
const UnTooled = require("./helpers/untooled.js");
|
||||
|
||||
/**
|
||||
* The agent provider for LiteLLM.
|
||||
*/
|
||||
class LiteLLMProvider extends InheritMultiple([Provider, UnTooled]) {
|
||||
model;
|
||||
|
||||
constructor(config = {}) {
|
||||
super();
|
||||
const { model = null } = config;
|
||||
const client = new OpenAI({
|
||||
baseURL: process.env.LITE_LLM_BASE_PATH,
|
||||
apiKey: process.env.LITE_LLM_API_KEY ?? null,
|
||||
maxRetries: 3,
|
||||
});
|
||||
|
||||
this._client = client;
|
||||
this.model = model || process.env.LITE_LLM_MODEL_PREF;
|
||||
this.verbose = true;
|
||||
}
|
||||
|
||||
get client() {
|
||||
return this._client;
|
||||
}
|
||||
|
||||
async #handleFunctionCallChat({ messages = [] }) {
|
||||
return await this.client.chat.completions
|
||||
.create({
|
||||
model: this.model,
|
||||
temperature: 0,
|
||||
messages,
|
||||
})
|
||||
.then((result) => {
|
||||
if (!result.hasOwnProperty("choices"))
|
||||
throw new Error("LiteLLM chat: No results!");
|
||||
if (result.choices.length === 0)
|
||||
throw new Error("LiteLLM chat: No results length!");
|
||||
return result.choices[0].message.content;
|
||||
})
|
||||
.catch((_) => {
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a completion based on the received messages.
|
||||
*
|
||||
* @param messages A list of messages to send to the API.
|
||||
* @param functions
|
||||
* @returns The completion.
|
||||
*/
|
||||
async complete(messages, functions = null) {
|
||||
try {
|
||||
let completion;
|
||||
if (functions.length > 0) {
|
||||
const { toolCall, text } = await this.functionCall(
|
||||
messages,
|
||||
functions,
|
||||
this.#handleFunctionCallChat.bind(this)
|
||||
);
|
||||
|
||||
if (toolCall !== null) {
|
||||
this.providerLog(`Valid tool call found - running ${toolCall.name}.`);
|
||||
this.deduplicator.trackRun(toolCall.name, toolCall.arguments);
|
||||
return {
|
||||
result: null,
|
||||
functionCall: {
|
||||
name: toolCall.name,
|
||||
arguments: toolCall.arguments,
|
||||
},
|
||||
cost: 0,
|
||||
};
|
||||
}
|
||||
completion = { content: text };
|
||||
}
|
||||
|
||||
if (!completion?.content) {
|
||||
this.providerLog(
|
||||
"Will assume chat completion without tool call inputs."
|
||||
);
|
||||
const response = await this.client.chat.completions.create({
|
||||
model: this.model,
|
||||
messages: this.cleanMsgs(messages),
|
||||
});
|
||||
completion = response.choices[0].message;
|
||||
}
|
||||
|
||||
// The UnTooled class inherited Deduplicator is mostly useful to prevent the agent
|
||||
// from calling the exact same function over and over in a loop within a single chat exchange
|
||||
// _but_ we should enable it to call previously used tools in a new chat interaction.
|
||||
this.deduplicator.reset("runs");
|
||||
return {
|
||||
result: completion.content,
|
||||
cost: 0,
|
||||
};
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
getCost(_usage) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LiteLLMProvider;
|
@ -166,6 +166,12 @@ class AgentHandler {
|
||||
if (!process.env.DEEPSEEK_API_KEY)
|
||||
throw new Error("DeepSeek API Key must be provided to use agents.");
|
||||
break;
|
||||
case "litellm":
|
||||
if (!process.env.LITE_LLM_BASE_PATH)
|
||||
throw new Error(
|
||||
"LiteLLM API base path and key must be provided to use agents."
|
||||
);
|
||||
break;
|
||||
case "apipie":
|
||||
if (!process.env.APIPIE_LLM_API_KEY)
|
||||
throw new Error("ApiPie API Key must be provided to use agents.");
|
||||
@ -216,6 +222,8 @@ class AgentHandler {
|
||||
return null;
|
||||
case "deepseek":
|
||||
return "deepseek-chat";
|
||||
case "litellm":
|
||||
return null;
|
||||
case "apipie":
|
||||
return null;
|
||||
default:
|
||||
|
Loading…
Reference in New Issue
Block a user