Added vertex

This commit is contained in:
Karl Stoney 2024-11-03 17:30:59 +00:00
parent 0bb47619dc
commit 2fccfcdd2b
15 changed files with 541 additions and 5 deletions

View File

@ -96,6 +96,7 @@ AnythingLLM divides your documents into objects called `workspaces`. A Workspace
- [Text Generation Web UI](https://github.com/oobabooga/text-generation-webui) - [Text Generation Web UI](https://github.com/oobabooga/text-generation-webui)
- [Apipie](https://apipie.ai/) - [Apipie](https://apipie.ai/)
- [xAI](https://x.ai/) - [xAI](https://x.ai/)
- [Google Vertex](https://cloud.google.com/vertex-ai)
**Embedder models:** **Embedder models:**

View File

@ -0,0 +1,80 @@
export default function VertexLLMOptions({ settings }) {
return (
<div className="w-full flex flex-col">
<div className="w-full flex items-center gap-[36px] mt-1.5">
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
GCP Project Name
</label>
<input
type="text"
name="VertexProjectName"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
placeholder="your-project-name"
defaultValue={settings?.VertexProjectName ?? ""}
required={true}
autoComplete="off"
spellCheck={false}
/>
</div>
{!settings?.credentialsOnly && (
<VertexLLMModelSelection settings={settings} />
)}
</div>
</div>
);
function VertexLLMModelSelection({ settings }) {
console.log(settings);
return (
<>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Chat Model Selection
</label>
<select
name="VertexLLMModelPref"
defaultValue={settings?.VertexLLMModelPref || "gemini-1.5-flash"}
required={true}
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
<optgroup label="Stable Models">
{[
"gemini-1.5-flash",
"gemini-1.5-pro",
"gemini-1.5-flash-001",
"gemini-1.5-pro-001",
"gemini-1.5-flash-002",
"gemini-1.5-pro-002",
].map((model) => {
return (
<option key={model} value={model}>
{model}
</option>
);
})}
</optgroup>
</select>
</div>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-3">
Safety Setting
</label>
<select
name="VertexSafetySetting"
defaultValue={
settings?.VertexSafetySetting || "BLOCK_MEDIUM_AND_ABOVE"
}
required={true}
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
>
<option value="BLOCK_NONE">None</option>
<option value="BLOCK_ONLY_HIGH">Block few</option>
<option value="BLOCK_MEDIUM_AND_ABOVE">Block some (default)</option>
<option value="BLOCK_LOW_AND_ABOVE">Block most</option>
</select>
</div>
</>
);
}
}

View File

@ -11,6 +11,14 @@ export const DISABLED_PROVIDERS = [
]; ];
const PROVIDER_DEFAULT_MODELS = { const PROVIDER_DEFAULT_MODELS = {
openai: [], openai: [],
vertex: [
"gemini-1.5-flash",
"gemini-1.5-pro",
"gemini-1.5-flash-001",
"gemini-1.5-pro-001",
"gemini-1.5-flash-002",
"gemini-1.5-pro-002",
],
gemini: [ gemini: [
"gemini-pro", "gemini-pro",
"gemini-1.0-pro", "gemini-1.0-pro",

Binary file not shown.

After

Width:  |  Height:  |  Size: 69 KiB

View File

@ -10,6 +10,7 @@ import GenericOpenAiLogo from "@/media/llmprovider/generic-openai.png";
import AzureOpenAiLogo from "@/media/llmprovider/azure.png"; import AzureOpenAiLogo from "@/media/llmprovider/azure.png";
import AnthropicLogo from "@/media/llmprovider/anthropic.png"; import AnthropicLogo from "@/media/llmprovider/anthropic.png";
import GeminiLogo from "@/media/llmprovider/gemini.png"; import GeminiLogo from "@/media/llmprovider/gemini.png";
import VertexLogo from "@/media/llmprovider/vertex.png";
import OllamaLogo from "@/media/llmprovider/ollama.png"; import OllamaLogo from "@/media/llmprovider/ollama.png";
import LMStudioLogo from "@/media/llmprovider/lmstudio.png"; import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
import LocalAiLogo from "@/media/llmprovider/localai.png"; import LocalAiLogo from "@/media/llmprovider/localai.png";
@ -38,6 +39,7 @@ import LMStudioOptions from "@/components/LLMSelection/LMStudioOptions";
import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions"; import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions"; import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions"; import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
import VertexLLMOptions from "@/components/LLMSelection/VertexLLMOptions";
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions"; import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions"; import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
import FireworksAiOptions from "@/components/LLMSelection/FireworksAiOptions"; import FireworksAiOptions from "@/components/LLMSelection/FireworksAiOptions";
@ -92,6 +94,14 @@ export const AVAILABLE_LLM_PROVIDERS = [
description: "Google's largest and most capable AI model", description: "Google's largest and most capable AI model",
requiredConfig: ["GeminiLLMApiKey"], requiredConfig: ["GeminiLLMApiKey"],
}, },
{
name: "Vertex",
value: "vertex",
logo: VertexLogo,
options: (settings) => <VertexLLMOptions settings={settings} />,
description: "Google's Genereative AI model platform",
requiredConfig: ["VertexProjectName"],
},
{ {
name: "HuggingFace", name: "HuggingFace",
value: "huggingface", value: "huggingface",

View File

@ -6,6 +6,7 @@ import GenericOpenAiLogo from "@/media/llmprovider/generic-openai.png";
import AzureOpenAiLogo from "@/media/llmprovider/azure.png"; import AzureOpenAiLogo from "@/media/llmprovider/azure.png";
import AnthropicLogo from "@/media/llmprovider/anthropic.png"; import AnthropicLogo from "@/media/llmprovider/anthropic.png";
import GeminiLogo from "@/media/llmprovider/gemini.png"; import GeminiLogo from "@/media/llmprovider/gemini.png";
import VertexLogo from "@/media/llmprovider/vertex.png";
import OllamaLogo from "@/media/llmprovider/ollama.png"; import OllamaLogo from "@/media/llmprovider/ollama.png";
import TogetherAILogo from "@/media/llmprovider/togetherai.png"; import TogetherAILogo from "@/media/llmprovider/togetherai.png";
import FireworksAILogo from "@/media/llmprovider/fireworksai.jpeg"; import FireworksAILogo from "@/media/llmprovider/fireworksai.jpeg";
@ -75,6 +76,14 @@ export const LLM_SELECTION_PRIVACY = {
], ],
logo: GeminiLogo, logo: GeminiLogo,
}, },
vertex: {
name: "Vertex",
description: [
"Your chats will not be used for training",
"Your prompts and document text used in response creation are visible to Google",
],
logo: VertexLogo,
},
lmstudio: { lmstudio: {
name: "LMStudio", name: "LMStudio",
description: [ description: [

View File

@ -5,6 +5,7 @@ import GenericOpenAiLogo from "@/media/llmprovider/generic-openai.png";
import AzureOpenAiLogo from "@/media/llmprovider/azure.png"; import AzureOpenAiLogo from "@/media/llmprovider/azure.png";
import AnthropicLogo from "@/media/llmprovider/anthropic.png"; import AnthropicLogo from "@/media/llmprovider/anthropic.png";
import GeminiLogo from "@/media/llmprovider/gemini.png"; import GeminiLogo from "@/media/llmprovider/gemini.png";
import VertexLogo from "@/media/llmprovider/vertex.png";
import OllamaLogo from "@/media/llmprovider/ollama.png"; import OllamaLogo from "@/media/llmprovider/ollama.png";
import LMStudioLogo from "@/media/llmprovider/lmstudio.png"; import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
import LocalAiLogo from "@/media/llmprovider/localai.png"; import LocalAiLogo from "@/media/llmprovider/localai.png";
@ -33,6 +34,7 @@ import LMStudioOptions from "@/components/LLMSelection/LMStudioOptions";
import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions"; import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions";
import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions"; import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions";
import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions"; import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
import VertexLLMOptions from "@/components/LLMSelection/VertexLLMOptions";
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions"; import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
import MistralOptions from "@/components/LLMSelection/MistralOptions"; import MistralOptions from "@/components/LLMSelection/MistralOptions";
import HuggingFaceOptions from "@/components/LLMSelection/HuggingFaceOptions"; import HuggingFaceOptions from "@/components/LLMSelection/HuggingFaceOptions";
@ -89,6 +91,13 @@ const LLMS = [
options: (settings) => <GeminiLLMOptions settings={settings} />, options: (settings) => <GeminiLLMOptions settings={settings} />,
description: "Google's largest and most capable AI model", description: "Google's largest and most capable AI model",
}, },
{
name: "Vertex",
value: "vertex",
logo: VertexLogo,
options: (settings) => <VertexLLMOptions settings={settings} />,
description: "Google's Genereative AI model platform",
},
{ {
name: "HuggingFace", name: "HuggingFace",
value: "huggingface", value: "huggingface",

View File

@ -430,6 +430,13 @@ const SystemSettings = {
GeminiSafetySetting: GeminiSafetySetting:
process.env.GEMINI_SAFETY_SETTING || "BLOCK_MEDIUM_AND_ABOVE", process.env.GEMINI_SAFETY_SETTING || "BLOCK_MEDIUM_AND_ABOVE",
// Vertex
VertexLLMModelPref:
process.env.VERTEX_LLM_MODEL_PREF || "gemini-1.5-flash",
VertexSafetySetting:
process.env.VERTEX_SAFETY_SETTING || "BLOCK_MEDIUM_AND_ABOVE",
VertexProjectName: process.env.VERTEX_PROJECT_NAME,
// LMStudio Keys // LMStudio Keys
LMStudioBasePath: process.env.LMSTUDIO_BASE_PATH, LMStudioBasePath: process.env.LMSTUDIO_BASE_PATH,
LMStudioTokenLimit: process.env.LMSTUDIO_MODEL_TOKEN_LIMIT, LMStudioTokenLimit: process.env.LMSTUDIO_MODEL_TOKEN_LIMIT,

View File

@ -22,6 +22,7 @@
"@anthropic-ai/sdk": "^0.16.1", "@anthropic-ai/sdk": "^0.16.1",
"@azure/openai": "1.0.0-beta.10", "@azure/openai": "1.0.0-beta.10",
"@datastax/astra-db-ts": "^0.1.3", "@datastax/astra-db-ts": "^0.1.3",
"@google-cloud/vertexai": "^1.9.0",
"@google/generative-ai": "^0.7.1", "@google/generative-ai": "^0.7.1",
"@ladjs/graceful": "^3.2.2", "@ladjs/graceful": "^3.2.2",
"@lancedb/lancedb": "0.5.2", "@lancedb/lancedb": "0.5.2",
@ -92,8 +93,8 @@
"flow-remove-types": "^2.217.1", "flow-remove-types": "^2.217.1",
"globals": "^13.21.0", "globals": "^13.21.0",
"hermes-eslint": "^0.15.0", "hermes-eslint": "^0.15.0",
"nodemon": "^2.0.22",
"node-html-markdown": "^1.3.0", "node-html-markdown": "^1.3.0",
"nodemon": "^2.0.22",
"prettier": "^3.0.3" "prettier": "^3.0.3"
} }
} }

View File

@ -30,6 +30,14 @@ const MODEL_MAP = {
"gemini-1.5-flash-exp-0827": 1_048_576, "gemini-1.5-flash-exp-0827": 1_048_576,
"gemini-1.5-flash-8b-exp-0827": 1_048_576, "gemini-1.5-flash-8b-exp-0827": 1_048_576,
}, },
vertex: {
"gemini-1.5-flash": 1_048_576,
"gemini-1.5-pro": 2_097_152,
"gemini-1.5-flash-001": 1_048_576,
"gemini-1.5-pro-001": 2_097_152,
"gemini-1.5-flash-002": 1_048_576,
"gemini-1.5-pro-002": 2_097_152,
},
groq: { groq: {
"gemma2-9b-it": 8192, "gemma2-9b-it": 8192,
"gemma-7b-it": 8192, "gemma-7b-it": 8192,

View File

@ -0,0 +1,296 @@
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
const {
writeResponseChunk,
clientAbortedHandler,
} = require("../../helpers/chat/responses");
const { MODEL_MAP } = require("../modelMap");
class VertexLLM {
constructor(embedder = null, modelPreference = null) {
const { VertexAI } = require("@google-cloud/vertexai");
const vertexAI = new VertexAI({
project: process.env.VERTEX_PROJECT_NAME,
});
this.model =
modelPreference ||
process.env.VERTEX_LLM_MODEL_PREF ||
"gemini-1.5-flash";
this.vertex = vertexAI.getGenerativeModel({
model: this.model,
});
this.limits = {
history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15,
user: this.promptWindowLimit() * 0.7,
};
this.embedder = embedder ?? new NativeEmbedder();
this.defaultTemp = 0.7;
this.safetyThreshold = this.#fetchSafetyThreshold();
}
#appendContext(contextTexts = []) {
if (!contextTexts || !contextTexts.length) return "";
return (
"\nContext:\n" +
contextTexts
.map((text, i) => {
return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
})
.join("")
);
}
// BLOCK_NONE can be a special candidate for some fields
// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-attributes#how_to_remove_automated_response_blocking_for_select_safety_attributes
// so if you are wondering why BLOCK_NONE still failed, the link above will explain why.
#fetchSafetyThreshold() {
const threshold =
process.env.GEMINI_SAFETY_SETTING ?? "BLOCK_MEDIUM_AND_ABOVE";
const safetyThresholds = [
"BLOCK_NONE",
"BLOCK_ONLY_HIGH",
"BLOCK_MEDIUM_AND_ABOVE",
"BLOCK_LOW_AND_ABOVE",
];
return safetyThresholds.includes(threshold)
? threshold
: "BLOCK_MEDIUM_AND_ABOVE";
}
#safetySettings() {
return [
{
category: "HARM_CATEGORY_HATE_SPEECH",
threshold: this.safetyThreshold,
},
{
category: "HARM_CATEGORY_SEXUALLY_EXPLICIT",
threshold: this.safetyThreshold,
},
{ category: "HARM_CATEGORY_HARASSMENT", threshold: this.safetyThreshold },
{
category: "HARM_CATEGORY_DANGEROUS_CONTENT",
threshold: this.safetyThreshold,
},
];
}
streamingEnabled() {
return "streamGetChatCompletion" in this;
}
static promptWindowLimit(modelName) {
return MODEL_MAP.vertex[modelName] ?? 30_720;
}
promptWindowLimit() {
return MODEL_MAP.vertex[this.model] ?? 30_720;
}
isValidChatCompletionModel(modelName = "") {
const validModels = [
"gemini-1.5-flash",
"gemini-1.5-pro",
"gemini-1.5-flash-001",
"gemini-1.5-pro-001",
"gemini-1.5-flash-002",
"gemini-1.5-pro-002",
];
return validModels.includes(modelName);
}
/**
* Generates appropriate content array for a message + attachments.
* @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}}
* @returns {string|object[]}
*/
#generateContent({ userPrompt, attachments = [] }) {
if (!attachments.length) {
return userPrompt;
}
const content = [{ text: userPrompt }];
for (let attachment of attachments) {
content.push({
inlineData: {
data: attachment.contentString.split("base64,")[1],
mimeType: attachment.mime,
},
});
}
return content.flat();
}
constructPrompt({
systemPrompt = "",
contextTexts = [],
chatHistory = [],
userPrompt = "",
attachments = [],
}) {
const prompt = {
role: "system",
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
};
return [
prompt,
{ role: "assistant", content: "Okay." },
...chatHistory,
{
role: "USER_PROMPT",
content: this.#generateContent({ userPrompt, attachments }),
},
];
}
// This will take an OpenAi format message array and only pluck valid roles from it.
formatMessages(messages = []) {
// Gemini roles are either user || model.
// and all "content" is relabeled to "parts"
const allMessages = messages
.map((message) => {
if (message.role === "system")
return { role: "user", parts: [{ text: message.content }] };
if (message.role === "user")
return { role: "user", parts: [{ text: message.content }] };
if (message.role === "assistant")
return { role: "model", parts: [{ text: message.content }] };
return null;
})
.filter((msg) => !!msg);
// Specifically, Google cannot have the last sent message be from a user with no assistant reply
// otherwise it will crash. So if the last item is from the user, it was not completed so pop it off
// the history.
if (
allMessages.length > 0 &&
allMessages[allMessages.length - 1].role === "user"
)
allMessages.pop();
// Validate that after every user message, there is a model message
// sometimes when using gemini we try to compress messages in order to retain as
// much context as possible but this may mess up the order of the messages that the gemini model expects
// we do this check to work around the edge case where 2 user prompts may be next to each other, in the message array
for (let i = 0; i < allMessages.length; i++) {
if (
allMessages[i].role === "user" &&
i < allMessages.length - 1 &&
allMessages[i + 1].role !== "model"
) {
allMessages.splice(i + 1, 0, {
role: "model",
parts: [{ text: "Okay." }],
});
}
}
return allMessages;
}
async getChatCompletion(messages = [], _opts = {}) {
if (!this.isValidChatCompletionModel(this.model))
throw new Error(
`Gemini chat: ${this.model} is not valid for chat completion!`
);
const prompt = messages.find(
(chat) => chat.role === "USER_PROMPT"
)?.content;
const chatThread = this.vertex.startChat({
history: this.formatMessages(messages),
safetySettings: this.#safetySettings(),
});
const result = await chatThread.sendMessage(prompt);
const response = result.response;
const responseText = response.text();
if (!responseText) throw new Error("Gemini: No response could be parsed.");
return responseText;
}
async streamGetChatCompletion(messages = [], _opts = {}) {
if (!this.isValidChatCompletionModel(this.model))
throw new Error(
`Gemini chat: ${this.model} is not valid for chat completion!`
);
const prompt = messages.find(
(chat) => chat.role === "USER_PROMPT"
)?.content;
const chatThread = this.vertex.startChat({
history: this.formatMessages(messages),
safetySettings: this.#safetySettings(),
});
const responseStream = await chatThread.sendMessageStream(prompt);
if (!responseStream.stream)
throw new Error("Could not stream response stream from Gemini.");
return responseStream.stream;
}
async compressMessages(promptArgs = {}, rawHistory = []) {
const { messageArrayCompressor } = require("../../helpers/chat");
const messageArray = this.constructPrompt(promptArgs);
return await messageArrayCompressor(this, messageArray, rawHistory);
}
async handleStream(response, stream, responseProps) {
const { uuid = uuidv4(), sources = [] } = responseProps;
return new Promise(async (resolve) => {
let fullText = "";
// Establish listener to early-abort a streaming response
// in case things go sideways or the user does not like the response.
// We preserve the generated text but continue as if chat was completed
// to preserve previously generated content.
const handleAbort = () => clientAbortedHandler(resolve, fullText);
response.on("close", handleAbort);
for await (const chunk of stream) {
try {
for (const part of chunk.candidates[0].content.parts) {
fullText += part.text;
writeResponseChunk(response, {
uuid,
sources: [],
type: "textResponseChunk",
textResponse: part.text,
close: false,
error: false,
});
}
} catch (e) {
writeResponseChunk(response, {
uuid,
sources: [],
type: "abort",
textResponse: null,
close: true,
error: e.message,
});
resolve(e.message);
return;
}
}
response.removeListener("close", handleAbort);
resolve(fullText);
});
}
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations
async embedTextInput(textInput) {
return await this.embedder.embedTextInput(textInput);
}
async embedChunks(textChunks = []) {
return await this.embedder.embedChunks(textChunks);
}
}
module.exports = {
VertexLLM,
};

View File

@ -210,6 +210,8 @@ class AgentHandler {
return process.env.KOBOLD_CPP_MODEL_PREF ?? null; return process.env.KOBOLD_CPP_MODEL_PREF ?? null;
case "gemini": case "gemini":
return process.env.GEMINI_MODEL_PREF ?? "gemini-pro"; return process.env.GEMINI_MODEL_PREF ?? "gemini-pro";
case "vertex":
return process.env.GEMINI_MODEL_PREF ?? "gemini-1.5-pro";
case "localai": case "localai":
return process.env.LOCAL_AI_MODEL_PREF ?? null; return process.env.LOCAL_AI_MODEL_PREF ?? null;
case "openrouter": case "openrouter":

View File

@ -108,6 +108,9 @@ function getLLMProvider({ provider = null, model = null } = {}) {
case "gemini": case "gemini":
const { GeminiLLM } = require("../AiProviders/gemini"); const { GeminiLLM } = require("../AiProviders/gemini");
return new GeminiLLM(embedder, model); return new GeminiLLM(embedder, model);
case "vertex":
const { VertexLLM } = require("../AiProviders/vertex");
return new VertexLLM(embedder, model);
case "lmstudio": case "lmstudio":
const { LMStudioLLM } = require("../AiProviders/lmStudio"); const { LMStudioLLM } = require("../AiProviders/lmStudio");
return new LMStudioLLM(embedder, model); return new LMStudioLLM(embedder, model);
@ -240,6 +243,9 @@ function getLLMProviderClass({ provider = null } = {}) {
case "gemini": case "gemini":
const { GeminiLLM } = require("../AiProviders/gemini"); const { GeminiLLM } = require("../AiProviders/gemini");
return GeminiLLM; return GeminiLLM;
case "vertex":
const { VertexLLM } = require("../AiProviders/vertex");
return VertexLLM;
case "lmstudio": case "lmstudio":
const { LMStudioLLM } = require("../AiProviders/lmStudio"); const { LMStudioLLM } = require("../AiProviders/lmStudio");
return LMStudioLLM; return LMStudioLLM;

View File

@ -57,6 +57,19 @@ const KEY_MAPPING = {
checks: [validGeminiSafetySetting], checks: [validGeminiSafetySetting],
}, },
VertexLLMModelPref: {
envKey: "VERTEX_LLM_MODEL_PREF",
checks: [isNotEmpty, validVertexModel],
},
VertexProjectName: {
envKey: "VERTEX_PROJECT_NAME",
checks: [isNotEmpty],
},
VertexSafetySetting: {
envKey: "VERTEX_SAFETY_SETTING",
checks: [validGeminiSafetySetting],
},
// LMStudio Settings // LMStudio Settings
LMStudioBasePath: { LMStudioBasePath: {
envKey: "LMSTUDIO_BASE_PATH", envKey: "LMSTUDIO_BASE_PATH",
@ -645,6 +658,7 @@ function supportedLLM(input = "") {
"azure", "azure",
"anthropic", "anthropic",
"gemini", "gemini",
"vertex",
"lmstudio", "lmstudio",
"localai", "localai",
"ollama", "ollama",
@ -692,6 +706,20 @@ function validGeminiModel(input = "") {
: `Invalid Model type. Must be one of ${validModels.join(", ")}.`; : `Invalid Model type. Must be one of ${validModels.join(", ")}.`;
} }
function validVertexModel(input = "") {
const validModels = [
"gemini-1.5-flash",
"gemini-1.5-pro",
"gemini-1.5-flash-001",
"gemini-1.5-pro-001",
"gemini-1.5-flash-002",
"gemini-1.5-pro-002",
];
return validModels.includes(input)
? null
: `Invalid Model type. Must be one of ${validModels.join(", ")}.`;
}
function validGeminiSafetySetting(input = "") { function validGeminiSafetySetting(input = "") {
const validModes = [ const validModes = [
"BLOCK_NONE", "BLOCK_NONE",

View File

@ -874,6 +874,13 @@
resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.1.1.tgz#b9da6a878a371829a0502c9b6c1c143ef6663f4d" resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.1.1.tgz#b9da6a878a371829a0502c9b6c1c143ef6663f4d"
integrity sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA== integrity sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==
"@google-cloud/vertexai@^1.9.0":
version "1.9.0"
resolved "https://registry.yarnpkg.com/@google-cloud/vertexai/-/vertexai-1.9.0.tgz#30941faa920e1218231604285c56aa4ae172b358"
integrity sha512-8brlcJwFXI4fPuBtsDNQqCdWZmz8gV9jeEKOU0vc5H2SjehCQpXK/NwuSEr916zbhlBHtg/sU37qQQdgvh5BRA==
dependencies:
google-auth-library "^9.1.0"
"@google/generative-ai@^0.7.1": "@google/generative-ai@^0.7.1":
version "0.7.1" version "0.7.1"
resolved "https://registry.yarnpkg.com/@google/generative-ai/-/generative-ai-0.7.1.tgz#eb187c75080c0706245699dbc06816c830d8c6a7" resolved "https://registry.yarnpkg.com/@google/generative-ai/-/generative-ai-0.7.1.tgz#eb187c75080c0706245699dbc06816c830d8c6a7"
@ -2588,7 +2595,7 @@ base-64@^0.1.0:
resolved "https://registry.yarnpkg.com/base-64/-/base-64-0.1.0.tgz#780a99c84e7d600260361511c4877613bf24f6bb" resolved "https://registry.yarnpkg.com/base-64/-/base-64-0.1.0.tgz#780a99c84e7d600260361511c4877613bf24f6bb"
integrity sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA== integrity sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA==
base64-js@^1.3.1, base64-js@^1.5.1: base64-js@^1.3.0, base64-js@^1.3.1, base64-js@^1.5.1:
version "1.5.1" version "1.5.1"
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
@ -2606,6 +2613,11 @@ before-after-hook@^2.2.0:
resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c" resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c"
integrity sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ== integrity sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==
bignumber.js@^9.0.0:
version "9.1.2"
resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.1.2.tgz#b7c4242259c008903b13707983b5f4bbd31eda0c"
integrity sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==
binary-extensions@^2.0.0, binary-extensions@^2.2.0: binary-extensions@^2.0.0, binary-extensions@^2.2.0:
version "2.3.0" version "2.3.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.3.0.tgz#f6e14a97858d327252200242d4ccfe522c445522" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.3.0.tgz#f6e14a97858d327252200242d4ccfe522c445522"
@ -3339,7 +3351,7 @@ eastasianwidth@^0.2.0:
resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb"
integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==
ecdsa-sig-formatter@1.0.11: ecdsa-sig-formatter@1.0.11, ecdsa-sig-formatter@^1.0.11:
version "1.0.11" version "1.0.11"
resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf"
integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==
@ -3784,6 +3796,11 @@ express@^4.18.2:
utils-merge "1.0.1" utils-merge "1.0.1"
vary "~1.1.2" vary "~1.1.2"
extend@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa"
integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==
external-editor@^3.1.0: external-editor@^3.1.0:
version "3.1.0" version "3.1.0"
resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495"
@ -4077,6 +4094,25 @@ gauge@^4.0.3:
strip-ansi "^6.0.1" strip-ansi "^6.0.1"
wide-align "^1.1.5" wide-align "^1.1.5"
gaxios@^6.0.0, gaxios@^6.1.1:
version "6.7.1"
resolved "https://registry.yarnpkg.com/gaxios/-/gaxios-6.7.1.tgz#ebd9f7093ede3ba502685e73390248bb5b7f71fb"
integrity sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==
dependencies:
extend "^3.0.2"
https-proxy-agent "^7.0.1"
is-stream "^2.0.0"
node-fetch "^2.6.9"
uuid "^9.0.1"
gcp-metadata@^6.1.0:
version "6.1.0"
resolved "https://registry.yarnpkg.com/gcp-metadata/-/gcp-metadata-6.1.0.tgz#9b0dd2b2445258e7597f2024332d20611cbd6b8c"
integrity sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==
dependencies:
gaxios "^6.0.0"
json-bigint "^1.0.0"
generate-function@^2.3.1: generate-function@^2.3.1:
version "2.3.1" version "2.3.1"
resolved "https://registry.yarnpkg.com/generate-function/-/generate-function-2.3.1.tgz#f069617690c10c868e73b8465746764f97c3479f" resolved "https://registry.yarnpkg.com/generate-function/-/generate-function-2.3.1.tgz#f069617690c10c868e73b8465746764f97c3479f"
@ -4165,6 +4201,18 @@ globalthis@^1.0.3:
define-properties "^1.2.1" define-properties "^1.2.1"
gopd "^1.0.1" gopd "^1.0.1"
google-auth-library@^9.1.0:
version "9.14.2"
resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-9.14.2.tgz#92a53ba32b3a9ff9ced8ed34129edb5a7fa7fb52"
integrity sha512-R+FRIfk1GBo3RdlRYWPdwk8nmtVUOn6+BkDomAC46KoU8kzXzE1HLmOasSCbWUByMMAGkknVF0G5kQ69Vj7dlA==
dependencies:
base64-js "^1.3.0"
ecdsa-sig-formatter "^1.0.11"
gaxios "^6.1.1"
gcp-metadata "^6.1.0"
gtoken "^7.0.0"
jws "^4.0.0"
gopd@^1.0.1: gopd@^1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c"
@ -4197,6 +4245,14 @@ graphql@^16.7.1:
resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.9.0.tgz#1c310e63f16a49ce1fbb230bd0a000e99f6f115f" resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.9.0.tgz#1c310e63f16a49ce1fbb230bd0a000e99f6f115f"
integrity sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw== integrity sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw==
gtoken@^7.0.0:
version "7.1.0"
resolved "https://registry.yarnpkg.com/gtoken/-/gtoken-7.1.0.tgz#d61b4ebd10132222817f7222b1e6064bd463fc26"
integrity sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==
dependencies:
gaxios "^6.0.0"
jws "^4.0.0"
guid-typescript@^1.0.9: guid-typescript@^1.0.9:
version "1.0.9" version "1.0.9"
resolved "https://registry.yarnpkg.com/guid-typescript/-/guid-typescript-1.0.9.tgz#e35f77003535b0297ea08548f5ace6adb1480ddc" resolved "https://registry.yarnpkg.com/guid-typescript/-/guid-typescript-1.0.9.tgz#e35f77003535b0297ea08548f5ace6adb1480ddc"
@ -4326,6 +4382,14 @@ https-proxy-agent@^7.0.0:
agent-base "^7.0.2" agent-base "^7.0.2"
debug "4" debug "4"
https-proxy-agent@^7.0.1:
version "7.0.5"
resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz#9e8b5013873299e11fab6fd548405da2d6c602b2"
integrity sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==
dependencies:
agent-base "^7.0.2"
debug "4"
human-interval@^2.0.1: human-interval@^2.0.1:
version "2.0.1" version "2.0.1"
resolved "https://registry.yarnpkg.com/human-interval/-/human-interval-2.0.1.tgz#655baf606c7067bb26042dcae14ec777b099af15" resolved "https://registry.yarnpkg.com/human-interval/-/human-interval-2.0.1.tgz#655baf606c7067bb26042dcae14ec777b099af15"
@ -4787,6 +4851,13 @@ jsbi@^4.3.0:
resolved "https://registry.yarnpkg.com/jsbi/-/jsbi-4.3.0.tgz#b54ee074fb6fcbc00619559305c8f7e912b04741" resolved "https://registry.yarnpkg.com/jsbi/-/jsbi-4.3.0.tgz#b54ee074fb6fcbc00619559305c8f7e912b04741"
integrity sha512-SnZNcinB4RIcnEyZqFPdGPVgrg2AcnykiBy0sHVJQKHYeaLUvi3Exj+iaPpLnFVkDPZIV4U0yvgC9/R4uEAZ9g== integrity sha512-SnZNcinB4RIcnEyZqFPdGPVgrg2AcnykiBy0sHVJQKHYeaLUvi3Exj+iaPpLnFVkDPZIV4U0yvgC9/R4uEAZ9g==
json-bigint@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/json-bigint/-/json-bigint-1.0.0.tgz#ae547823ac0cad8398667f8cd9ef4730f5b01ff1"
integrity sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==
dependencies:
bignumber.js "^9.0.0"
json-bignum@^0.0.3: json-bignum@^0.0.3:
version "0.0.3" version "0.0.3"
resolved "https://registry.yarnpkg.com/json-bignum/-/json-bignum-0.0.3.tgz#41163b50436c773d82424dbc20ed70db7604b8d7" resolved "https://registry.yarnpkg.com/json-bignum/-/json-bignum-0.0.3.tgz#41163b50436c773d82424dbc20ed70db7604b8d7"
@ -5452,7 +5523,7 @@ node-domexception@1.0.0:
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==
node-fetch@2.7.0, node-fetch@^2.6.1, node-fetch@^2.6.12, node-fetch@^2.6.7: node-fetch@2.7.0, node-fetch@^2.6.1, node-fetch@^2.6.12, node-fetch@^2.6.7, node-fetch@^2.6.9:
version "2.7.0" version "2.7.0"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d"
integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==