anything-llm/server/utils/helpers/index.js

49 lines
1.4 KiB
JavaScript
Raw Normal View History

2023-06-08 06:31:35 +02:00
function getVectorDbClass() {
const vectorSelection = process.env.VECTOR_DB || "pinecone";
switch (vectorSelection) {
case "pinecone":
const { Pinecone } = require("../vectorDbProviders/pinecone");
2023-06-08 06:31:35 +02:00
return Pinecone;
case "chroma":
const { Chroma } = require("../vectorDbProviders/chroma");
2023-06-08 06:31:35 +02:00
return Chroma;
case "lancedb":
const { LanceDb } = require("../vectorDbProviders/lance");
return LanceDb;
case "weaviate":
const { Weaviate } = require("../vectorDbProviders/weaviate");
return Weaviate;
case "qdrant":
const { QDrant } = require("../vectorDbProviders/qdrant");
return QDrant;
2023-06-08 06:31:35 +02:00
default:
throw new Error("ENV: No VECTOR_DB value found in environment!");
2023-06-08 06:31:35 +02:00
}
}
function getLLMProvider() {
const vectorSelection = process.env.LLM_PROVIDER || "openai";
switch (vectorSelection) {
case "openai":
const { OpenAi } = require("../AiProviders/openAi");
return new OpenAi();
case "azure":
const { AzureOpenAi } = require("../AiProviders/azureOpenAi");
return new AzureOpenAi();
default:
throw new Error("ENV: No LLM_PROVIDER value found in environment!");
}
}
function toChunks(arr, size) {
return Array.from({ length: Math.ceil(arr.length / size) }, (_v, i) =>
arr.slice(i * size, i * size + size)
);
}
2023-06-08 06:31:35 +02:00
module.exports = {
getVectorDbClass,
getLLMProvider,
toChunks,
2023-06-08 06:31:35 +02:00
};