2023-06-08 06:31:35 +02:00
|
|
|
function getVectorDbClass() {
|
|
|
|
const vectorSelection = process.env.VECTOR_DB || "pinecone";
|
|
|
|
switch (vectorSelection) {
|
|
|
|
case "pinecone":
|
2023-08-04 23:56:27 +02:00
|
|
|
const { Pinecone } = require("../vectorDbProviders/pinecone");
|
2023-06-08 06:31:35 +02:00
|
|
|
return Pinecone;
|
|
|
|
case "chroma":
|
2023-08-04 23:56:27 +02:00
|
|
|
const { Chroma } = require("../vectorDbProviders/chroma");
|
2023-06-08 06:31:35 +02:00
|
|
|
return Chroma;
|
2023-06-09 03:40:29 +02:00
|
|
|
case "lancedb":
|
2023-08-04 23:56:27 +02:00
|
|
|
const { LanceDb } = require("../vectorDbProviders/lance");
|
2023-06-09 03:40:29 +02:00
|
|
|
return LanceDb;
|
2023-08-09 03:02:30 +02:00
|
|
|
case "weaviate":
|
|
|
|
const { Weaviate } = require("../vectorDbProviders/weaviate");
|
|
|
|
return Weaviate;
|
2023-08-16 00:26:44 +02:00
|
|
|
case "qdrant":
|
|
|
|
const { QDrant } = require("../vectorDbProviders/qdrant");
|
|
|
|
return QDrant;
|
2023-06-08 06:31:35 +02:00
|
|
|
default:
|
2023-06-08 22:13:48 +02:00
|
|
|
throw new Error("ENV: No VECTOR_DB value found in environment!");
|
2023-06-08 06:31:35 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-08-04 23:56:27 +02:00
|
|
|
function getLLMProvider() {
|
|
|
|
const vectorSelection = process.env.LLM_PROVIDER || "openai";
|
2023-11-17 00:19:49 +01:00
|
|
|
const embedder = getEmbeddingEngineSelection();
|
2023-08-04 23:56:27 +02:00
|
|
|
switch (vectorSelection) {
|
|
|
|
case "openai":
|
2023-10-30 23:44:03 +01:00
|
|
|
const { OpenAiLLM } = require("../AiProviders/openAi");
|
2023-11-17 00:19:49 +01:00
|
|
|
return new OpenAiLLM(embedder);
|
2023-08-04 23:56:27 +02:00
|
|
|
case "azure":
|
2023-10-30 23:44:03 +01:00
|
|
|
const { AzureOpenAiLLM } = require("../AiProviders/azureOpenAi");
|
2023-11-17 00:19:49 +01:00
|
|
|
return new AzureOpenAiLLM(embedder);
|
2023-10-30 23:44:03 +01:00
|
|
|
case "anthropic":
|
|
|
|
const { AnthropicLLM } = require("../AiProviders/anthropic");
|
|
|
|
return new AnthropicLLM(embedder);
|
2023-11-09 21:33:21 +01:00
|
|
|
case "lmstudio":
|
|
|
|
const { LMStudioLLM } = require("../AiProviders/lmStudio");
|
|
|
|
return new LMStudioLLM(embedder);
|
2023-11-14 21:31:44 +01:00
|
|
|
case "localai":
|
|
|
|
const { LocalAiLLM } = require("../AiProviders/localAi");
|
|
|
|
return new LocalAiLLM(embedder);
|
2023-08-04 23:56:27 +02:00
|
|
|
default:
|
|
|
|
throw new Error("ENV: No LLM_PROVIDER value found in environment!");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-30 23:44:03 +01:00
|
|
|
function getEmbeddingEngineSelection() {
|
|
|
|
const engineSelection = process.env.EMBEDDING_ENGINE;
|
|
|
|
switch (engineSelection) {
|
|
|
|
case "openai":
|
2023-10-30 23:49:29 +01:00
|
|
|
const { OpenAiEmbedder } = require("../EmbeddingEngines/openAi");
|
2023-10-30 23:44:03 +01:00
|
|
|
return new OpenAiEmbedder();
|
|
|
|
case "azure":
|
|
|
|
const {
|
|
|
|
AzureOpenAiEmbedder,
|
2023-10-30 23:49:29 +01:00
|
|
|
} = require("../EmbeddingEngines/azureOpenAi");
|
2023-10-30 23:44:03 +01:00
|
|
|
return new AzureOpenAiEmbedder();
|
2023-11-14 22:49:31 +01:00
|
|
|
case "localai":
|
|
|
|
const { LocalAiEmbedder } = require("../EmbeddingEngines/localAi");
|
|
|
|
return new LocalAiEmbedder();
|
2023-12-06 19:36:22 +01:00
|
|
|
case "native":
|
|
|
|
const { NativeEmbedder } = require("../EmbeddingEngines/native");
|
|
|
|
return new NativeEmbedder();
|
2023-10-30 23:44:03 +01:00
|
|
|
default:
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-06-08 22:13:48 +02:00
|
|
|
function toChunks(arr, size) {
|
|
|
|
return Array.from({ length: Math.ceil(arr.length / size) }, (_v, i) =>
|
|
|
|
arr.slice(i * size, i * size + size)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2023-06-08 06:31:35 +02:00
|
|
|
module.exports = {
|
2023-10-30 23:44:03 +01:00
|
|
|
getEmbeddingEngineSelection,
|
2023-06-08 06:31:35 +02:00
|
|
|
getVectorDbClass,
|
2023-08-04 23:56:27 +02:00
|
|
|
getLLMProvider,
|
2023-06-08 22:13:48 +02:00
|
|
|
toChunks,
|
2023-06-08 06:31:35 +02:00
|
|
|
};
|