2023-08-24 04:15:07 +02:00
|
|
|
process.env.NODE_ENV === "development"
|
|
|
|
? require("dotenv").config({ path: `.env.${process.env.NODE_ENV}` })
|
|
|
|
: require("dotenv").config();
|
|
|
|
|
2023-09-28 23:00:03 +02:00
|
|
|
const prisma = require("../utils/prisma");
|
|
|
|
|
2023-07-25 19:37:04 +02:00
|
|
|
const SystemSettings = {
|
|
|
|
supportedFields: [
|
|
|
|
"multi_user_mode",
|
|
|
|
"users_can_delete_workspaces",
|
|
|
|
"limit_user_messages",
|
|
|
|
"message_limit",
|
2023-08-15 00:22:55 +02:00
|
|
|
"logo_filename",
|
2023-08-15 02:42:17 +02:00
|
|
|
"telemetry_id",
|
2023-07-25 19:37:04 +02:00
|
|
|
],
|
2023-08-24 04:15:07 +02:00
|
|
|
currentSettings: async function () {
|
2023-12-07 23:48:27 +01:00
|
|
|
const llmProvider = process.env.LLM_PROVIDER;
|
|
|
|
const vectorDB = process.env.VECTOR_DB;
|
2023-08-24 04:15:07 +02:00
|
|
|
return {
|
|
|
|
RequiresAuth: !!process.env.AUTH_TOKEN,
|
|
|
|
AuthToken: !!process.env.AUTH_TOKEN,
|
|
|
|
JWTSecret: !!process.env.JWT_SECRET,
|
|
|
|
StorageDir: process.env.STORAGE_DIR,
|
|
|
|
MultiUserMode: await this.isMultiUserMode(),
|
|
|
|
VectorDB: vectorDB,
|
2023-11-16 23:35:14 +01:00
|
|
|
HasExistingEmbeddings: await this.hasEmbeddings(),
|
2023-10-30 23:44:03 +01:00
|
|
|
EmbeddingEngine: process.env.EMBEDDING_ENGINE,
|
2023-11-14 22:49:31 +01:00
|
|
|
EmbeddingBasePath: process.env.EMBEDDING_BASE_PATH,
|
|
|
|
EmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
2023-12-08 01:27:36 +01:00
|
|
|
EmbeddingModelMaxChunkLength:
|
|
|
|
process.env.EMBEDDING_MODEL_MAX_CHUNK_LENGTH,
|
2023-12-11 23:18:28 +01:00
|
|
|
LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
|
2023-08-24 04:15:07 +02:00
|
|
|
...(vectorDB === "pinecone"
|
|
|
|
? {
|
|
|
|
PineConeKey: !!process.env.PINECONE_API_KEY,
|
|
|
|
PineConeIndex: process.env.PINECONE_INDEX,
|
|
|
|
}
|
|
|
|
: {}),
|
|
|
|
...(vectorDB === "chroma"
|
|
|
|
? {
|
|
|
|
ChromaEndpoint: process.env.CHROMA_ENDPOINT,
|
2023-09-29 22:20:06 +02:00
|
|
|
ChromaApiHeader: process.env.CHROMA_API_HEADER,
|
|
|
|
ChromaApiKey: !!process.env.CHROMA_API_KEY,
|
2023-08-24 04:15:07 +02:00
|
|
|
}
|
|
|
|
: {}),
|
|
|
|
...(vectorDB === "weaviate"
|
|
|
|
? {
|
|
|
|
WeaviateEndpoint: process.env.WEAVIATE_ENDPOINT,
|
|
|
|
WeaviateApiKey: process.env.WEAVIATE_API_KEY,
|
|
|
|
}
|
|
|
|
: {}),
|
|
|
|
...(vectorDB === "qdrant"
|
|
|
|
? {
|
|
|
|
QdrantEndpoint: process.env.QDRANT_ENDPOINT,
|
|
|
|
QdrantApiKey: process.env.QDRANT_API_KEY,
|
|
|
|
}
|
|
|
|
: {}),
|
2024-01-12 22:23:57 +01:00
|
|
|
...(vectorDB === "milvus"
|
|
|
|
? {
|
|
|
|
MilvusAddress: process.env.MILVUS_ADDRESS,
|
|
|
|
MilvusUsername: process.env.MILVUS_USERNAME,
|
|
|
|
MilvusPassword: !!process.env.MILVUS_PASSWORD,
|
|
|
|
}
|
|
|
|
: {}),
|
2024-01-18 03:00:54 +01:00
|
|
|
...(vectorDB === "zilliz"
|
|
|
|
? {
|
|
|
|
ZillizEndpoint: process.env.ZILLIZ_ENDPOINT,
|
|
|
|
ZillizApiToken: process.env.ZILLIZ_API_TOKEN,
|
|
|
|
}
|
|
|
|
: {}),
|
2024-01-26 22:07:53 +01:00
|
|
|
...(vectorDB === "astra"
|
|
|
|
? {
|
|
|
|
AstraDBApplicationToken: process?.env?.ASTRA_DB_APPLICATION_TOKEN,
|
|
|
|
AstraDBEndpoint: process?.env?.ASTRA_DB_ENDPOINT,
|
|
|
|
}
|
|
|
|
: {}),
|
2023-08-24 04:15:07 +02:00
|
|
|
LLMProvider: llmProvider,
|
|
|
|
...(llmProvider === "openai"
|
|
|
|
? {
|
|
|
|
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
|
|
|
OpenAiModelPref: process.env.OPEN_MODEL_PREF || "gpt-3.5-turbo",
|
|
|
|
}
|
|
|
|
: {}),
|
|
|
|
|
|
|
|
...(llmProvider === "azure"
|
|
|
|
? {
|
|
|
|
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
|
|
|
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
|
|
|
AzureOpenAiModelPref: process.env.OPEN_MODEL_PREF,
|
|
|
|
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
2023-11-06 22:13:53 +01:00
|
|
|
AzureOpenAiTokenLimit: process.env.AZURE_OPENAI_TOKEN_LIMIT || 4096,
|
2023-08-24 04:15:07 +02:00
|
|
|
}
|
|
|
|
: {}),
|
2023-10-30 23:44:03 +01:00
|
|
|
|
|
|
|
...(llmProvider === "anthropic"
|
|
|
|
? {
|
|
|
|
AnthropicApiKey: !!process.env.ANTHROPIC_API_KEY,
|
|
|
|
AnthropicModelPref: process.env.ANTHROPIC_MODEL_PREF || "claude-2",
|
|
|
|
|
|
|
|
// For embedding credentials when Anthropic is selected.
|
|
|
|
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
|
|
|
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
|
|
|
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
|
|
|
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
|
|
|
}
|
|
|
|
: {}),
|
2023-11-09 21:33:21 +01:00
|
|
|
|
2023-12-28 02:08:03 +01:00
|
|
|
...(llmProvider === "gemini"
|
|
|
|
? {
|
|
|
|
GeminiLLMApiKey: !!process.env.GEMINI_API_KEY,
|
|
|
|
GeminiLLMModelPref:
|
|
|
|
process.env.GEMINI_LLM_MODEL_PREF || "gemini-pro",
|
|
|
|
|
|
|
|
// For embedding credentials when Gemini is selected.
|
|
|
|
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
|
|
|
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
|
|
|
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
|
|
|
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
|
|
|
}
|
|
|
|
: {}),
|
|
|
|
|
2023-11-09 21:33:21 +01:00
|
|
|
...(llmProvider === "lmstudio"
|
|
|
|
? {
|
|
|
|
LMStudioBasePath: process.env.LMSTUDIO_BASE_PATH,
|
|
|
|
LMStudioTokenLimit: process.env.LMSTUDIO_MODEL_TOKEN_LIMIT,
|
|
|
|
|
|
|
|
// For embedding credentials when lmstudio is selected.
|
|
|
|
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
|
|
|
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
|
|
|
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
|
|
|
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
|
|
|
}
|
|
|
|
: {}),
|
2023-11-14 21:31:44 +01:00
|
|
|
...(llmProvider === "localai"
|
|
|
|
? {
|
|
|
|
LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH,
|
|
|
|
LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF,
|
|
|
|
LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT,
|
|
|
|
|
|
|
|
// For embedding credentials when localai is selected.
|
|
|
|
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
|
|
|
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
|
|
|
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
|
|
|
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
|
|
|
}
|
|
|
|
: {}),
|
2023-12-28 02:21:47 +01:00
|
|
|
|
|
|
|
...(llmProvider === "ollama"
|
|
|
|
? {
|
|
|
|
OllamaLLMBasePath: process.env.OLLAMA_BASE_PATH,
|
|
|
|
OllamaLLMModelPref: process.env.OLLAMA_MODEL_PREF,
|
|
|
|
OllamaLLMTokenLimit: process.env.OLLAMA_MODEL_TOKEN_LIMIT,
|
|
|
|
|
2024-01-10 21:35:30 +01:00
|
|
|
// For embedding credentials when ollama is selected.
|
|
|
|
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
|
|
|
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
|
|
|
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
|
|
|
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
|
|
|
}
|
|
|
|
: {}),
|
|
|
|
...(llmProvider === "togetherai"
|
|
|
|
? {
|
|
|
|
TogetherAiApiKey: !!process.env.TOGETHER_AI_API_KEY,
|
|
|
|
TogetherAiModelPref: process.env.TOGETHER_AI_MODEL_PREF,
|
|
|
|
|
2023-12-28 02:21:47 +01:00
|
|
|
// For embedding credentials when ollama is selected.
|
|
|
|
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
|
|
|
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
2024-01-17 23:42:05 +01:00
|
|
|
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
|
|
|
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
|
|
|
}
|
|
|
|
: {}),
|
|
|
|
...(llmProvider === "mistral"
|
|
|
|
? {
|
|
|
|
MistralApiKey: !!process.env.MISTRAL_API_KEY,
|
|
|
|
MistralModelPref: process.env.MISTRAL_MODEL_PREF,
|
|
|
|
|
|
|
|
// For embedding credentials when mistral is selected.
|
|
|
|
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
|
|
|
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
2023-12-28 02:21:47 +01:00
|
|
|
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
|
|
|
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
|
|
|
}
|
|
|
|
: {}),
|
2023-12-07 23:48:27 +01:00
|
|
|
...(llmProvider === "native"
|
|
|
|
? {
|
|
|
|
NativeLLMModelPref: process.env.NATIVE_LLM_MODEL_PREF,
|
2024-01-18 01:25:30 +01:00
|
|
|
NativeLLMTokenLimit: process.env.NATIVE_LLM_MODEL_TOKEN_LIMIT,
|
2024-01-10 21:35:30 +01:00
|
|
|
|
|
|
|
// For embedding credentials when ollama is selected.
|
|
|
|
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
|
|
|
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
|
|
|
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
|
|
|
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
2023-12-07 23:48:27 +01:00
|
|
|
}
|
|
|
|
: {}),
|
2024-02-06 18:17:51 +01:00
|
|
|
|
|
|
|
...(llmProvider === "huggingface"
|
|
|
|
? {
|
|
|
|
HuggingFaceLLMEndpoint: process.env.HUGGING_FACE_LLM_ENDPOINT,
|
|
|
|
HuggingFaceLLMAccessToken: !!process.env.HUGGING_FACE_LLM_API_KEY,
|
|
|
|
HuggingFaceLLMTokenLimit: process.env.HUGGING_FACE_LLM_TOKEN_LIMIT,
|
|
|
|
|
|
|
|
// For embedding credentials when Anthropic is selected.
|
|
|
|
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
|
|
|
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
|
|
|
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
|
|
|
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
|
|
|
}
|
|
|
|
: {}),
|
2023-08-24 04:15:07 +02:00
|
|
|
};
|
|
|
|
},
|
2023-07-25 19:37:04 +02:00
|
|
|
|
2023-09-28 23:00:03 +02:00
|
|
|
get: async function (clause = {}) {
|
|
|
|
try {
|
|
|
|
const setting = await prisma.system_settings.findFirst({ where: clause });
|
|
|
|
return setting || null;
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error.message);
|
|
|
|
return null;
|
|
|
|
}
|
2023-07-25 19:37:04 +02:00
|
|
|
},
|
|
|
|
|
2023-09-28 23:00:03 +02:00
|
|
|
where: async function (clause = {}, limit) {
|
|
|
|
try {
|
|
|
|
const settings = await prisma.system_settings.findMany({
|
|
|
|
where: clause,
|
|
|
|
take: limit || undefined,
|
|
|
|
});
|
|
|
|
return settings;
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error.message);
|
|
|
|
return [];
|
|
|
|
}
|
2023-07-25 19:37:04 +02:00
|
|
|
},
|
2023-09-28 23:00:03 +02:00
|
|
|
|
2023-07-25 19:37:04 +02:00
|
|
|
updateSettings: async function (updates = {}) {
|
2023-09-28 23:00:03 +02:00
|
|
|
try {
|
|
|
|
const updatePromises = Object.keys(updates)
|
|
|
|
.filter((key) => this.supportedFields.includes(key))
|
|
|
|
.map((key) => {
|
|
|
|
return prisma.system_settings.upsert({
|
|
|
|
where: { label: key },
|
|
|
|
update: {
|
|
|
|
value: updates[key] === null ? null : String(updates[key]),
|
|
|
|
},
|
|
|
|
create: {
|
|
|
|
label: key,
|
|
|
|
value: updates[key] === null ? null : String(updates[key]),
|
|
|
|
},
|
2023-07-25 19:37:04 +02:00
|
|
|
});
|
2023-09-28 23:00:03 +02:00
|
|
|
});
|
2023-07-25 19:37:04 +02:00
|
|
|
|
2023-09-28 23:00:03 +02:00
|
|
|
await Promise.all(updatePromises);
|
|
|
|
return { success: true, error: null };
|
|
|
|
} catch (error) {
|
|
|
|
console.error("FAILED TO UPDATE SYSTEM SETTINGS", error.message);
|
|
|
|
return { success: false, error: error.message };
|
2023-07-25 19:37:04 +02:00
|
|
|
}
|
|
|
|
},
|
2023-09-28 23:00:03 +02:00
|
|
|
|
2023-07-25 19:37:04 +02:00
|
|
|
isMultiUserMode: async function () {
|
2023-09-28 23:00:03 +02:00
|
|
|
try {
|
|
|
|
const setting = await this.get({ label: "multi_user_mode" });
|
|
|
|
return setting?.value === "true";
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error.message);
|
|
|
|
return false;
|
|
|
|
}
|
2023-07-25 19:37:04 +02:00
|
|
|
},
|
2023-09-28 23:00:03 +02:00
|
|
|
|
2023-08-15 00:22:55 +02:00
|
|
|
currentLogoFilename: async function () {
|
2023-09-28 23:00:03 +02:00
|
|
|
try {
|
|
|
|
const setting = await this.get({ label: "logo_filename" });
|
|
|
|
return setting?.value || null;
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error.message);
|
|
|
|
return null;
|
|
|
|
}
|
2023-08-15 00:22:55 +02:00
|
|
|
},
|
2023-09-28 23:00:03 +02:00
|
|
|
|
2023-08-31 00:28:30 +02:00
|
|
|
canDeleteWorkspaces: async function () {
|
2023-09-28 23:00:03 +02:00
|
|
|
try {
|
|
|
|
const setting = await this.get({ label: "users_can_delete_workspaces" });
|
|
|
|
return setting?.value === "true";
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error.message);
|
|
|
|
return false;
|
|
|
|
}
|
2023-08-31 00:28:30 +02:00
|
|
|
},
|
2023-11-16 23:35:14 +01:00
|
|
|
|
|
|
|
hasEmbeddings: async function () {
|
|
|
|
try {
|
|
|
|
const { Document } = require("./documents");
|
|
|
|
const count = await Document.count({}, 1);
|
|
|
|
return count > 0;
|
|
|
|
} catch (error) {
|
|
|
|
console.error(error.message);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
},
|
2023-07-25 19:37:04 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
module.exports.SystemSettings = SystemSettings;
|