Make LanceDB the vector database default provider in backend to prevent issues where somehow this key is not set by the user resulting in a Pinecone error even though they never said they wanted Pinecone to be their vector db

This commit is contained in:
timothycarambat 2024-05-13 12:22:53 -07:00
parent 790fd58b97
commit a87978d1d9
9 changed files with 13 additions and 13 deletions

View File

@ -64,7 +64,7 @@ export default function PrivacyAndDataHandling() {
function ThirdParty({ settings }) {
const llmChoice = settings?.LLMProvider || "openai";
const embeddingEngine = settings?.EmbeddingEngine || "openai";
const vectorDb = settings?.VectorDB || "pinecone";
const vectorDb = settings?.VectorDB || "lancedb";
return (
<div className="py-8 w-full flex items-start justify-center flex-col gap-y-6 border-b-2 border-white/10">

View File

@ -299,7 +299,7 @@ export default function DataHandling({ setHeader, setForwardBtn, setBackBtn }) {
async function fetchKeys() {
const _settings = await System.keys();
setLLMChoice(_settings?.LLMProvider || "openai");
setVectorDb(_settings?.VectorDB || "pinecone");
setVectorDb(_settings?.VectorDB || "lancedb");
setEmbeddingEngine(_settings?.EmbeddingEngine || "openai");
setLoading(false);

View File

@ -72,7 +72,7 @@ function apiWorkspaceEndpoints(app) {
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
});
await EventLogs.logEvent("api_workspace_created", {
workspaceName: workspace?.name || "Unknown Workspace",
@ -525,7 +525,7 @@ function apiWorkspaceEndpoints(app) {
await Telemetry.sendTelemetry("sent_chat", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
});
await EventLogs.logEvent("api_sent_chat", {
workspaceName: workspace?.name,
@ -647,7 +647,7 @@ function apiWorkspaceEndpoints(app) {
await Telemetry.sendTelemetry("sent_chat", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
});
await EventLogs.logEvent("api_sent_chat", {
workspaceName: workspace?.name,

View File

@ -92,7 +92,7 @@ function chatEndpoints(app) {
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
});
await EventLogs.logEvent(
@ -200,7 +200,7 @@ function chatEndpoints(app) {
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
});
await EventLogs.logEvent(

View File

@ -46,7 +46,7 @@ function embeddedEndpoints(app) {
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
});
response.end();
} catch (e) {

View File

@ -34,7 +34,7 @@ function workspaceThreadEndpoints(app) {
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
},
user?.id
);

View File

@ -45,7 +45,7 @@ function workspaceEndpoints(app) {
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
},
user?.id
);

View File

@ -114,7 +114,7 @@ const Document = {
await Telemetry.sendTelemetry("documents_embedded_in_workspace", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
});
await EventLogs.logEvent(
"workspace_documents_added",
@ -157,7 +157,7 @@ const Document = {
await Telemetry.sendTelemetry("documents_removed_in_workspace", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
});
await EventLogs.logEvent(
"workspace_documents_removed",

View File

@ -1,5 +1,5 @@
function getVectorDbClass() {
const vectorSelection = process.env.VECTOR_DB || "pinecone";
const vectorSelection = process.env.VECTOR_DB || "lancedb";
switch (vectorSelection) {
case "pinecone":
const { Pinecone } = require("../vectorDbProviders/pinecone");