From bf435b28612a06a572551f9d5eb2510febebff31 Mon Sep 17 00:00:00 2001 From: Timothy Carambat Date: Tue, 30 Apr 2024 10:11:56 -0700 Subject: [PATCH] Adjust how text is split depending on input type (#1238) resolves #1230 --- server/utils/EmbeddingEngines/azureOpenAi/index.js | 4 +++- server/utils/EmbeddingEngines/lmstudio/index.js | 4 +++- server/utils/EmbeddingEngines/localAi/index.js | 4 +++- server/utils/EmbeddingEngines/native/index.js | 4 +++- server/utils/EmbeddingEngines/ollama/index.js | 4 +++- server/utils/EmbeddingEngines/openAi/index.js | 4 +++- 6 files changed, 18 insertions(+), 6 deletions(-) diff --git a/server/utils/EmbeddingEngines/azureOpenAi/index.js b/server/utils/EmbeddingEngines/azureOpenAi/index.js index 1f9362c9..62f69660 100644 --- a/server/utils/EmbeddingEngines/azureOpenAi/index.js +++ b/server/utils/EmbeddingEngines/azureOpenAi/index.js @@ -23,7 +23,9 @@ class AzureOpenAiEmbedder { } async embedTextInput(textInput) { - const result = await this.embedChunks(textInput); + const result = await this.embedChunks( + Array.isArray(textInput) ? textInput : [textInput] + ); return result?.[0] || []; } diff --git a/server/utils/EmbeddingEngines/lmstudio/index.js b/server/utils/EmbeddingEngines/lmstudio/index.js index b19ea262..6874b4b3 100644 --- a/server/utils/EmbeddingEngines/lmstudio/index.js +++ b/server/utils/EmbeddingEngines/lmstudio/index.js @@ -31,7 +31,9 @@ class LMStudioEmbedder { } async embedTextInput(textInput) { - const result = await this.embedChunks(textInput); + const result = await this.embedChunks( + Array.isArray(textInput) ? textInput : [textInput] + ); return result?.[0] || []; } diff --git a/server/utils/EmbeddingEngines/localAi/index.js b/server/utils/EmbeddingEngines/localAi/index.js index 2c9db2c7..0f4b79d8 100644 --- a/server/utils/EmbeddingEngines/localAi/index.js +++ b/server/utils/EmbeddingEngines/localAi/index.js @@ -23,7 +23,9 @@ class LocalAiEmbedder { } async embedTextInput(textInput) { - const result = await this.embedChunks(textInput); + const result = await this.embedChunks( + Array.isArray(textInput) ? textInput : [textInput] + ); return result?.[0] || []; } diff --git a/server/utils/EmbeddingEngines/native/index.js b/server/utils/EmbeddingEngines/native/index.js index 04b754e0..ae73c489 100644 --- a/server/utils/EmbeddingEngines/native/index.js +++ b/server/utils/EmbeddingEngines/native/index.js @@ -119,7 +119,9 @@ class NativeEmbedder { } async embedTextInput(textInput) { - const result = await this.embedChunks(textInput); + const result = await this.embedChunks( + Array.isArray(textInput) ? textInput : [textInput] + ); return result?.[0] || []; } diff --git a/server/utils/EmbeddingEngines/ollama/index.js b/server/utils/EmbeddingEngines/ollama/index.js index f6b33376..2d1cea7a 100644 --- a/server/utils/EmbeddingEngines/ollama/index.js +++ b/server/utils/EmbeddingEngines/ollama/index.js @@ -30,7 +30,9 @@ class OllamaEmbedder { } async embedTextInput(textInput) { - const result = await this.embedChunks([textInput]); + const result = await this.embedChunks( + Array.isArray(textInput) ? textInput : [textInput] + ); return result?.[0] || []; } diff --git a/server/utils/EmbeddingEngines/openAi/index.js b/server/utils/EmbeddingEngines/openAi/index.js index 49841343..a0d8a4f0 100644 --- a/server/utils/EmbeddingEngines/openAi/index.js +++ b/server/utils/EmbeddingEngines/openAi/index.js @@ -19,7 +19,9 @@ class OpenAiEmbedder { } async embedTextInput(textInput) { - const result = await this.embedChunks(textInput); + const result = await this.embedChunks( + Array.isArray(textInput) ? textInput : [textInput] + ); return result?.[0] || []; }