mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2024-11-19 20:50:09 +01:00
merge with master
This commit is contained in:
commit
2b6e1db79b
@ -71,6 +71,8 @@ Some cool features of AnythingLLM
|
||||
- [LM Studio (all models)](https://lmstudio.ai)
|
||||
- [LocalAi (all models)](https://localai.io/)
|
||||
- [Together AI (chat models)](https://www.together.ai/)
|
||||
- [Perplexity (chat models)](https://www.perplexity.ai/)
|
||||
- [OpenRouter (chat models)](https://openrouter.ai/)
|
||||
- [Mistral](https://mistral.ai/)
|
||||
|
||||
**Supported Embedding models:**
|
||||
@ -80,6 +82,7 @@ Some cool features of AnythingLLM
|
||||
- [Azure OpenAI](https://azure.microsoft.com/en-us/products/ai-services/openai-service)
|
||||
- [LM Studio (all)](https://lmstudio.ai)
|
||||
- [LocalAi (all)](https://localai.io/)
|
||||
- [Ollama (all)](https://ollama.ai/)
|
||||
|
||||
**Supported Vector Databases:**
|
||||
|
||||
@ -109,7 +112,7 @@ Mintplex Labs & the community maintain a number of deployment methods, scripts,
|
||||
| [![Deploy on Docker][docker-btn]][docker-deploy] | [![Deploy on AWS][aws-btn]][aws-deploy] | [![Deploy on GCP][gcp-btn]][gcp-deploy] | [![Deploy on DigitalOcean][do-btn]][aws-deploy] | [![Deploy on Render.com][render-btn]][render-deploy] |
|
||||
|
||||
| Railway |
|
||||
|----------------------------------------|
|
||||
| --------------------------------------------------- |
|
||||
| [![Deploy on Railway][railway-btn]][railway-deploy] |
|
||||
|
||||
[or set up a production AnythingLLM instance without Docker →](./BARE_METAL.md)
|
||||
|
@ -4,7 +4,7 @@ const {
|
||||
WATCH_DIRECTORY,
|
||||
SUPPORTED_FILETYPE_CONVERTERS,
|
||||
} = require("../utils/constants");
|
||||
const { trashFile } = require("../utils/files");
|
||||
const { trashFile, isTextType } = require("../utils/files");
|
||||
const RESERVED_FILES = ["__HOTDIR__.md"];
|
||||
|
||||
async function processSingleFile(targetFilename) {
|
||||
@ -31,17 +31,25 @@ async function processSingleFile(targetFilename) {
|
||||
};
|
||||
}
|
||||
|
||||
if (!Object.keys(SUPPORTED_FILETYPE_CONVERTERS).includes(fileExtension)) {
|
||||
let processFileAs = fileExtension;
|
||||
if (!SUPPORTED_FILETYPE_CONVERTERS.hasOwnProperty(fileExtension)) {
|
||||
if (isTextType(fullFilePath)) {
|
||||
console.log(
|
||||
`\x1b[33m[Collector]\x1b[0m The provided filetype of ${fileExtension} does not have a preset and will be processed as .txt.`
|
||||
);
|
||||
processFileAs = ".txt";
|
||||
} else {
|
||||
trashFile(fullFilePath);
|
||||
return {
|
||||
success: false,
|
||||
reason: `File extension ${fileExtension} not supported for parsing.`,
|
||||
reason: `File extension ${fileExtension} not supported for parsing and cannot be assumed as text file type.`,
|
||||
documents: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const FileTypeProcessor = require(SUPPORTED_FILETYPE_CONVERTERS[
|
||||
fileExtension
|
||||
processFileAs
|
||||
]);
|
||||
return await FileTypeProcessor({
|
||||
fullFilePath,
|
||||
|
@ -1,10 +1,38 @@
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const { getType } = require("mime");
|
||||
const documentsFolder =
|
||||
process.env.NODE_ENV === "production"
|
||||
? path.resolve("/storage/documents") // hardcoded to Render storage mount.
|
||||
: path.resolve(__dirname, "../../../server/storage/documents");
|
||||
|
||||
function isTextType(filepath) {
|
||||
if (!fs.existsSync(filepath)) return false;
|
||||
// These are types of mime primary classes that for sure
|
||||
// cannot also for forced into a text type.
|
||||
const nonTextTypes = ["multipart", "image", "model", "audio", "video"];
|
||||
// These are full-mimes we for sure cannot parse or interpret as text
|
||||
// documents
|
||||
const BAD_MIMES = [
|
||||
"application/octet-stream",
|
||||
"application/zip",
|
||||
"application/pkcs8",
|
||||
"application/vnd.microsoft.portable-executable",
|
||||
"application/x-msdownload",
|
||||
];
|
||||
|
||||
try {
|
||||
const mime = getType(filepath);
|
||||
if (BAD_MIMES.includes(mime)) return false;
|
||||
|
||||
const type = mime.split("/")[0];
|
||||
if (nonTextTypes.includes(type)) return false;
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function trashFile(filepath) {
|
||||
if (!fs.existsSync(filepath)) return;
|
||||
|
||||
@ -97,6 +125,7 @@ async function wipeCollectorStorage() {
|
||||
module.exports = {
|
||||
documentsFolder,
|
||||
trashFile,
|
||||
isTextType,
|
||||
createdDate,
|
||||
writeToServerDocuments,
|
||||
wipeCollectorStorage,
|
||||
|
@ -48,6 +48,14 @@ GID='1000'
|
||||
# MISTRAL_API_KEY='example-mistral-ai-api-key'
|
||||
# MISTRAL_MODEL_PREF='mistral-tiny'
|
||||
|
||||
# LLM_PROVIDER='perplexity'
|
||||
# PERPLEXITY_API_KEY='my-perplexity-key'
|
||||
# PERPLEXITY_MODEL_PREF='codellama-34b-instruct'
|
||||
|
||||
# LLM_PROVIDER='openrouter'
|
||||
# OPENROUTER_API_KEY='my-openrouter-key'
|
||||
# OPENROUTER_MODEL_PREF='openrouter/auto'
|
||||
|
||||
# LLM_PROVIDER='huggingface'
|
||||
# HUGGING_FACE_LLM_ENDPOINT=https://uuid-here.us-east-1.aws.endpoints.huggingface.cloud
|
||||
# HUGGING_FACE_LLM_API_KEY=hf_xxxxxx
|
||||
@ -71,6 +79,11 @@ GID='1000'
|
||||
# EMBEDDING_MODEL_PREF='text-embedding-ada-002'
|
||||
# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=1000 # The max chunk size in chars a string to embed can be
|
||||
|
||||
# EMBEDDING_ENGINE='ollama'
|
||||
# EMBEDDING_BASE_PATH='http://127.0.0.1:11434'
|
||||
# EMBEDDING_MODEL_PREF='nomic-embed-text:latest'
|
||||
# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
|
||||
|
||||
###########################################
|
||||
######## Vector Database Selection ########
|
||||
###########################################
|
||||
|
@ -9,7 +9,7 @@ export default function AzureAiOptions({ settings }) {
|
||||
<input
|
||||
type="url"
|
||||
name="AzureOpenAiEndpoint"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="https://my-azure.openai.azure.com"
|
||||
defaultValue={settings?.AzureOpenAiEndpoint}
|
||||
required={true}
|
||||
@ -25,7 +25,7 @@ export default function AzureAiOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="AzureOpenAiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Azure OpenAI API Key"
|
||||
defaultValue={settings?.AzureOpenAiKey ? "*".repeat(20) : ""}
|
||||
required={true}
|
||||
@ -41,7 +41,7 @@ export default function AzureAiOptions({ settings }) {
|
||||
<input
|
||||
type="text"
|
||||
name="AzureOpenAiEmbeddingModelPref"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Azure OpenAI embedding model deployment name"
|
||||
defaultValue={settings?.AzureOpenAiEmbeddingModelPref}
|
||||
required={true}
|
||||
|
@ -27,11 +27,9 @@ export default function EmbedderItem({
|
||||
alt={`${name} logo`}
|
||||
className="w-10 h-10 rounded-md"
|
||||
/>
|
||||
<div className="flex flex-col gap-y-1">
|
||||
<div className="flex flex-col">
|
||||
<div className="text-sm font-semibold">{name}</div>
|
||||
<div className="mt-2 text-xs text-white tracking-wide">
|
||||
{description}
|
||||
</div>
|
||||
<div className="mt-1 text-xs text-white/60">{description}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -19,7 +19,7 @@ export default function LocalAiOptions({ settings }) {
|
||||
<input
|
||||
type="url"
|
||||
name="EmbeddingBasePath"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="http://localhost:8080/v1"
|
||||
defaultValue={settings?.EmbeddingBasePath}
|
||||
onChange={(e) => setBasePathValue(e.target.value)}
|
||||
@ -41,7 +41,7 @@ export default function LocalAiOptions({ settings }) {
|
||||
<input
|
||||
type="number"
|
||||
name="EmbeddingModelMaxChunkLength"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="1000"
|
||||
min={1}
|
||||
onScroll={(e) => e.target.blur()}
|
||||
@ -62,7 +62,7 @@ export default function LocalAiOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="LocalAiApiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="sk-mysecretkey"
|
||||
defaultValue={settings?.LocalAiApiKey ? "*".repeat(20) : ""}
|
||||
autoComplete="off"
|
||||
@ -108,7 +108,7 @@ function LocalAIModelSelection({ settings, apiKey = null, basePath = null }) {
|
||||
<select
|
||||
name="EmbeddingModelPref"
|
||||
disabled={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<option disabled={true} selected={true}>
|
||||
{basePath?.includes("/v1")
|
||||
@ -128,7 +128,7 @@ function LocalAIModelSelection({ settings, apiKey = null, basePath = null }) {
|
||||
<select
|
||||
name="EmbeddingModelPref"
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{customModels.length > 0 && (
|
||||
<optgroup label="Your loaded models">
|
||||
|
@ -0,0 +1,120 @@
|
||||
import React, { useEffect, useState } from "react";
|
||||
import System from "@/models/system";
|
||||
|
||||
export default function OllamaEmbeddingOptions({ settings }) {
|
||||
const [basePathValue, setBasePathValue] = useState(
|
||||
settings?.EmbeddingBasePath
|
||||
);
|
||||
const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath);
|
||||
|
||||
return (
|
||||
<div className="w-full flex flex-col gap-y-4">
|
||||
<div className="w-full flex items-center gap-4">
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
LocalAI Base URL
|
||||
</label>
|
||||
<input
|
||||
type="url"
|
||||
name="EmbeddingBasePath"
|
||||
className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="http://127.0.0.1:11434"
|
||||
defaultValue={settings?.EmbeddingBasePath}
|
||||
onChange={(e) => setBasePathValue(e.target.value)}
|
||||
onBlur={() => setBasePath(basePathValue)}
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
spellCheck={false}
|
||||
/>
|
||||
</div>
|
||||
<OllamaLLMModelSelection settings={settings} basePath={basePath} />
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
Max embedding chunk length
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
name="EmbeddingModelMaxChunkLength"
|
||||
className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="8192"
|
||||
min={1}
|
||||
onScroll={(e) => e.target.blur()}
|
||||
defaultValue={settings?.EmbeddingModelMaxChunkLength}
|
||||
required={false}
|
||||
autoComplete="off"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function OllamaLLMModelSelection({ settings, basePath = null }) {
|
||||
const [customModels, setCustomModels] = useState([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
async function findCustomModels() {
|
||||
if (!basePath) {
|
||||
setCustomModels([]);
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
setLoading(true);
|
||||
const { models } = await System.customModels("ollama", null, basePath);
|
||||
setCustomModels(models || []);
|
||||
setLoading(false);
|
||||
}
|
||||
findCustomModels();
|
||||
}, [basePath]);
|
||||
|
||||
if (loading || customModels.length == 0) {
|
||||
return (
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
Embedding Model Selection
|
||||
</label>
|
||||
<select
|
||||
name="EmbeddingModelPref"
|
||||
disabled={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<option disabled={true} selected={true}>
|
||||
{!!basePath
|
||||
? "-- loading available models --"
|
||||
: "-- waiting for URL --"}
|
||||
</option>
|
||||
</select>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
Embedding Model Selection
|
||||
</label>
|
||||
<select
|
||||
name="EmbeddingModelPref"
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{customModels.length > 0 && (
|
||||
<optgroup label="Your loaded models">
|
||||
{customModels.map((model) => {
|
||||
return (
|
||||
<option
|
||||
key={model.id}
|
||||
value={model.id}
|
||||
selected={settings.EmbeddingModelPref === model.id}
|
||||
>
|
||||
{model.id}
|
||||
</option>
|
||||
);
|
||||
})}
|
||||
</optgroup>
|
||||
)}
|
||||
</select>
|
||||
</div>
|
||||
);
|
||||
}
|
@ -9,7 +9,7 @@ export default function OpenAiOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="OpenAiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="OpenAI API Key"
|
||||
defaultValue={settings?.OpenAiKey ? "*".repeat(20) : ""}
|
||||
required={true}
|
||||
@ -24,7 +24,7 @@ export default function OpenAiOptions({ settings }) {
|
||||
<select
|
||||
name="EmbeddingModelPref"
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<optgroup label="Available embedding models">
|
||||
{[
|
||||
|
@ -29,7 +29,7 @@ export default function AnthropicAiOptions({ settings, showAlert = false }) {
|
||||
<input
|
||||
type="password"
|
||||
name="AnthropicApiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Anthropic Claude-2 API Key"
|
||||
defaultValue={settings?.AnthropicApiKey ? "*".repeat(20) : ""}
|
||||
required={true}
|
||||
@ -46,7 +46,7 @@ export default function AnthropicAiOptions({ settings, showAlert = false }) {
|
||||
name="AnthropicModelPref"
|
||||
defaultValue={settings?.AnthropicModelPref || "claude-2"}
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{["claude-2", "claude-instant-1"].map((model) => {
|
||||
return (
|
||||
|
@ -9,7 +9,7 @@ export default function AzureAiOptions({ settings }) {
|
||||
<input
|
||||
type="url"
|
||||
name="AzureOpenAiEndpoint"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="https://my-azure.openai.azure.com"
|
||||
defaultValue={settings?.AzureOpenAiEndpoint}
|
||||
required={true}
|
||||
@ -25,7 +25,7 @@ export default function AzureAiOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="AzureOpenAiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Azure OpenAI API Key"
|
||||
defaultValue={settings?.AzureOpenAiKey ? "*".repeat(20) : ""}
|
||||
required={true}
|
||||
@ -41,7 +41,7 @@ export default function AzureAiOptions({ settings }) {
|
||||
<input
|
||||
type="text"
|
||||
name="AzureOpenAiModelPref"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Azure OpenAI chat model deployment name"
|
||||
defaultValue={settings?.AzureOpenAiModelPref}
|
||||
required={true}
|
||||
@ -59,7 +59,7 @@ export default function AzureAiOptions({ settings }) {
|
||||
<select
|
||||
name="AzureOpenAiTokenLimit"
|
||||
defaultValue={settings?.AzureOpenAiTokenLimit || 4096}
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
required={true}
|
||||
>
|
||||
<option value={4096}>4,096 (gpt-3.5-turbo)</option>
|
||||
@ -77,7 +77,7 @@ export default function AzureAiOptions({ settings }) {
|
||||
<input
|
||||
type="text"
|
||||
name="AzureOpenAiEmbeddingModelPref"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Azure OpenAI embedding model deployment name"
|
||||
defaultValue={settings?.AzureOpenAiEmbeddingModelPref}
|
||||
required={true}
|
||||
|
@ -9,7 +9,7 @@ export default function GeminiLLMOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="GeminiLLMApiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Google Gemini API Key"
|
||||
defaultValue={settings?.GeminiLLMApiKey ? "*".repeat(20) : ""}
|
||||
required={true}
|
||||
@ -26,7 +26,7 @@ export default function GeminiLLMOptions({ settings }) {
|
||||
name="GeminiLLMModelPref"
|
||||
defaultValue={settings?.GeminiLLMModelPref || "gemini-pro"}
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{["gemini-pro"].map((model) => {
|
||||
return (
|
||||
|
@ -9,7 +9,7 @@ export default function HuggingFaceOptions({ settings }) {
|
||||
<input
|
||||
type="url"
|
||||
name="HuggingFaceLLMEndpoint"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="https://example.endpoints.huggingface.cloud"
|
||||
defaultValue={settings?.HuggingFaceLLMEndpoint}
|
||||
required={true}
|
||||
@ -24,7 +24,7 @@ export default function HuggingFaceOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="HuggingFaceLLMAccessToken"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="HuggingFace Access Token"
|
||||
defaultValue={
|
||||
settings?.HuggingFaceLLMAccessToken ? "*".repeat(20) : ""
|
||||
@ -41,7 +41,7 @@ export default function HuggingFaceOptions({ settings }) {
|
||||
<input
|
||||
type="number"
|
||||
name="HuggingFaceLLMTokenLimit"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="4096"
|
||||
min={1}
|
||||
onScroll={(e) => e.target.blur()}
|
||||
|
@ -27,11 +27,9 @@ export default function LLMItem({
|
||||
alt={`${name} logo`}
|
||||
className="w-10 h-10 rounded-md"
|
||||
/>
|
||||
<div className="flex flex-col gap-y-1">
|
||||
<div className="flex flex-col">
|
||||
<div className="text-sm font-semibold">{name}</div>
|
||||
<div className="mt-2 text-xs text-white tracking-wide">
|
||||
{description}
|
||||
</div>
|
||||
<div className="mt-1 text-xs text-white/60">{description}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -29,7 +29,7 @@ export default function LMStudioOptions({ settings, showAlert = false }) {
|
||||
<input
|
||||
type="url"
|
||||
name="LMStudioBasePath"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="http://localhost:1234/v1"
|
||||
defaultValue={settings?.LMStudioBasePath}
|
||||
required={true}
|
||||
@ -44,7 +44,7 @@ export default function LMStudioOptions({ settings, showAlert = false }) {
|
||||
<input
|
||||
type="number"
|
||||
name="LMStudioTokenLimit"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="4096"
|
||||
min={1}
|
||||
onScroll={(e) => e.target.blur()}
|
||||
|
@ -36,7 +36,7 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
|
||||
<input
|
||||
type="url"
|
||||
name="LocalAiBasePath"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="http://localhost:1234/v1"
|
||||
defaultValue={settings?.LocalAiBasePath}
|
||||
required={true}
|
||||
@ -58,7 +58,7 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
|
||||
<input
|
||||
type="number"
|
||||
name="LocalAiTokenLimit"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="4096"
|
||||
min={1}
|
||||
onScroll={(e) => e.target.blur()}
|
||||
@ -80,7 +80,7 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
|
||||
<input
|
||||
type="password"
|
||||
name="LocalAiApiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="sk-mysecretkey"
|
||||
defaultValue={settings?.LocalAiApiKey ? "*".repeat(20) : ""}
|
||||
autoComplete="off"
|
||||
@ -126,7 +126,7 @@ function LocalAIModelSelection({ settings, basePath = null, apiKey = null }) {
|
||||
<select
|
||||
name="LocalAiModelPref"
|
||||
disabled={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<option disabled={true} selected={true}>
|
||||
{basePath?.includes("/v1")
|
||||
@ -146,7 +146,7 @@ function LocalAIModelSelection({ settings, basePath = null, apiKey = null }) {
|
||||
<select
|
||||
name="LocalAiModelPref"
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{customModels.length > 0 && (
|
||||
<optgroup label="Your loaded models">
|
||||
|
@ -14,7 +14,7 @@ export default function MistralOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="MistralApiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Mistral API Key"
|
||||
defaultValue={settings?.MistralApiKey ? "*".repeat(20) : ""}
|
||||
required={true}
|
||||
@ -60,7 +60,7 @@ function MistralModelSelection({ apiKey, settings }) {
|
||||
<select
|
||||
name="MistralModelPref"
|
||||
disabled={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<option disabled={true} selected={true}>
|
||||
{!!apiKey
|
||||
@ -80,7 +80,7 @@ function MistralModelSelection({ apiKey, settings }) {
|
||||
<select
|
||||
name="MistralModelPref"
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{customModels.length > 0 && (
|
||||
<optgroup label="Available Mistral Models">
|
||||
|
@ -43,7 +43,7 @@ function NativeModelSelection({ settings }) {
|
||||
<select
|
||||
name="NativeLLMModelPref"
|
||||
disabled={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<option disabled={true} selected={true}>
|
||||
-- waiting for models --
|
||||
@ -62,7 +62,7 @@ function NativeModelSelection({ settings }) {
|
||||
<select
|
||||
name="NativeLLMModelPref"
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{customModels.length > 0 && (
|
||||
<optgroup label="Your loaded models">
|
||||
@ -88,7 +88,7 @@ function NativeModelSelection({ settings }) {
|
||||
<input
|
||||
type="number"
|
||||
name="NativeLLMTokenLimit"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="4096"
|
||||
min={1}
|
||||
onScroll={(e) => e.target.blur()}
|
||||
|
@ -17,7 +17,7 @@ export default function OllamaLLMOptions({ settings }) {
|
||||
<input
|
||||
type="url"
|
||||
name="OllamaLLMBasePath"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="http://127.0.0.1:11434"
|
||||
defaultValue={settings?.OllamaLLMBasePath}
|
||||
required={true}
|
||||
@ -35,7 +35,7 @@ export default function OllamaLLMOptions({ settings }) {
|
||||
<input
|
||||
type="number"
|
||||
name="OllamaLLMTokenLimit"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="4096"
|
||||
min={1}
|
||||
onScroll={(e) => e.target.blur()}
|
||||
@ -77,7 +77,7 @@ function OllamaLLMModelSelection({ settings, basePath = null }) {
|
||||
<select
|
||||
name="OllamaLLMModelPref"
|
||||
disabled={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<option disabled={true} selected={true}>
|
||||
{!!basePath
|
||||
@ -97,7 +97,7 @@ function OllamaLLMModelSelection({ settings, basePath = null }) {
|
||||
<select
|
||||
name="OllamaLLMModelPref"
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{customModels.length > 0 && (
|
||||
<optgroup label="Your loaded models">
|
||||
|
@ -14,7 +14,7 @@ export default function OpenAiOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="OpenAiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="OpenAI API Key"
|
||||
defaultValue={settings?.OpenAiKey ? "*".repeat(20) : ""}
|
||||
required={true}
|
||||
@ -60,7 +60,7 @@ function OpenAIModelSelection({ apiKey, settings }) {
|
||||
<select
|
||||
name="OpenAiModelPref"
|
||||
disabled={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<option disabled={true} selected={true}>
|
||||
-- loading available models --
|
||||
@ -78,7 +78,7 @@ function OpenAIModelSelection({ apiKey, settings }) {
|
||||
<select
|
||||
name="OpenAiModelPref"
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<optgroup label="General LLM models">
|
||||
{[
|
||||
|
@ -0,0 +1,97 @@
|
||||
import System from "@/models/system";
|
||||
import { useState, useEffect } from "react";
|
||||
|
||||
export default function OpenRouterOptions({ settings }) {
|
||||
return (
|
||||
<div className="flex gap-x-4">
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
OpenRouter API Key
|
||||
</label>
|
||||
<input
|
||||
type="password"
|
||||
name="OpenRouterApiKey"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="OpenRouter API Key"
|
||||
defaultValue={settings?.OpenRouterApiKey ? "*".repeat(20) : ""}
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
spellCheck={false}
|
||||
/>
|
||||
</div>
|
||||
<OpenRouterModelSelection settings={settings} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function OpenRouterModelSelection({ settings }) {
|
||||
const [groupedModels, setGroupedModels] = useState({});
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
async function findCustomModels() {
|
||||
setLoading(true);
|
||||
const { models } = await System.customModels("openrouter");
|
||||
if (models?.length > 0) {
|
||||
const modelsByOrganization = models.reduce((acc, model) => {
|
||||
acc[model.organization] = acc[model.organization] || [];
|
||||
acc[model.organization].push(model);
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
setGroupedModels(modelsByOrganization);
|
||||
}
|
||||
|
||||
setLoading(false);
|
||||
}
|
||||
findCustomModels();
|
||||
}, []);
|
||||
|
||||
if (loading || Object.keys(groupedModels).length === 0) {
|
||||
return (
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
Chat Model Selection
|
||||
</label>
|
||||
<select
|
||||
name="OpenRouterModelPref"
|
||||
disabled={true}
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<option disabled={true} selected={true}>
|
||||
-- loading available models --
|
||||
</option>
|
||||
</select>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
Chat Model Selection
|
||||
</label>
|
||||
<select
|
||||
name="OpenRouterModelPref"
|
||||
required={true}
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{Object.keys(groupedModels)
|
||||
.sort()
|
||||
.map((organization) => (
|
||||
<optgroup key={organization} label={organization}>
|
||||
{groupedModels[organization].map((model) => (
|
||||
<option
|
||||
key={model.id}
|
||||
value={model.id}
|
||||
selected={settings.OpenRouterModelPref === model.id}
|
||||
>
|
||||
{model.name}
|
||||
</option>
|
||||
))}
|
||||
</optgroup>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
);
|
||||
}
|
@ -0,0 +1,88 @@
|
||||
import System from "@/models/system";
|
||||
import { useState, useEffect } from "react";
|
||||
|
||||
export default function PerplexityOptions({ settings }) {
|
||||
return (
|
||||
<div className="flex gap-x-4">
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
Perplexity API Key
|
||||
</label>
|
||||
<input
|
||||
type="password"
|
||||
name="PerplexityApiKey"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Perplexity API Key"
|
||||
defaultValue={settings?.PerplexityApiKey ? "*".repeat(20) : ""}
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
spellCheck={false}
|
||||
/>
|
||||
</div>
|
||||
<PerplexityModelSelection settings={settings} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function PerplexityModelSelection({ settings }) {
|
||||
const [customModels, setCustomModels] = useState([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
async function findCustomModels() {
|
||||
setLoading(true);
|
||||
const { models } = await System.customModels("perplexity");
|
||||
setCustomModels(models || []);
|
||||
setLoading(false);
|
||||
}
|
||||
findCustomModels();
|
||||
}, []);
|
||||
|
||||
if (loading || customModels.length == 0) {
|
||||
return (
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
Chat Model Selection
|
||||
</label>
|
||||
<select
|
||||
name="PerplexityModelPref"
|
||||
disabled={true}
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<option disabled={true} selected={true}>
|
||||
-- loading available models --
|
||||
</option>
|
||||
</select>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
Chat Model Selection
|
||||
</label>
|
||||
<select
|
||||
name="PerplexityModelPref"
|
||||
required={true}
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{customModels.length > 0 && (
|
||||
<optgroup label="Available Perplexity Models">
|
||||
{customModels.map((model) => {
|
||||
return (
|
||||
<option
|
||||
key={model.id}
|
||||
value={model.id}
|
||||
selected={settings?.PerplexityModelPref === model.id}
|
||||
>
|
||||
{model.id}
|
||||
</option>
|
||||
);
|
||||
})}
|
||||
</optgroup>
|
||||
)}
|
||||
</select>
|
||||
</div>
|
||||
);
|
||||
}
|
@ -11,7 +11,7 @@ export default function TogetherAiOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="TogetherAiApiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Together AI API Key"
|
||||
defaultValue={settings?.TogetherAiApiKey ? "*".repeat(20) : ""}
|
||||
required={true}
|
||||
@ -56,7 +56,7 @@ function TogetherAiModelSelection({ settings }) {
|
||||
<select
|
||||
name="TogetherAiModelPref"
|
||||
disabled={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<option disabled={true} selected={true}>
|
||||
-- loading available models --
|
||||
@ -74,15 +74,17 @@ function TogetherAiModelSelection({ settings }) {
|
||||
<select
|
||||
name="TogetherAiModelPref"
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{Object.entries(groupedModels).map(([organization, models]) => (
|
||||
{Object.keys(groupedModels)
|
||||
.sort()
|
||||
.map((organization) => (
|
||||
<optgroup key={organization} label={organization}>
|
||||
{models.map((model) => (
|
||||
{groupedModels[organization].map((model) => (
|
||||
<option
|
||||
key={model.id}
|
||||
value={model.id}
|
||||
selected={settings.TogetherAiModelPref === model.id}
|
||||
selected={settings.OpenRouterModelPref === model.id}
|
||||
>
|
||||
{model.name}
|
||||
</option>
|
||||
|
@ -3,12 +3,12 @@ import PreLoader from "@/components/Preloader";
|
||||
import { memo, useEffect, useState } from "react";
|
||||
import FolderRow from "./FolderRow";
|
||||
import pluralize from "pluralize";
|
||||
import System from "@/models/system";
|
||||
|
||||
function Directory({
|
||||
files,
|
||||
loading,
|
||||
setLoading,
|
||||
fileTypes,
|
||||
workspace,
|
||||
fetchKeys,
|
||||
selectedItems,
|
||||
@ -20,6 +20,40 @@ function Directory({
|
||||
}) {
|
||||
const [amountSelected, setAmountSelected] = useState(0);
|
||||
|
||||
const deleteFiles = async (event) => {
|
||||
event.stopPropagation();
|
||||
if (
|
||||
!window.confirm(
|
||||
"Are you sure you want to delete these files?\nThis will remove the files from the system and remove them from any existing workspaces automatically.\nThis action is not reversible."
|
||||
)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const toRemove = [];
|
||||
for (const itemId of Object.keys(selectedItems)) {
|
||||
for (const folder of files.items) {
|
||||
const foundItem = folder.items.find((file) => file.id === itemId);
|
||||
if (foundItem) {
|
||||
toRemove.push(`${folder.name}/${foundItem.name}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
setLoading(true);
|
||||
setLoadingMessage(`Removing ${toRemove.length} documents. Please wait.`);
|
||||
await System.deleteDocuments(toRemove);
|
||||
await fetchKeys(true);
|
||||
setSelectedItems({});
|
||||
} catch (error) {
|
||||
console.error("Failed to delete the document:", error);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
setSelectedItems({});
|
||||
}
|
||||
};
|
||||
|
||||
const toggleSelection = (item) => {
|
||||
setSelectedItems((prevSelectedItems) => {
|
||||
const newSelectedItems = { ...prevSelectedItems };
|
||||
@ -120,24 +154,28 @@ function Directory({
|
||||
</div>
|
||||
|
||||
{amountSelected !== 0 && (
|
||||
<div className="absolute bottom-0 left-0 w-full flex justify-center items-center h-9 bg-white rounded-b-2xl">
|
||||
<div className="flex gap-x-5">
|
||||
<div
|
||||
<div className="absolute bottom-0 left-0 w-full flex justify-between items-center h-9 bg-white rounded-b-2xl">
|
||||
<div className="flex gap-x-5 w-[80%] justify-center">
|
||||
<button
|
||||
onMouseEnter={() => setHighlightWorkspace(true)}
|
||||
onMouseLeave={() => setHighlightWorkspace(false)}
|
||||
onClick={moveToWorkspace}
|
||||
className="text-sm font-semibold h-7 px-2.5 rounded-lg transition-all duration-300 hover:text-white hover:bg-neutral-800/80 cursor-pointer flex items-center"
|
||||
className="border-none text-sm font-semibold h-7 px-2.5 rounded-lg hover:text-white hover:bg-neutral-800/80 flex items-center"
|
||||
>
|
||||
Move {amountSelected} {pluralize("file", amountSelected)} to
|
||||
workspace
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<button
|
||||
onClick={deleteFiles}
|
||||
className="border-none text-red-500/50 text-sm font-semibold h-7 px-2.5 rounded-lg hover:text-red-500/80 flex items-center"
|
||||
>
|
||||
Delete
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<UploadFile
|
||||
fileTypes={fileTypes}
|
||||
workspace={workspace}
|
||||
fetchKeys={fetchKeys}
|
||||
setLoading={setLoading}
|
||||
|
@ -61,7 +61,7 @@ function FileUploadProgressComponent({
|
||||
if (status === "failed") {
|
||||
return (
|
||||
<div className="h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-white/5 border border-white/40 overflow-y-auto">
|
||||
<div className="w-6 h-6">
|
||||
<div className="w-6 h-6 flex-shrink-0">
|
||||
<XCircle className="w-6 h-6 stroke-white bg-red-500 rounded-full p-1 w-full h-full" />
|
||||
</div>
|
||||
<div className="flex flex-col">
|
||||
@ -76,7 +76,7 @@ function FileUploadProgressComponent({
|
||||
|
||||
return (
|
||||
<div className="h-14 px-2 py-2 flex items-center gap-x-4 rounded-lg bg-white/5 border border-white/40">
|
||||
<div className="w-6 h-6">
|
||||
<div className="w-6 h-6 flex-shrink-0">
|
||||
{status !== "complete" ? (
|
||||
<div className="flex items-center justify-center">
|
||||
<PreLoader size="6" />
|
||||
|
@ -7,12 +7,7 @@ import { v4 } from "uuid";
|
||||
import FileUploadProgress from "./FileUploadProgress";
|
||||
import Workspace from "../../../../../models/workspace";
|
||||
|
||||
export default function UploadFile({
|
||||
workspace,
|
||||
fileTypes,
|
||||
fetchKeys,
|
||||
setLoading,
|
||||
}) {
|
||||
export default function UploadFile({ workspace, fetchKeys, setLoading }) {
|
||||
const [ready, setReady] = useState(false);
|
||||
const [files, setFiles] = useState([]);
|
||||
const [fetchingUrl, setFetchingUrl] = useState(false);
|
||||
@ -40,7 +35,7 @@ export default function UploadFile({
|
||||
|
||||
const handleUploadSuccess = () => {
|
||||
fetchKeys(true);
|
||||
showToast("File uploaded successfully", "success");
|
||||
showToast("File uploaded successfully", "success", { clear: true });
|
||||
};
|
||||
|
||||
const handleUploadError = (message) => {
|
||||
@ -76,9 +71,6 @@ export default function UploadFile({
|
||||
|
||||
const { getRootProps, getInputProps } = useDropzone({
|
||||
onDrop,
|
||||
accept: {
|
||||
...fileTypes,
|
||||
},
|
||||
disabled: !ready,
|
||||
});
|
||||
|
||||
@ -109,9 +101,7 @@ export default function UploadFile({
|
||||
Click to upload or drag and drop
|
||||
</div>
|
||||
<div className="text-white text-opacity-60 text-xs font-medium py-1">
|
||||
{Object.values(fileTypes ?? [])
|
||||
.flat()
|
||||
.join(" ")}
|
||||
supports text files, csv's, spreadsheets, audio files, and more!
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
@ -138,7 +128,7 @@ export default function UploadFile({
|
||||
disabled={fetchingUrl}
|
||||
name="link"
|
||||
type="url"
|
||||
className="disabled:bg-zinc-600 disabled:text-slate-300 bg-zinc-900 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-3/4 p-2.5"
|
||||
className="disabled:bg-zinc-600 disabled:text-slate-300 bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-3/4 p-2.5"
|
||||
placeholder={"https://example.com"}
|
||||
autoComplete="off"
|
||||
/>
|
||||
|
@ -4,12 +4,7 @@ import {
|
||||
getFileExtension,
|
||||
middleTruncate,
|
||||
} from "@/utils/directories";
|
||||
import {
|
||||
ArrowUUpLeft,
|
||||
File,
|
||||
PushPin,
|
||||
PushPinSlash,
|
||||
} from "@phosphor-icons/react";
|
||||
import { ArrowUUpLeft, File, PushPin } from "@phosphor-icons/react";
|
||||
import Workspace from "@/models/workspace";
|
||||
import debounce from "lodash.debounce";
|
||||
import { Tooltip } from "react-tooltip";
|
||||
@ -144,28 +139,27 @@ const PinItemToWorkspace = memo(({ workspace, docPath, item }) => {
|
||||
|
||||
if (!item) return <div />;
|
||||
|
||||
const PinIcon = pinned ? PushPinSlash : PushPin;
|
||||
return (
|
||||
<div
|
||||
onMouseEnter={() => setHover(true)}
|
||||
onMouseLeave={() => setHover(false)}
|
||||
className="flex gap-x-2 items-center hover:bg-main-gradient p-[2px] rounded ml-2"
|
||||
>
|
||||
<PinIcon
|
||||
<PushPin
|
||||
data-tooltip-id={`pin-${item.id}`}
|
||||
data-tooltip-content={
|
||||
pinned ? "Unpin document from workspace" : "Pin document to workspace"
|
||||
pinned ? "Un-Pin from workspace" : "Pin to workspace"
|
||||
}
|
||||
size={16}
|
||||
onClick={updatePinStatus}
|
||||
weight={hover ? "fill" : "regular"}
|
||||
className={`outline-none text-base font-bold w-4 h-4 ml-2 flex-shrink-0 cursor-pointer ${
|
||||
pinned ? "hover:text-red-300" : ""
|
||||
}`}
|
||||
weight={hover || pinned ? "fill" : "regular"}
|
||||
className="outline-none text-base font-bold flex-shrink-0 cursor-pointer"
|
||||
/>
|
||||
<Tooltip
|
||||
id={`pin-${item.id}`}
|
||||
place="bottom"
|
||||
delayShow={300}
|
||||
className="tooltip !text-xs"
|
||||
className="tooltip invert !text-xs"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
@ -184,7 +178,7 @@ const RemoveItemFromWorkspace = ({ item, onClick }) => {
|
||||
id={`remove-${item.id}`}
|
||||
place="bottom"
|
||||
delayShow={300}
|
||||
className="tooltip !text-xs"
|
||||
className="tooltip invert !text-xs"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
@ -15,11 +15,7 @@ const MODEL_COSTS = {
|
||||
"text-embedding-3-large": 0.00000013, // $0.00013 / 1K tokens
|
||||
};
|
||||
|
||||
export default function DocumentSettings({
|
||||
workspace,
|
||||
fileTypes,
|
||||
systemSettings,
|
||||
}) {
|
||||
export default function DocumentSettings({ workspace, systemSettings }) {
|
||||
const [highlightWorkspace, setHighlightWorkspace] = useState(false);
|
||||
const [availableDocs, setAvailableDocs] = useState([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
@ -201,7 +197,6 @@ export default function DocumentSettings({
|
||||
loading={loading}
|
||||
loadingMessage={loadingMessage}
|
||||
setLoading={setLoading}
|
||||
fileTypes={fileTypes}
|
||||
workspace={workspace}
|
||||
fetchKeys={fetchKeys}
|
||||
selectedItems={selectedItems}
|
||||
|
@ -11,17 +11,14 @@ const noop = () => {};
|
||||
const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
|
||||
const { slug } = useParams();
|
||||
const [workspace, setWorkspace] = useState(null);
|
||||
const [fileTypes, setFileTypes] = useState(null);
|
||||
const [settings, setSettings] = useState({});
|
||||
|
||||
useEffect(() => {
|
||||
async function checkSupportedFiletypes() {
|
||||
const acceptedTypes = await System.acceptedDocumentTypes();
|
||||
async function getSettings() {
|
||||
const _settings = await System.keys();
|
||||
setFileTypes(acceptedTypes ?? {});
|
||||
setSettings(_settings ?? {});
|
||||
}
|
||||
checkSupportedFiletypes();
|
||||
getSettings();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
@ -78,11 +75,7 @@ const ManageWorkspace = ({ hideModal = noop, providedSlug = null }) => {
|
||||
<X className="text-gray-300 text-lg" />
|
||||
</button>
|
||||
</div>
|
||||
<DocumentSettings
|
||||
workspace={workspace}
|
||||
fileTypes={fileTypes}
|
||||
systemSettings={settings}
|
||||
/>
|
||||
<DocumentSettings workspace={workspace} systemSettings={settings} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -52,7 +52,7 @@ export default function NewWorkspaceModal({ hideModal = noop }) {
|
||||
name="name"
|
||||
type="text"
|
||||
id="name"
|
||||
className="bg-zinc-900 w-full text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 w-full text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="My Workspace"
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
|
@ -125,7 +125,7 @@ export default function AccountModal({ user, hideModal }) {
|
||||
<input
|
||||
name="username"
|
||||
type="text"
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 placeholder:text-white/20 border-gray-500 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
placeholder="User's username"
|
||||
minLength={2}
|
||||
defaultValue={user.username}
|
||||
@ -143,7 +143,7 @@ export default function AccountModal({ user, hideModal }) {
|
||||
<input
|
||||
name="password"
|
||||
type="password"
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 placeholder:text-white/20 border-gray-500 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
placeholder={`${user.username}'s new password`}
|
||||
/>
|
||||
</div>
|
||||
|
@ -9,7 +9,7 @@ export default function AstraDBOptions({ settings }) {
|
||||
<input
|
||||
type="url"
|
||||
name="AstraDBEndpoint"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Astra DB API endpoint"
|
||||
defaultValue={settings?.AstraDBEndpoint}
|
||||
required={true}
|
||||
@ -25,7 +25,7 @@ export default function AstraDBOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="AstraDBApplicationToken"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="AstraCS:..."
|
||||
defaultValue={
|
||||
settings?.AstraDBApplicationToken ? "*".repeat(20) : ""
|
||||
|
@ -9,7 +9,7 @@ export default function ChromaDBOptions({ settings }) {
|
||||
<input
|
||||
type="url"
|
||||
name="ChromaEndpoint"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="http://localhost:8000"
|
||||
defaultValue={settings?.ChromaEndpoint}
|
||||
required={true}
|
||||
@ -27,7 +27,7 @@ export default function ChromaDBOptions({ settings }) {
|
||||
autoComplete="off"
|
||||
type="text"
|
||||
defaultValue={settings?.ChromaApiHeader}
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="X-Api-Key"
|
||||
/>
|
||||
</div>
|
||||
@ -41,7 +41,7 @@ export default function ChromaDBOptions({ settings }) {
|
||||
autoComplete="off"
|
||||
type="password"
|
||||
defaultValue={settings?.ChromaApiKey ? "*".repeat(20) : ""}
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="sk-myApiKeyToAccessMyChromaInstance"
|
||||
/>
|
||||
</div>
|
||||
|
@ -9,7 +9,7 @@ export default function MilvusDBOptions({ settings }) {
|
||||
<input
|
||||
type="text"
|
||||
name="MilvusAddress"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="http://localhost:19530"
|
||||
defaultValue={settings?.MilvusAddress}
|
||||
required={true}
|
||||
@ -25,7 +25,7 @@ export default function MilvusDBOptions({ settings }) {
|
||||
<input
|
||||
type="text"
|
||||
name="MilvusUsername"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="username"
|
||||
defaultValue={settings?.MilvusUsername}
|
||||
autoComplete="off"
|
||||
@ -39,7 +39,7 @@ export default function MilvusDBOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="MilvusPassword"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="password"
|
||||
defaultValue={settings?.MilvusPassword ? "*".repeat(20) : ""}
|
||||
autoComplete="off"
|
||||
|
@ -9,7 +9,7 @@ export default function PineconeDBOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="PineConeKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Pinecone API Key"
|
||||
defaultValue={settings?.PineConeKey ? "*".repeat(20) : ""}
|
||||
required={true}
|
||||
@ -24,7 +24,7 @@ export default function PineconeDBOptions({ settings }) {
|
||||
<input
|
||||
type="text"
|
||||
name="PineConeIndex"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="my-index"
|
||||
defaultValue={settings?.PineConeIndex}
|
||||
required={true}
|
||||
|
@ -9,7 +9,7 @@ export default function QDrantDBOptions({ settings }) {
|
||||
<input
|
||||
type="url"
|
||||
name="QdrantEndpoint"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="http://localhost:6633"
|
||||
defaultValue={settings?.QdrantEndpoint}
|
||||
required={true}
|
||||
@ -25,7 +25,7 @@ export default function QDrantDBOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="QdrantApiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="wOeqxsYP4....1244sba"
|
||||
defaultValue={settings?.QdrantApiKey}
|
||||
autoComplete="off"
|
||||
|
@ -27,9 +27,9 @@ export default function VectorDBItem({
|
||||
alt={`${name} logo`}
|
||||
className="w-10 h-10 rounded-md"
|
||||
/>
|
||||
<div className="flex flex-col gap-y-1">
|
||||
<div className="flex flex-col">
|
||||
<div className="text-sm font-semibold">{name}</div>
|
||||
<div className="text-xs text-white tracking-wide">{description}</div>
|
||||
<div className="mt-1 text-xs text-white/60">{description}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -9,7 +9,7 @@ export default function WeaviateDBOptions({ settings }) {
|
||||
<input
|
||||
type="url"
|
||||
name="WeaviateEndpoint"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="http://localhost:8080"
|
||||
defaultValue={settings?.WeaviateEndpoint}
|
||||
required={true}
|
||||
@ -25,7 +25,7 @@ export default function WeaviateDBOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="WeaviateApiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="sk-123Abcweaviate"
|
||||
defaultValue={settings?.WeaviateApiKey}
|
||||
autoComplete="off"
|
||||
|
@ -9,7 +9,7 @@ export default function ZillizCloudOptions({ settings }) {
|
||||
<input
|
||||
type="text"
|
||||
name="ZillizEndpoint"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="https://sample.api.gcp-us-west1.zillizcloud.com"
|
||||
defaultValue={settings?.ZillizEndpoint}
|
||||
required={true}
|
||||
@ -25,7 +25,7 @@ export default function ZillizCloudOptions({ settings }) {
|
||||
<input
|
||||
type="password"
|
||||
name="ZillizApiToken"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="Zilliz cluster API Token"
|
||||
defaultValue={settings?.ZillizApiToken ? "*".repeat(20) : ""}
|
||||
autoComplete="off"
|
||||
|
@ -1,31 +1,37 @@
|
||||
import { memo, useState } from "react";
|
||||
import { v4 } from "uuid";
|
||||
import { decode as HTMLDecode } from "he";
|
||||
import { CaretRight, FileText } from "@phosphor-icons/react";
|
||||
import truncate from "truncate";
|
||||
import ModalWrapper from "@/components/ModalWrapper";
|
||||
import { middleTruncate } from "@/utils/directories";
|
||||
import {
|
||||
CaretRight,
|
||||
FileText,
|
||||
Info,
|
||||
ArrowSquareOut,
|
||||
GithubLogo,
|
||||
Link,
|
||||
X,
|
||||
YoutubeLogo,
|
||||
} from "@phosphor-icons/react";
|
||||
import { Tooltip } from "react-tooltip";
|
||||
import { toPercentString } from "@/utils/numbers";
|
||||
|
||||
function combineLikeSources(sources) {
|
||||
const combined = {};
|
||||
sources.forEach((source) => {
|
||||
const { id, title, text, chunkSource = "" } = source;
|
||||
const { id, title, text, chunkSource = "", score = null } = source;
|
||||
if (combined.hasOwnProperty(title)) {
|
||||
combined[title].text += `\n\n ---- Chunk ${id || ""} ---- \n\n${text}`;
|
||||
combined[title].chunks.push({ id, text, chunkSource, score });
|
||||
combined[title].references += 1;
|
||||
combined[title].chunkSource = chunkSource;
|
||||
} else {
|
||||
combined[title] = { title, text, chunkSource, references: 1 };
|
||||
combined[title] = {
|
||||
title,
|
||||
chunks: [{ id, text, chunkSource, score }],
|
||||
references: 1,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
return Object.values(combined);
|
||||
}
|
||||
|
||||
@ -109,7 +115,7 @@ function SkeletonLine() {
|
||||
}
|
||||
|
||||
function CitationDetailModal({ source, onClose }) {
|
||||
const { references, title, text } = source;
|
||||
const { references, title, chunks } = source;
|
||||
const { isUrl, text: webpageUrl, href: linkTo } = parseChunkSource(source);
|
||||
|
||||
return (
|
||||
@ -156,12 +162,39 @@ function CitationDetailModal({ source, onClose }) {
|
||||
{[...Array(3)].map((_, idx) => (
|
||||
<SkeletonLine key={idx} />
|
||||
))}
|
||||
<p className="text-white whitespace-pre-line">{HTMLDecode(text)}</p>
|
||||
<div className="mb-6">
|
||||
{chunks.map(({ text, score }, idx) => (
|
||||
<div key={idx} className="pt-6 text-white">
|
||||
<div className="flex flex-col w-full justify-start pb-6 gap-y-1">
|
||||
<p className="text-white whitespace-pre-line">
|
||||
{HTMLDecode(text)}
|
||||
</p>
|
||||
|
||||
{!!score && (
|
||||
<>
|
||||
<div className="w-full flex items-center text-xs text-white/60 gap-x-2 cursor-default">
|
||||
<div
|
||||
data-tooltip-id="similarity-score"
|
||||
data-tooltip-content={`This is the semantic similarity score of this chunk of text compared to your query calculated by the vector database.`}
|
||||
className="flex items-center gap-x-1"
|
||||
>
|
||||
<Info size={14} />
|
||||
<p>{toPercentString(score)} match</p>
|
||||
</div>
|
||||
</div>
|
||||
<Tooltip
|
||||
id="similarity-score"
|
||||
place="top"
|
||||
delayShow={100}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
{[...Array(3)].map((_, idx) => (
|
||||
<SkeletonLine key={idx} />
|
||||
))}
|
||||
</div>
|
||||
))}
|
||||
<div className="mb-6"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@ -180,7 +213,7 @@ const ICONS = {
|
||||
// which contain valid outbound links that can be clicked by the
|
||||
// user when viewing a citation. Optionally allows various icons
|
||||
// to show distinct types of sources.
|
||||
function parseChunkSource({ title = "", chunkSource = "" }) {
|
||||
function parseChunkSource({ title = "", chunks = [] }) {
|
||||
const nullResponse = {
|
||||
isUrl: false,
|
||||
text: null,
|
||||
@ -188,9 +221,10 @@ function parseChunkSource({ title = "", chunkSource = "" }) {
|
||||
icon: "file",
|
||||
};
|
||||
|
||||
if (!chunkSource.startsWith("link://")) return nullResponse;
|
||||
if (!chunks.length || !chunks[0].chunkSource.startsWith("link://"))
|
||||
return nullResponse;
|
||||
try {
|
||||
const url = new URL(chunkSource.split("link://")[1]);
|
||||
const url = new URL(chunks[0].chunkSource.split("link://")[1]);
|
||||
let text = url.host + url.pathname;
|
||||
let icon = "link";
|
||||
|
||||
|
@ -52,7 +52,7 @@ const HistoricalMessage = ({
|
||||
</div>
|
||||
) : (
|
||||
<span
|
||||
className={`whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}
|
||||
className={`flex flex-col gap-y-1 mt-2`}
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: DOMPurify.sanitize(renderMarkdown(message)),
|
||||
}}
|
||||
|
@ -70,7 +70,7 @@ const PromptReply = ({
|
||||
<div className="flex gap-x-5">
|
||||
<Jazzicon size={36} user={{ uid: workspace.slug }} role="assistant" />
|
||||
<span
|
||||
className={`reply whitespace-pre-line text-white font-normal text-sm md:text-sm flex flex-col gap-y-1 mt-2`}
|
||||
className={`reply flex flex-col gap-y-1 mt-2`}
|
||||
dangerouslySetInnerHTML={{ __html: renderMarkdown(reply) }}
|
||||
/>
|
||||
</div>
|
||||
|
@ -83,7 +83,7 @@ export default function ChatHistory({ history = [], workspace, sendCommand }) {
|
||||
|
||||
return (
|
||||
<div
|
||||
className="h-full md:h-[83%] pb-[100px] pt-6 md:pt-0 md:pb-20 md:mx-0 overflow-y-scroll flex flex-col justify-start no-scroll"
|
||||
className="markdown text-white/80 font-light text-sm h-full md:h-[83%] pb-[100px] pt-6 md:pt-0 md:pb-20 md:mx-0 overflow-y-scroll flex flex-col justify-start no-scroll"
|
||||
id="chat-history"
|
||||
ref={chatHistoryRef}
|
||||
>
|
||||
|
@ -443,3 +443,157 @@ dialog::backdrop {
|
||||
.input-label {
|
||||
@apply text-[14px] font-bold text-white;
|
||||
}
|
||||
|
||||
/**
|
||||
* ==============================================
|
||||
* Markdown Styles
|
||||
* ==============================================
|
||||
*/
|
||||
.markdown,
|
||||
.markdown > * {
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
.markdown h1 {
|
||||
font-size: xx-large;
|
||||
line-height: 1.7;
|
||||
padding-left: 0.3rem;
|
||||
}
|
||||
|
||||
.markdown h2 {
|
||||
line-height: 1.5;
|
||||
font-size: x-large;
|
||||
padding-left: 0.3rem;
|
||||
}
|
||||
|
||||
.markdown h3 {
|
||||
line-height: 1.4;
|
||||
font-size: large;
|
||||
padding-left: 0.3rem;
|
||||
}
|
||||
|
||||
/* Table Styles */
|
||||
|
||||
.markdown table {
|
||||
border-collapse: separate;
|
||||
}
|
||||
|
||||
.markdown th {
|
||||
border-top: none;
|
||||
}
|
||||
|
||||
.markdown td:first-child,
|
||||
.markdown th:first-child {
|
||||
border-left: none;
|
||||
}
|
||||
|
||||
.markdown table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
color: #bdbdbe;
|
||||
font-size: 13px;
|
||||
margin: 30px 0px;
|
||||
border-radius: 10px;
|
||||
overflow: hidden;
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
.markdown table thead {
|
||||
color: #fff;
|
||||
text-transform: uppercase;
|
||||
font-weight: bolder;
|
||||
}
|
||||
|
||||
.markdown hr {
|
||||
border: 0;
|
||||
border-top: 1px solid #cdcdcd40;
|
||||
margin: 1rem 0;
|
||||
}
|
||||
|
||||
.markdown table th,
|
||||
.markdown table td {
|
||||
padding: 8px 15px;
|
||||
border-bottom: 1px solid #cdcdcd2e;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.markdown table th {
|
||||
padding: 14px 15px;
|
||||
}
|
||||
|
||||
@media (max-width: 600px) {
|
||||
.markdown table th,
|
||||
.markdown table td {
|
||||
padding: 10px;
|
||||
}
|
||||
}
|
||||
|
||||
/* List Styles */
|
||||
.markdown ol {
|
||||
list-style: decimal-leading-zero;
|
||||
padding-left: 0px;
|
||||
padding-top: 10px;
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
.markdown ol li {
|
||||
margin-left: 20px;
|
||||
padding-left: 10px;
|
||||
position: relative;
|
||||
transition: all 0.3s ease;
|
||||
line-height: 1.4rem;
|
||||
}
|
||||
|
||||
.markdown ol li::marker {
|
||||
padding-top: 10px;
|
||||
}
|
||||
|
||||
.markdown ol li p {
|
||||
margin: 0.5rem;
|
||||
padding-top: 10px;
|
||||
}
|
||||
|
||||
.markdown ul {
|
||||
list-style: revert-layer;
|
||||
/* color: #cfcfcfcf; */
|
||||
padding-left: 0px;
|
||||
padding-top: 10px;
|
||||
padding-bottom: 10px;
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
.markdown ul li::marker {
|
||||
color: #d0d0d0cf;
|
||||
padding-top: 10px;
|
||||
}
|
||||
|
||||
.markdownul li {
|
||||
margin-left: 20px;
|
||||
|
||||
padding-left: 10px;
|
||||
transition: all 0.3s ease;
|
||||
line-height: 1.4rem;
|
||||
}
|
||||
|
||||
.markdown ul li > ul {
|
||||
padding-left: 20px;
|
||||
margin: 0px;
|
||||
}
|
||||
|
||||
.markdown p {
|
||||
font-weight: 400;
|
||||
margin: 0.35rem;
|
||||
}
|
||||
|
||||
.markdown {
|
||||
text-wrap: wrap;
|
||||
}
|
||||
|
||||
.markdown pre {
|
||||
margin: 20px 0;
|
||||
}
|
||||
|
||||
.markdown strong {
|
||||
font-weight: 600;
|
||||
color: #fff;
|
||||
}
|
||||
|
BIN
frontend/src/media/llmprovider/openrouter.jpeg
Normal file
BIN
frontend/src/media/llmprovider/openrouter.jpeg
Normal file
Binary file not shown.
After Width: | Height: | Size: 6.2 KiB |
BIN
frontend/src/media/llmprovider/perplexity.png
Normal file
BIN
frontend/src/media/llmprovider/perplexity.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 16 KiB |
@ -152,6 +152,18 @@ const System = {
|
||||
return false;
|
||||
});
|
||||
},
|
||||
deleteDocuments: async (names = []) => {
|
||||
return await fetch(`${API_BASE}/system/remove-documents`, {
|
||||
method: "DELETE",
|
||||
headers: baseHeaders(),
|
||||
body: JSON.stringify({ names }),
|
||||
})
|
||||
.then((res) => res.ok)
|
||||
.catch((e) => {
|
||||
console.error(e);
|
||||
return false;
|
||||
});
|
||||
},
|
||||
deleteFolder: async (name) => {
|
||||
return await fetch(`${API_BASE}/system/remove-folder`, {
|
||||
method: "DELETE",
|
||||
|
@ -49,7 +49,7 @@ export default function NewUserModal({ closeModal }) {
|
||||
<input
|
||||
name="username"
|
||||
type="text"
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
className="bg-zinc-900 placeholder:text-white/20 border-gray-500 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
placeholder="User's username"
|
||||
minLength={2}
|
||||
required={true}
|
||||
@ -66,7 +66,7 @@ export default function NewUserModal({ closeModal }) {
|
||||
<input
|
||||
name="password"
|
||||
type="text"
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
className="bg-zinc-900 placeholder:text-white/20 border-gray-500 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
placeholder="User's initial password"
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
@ -84,7 +84,7 @@ export default function NewUserModal({ closeModal }) {
|
||||
required={true}
|
||||
defaultValue={"default"}
|
||||
onChange={(e) => setRole(e.target.value)}
|
||||
className="rounded-lg bg-zinc-900 px-4 py-2 text-sm text-white border border-gray-500 focus:ring-blue-500 focus:border-blue-500"
|
||||
className="rounded-lg bg-zinc-900 px-4 py-2 text-sm text-white border-gray-500 focus:ring-blue-500 focus:border-blue-500"
|
||||
>
|
||||
<option value="default">Default</option>
|
||||
<option value="manager">Manager </option>
|
||||
|
@ -50,7 +50,7 @@ export default function EditUserModal({ currentUser, user, closeModal }) {
|
||||
<input
|
||||
name="username"
|
||||
type="text"
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
className="bg-zinc-900 placeholder:text-white/20 border-gray-500 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
placeholder="User's username"
|
||||
minLength={2}
|
||||
defaultValue={user.username}
|
||||
@ -68,7 +68,7 @@ export default function EditUserModal({ currentUser, user, closeModal }) {
|
||||
<input
|
||||
name="password"
|
||||
type="text"
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
className="bg-zinc-900 placeholder:text-white/20 border-gray-500 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
placeholder={`${user.username}'s new password`}
|
||||
autoComplete="off"
|
||||
/>
|
||||
@ -85,7 +85,7 @@ export default function EditUserModal({ currentUser, user, closeModal }) {
|
||||
required={true}
|
||||
defaultValue={user.role}
|
||||
onChange={(e) => setRole(e.target.value)}
|
||||
className="rounded-lg bg-zinc-900 px-4 py-2 text-sm text-white border border-gray-500 focus:ring-blue-500 focus:border-blue-500"
|
||||
className="rounded-lg bg-zinc-900 px-4 py-2 text-sm text-white border-gray-500 focus:ring-blue-500 focus:border-blue-500"
|
||||
>
|
||||
<option value="default">Default</option>
|
||||
<option value="manager">Manager</option>
|
||||
|
@ -42,7 +42,7 @@ export default function NewWorkspaceModal({ closeModal }) {
|
||||
<input
|
||||
name="name"
|
||||
type="text"
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
className="bg-zinc-900 placeholder:text-white/20 border-gray-500 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
placeholder="My workspace"
|
||||
minLength={4}
|
||||
required={true}
|
||||
|
@ -107,12 +107,14 @@ export default function CustomLogo() {
|
||||
</div>
|
||||
</div>
|
||||
</label>
|
||||
{!isDefaultLogo && (
|
||||
<button
|
||||
onClick={handleRemoveLogo}
|
||||
className="text-white text-base font-medium hover:text-opacity-60"
|
||||
>
|
||||
Delete
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -73,7 +73,7 @@ export default function NewIconForm({ handleSubmit, showing }) {
|
||||
name="url"
|
||||
required={true}
|
||||
placeholder="https://example.com"
|
||||
className="bg-sidebar text-white placeholder-white/60 rounded-md p-2"
|
||||
className="bg-sidebar text-white placeholder:text-white/20 rounded-md p-2"
|
||||
/>
|
||||
</div>
|
||||
{selectedIcon !== "" && (
|
||||
|
@ -64,7 +64,7 @@ export default function SupportEmail() {
|
||||
<input
|
||||
name="supportEmail"
|
||||
type="email"
|
||||
className="bg-zinc-900 mt-4 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 max-w-[275px]"
|
||||
className="bg-zinc-900 mt-4 text-white placeholder:text-white/20 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 max-w-[275px]"
|
||||
placeholder="support@mycompany.com"
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
|
@ -132,7 +132,7 @@ export default function GithubConnectorSetup() {
|
||||
<input
|
||||
type="url"
|
||||
name="repo"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="https://github.com/Mintplex-Labs/anything-llm"
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
@ -156,7 +156,7 @@ export default function GithubConnectorSetup() {
|
||||
<input
|
||||
type="text"
|
||||
name="accessToken"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="github_pat_1234_abcdefg"
|
||||
required={false}
|
||||
autoComplete="off"
|
||||
@ -189,7 +189,7 @@ export default function GithubConnectorSetup() {
|
||||
classNames={{
|
||||
tag: "bg-blue-300/10 text-zinc-800 m-1",
|
||||
input:
|
||||
"flex bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white p-2.5",
|
||||
"flex bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white p-2.5",
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
@ -257,7 +257,7 @@ function GitHubBranchSelection({ repo, accessToken }) {
|
||||
<select
|
||||
name="branch"
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
<option disabled={true} selected={true}>
|
||||
-- loading available models --
|
||||
@ -278,7 +278,7 @@ function GitHubBranchSelection({ repo, accessToken }) {
|
||||
<select
|
||||
name="branch"
|
||||
required={true}
|
||||
className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
>
|
||||
{allBranches.map((branch) => {
|
||||
return (
|
||||
|
@ -79,7 +79,7 @@ export default function YouTubeTranscriptConnectorSetup() {
|
||||
<input
|
||||
type="url"
|
||||
name="url"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="https://youtube.com/watch?v=abc123"
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
|
@ -98,7 +98,7 @@ const ScriptTag = ({ embed }) => {
|
||||
<button
|
||||
disabled={copied}
|
||||
onClick={handleClick}
|
||||
className="disabled:border disabled:border-green-300 border border-transparent relative w-full font-mono flex bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white p-2.5"
|
||||
className="disabled:border disabled:border-green-300 border border-transparent relative w-full font-mono flex bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white p-2.5"
|
||||
>
|
||||
<div
|
||||
className="flex w-full text-left flex-col gap-y-1 pr-6 pl-4 whitespace-pre-line"
|
||||
|
@ -144,7 +144,7 @@ export const WorkspaceSelection = ({ defaultValue = null }) => {
|
||||
name="workspace_id"
|
||||
required={true}
|
||||
defaultValue={defaultValue}
|
||||
className="min-w-[15rem] rounded-lg bg-zinc-900 px-4 py-2 text-sm text-white border border-gray-500 focus:ring-blue-500 focus:border-blue-500"
|
||||
className="min-w-[15rem] rounded-lg bg-zinc-900 px-4 py-2 text-sm text-white focus:ring-blue-500 focus:border-blue-500"
|
||||
>
|
||||
{workspaces.map((workspace) => {
|
||||
return (
|
||||
@ -274,7 +274,7 @@ export const PermittedDomains = ({ defaultValue = [] }) => {
|
||||
classNames={{
|
||||
tag: "bg-blue-300/10 text-zinc-800 m-1",
|
||||
input:
|
||||
"flex bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white p-2.5",
|
||||
"flex bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white p-2.5",
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
@ -293,7 +293,7 @@ export const NumberInput = ({ name, title, hint, defaultValue = 0 }) => {
|
||||
<input
|
||||
type="number"
|
||||
name={name}
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-[15rem] p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-[15rem] p-2.5"
|
||||
min={0}
|
||||
defaultValue={defaultValue}
|
||||
onScroll={(e) => e.target.blur()}
|
||||
|
@ -7,12 +7,14 @@ import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
|
||||
import OpenAiLogo from "@/media/llmprovider/openai.png";
|
||||
import AzureOpenAiLogo from "@/media/llmprovider/azure.png";
|
||||
import LocalAiLogo from "@/media/llmprovider/localai.png";
|
||||
import OllamaLogo from "@/media/llmprovider/ollama.png";
|
||||
import PreLoader from "@/components/Preloader";
|
||||
import ChangeWarningModal from "@/components/ChangeWarning";
|
||||
import OpenAiOptions from "@/components/EmbeddingSelection/OpenAiOptions";
|
||||
import AzureAiOptions from "@/components/EmbeddingSelection/AzureAiOptions";
|
||||
import LocalAiOptions from "@/components/EmbeddingSelection/LocalAiOptions";
|
||||
import NativeEmbeddingOptions from "@/components/EmbeddingSelection/NativeEmbeddingOptions";
|
||||
import OllamaEmbeddingOptions from "@/components/EmbeddingSelection/OllamaOptions";
|
||||
import EmbedderItem from "@/components/EmbeddingSelection/EmbedderItem";
|
||||
import { MagnifyingGlass } from "@phosphor-icons/react";
|
||||
import { useModal } from "@/hooks/useModal";
|
||||
@ -108,6 +110,13 @@ export default function GeneralEmbeddingPreference() {
|
||||
options: <LocalAiOptions settings={settings} />,
|
||||
description: "Run embedding models locally on your own machine.",
|
||||
},
|
||||
{
|
||||
name: "Ollama",
|
||||
value: "ollama",
|
||||
logo: OllamaLogo,
|
||||
options: <OllamaEmbeddingOptions settings={settings} />,
|
||||
description: "Run embedding models locally on your own machine.",
|
||||
},
|
||||
];
|
||||
|
||||
useEffect(() => {
|
||||
@ -189,7 +198,7 @@ export default function GeneralEmbeddingPreference() {
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search Embedding providers"
|
||||
className="bg-zinc-600 z-20 pl-10 rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||
className="bg-zinc-600 z-20 pl-10 h-[38px] rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
autoComplete="off"
|
||||
onKeyDown={(e) => {
|
||||
|
@ -13,6 +13,8 @@ import LocalAiLogo from "@/media/llmprovider/localai.png";
|
||||
import TogetherAILogo from "@/media/llmprovider/togetherai.png";
|
||||
import MistralLogo from "@/media/llmprovider/mistral.jpeg";
|
||||
import HuggingFaceLogo from "@/media/llmprovider/huggingface.png";
|
||||
import PerplexityLogo from "@/media/llmprovider/perplexity.png";
|
||||
import OpenRouterLogo from "@/media/llmprovider/openrouter.jpeg";
|
||||
import PreLoader from "@/components/Preloader";
|
||||
import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
|
||||
import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
|
||||
@ -24,8 +26,11 @@ import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
|
||||
import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
|
||||
import MistralOptions from "@/components/LLMSelection/MistralOptions";
|
||||
import HuggingFaceOptions from "@/components/LLMSelection/HuggingFaceOptions";
|
||||
|
||||
import LLMItem from "@/components/LLMSelection/LLMItem";
|
||||
import { MagnifyingGlass } from "@phosphor-icons/react";
|
||||
import PerplexityOptions from "@/components/LLMSelection/PerplexityOptions";
|
||||
import OpenRouterOptions from "@/components/LLMSelection/OpenRouterOptions";
|
||||
|
||||
export default function GeneralLLMPreference() {
|
||||
const [saving, setSaving] = useState(false);
|
||||
@ -151,6 +156,21 @@ export default function GeneralLLMPreference() {
|
||||
options: <MistralOptions settings={settings} />,
|
||||
description: "Run open source models from Mistral AI.",
|
||||
},
|
||||
{
|
||||
name: "Perplexity AI",
|
||||
value: "perplexity",
|
||||
logo: PerplexityLogo,
|
||||
options: <PerplexityOptions settings={settings} />,
|
||||
description:
|
||||
"Run powerful and internet-connected models hosted by Perplexity AI.",
|
||||
},
|
||||
{
|
||||
name: "OpenRouter",
|
||||
value: "openrouter",
|
||||
logo: OpenRouterLogo,
|
||||
options: <OpenRouterOptions settings={settings} />,
|
||||
description: "A unified interface for LLMs.",
|
||||
},
|
||||
];
|
||||
|
||||
return (
|
||||
@ -209,7 +229,7 @@ export default function GeneralLLMPreference() {
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search LLM providers"
|
||||
className="bg-zinc-600 z-20 pl-10 rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||
className="bg-zinc-600 z-20 pl-10 h-[38px] rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
autoComplete="off"
|
||||
onKeyDown={(e) => {
|
||||
@ -220,7 +240,6 @@ export default function GeneralLLMPreference() {
|
||||
</div>
|
||||
<div className="px-4 pt-[70px] flex flex-col gap-y-1 max-h-[390px] overflow-y-auto no-scroll pb-4">
|
||||
{filteredLLMs.map((llm) => {
|
||||
if (llm.value === "native" && isHosted) return null;
|
||||
return (
|
||||
<LLMItem
|
||||
key={llm.name}
|
||||
|
@ -141,7 +141,7 @@ function MultiUserMode() {
|
||||
<input
|
||||
name="username"
|
||||
type="text"
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:border-blue-500 block w-full p-2.5 placeholder-white placeholder-opacity-60 focus:ring-blue-500"
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:border-blue-500 block w-full p-2.5 placeholder:text-white/20 focus:ring-blue-500"
|
||||
placeholder="Your admin username"
|
||||
minLength={2}
|
||||
required={true}
|
||||
@ -160,7 +160,7 @@ function MultiUserMode() {
|
||||
<input
|
||||
name="password"
|
||||
type="text"
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:border-blue-500 block w-full p-2.5 placeholder-white placeholder-opacity-60 focus:ring-blue-500"
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:border-blue-500 block w-full p-2.5 placeholder:text-white/20 focus:ring-blue-500"
|
||||
placeholder="Your admin password"
|
||||
minLength={8}
|
||||
required={true}
|
||||
@ -303,7 +303,7 @@ function PasswordProtection() {
|
||||
<input
|
||||
name="password"
|
||||
type="text"
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:border-blue-500 block w-full p-2.5 placeholder-white placeholder-opacity-60 focus:ring-blue-500"
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:border-blue-500 block w-full p-2.5 placeholder:text-white/20 focus:ring-blue-500"
|
||||
placeholder="Your Instance Password"
|
||||
minLength={8}
|
||||
required={true}
|
||||
|
@ -218,7 +218,7 @@ export default function GeneralVectorDatabase() {
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search vector databases"
|
||||
className="bg-zinc-600 z-20 pl-10 rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||
className="bg-zinc-600 z-20 pl-10 h-[38px] rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||
onChange={(e) => {
|
||||
e.preventDefault();
|
||||
setSearchQuery(e.target.value);
|
||||
|
@ -76,7 +76,7 @@ export default function CreateWorkspace({
|
||||
<input
|
||||
name="name"
|
||||
type="text"
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg block w-full p-2.5"
|
||||
placeholder="My Workspace"
|
||||
minLength={4}
|
||||
required={true}
|
||||
|
@ -58,8 +58,6 @@ export default function CustomLogo({ setHeader, setForwardBtn, setBackBtn }) {
|
||||
|
||||
const logoURL = await System.fetchLogo();
|
||||
_setLogo(logoURL);
|
||||
|
||||
showToast("Image uploaded successfully.", "success", { clear: true });
|
||||
setIsDefaultLogo(false);
|
||||
};
|
||||
|
||||
@ -79,8 +77,6 @@ export default function CustomLogo({ setHeader, setForwardBtn, setBackBtn }) {
|
||||
|
||||
const logoURL = await System.fetchLogo();
|
||||
_setLogo(logoURL);
|
||||
|
||||
showToast("Image successfully removed.", "success", { clear: true });
|
||||
};
|
||||
|
||||
return (
|
||||
@ -123,13 +119,21 @@ export default function CustomLogo({ setHeader, setForwardBtn, setBackBtn }) {
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isDefaultLogo ? (
|
||||
<button
|
||||
onClick={handleRemoveLogo}
|
||||
className="text-white text-base font-medium hover:text-opacity-60 mt-8"
|
||||
>
|
||||
Remove logo
|
||||
</button>
|
||||
) : (
|
||||
<button
|
||||
onClick={handleForward}
|
||||
className="text-white text-base font-medium hover:text-opacity-60 mt-8"
|
||||
>
|
||||
Skip
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
@ -11,6 +11,8 @@ import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
|
||||
import LocalAiLogo from "@/media/llmprovider/localai.png";
|
||||
import MistralLogo from "@/media/llmprovider/mistral.jpeg";
|
||||
import HuggingFaceLogo from "@/media/llmprovider/huggingface.png";
|
||||
import PerplexityLogo from "@/media/llmprovider/perplexity.png";
|
||||
import OpenRouterLogo from "@/media/llmprovider/openrouter.jpeg";
|
||||
import ZillizLogo from "@/media/vectordbs/zilliz.png";
|
||||
import AstraDBLogo from "@/media/vectordbs/astraDB.png";
|
||||
import ChromaLogo from "@/media/vectordbs/chroma.png";
|
||||
@ -109,6 +111,22 @@ const LLM_SELECTION_PRIVACY = {
|
||||
],
|
||||
logo: HuggingFaceLogo,
|
||||
},
|
||||
perplexity: {
|
||||
name: "Perplexity AI",
|
||||
description: [
|
||||
"Your chats will not be used for training",
|
||||
"Your prompts and document text used in response creation are visible to Perplexity AI",
|
||||
],
|
||||
logo: PerplexityLogo,
|
||||
},
|
||||
openrouter: {
|
||||
name: "OpenRouter",
|
||||
description: [
|
||||
"Your chats will not be used for training",
|
||||
"Your prompts and document text used in response creation are visible to OpenRouter",
|
||||
],
|
||||
logo: OpenRouterLogo,
|
||||
},
|
||||
};
|
||||
|
||||
const VECTOR_DB_PRIVACY = {
|
||||
@ -203,6 +221,13 @@ const EMBEDDING_ENGINE_PRIVACY = {
|
||||
],
|
||||
logo: LocalAiLogo,
|
||||
},
|
||||
ollama: {
|
||||
name: "Ollama",
|
||||
description: [
|
||||
"Your document text is embedded privately on the server running Ollama",
|
||||
],
|
||||
logo: OllamaLogo,
|
||||
},
|
||||
};
|
||||
|
||||
export default function DataHandling({ setHeader, setForwardBtn, setBackBtn }) {
|
||||
|
@ -4,10 +4,12 @@ import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
|
||||
import OpenAiLogo from "@/media/llmprovider/openai.png";
|
||||
import AzureOpenAiLogo from "@/media/llmprovider/azure.png";
|
||||
import LocalAiLogo from "@/media/llmprovider/localai.png";
|
||||
import OllamaLogo from "@/media/llmprovider/ollama.png";
|
||||
import NativeEmbeddingOptions from "@/components/EmbeddingSelection/NativeEmbeddingOptions";
|
||||
import OpenAiOptions from "@/components/EmbeddingSelection/OpenAiOptions";
|
||||
import AzureAiOptions from "@/components/EmbeddingSelection/AzureAiOptions";
|
||||
import LocalAiOptions from "@/components/EmbeddingSelection/LocalAiOptions";
|
||||
import OllamaEmbeddingOptions from "@/components/EmbeddingSelection/OllamaOptions";
|
||||
import EmbedderItem from "@/components/EmbeddingSelection/EmbedderItem";
|
||||
import System from "@/models/system";
|
||||
import paths from "@/utils/paths";
|
||||
@ -70,6 +72,13 @@ export default function EmbeddingPreference({
|
||||
options: <LocalAiOptions settings={settings} />,
|
||||
description: "Run embedding models locally on your own machine.",
|
||||
},
|
||||
{
|
||||
name: "Ollama",
|
||||
value: "ollama",
|
||||
logo: OllamaLogo,
|
||||
options: <OllamaEmbeddingOptions settings={settings} />,
|
||||
description: "Run embedding models locally on your own machine.",
|
||||
},
|
||||
];
|
||||
|
||||
function handleForward() {
|
||||
@ -95,9 +104,6 @@ export default function EmbeddingPreference({
|
||||
showToast(`Failed to save embedding settings: ${error}`, "error");
|
||||
return;
|
||||
}
|
||||
showToast("Embedder settings saved successfully.", "success", {
|
||||
clear: true,
|
||||
});
|
||||
navigate(paths.onboarding.vectorDatabase());
|
||||
};
|
||||
|
||||
@ -128,7 +134,7 @@ export default function EmbeddingPreference({
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search Embedding providers"
|
||||
className="bg-zinc-600 z-20 pl-10 rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||
className="bg-zinc-600 z-20 pl-10 h-[38px] rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
autoComplete="off"
|
||||
onKeyDown={(e) => {
|
||||
|
@ -10,6 +10,8 @@ import LocalAiLogo from "@/media/llmprovider/localai.png";
|
||||
import TogetherAILogo from "@/media/llmprovider/togetherai.png";
|
||||
import MistralLogo from "@/media/llmprovider/mistral.jpeg";
|
||||
import HuggingFaceLogo from "@/media/llmprovider/huggingface.png";
|
||||
import PerplexityLogo from "@/media/llmprovider/perplexity.png";
|
||||
import OpenRouterLogo from "@/media/llmprovider/openrouter.jpeg";
|
||||
import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
|
||||
import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
|
||||
import AnthropicAiOptions from "@/components/LLMSelection/AnthropicAiOptions";
|
||||
@ -19,12 +21,14 @@ import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
|
||||
import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
|
||||
import MistralOptions from "@/components/LLMSelection/MistralOptions";
|
||||
import HuggingFaceOptions from "@/components/LLMSelection/HuggingFaceOptions";
|
||||
import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
|
||||
import PerplexityOptions from "@/components/LLMSelection/PerplexityOptions";
|
||||
import LLMItem from "@/components/LLMSelection/LLMItem";
|
||||
import System from "@/models/system";
|
||||
import paths from "@/utils/paths";
|
||||
import showToast from "@/utils/toast";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
|
||||
import OpenRouterOptions from "@/components/LLMSelection/OpenRouterOptions";
|
||||
|
||||
const TITLE = "LLM Preference";
|
||||
const DESCRIPTION =
|
||||
@ -126,6 +130,21 @@ export default function LLMPreference({
|
||||
options: <MistralOptions settings={settings} />,
|
||||
description: "Run open source models from Mistral AI.",
|
||||
},
|
||||
{
|
||||
name: "Perplexity AI",
|
||||
value: "perplexity",
|
||||
logo: PerplexityLogo,
|
||||
options: <PerplexityOptions settings={settings} />,
|
||||
description:
|
||||
"Run powerful and internet-connected models hosted by Perplexity AI.",
|
||||
},
|
||||
{
|
||||
name: "OpenRouter",
|
||||
value: "openrouter",
|
||||
logo: OpenRouterLogo,
|
||||
options: <OpenRouterOptions settings={settings} />,
|
||||
description: "A unified interface for LLMs.",
|
||||
},
|
||||
];
|
||||
|
||||
function handleForward() {
|
||||
@ -151,7 +170,6 @@ export default function LLMPreference({
|
||||
showToast(`Failed to save LLM settings: ${error}`, "error");
|
||||
return;
|
||||
}
|
||||
showToast("LLM settings saved successfully.", "success", { clear: true });
|
||||
navigate(paths.onboarding.embeddingPreference());
|
||||
};
|
||||
|
||||
@ -182,7 +200,7 @@ export default function LLMPreference({
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search LLM providers"
|
||||
className="bg-zinc-600 z-20 pl-10 rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||
className="bg-zinc-600 z-20 pl-10 h-[38px] rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
autoComplete="off"
|
||||
onKeyDown={(e) => {
|
||||
@ -193,7 +211,6 @@ export default function LLMPreference({
|
||||
</div>
|
||||
<div className="px-4 pt-[70px] flex flex-col gap-y-1 max-h-[390px] overflow-y-auto no-scroll pb-4">
|
||||
{filteredLLMs.map((llm) => {
|
||||
if (llm.value === "native" && isHosted) return null;
|
||||
return (
|
||||
<LLMItem
|
||||
key={llm.name}
|
||||
|
@ -102,7 +102,7 @@ export default function Survey({ setHeader, setForwardBtn, setBackBtn }) {
|
||||
type="email"
|
||||
placeholder="you@gmail.com"
|
||||
required={true}
|
||||
className="mt-2 bg-zinc-900 text-white text-sm font-medium font-['Plus Jakarta Sans'] leading-tight w-full h-11 p-2.5 bg-zinc-900 rounded-lg"
|
||||
className="mt-2 bg-zinc-900 text-white placeholder:text-white/20 text-sm font-medium font-['Plus Jakarta Sans'] leading-tight w-full h-11 p-2.5 bg-zinc-900 rounded-lg"
|
||||
/>
|
||||
</div>
|
||||
|
||||
@ -269,7 +269,7 @@ export default function Survey({ setHeader, setForwardBtn, setBackBtn }) {
|
||||
<textarea
|
||||
name="comment"
|
||||
rows={5}
|
||||
className="mt-2 bg-zinc-900 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
className="mt-2 bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
placeholder="If you have any questions or comments right now, you can leave them here and we will get back to you. You can also email team@mintplexlabs.com"
|
||||
wrap="soft"
|
||||
autoComplete="off"
|
||||
|
@ -124,8 +124,6 @@ const JustMe = ({
|
||||
return;
|
||||
}
|
||||
|
||||
showToast("Password set successfully!", "success", { clear: true });
|
||||
|
||||
// Auto-request token with password that was just set so they
|
||||
// are not redirected to login after completion.
|
||||
const { token } = await System.requestToken({
|
||||
@ -245,9 +243,7 @@ const MyTeam = ({ setMultiUserLoginValid, myTeamSubmitRef, navigate }) => {
|
||||
return;
|
||||
}
|
||||
|
||||
showToast("Multi-user login enabled.", "success", { clear: true });
|
||||
navigate(paths.onboarding.dataHandling());
|
||||
|
||||
// Auto-request token with credentials that was just set so they
|
||||
// are not redirected to login after completion.
|
||||
const { user, token } = await System.requestToken(data);
|
||||
|
@ -133,9 +133,6 @@ export default function VectorDatabaseConnection({
|
||||
showToast(`Failed to save Vector Database settings: ${error}`, "error");
|
||||
return;
|
||||
}
|
||||
showToast("Vector Database settings saved successfully.", "success", {
|
||||
clear: true,
|
||||
});
|
||||
navigate(paths.onboarding.customLogo());
|
||||
};
|
||||
|
||||
@ -166,7 +163,7 @@ export default function VectorDatabaseConnection({
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search vector databases"
|
||||
className="bg-zinc-600 z-20 pl-10 rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||
className="bg-zinc-600 z-20 pl-10 h-[38px] rounded-full w-full px-4 py-1 text-sm border-2 border-slate-300/40 outline-none focus:border-white text-white"
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
autoComplete="off"
|
||||
onKeyDown={(e) => {
|
||||
|
@ -21,7 +21,7 @@ export default function ChatHistorySettings({ workspace, setHasChanges }) {
|
||||
step={1}
|
||||
onWheel={(e) => e.target.blur()}
|
||||
defaultValue={workspace?.openAiHistory ?? 20}
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
placeholder="20"
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
|
@ -18,7 +18,7 @@ export default function ChatPromptSettings({ workspace, setHasChanges }) {
|
||||
name="openAiPrompt"
|
||||
rows={5}
|
||||
defaultValue={chatPrompt(workspace)}
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 mt-2"
|
||||
className="bg-zinc-900 placeholder:text-white/20 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 mt-2"
|
||||
placeholder="Given the following conversation, relevant context, and a follow up question, reply with an answer to the current question the user is asking. Return only your response to the question given the above information following the users instructions as needed."
|
||||
required={true}
|
||||
wrap="soft"
|
||||
|
@ -36,7 +36,7 @@ export default function ChatTemperatureSettings({
|
||||
step={0.1}
|
||||
onWheel={(e) => e.target.blur()}
|
||||
defaultValue={workspace?.openAiTemp ?? defaults.temp}
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
placeholder="0.7"
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
|
@ -143,7 +143,7 @@ export default function SuggestedChatMessages({ slug }) {
|
||||
</label>
|
||||
<input
|
||||
placeholder="Message heading"
|
||||
className=" bg-zinc-900 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 w-full"
|
||||
className=" bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 w-full"
|
||||
value={newMessage.heading}
|
||||
name="heading"
|
||||
onChange={onEditChange}
|
||||
@ -155,7 +155,7 @@ export default function SuggestedChatMessages({ slug }) {
|
||||
</label>
|
||||
<input
|
||||
placeholder="Message"
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 w-full"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 w-full"
|
||||
value={newMessage.message}
|
||||
name="message"
|
||||
onChange={onEditChange}
|
||||
|
@ -15,7 +15,7 @@ export default function WorkspaceName({ workspace, setHasChanges }) {
|
||||
minLength={2}
|
||||
maxLength={80}
|
||||
defaultValue={workspace?.name}
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
|
||||
placeholder="My Workspace"
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
|
@ -20,7 +20,7 @@ export default function MaxContextSnippets({ workspace, setHasChanges }) {
|
||||
step={1}
|
||||
onWheel={(e) => e.target.blur()}
|
||||
defaultValue={workspace?.topN ?? 4}
|
||||
className="bg-zinc-900 text-white text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 mt-2"
|
||||
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 mt-2"
|
||||
placeholder="4"
|
||||
required={true}
|
||||
autoComplete="off"
|
||||
|
@ -43,9 +43,7 @@ const markdown = markdownIt({
|
||||
"</pre></div>"
|
||||
);
|
||||
},
|
||||
})
|
||||
// Enable <ol> and <ul> items to not assume an HTML structure so we can keep numbering from responses.
|
||||
.disable("list");
|
||||
});
|
||||
|
||||
export default function renderMarkdown(text = "") {
|
||||
return markdown.render(text);
|
||||
|
@ -15,6 +15,14 @@ export function dollarFormat(input) {
|
||||
}).format(input);
|
||||
}
|
||||
|
||||
export function toPercentString(input = null, decimals = 0) {
|
||||
if (isNaN(input) || input === null) return "";
|
||||
const percentage = Math.round(input * 100);
|
||||
return (
|
||||
(decimals > 0 ? percentage.toFixed(decimals) : percentage.toString()) + "%"
|
||||
);
|
||||
}
|
||||
|
||||
export function humanFileSize(bytes, si = false, dp = 1) {
|
||||
const thresh = si ? 1000 : 1024;
|
||||
|
||||
|
@ -41,6 +41,14 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
|
||||
# TOGETHER_AI_API_KEY='my-together-ai-key'
|
||||
# TOGETHER_AI_MODEL_PREF='mistralai/Mixtral-8x7B-Instruct-v0.1'
|
||||
|
||||
# LLM_PROVIDER='perplexity'
|
||||
# PERPLEXITY_API_KEY='my-perplexity-key'
|
||||
# PERPLEXITY_MODEL_PREF='codellama-34b-instruct'
|
||||
|
||||
# LLM_PROVIDER='openrouter'
|
||||
# OPENROUTER_API_KEY='my-openrouter-key'
|
||||
# OPENROUTER_MODEL_PREF='openrouter/auto'
|
||||
|
||||
# LLM_PROVIDER='mistral'
|
||||
# MISTRAL_API_KEY='example-mistral-ai-api-key'
|
||||
# MISTRAL_MODEL_PREF='mistral-tiny'
|
||||
@ -68,6 +76,11 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
|
||||
# EMBEDDING_MODEL_PREF='text-embedding-ada-002'
|
||||
# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=1000 # The max chunk size in chars a string to embed can be
|
||||
|
||||
# EMBEDDING_ENGINE='ollama'
|
||||
# EMBEDDING_BASE_PATH='http://127.0.0.1:11434'
|
||||
# EMBEDDING_MODEL_PREF='nomic-embed-text:latest'
|
||||
# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
|
||||
|
||||
###########################################
|
||||
######## Vector Database Selection ########
|
||||
###########################################
|
||||
|
@ -1,5 +1,10 @@
|
||||
const { EventLogs } = require("../../../models/eventLogs");
|
||||
const { SystemSettings } = require("../../../models/systemSettings");
|
||||
const { getVectorDbClass } = require("../../../utils/helpers");
|
||||
const {
|
||||
prepareWorkspaceChatsForExport,
|
||||
exportChatsAsType,
|
||||
} = require("../../../utils/helpers/chat/convertTo");
|
||||
const { dumpENV, updateENV } = require("../../../utils/helpers/updateENV");
|
||||
const { reqBody } = require("../../../utils/http");
|
||||
const { validApiKey } = require("../../../utils/middleware/validApiKey");
|
||||
@ -147,6 +152,60 @@ function apiSystemEndpoints(app) {
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
app.get(
|
||||
"/v1/system/export-chats",
|
||||
[validApiKey],
|
||||
async (request, response) => {
|
||||
/*
|
||||
#swagger.tags = ['System Settings']
|
||||
#swagger.description = 'Export all of the chats from the system in a known format. Output depends on the type sent. Will be send with the correct header for the output.'
|
||||
#swagger.parameters['type'] = {
|
||||
in: 'query',
|
||||
description: "Export format jsonl, json, csv, jsonAlpaca",
|
||||
required: false,
|
||||
type: 'string'
|
||||
}
|
||||
#swagger.responses[200] = {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: {
|
||||
type: 'object',
|
||||
example: [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What is AnythinglLM?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "AnythingLLM is a knowledge graph and vector database management system built using NodeJS express server. It provides an interface for handling all interactions, including vectorDB management and LLM (Language Model) interactions."
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#swagger.responses[403] = {
|
||||
schema: {
|
||||
"$ref": "#/definitions/InvalidAPIKey"
|
||||
}
|
||||
}
|
||||
*/
|
||||
try {
|
||||
const { type = "jsonl" } = request.query;
|
||||
const chats = await prepareWorkspaceChatsForExport(type);
|
||||
const { contentType, data } = await exportChatsAsType(chats, type);
|
||||
await EventLogs.logEvent("exported_chats", {
|
||||
type,
|
||||
});
|
||||
response.setHeader("Content-Type", contentType);
|
||||
response.status(200).send(data);
|
||||
} catch (e) {
|
||||
console.log(e.message, e);
|
||||
response.sendStatus(500).end();
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = { apiSystemEndpoints };
|
||||
|
@ -265,6 +265,21 @@ function systemEndpoints(app) {
|
||||
}
|
||||
);
|
||||
|
||||
app.delete(
|
||||
"/system/remove-documents",
|
||||
[validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],
|
||||
async (request, response) => {
|
||||
try {
|
||||
const { names } = reqBody(request);
|
||||
for await (const name of names) await purgeDocument(name);
|
||||
response.sendStatus(200).end();
|
||||
} catch (e) {
|
||||
console.log(e.message, e);
|
||||
response.sendStatus(500).end();
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
app.delete(
|
||||
"/system/remove-folder",
|
||||
[validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],
|
||||
|
@ -180,6 +180,30 @@ const SystemSettings = {
|
||||
TogetherAiApiKey: !!process.env.TOGETHER_AI_API_KEY,
|
||||
TogetherAiModelPref: process.env.TOGETHER_AI_MODEL_PREF,
|
||||
|
||||
// For embedding credentials when ollama is selected.
|
||||
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
||||
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
||||
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
||||
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
||||
}
|
||||
: {}),
|
||||
...(llmProvider === "perplexity"
|
||||
? {
|
||||
PerplexityApiKey: !!process.env.PERPLEXITY_API_KEY,
|
||||
PerplexityModelPref: process.env.PERPLEXITY_MODEL_PREF,
|
||||
|
||||
// For embedding credentials when ollama is selected.
|
||||
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
||||
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
||||
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
||||
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
||||
}
|
||||
: {}),
|
||||
...(llmProvider === "openrouter"
|
||||
? {
|
||||
OpenRouterApiKey: !!process.env.OPENROUTER_API_KEY,
|
||||
OpenRouterModelPref: process.env.OPENROUTER_MODEL_PREF,
|
||||
|
||||
// For embedding credentials when ollama is selected.
|
||||
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
||||
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
||||
|
@ -2232,6 +2232,72 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/system/export-chats": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"System Settings"
|
||||
],
|
||||
"description": "Export all of the chats from the system in a known format. Output depends on the type sent. Will be send with the correct header for the output.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "Authorization",
|
||||
"in": "header",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "type",
|
||||
"in": "query",
|
||||
"description": "Export format jsonl, json, csv, jsonAlpaca",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"example": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What is AnythinglLM?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "AnythingLLM is a knowledge graph and vector database management system built using NodeJS express server. It provides an interface for handling all interactions, including vectorDB management and LLM (Language Model) interactions."
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"403": {
|
||||
"description": "Forbidden",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/InvalidAPIKey"
|
||||
}
|
||||
},
|
||||
"application/xml": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/InvalidAPIKey"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal Server Error"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
|
334
server/utils/AiProviders/openRouter/index.js
Normal file
334
server/utils/AiProviders/openRouter/index.js
Normal file
@ -0,0 +1,334 @@
|
||||
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
|
||||
const { chatPrompt } = require("../../chats");
|
||||
const { v4: uuidv4 } = require("uuid");
|
||||
const { writeResponseChunk } = require("../../helpers/chat/responses");
|
||||
|
||||
function openRouterModels() {
|
||||
const { MODELS } = require("./models.js");
|
||||
return MODELS || {};
|
||||
}
|
||||
|
||||
class OpenRouterLLM {
|
||||
constructor(embedder = null, modelPreference = null) {
|
||||
const { Configuration, OpenAIApi } = require("openai");
|
||||
if (!process.env.OPENROUTER_API_KEY)
|
||||
throw new Error("No OpenRouter API key was set.");
|
||||
|
||||
const config = new Configuration({
|
||||
basePath: "https://openrouter.ai/api/v1",
|
||||
apiKey: process.env.OPENROUTER_API_KEY,
|
||||
baseOptions: {
|
||||
headers: {
|
||||
"HTTP-Referer": "https://useanything.com",
|
||||
"X-Title": "AnythingLLM",
|
||||
},
|
||||
},
|
||||
});
|
||||
this.openai = new OpenAIApi(config);
|
||||
this.model =
|
||||
modelPreference || process.env.OPENROUTER_MODEL_PREF || "openrouter/auto";
|
||||
this.limits = {
|
||||
history: this.promptWindowLimit() * 0.15,
|
||||
system: this.promptWindowLimit() * 0.15,
|
||||
user: this.promptWindowLimit() * 0.7,
|
||||
};
|
||||
|
||||
this.embedder = !embedder ? new NativeEmbedder() : embedder;
|
||||
this.defaultTemp = 0.7;
|
||||
}
|
||||
|
||||
#appendContext(contextTexts = []) {
|
||||
if (!contextTexts || !contextTexts.length) return "";
|
||||
return (
|
||||
"\nContext:\n" +
|
||||
contextTexts
|
||||
.map((text, i) => {
|
||||
return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
|
||||
})
|
||||
.join("")
|
||||
);
|
||||
}
|
||||
|
||||
allModelInformation() {
|
||||
return openRouterModels();
|
||||
}
|
||||
|
||||
streamingEnabled() {
|
||||
return "streamChat" in this && "streamGetChatCompletion" in this;
|
||||
}
|
||||
|
||||
promptWindowLimit() {
|
||||
const availableModels = this.allModelInformation();
|
||||
return availableModels[this.model]?.maxLength || 4096;
|
||||
}
|
||||
|
||||
async isValidChatCompletionModel(model = "") {
|
||||
const availableModels = this.allModelInformation();
|
||||
return availableModels.hasOwnProperty(model);
|
||||
}
|
||||
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
}) {
|
||||
const prompt = {
|
||||
role: "system",
|
||||
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
|
||||
};
|
||||
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
|
||||
}
|
||||
|
||||
async isSafe(_input = "") {
|
||||
// Not implemented so must be stubbed
|
||||
return { safe: true, reasons: [] };
|
||||
}
|
||||
|
||||
async sendChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
|
||||
if (!(await this.isValidChatCompletionModel(this.model)))
|
||||
throw new Error(
|
||||
`OpenRouter chat: ${this.model} is not valid for chat completion!`
|
||||
);
|
||||
|
||||
const textResponse = await this.openai
|
||||
.createChatCompletion({
|
||||
model: this.model,
|
||||
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
|
||||
n: 1,
|
||||
messages: await this.compressMessages(
|
||||
{
|
||||
systemPrompt: chatPrompt(workspace),
|
||||
userPrompt: prompt,
|
||||
chatHistory,
|
||||
},
|
||||
rawHistory
|
||||
),
|
||||
})
|
||||
.then((json) => {
|
||||
const res = json.data;
|
||||
if (!res.hasOwnProperty("choices"))
|
||||
throw new Error("OpenRouter chat: No results!");
|
||||
if (res.choices.length === 0)
|
||||
throw new Error("OpenRouter chat: No results length!");
|
||||
return res.choices[0].message.content;
|
||||
})
|
||||
.catch((error) => {
|
||||
throw new Error(
|
||||
`OpenRouter::createChatCompletion failed with: ${error.message}`
|
||||
);
|
||||
});
|
||||
|
||||
return textResponse;
|
||||
}
|
||||
|
||||
async streamChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
|
||||
if (!(await this.isValidChatCompletionModel(this.model)))
|
||||
throw new Error(
|
||||
`OpenRouter chat: ${this.model} is not valid for chat completion!`
|
||||
);
|
||||
|
||||
const streamRequest = await this.openai.createChatCompletion(
|
||||
{
|
||||
model: this.model,
|
||||
stream: true,
|
||||
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
|
||||
n: 1,
|
||||
messages: await this.compressMessages(
|
||||
{
|
||||
systemPrompt: chatPrompt(workspace),
|
||||
userPrompt: prompt,
|
||||
chatHistory,
|
||||
},
|
||||
rawHistory
|
||||
),
|
||||
},
|
||||
{ responseType: "stream" }
|
||||
);
|
||||
return streamRequest;
|
||||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
if (!(await this.isValidChatCompletionModel(this.model)))
|
||||
throw new Error(
|
||||
`OpenRouter chat: ${this.model} is not valid for chat completion!`
|
||||
);
|
||||
|
||||
const { data } = await this.openai
|
||||
.createChatCompletion({
|
||||
model: this.model,
|
||||
messages,
|
||||
temperature,
|
||||
})
|
||||
.catch((e) => {
|
||||
throw new Error(e.response.data.error.message);
|
||||
});
|
||||
|
||||
if (!data.hasOwnProperty("choices")) return null;
|
||||
return data.choices[0].message.content;
|
||||
}
|
||||
|
||||
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
if (!(await this.isValidChatCompletionModel(this.model)))
|
||||
throw new Error(
|
||||
`OpenRouter chat: ${this.model} is not valid for chat completion!`
|
||||
);
|
||||
|
||||
const streamRequest = await this.openai.createChatCompletion(
|
||||
{
|
||||
model: this.model,
|
||||
stream: true,
|
||||
messages,
|
||||
temperature,
|
||||
},
|
||||
{ responseType: "stream" }
|
||||
);
|
||||
return streamRequest;
|
||||
}
|
||||
|
||||
handleStream(response, stream, responseProps) {
|
||||
const timeoutThresholdMs = 500;
|
||||
const { uuid = uuidv4(), sources = [] } = responseProps;
|
||||
|
||||
return new Promise((resolve) => {
|
||||
let fullText = "";
|
||||
let chunk = "";
|
||||
let lastChunkTime = null; // null when first token is still not received.
|
||||
|
||||
// NOTICE: Not all OpenRouter models will return a stop reason
|
||||
// which keeps the connection open and so the model never finalizes the stream
|
||||
// like the traditional OpenAI response schema does. So in the case the response stream
|
||||
// never reaches a formal close state we maintain an interval timer that if we go >=timeoutThresholdMs with
|
||||
// no new chunks then we kill the stream and assume it to be complete. OpenRouter is quite fast
|
||||
// so this threshold should permit most responses, but we can adjust `timeoutThresholdMs` if
|
||||
// we find it is too aggressive.
|
||||
const timeoutCheck = setInterval(() => {
|
||||
if (lastChunkTime === null) return;
|
||||
|
||||
const now = Number(new Date());
|
||||
const diffMs = now - lastChunkTime;
|
||||
if (diffMs >= timeoutThresholdMs) {
|
||||
console.log(
|
||||
`OpenRouter stream did not self-close and has been stale for >${timeoutThresholdMs}ms. Closing response stream.`
|
||||
);
|
||||
writeResponseChunk(response, {
|
||||
uuid,
|
||||
sources,
|
||||
type: "textResponseChunk",
|
||||
textResponse: "",
|
||||
close: true,
|
||||
error: false,
|
||||
});
|
||||
clearInterval(timeoutCheck);
|
||||
resolve(fullText);
|
||||
}
|
||||
}, 500);
|
||||
|
||||
stream.data.on("data", (data) => {
|
||||
const lines = data
|
||||
?.toString()
|
||||
?.split("\n")
|
||||
.filter((line) => line.trim() !== "");
|
||||
|
||||
for (const line of lines) {
|
||||
let validJSON = false;
|
||||
const message = chunk + line.replace(/^data: /, "");
|
||||
|
||||
// JSON chunk is incomplete and has not ended yet
|
||||
// so we need to stitch it together. You would think JSON
|
||||
// chunks would only come complete - but they don't!
|
||||
try {
|
||||
JSON.parse(message);
|
||||
validJSON = true;
|
||||
} catch {}
|
||||
|
||||
if (!validJSON) {
|
||||
// It can be possible that the chunk decoding is running away
|
||||
// and the message chunk fails to append due to string length.
|
||||
// In this case abort the chunk and reset so we can continue.
|
||||
// ref: https://github.com/Mintplex-Labs/anything-llm/issues/416
|
||||
try {
|
||||
chunk += message;
|
||||
} catch (e) {
|
||||
console.error(`Chunk appending error`, e);
|
||||
chunk = "";
|
||||
}
|
||||
continue;
|
||||
} else {
|
||||
chunk = "";
|
||||
}
|
||||
|
||||
if (message == "[DONE]") {
|
||||
lastChunkTime = Number(new Date());
|
||||
writeResponseChunk(response, {
|
||||
uuid,
|
||||
sources,
|
||||
type: "textResponseChunk",
|
||||
textResponse: "",
|
||||
close: true,
|
||||
error: false,
|
||||
});
|
||||
clearInterval(timeoutCheck);
|
||||
resolve(fullText);
|
||||
} else {
|
||||
let finishReason = null;
|
||||
let token = "";
|
||||
try {
|
||||
const json = JSON.parse(message);
|
||||
token = json?.choices?.[0]?.delta?.content;
|
||||
finishReason = json?.choices?.[0]?.finish_reason || null;
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token) {
|
||||
fullText += token;
|
||||
lastChunkTime = Number(new Date());
|
||||
writeResponseChunk(response, {
|
||||
uuid,
|
||||
sources: [],
|
||||
type: "textResponseChunk",
|
||||
textResponse: token,
|
||||
close: false,
|
||||
error: false,
|
||||
});
|
||||
}
|
||||
|
||||
if (finishReason !== null) {
|
||||
lastChunkTime = Number(new Date());
|
||||
writeResponseChunk(response, {
|
||||
uuid,
|
||||
sources,
|
||||
type: "textResponseChunk",
|
||||
textResponse: "",
|
||||
close: true,
|
||||
error: false,
|
||||
});
|
||||
clearInterval(timeoutCheck);
|
||||
resolve(fullText);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations
|
||||
async embedTextInput(textInput) {
|
||||
return await this.embedder.embedTextInput(textInput);
|
||||
}
|
||||
async embedChunks(textChunks = []) {
|
||||
return await this.embedder.embedChunks(textChunks);
|
||||
}
|
||||
|
||||
async compressMessages(promptArgs = {}, rawHistory = []) {
|
||||
const { messageArrayCompressor } = require("../../helpers/chat");
|
||||
const messageArray = this.constructPrompt(promptArgs);
|
||||
return await messageArrayCompressor(this, messageArray, rawHistory);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
OpenRouterLLM,
|
||||
openRouterModels,
|
||||
};
|
622
server/utils/AiProviders/openRouter/models.js
Normal file
622
server/utils/AiProviders/openRouter/models.js
Normal file
@ -0,0 +1,622 @@
|
||||
const MODELS = {
|
||||
"nousresearch/nous-capybara-34b": {
|
||||
id: "nousresearch/nous-capybara-34b",
|
||||
name: "Nous: Capybara 34B",
|
||||
organization: "Nousresearch",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"openrouter/auto": {
|
||||
id: "openrouter/auto",
|
||||
name: "Auto (best for prompt)",
|
||||
organization: "Openrouter",
|
||||
maxLength: 128000,
|
||||
},
|
||||
"nousresearch/nous-capybara-7b:free": {
|
||||
id: "nousresearch/nous-capybara-7b:free",
|
||||
name: "Nous: Capybara 7B (free)",
|
||||
organization: "Nousresearch",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"mistralai/mistral-7b-instruct:free": {
|
||||
id: "mistralai/mistral-7b-instruct:free",
|
||||
name: "Mistral 7B Instruct (free)",
|
||||
organization: "Mistralai",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"gryphe/mythomist-7b:free": {
|
||||
id: "gryphe/mythomist-7b:free",
|
||||
name: "MythoMist 7B (free)",
|
||||
organization: "Gryphe",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"undi95/toppy-m-7b:free": {
|
||||
id: "undi95/toppy-m-7b:free",
|
||||
name: "Toppy M 7B (free)",
|
||||
organization: "Undi95",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"openrouter/cinematika-7b:free": {
|
||||
id: "openrouter/cinematika-7b:free",
|
||||
name: "Cinematika 7B (alpha) (free)",
|
||||
organization: "Openrouter",
|
||||
maxLength: 8000,
|
||||
},
|
||||
"google/gemma-7b-it:free": {
|
||||
id: "google/gemma-7b-it:free",
|
||||
name: "Google: Gemma 7B (free)",
|
||||
organization: "Google",
|
||||
maxLength: 8000,
|
||||
},
|
||||
"jondurbin/bagel-34b": {
|
||||
id: "jondurbin/bagel-34b",
|
||||
name: "Bagel 34B v0.2",
|
||||
organization: "Jondurbin",
|
||||
maxLength: 8000,
|
||||
},
|
||||
"jebcarter/psyfighter-13b": {
|
||||
id: "jebcarter/psyfighter-13b",
|
||||
name: "Psyfighter 13B",
|
||||
organization: "Jebcarter",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"koboldai/psyfighter-13b-2": {
|
||||
id: "koboldai/psyfighter-13b-2",
|
||||
name: "Psyfighter v2 13B",
|
||||
organization: "Koboldai",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"neversleep/noromaid-mixtral-8x7b-instruct": {
|
||||
id: "neversleep/noromaid-mixtral-8x7b-instruct",
|
||||
name: "Noromaid Mixtral 8x7B Instruct",
|
||||
organization: "Neversleep",
|
||||
maxLength: 8000,
|
||||
},
|
||||
"nousresearch/nous-hermes-llama2-13b": {
|
||||
id: "nousresearch/nous-hermes-llama2-13b",
|
||||
name: "Nous: Hermes 13B",
|
||||
organization: "Nousresearch",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"meta-llama/codellama-34b-instruct": {
|
||||
id: "meta-llama/codellama-34b-instruct",
|
||||
name: "Meta: CodeLlama 34B Instruct",
|
||||
organization: "Meta-llama",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"phind/phind-codellama-34b": {
|
||||
id: "phind/phind-codellama-34b",
|
||||
name: "Phind: CodeLlama 34B v2",
|
||||
organization: "Phind",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"intel/neural-chat-7b": {
|
||||
id: "intel/neural-chat-7b",
|
||||
name: "Neural Chat 7B v3.1",
|
||||
organization: "Intel",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"mistralai/mixtral-8x7b-instruct": {
|
||||
id: "mistralai/mixtral-8x7b-instruct",
|
||||
name: "Mistral: Mixtral 8x7B Instruct",
|
||||
organization: "Mistralai",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"nousresearch/nous-hermes-2-mixtral-8x7b-dpo": {
|
||||
id: "nousresearch/nous-hermes-2-mixtral-8x7b-dpo",
|
||||
name: "Nous: Hermes 2 Mixtral 8x7B DPO",
|
||||
organization: "Nousresearch",
|
||||
maxLength: 32000,
|
||||
},
|
||||
"nousresearch/nous-hermes-2-mixtral-8x7b-sft": {
|
||||
id: "nousresearch/nous-hermes-2-mixtral-8x7b-sft",
|
||||
name: "Nous: Hermes 2 Mixtral 8x7B SFT",
|
||||
organization: "Nousresearch",
|
||||
maxLength: 32000,
|
||||
},
|
||||
"haotian-liu/llava-13b": {
|
||||
id: "haotian-liu/llava-13b",
|
||||
name: "Llava 13B",
|
||||
organization: "Haotian-liu",
|
||||
maxLength: 2048,
|
||||
},
|
||||
"nousresearch/nous-hermes-2-vision-7b": {
|
||||
id: "nousresearch/nous-hermes-2-vision-7b",
|
||||
name: "Nous: Hermes 2 Vision 7B (alpha)",
|
||||
organization: "Nousresearch",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"meta-llama/llama-2-13b-chat": {
|
||||
id: "meta-llama/llama-2-13b-chat",
|
||||
name: "Meta: Llama v2 13B Chat",
|
||||
organization: "Meta-llama",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"migtissera/synthia-70b": {
|
||||
id: "migtissera/synthia-70b",
|
||||
name: "Synthia 70B",
|
||||
organization: "Migtissera",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"pygmalionai/mythalion-13b": {
|
||||
id: "pygmalionai/mythalion-13b",
|
||||
name: "Pygmalion: Mythalion 13B",
|
||||
organization: "Pygmalionai",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"undi95/remm-slerp-l2-13b-6k": {
|
||||
id: "undi95/remm-slerp-l2-13b-6k",
|
||||
name: "ReMM SLERP 13B 6k",
|
||||
organization: "Undi95",
|
||||
maxLength: 6144,
|
||||
},
|
||||
"gryphe/mythomax-l2-13b": {
|
||||
id: "gryphe/mythomax-l2-13b",
|
||||
name: "MythoMax 13B",
|
||||
organization: "Gryphe",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"xwin-lm/xwin-lm-70b": {
|
||||
id: "xwin-lm/xwin-lm-70b",
|
||||
name: "Xwin 70B",
|
||||
organization: "Xwin-lm",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"gryphe/mythomax-l2-13b-8k": {
|
||||
id: "gryphe/mythomax-l2-13b-8k",
|
||||
name: "MythoMax 13B 8k",
|
||||
organization: "Gryphe",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"alpindale/goliath-120b": {
|
||||
id: "alpindale/goliath-120b",
|
||||
name: "Goliath 120B",
|
||||
organization: "Alpindale",
|
||||
maxLength: 6144,
|
||||
},
|
||||
"neversleep/noromaid-20b": {
|
||||
id: "neversleep/noromaid-20b",
|
||||
name: "Noromaid 20B",
|
||||
organization: "Neversleep",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"gryphe/mythomist-7b": {
|
||||
id: "gryphe/mythomist-7b",
|
||||
name: "MythoMist 7B",
|
||||
organization: "Gryphe",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"mancer/weaver": {
|
||||
id: "mancer/weaver",
|
||||
name: "Mancer: Weaver (alpha)",
|
||||
organization: "Mancer",
|
||||
maxLength: 8000,
|
||||
},
|
||||
"nousresearch/nous-hermes-llama2-70b": {
|
||||
id: "nousresearch/nous-hermes-llama2-70b",
|
||||
name: "Nous: Hermes 70B",
|
||||
organization: "Nousresearch",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"nousresearch/nous-capybara-7b": {
|
||||
id: "nousresearch/nous-capybara-7b",
|
||||
name: "Nous: Capybara 7B",
|
||||
organization: "Nousresearch",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"codellama/codellama-70b-instruct": {
|
||||
id: "codellama/codellama-70b-instruct",
|
||||
name: "Meta: CodeLlama 70B Instruct",
|
||||
organization: "Codellama",
|
||||
maxLength: 2048,
|
||||
},
|
||||
"teknium/openhermes-2-mistral-7b": {
|
||||
id: "teknium/openhermes-2-mistral-7b",
|
||||
name: "OpenHermes 2 Mistral 7B",
|
||||
organization: "Teknium",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"teknium/openhermes-2.5-mistral-7b": {
|
||||
id: "teknium/openhermes-2.5-mistral-7b",
|
||||
name: "OpenHermes 2.5 Mistral 7B",
|
||||
organization: "Teknium",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"undi95/remm-slerp-l2-13b": {
|
||||
id: "undi95/remm-slerp-l2-13b",
|
||||
name: "ReMM SLERP 13B",
|
||||
organization: "Undi95",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"undi95/toppy-m-7b": {
|
||||
id: "undi95/toppy-m-7b",
|
||||
name: "Toppy M 7B",
|
||||
organization: "Undi95",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"openrouter/cinematika-7b": {
|
||||
id: "openrouter/cinematika-7b",
|
||||
name: "Cinematika 7B (alpha)",
|
||||
organization: "Openrouter",
|
||||
maxLength: 8000,
|
||||
},
|
||||
"01-ai/yi-34b-chat": {
|
||||
id: "01-ai/yi-34b-chat",
|
||||
name: "Yi 34B Chat",
|
||||
organization: "01-ai",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"01-ai/yi-34b": {
|
||||
id: "01-ai/yi-34b",
|
||||
name: "Yi 34B (base)",
|
||||
organization: "01-ai",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"01-ai/yi-6b": {
|
||||
id: "01-ai/yi-6b",
|
||||
name: "Yi 6B (base)",
|
||||
organization: "01-ai",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"togethercomputer/stripedhyena-nous-7b": {
|
||||
id: "togethercomputer/stripedhyena-nous-7b",
|
||||
name: "StripedHyena Nous 7B",
|
||||
organization: "Togethercomputer",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"togethercomputer/stripedhyena-hessian-7b": {
|
||||
id: "togethercomputer/stripedhyena-hessian-7b",
|
||||
name: "StripedHyena Hessian 7B (base)",
|
||||
organization: "Togethercomputer",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"mistralai/mixtral-8x7b": {
|
||||
id: "mistralai/mixtral-8x7b",
|
||||
name: "Mistral: Mixtral 8x7B (base)",
|
||||
organization: "Mistralai",
|
||||
maxLength: 32768,
|
||||
},
|
||||
"nousresearch/nous-hermes-yi-34b": {
|
||||
id: "nousresearch/nous-hermes-yi-34b",
|
||||
name: "Nous: Hermes 2 Yi 34B",
|
||||
organization: "Nousresearch",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"nousresearch/nous-hermes-2-mistral-7b-dpo": {
|
||||
id: "nousresearch/nous-hermes-2-mistral-7b-dpo",
|
||||
name: "Nous: Hermes 2 Mistral 7B DPO",
|
||||
organization: "Nousresearch",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"open-orca/mistral-7b-openorca": {
|
||||
id: "open-orca/mistral-7b-openorca",
|
||||
name: "Mistral OpenOrca 7B",
|
||||
organization: "Open-orca",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"huggingfaceh4/zephyr-7b-beta": {
|
||||
id: "huggingfaceh4/zephyr-7b-beta",
|
||||
name: "Hugging Face: Zephyr 7B",
|
||||
organization: "Huggingfaceh4",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"openai/gpt-3.5-turbo": {
|
||||
id: "openai/gpt-3.5-turbo",
|
||||
name: "OpenAI: GPT-3.5 Turbo",
|
||||
organization: "Openai",
|
||||
maxLength: 4095,
|
||||
},
|
||||
"openai/gpt-3.5-turbo-0125": {
|
||||
id: "openai/gpt-3.5-turbo-0125",
|
||||
name: "OpenAI: GPT-3.5 Turbo 16k",
|
||||
organization: "Openai",
|
||||
maxLength: 16385,
|
||||
},
|
||||
"openai/gpt-3.5-turbo-1106": {
|
||||
id: "openai/gpt-3.5-turbo-1106",
|
||||
name: "OpenAI: GPT-3.5 Turbo 16k (older v1106)",
|
||||
organization: "Openai",
|
||||
maxLength: 16385,
|
||||
},
|
||||
"openai/gpt-3.5-turbo-0613": {
|
||||
id: "openai/gpt-3.5-turbo-0613",
|
||||
name: "OpenAI: GPT-3.5 Turbo (older v0613)",
|
||||
organization: "Openai",
|
||||
maxLength: 4095,
|
||||
},
|
||||
"openai/gpt-3.5-turbo-0301": {
|
||||
id: "openai/gpt-3.5-turbo-0301",
|
||||
name: "OpenAI: GPT-3.5 Turbo (older v0301)",
|
||||
organization: "Openai",
|
||||
maxLength: 4095,
|
||||
},
|
||||
"openai/gpt-3.5-turbo-16k": {
|
||||
id: "openai/gpt-3.5-turbo-16k",
|
||||
name: "OpenAI: GPT-3.5 Turbo 16k",
|
||||
organization: "Openai",
|
||||
maxLength: 16385,
|
||||
},
|
||||
"openai/gpt-4-turbo-preview": {
|
||||
id: "openai/gpt-4-turbo-preview",
|
||||
name: "OpenAI: GPT-4 Turbo (preview)",
|
||||
organization: "Openai",
|
||||
maxLength: 128000,
|
||||
},
|
||||
"openai/gpt-4-1106-preview": {
|
||||
id: "openai/gpt-4-1106-preview",
|
||||
name: "OpenAI: GPT-4 Turbo (older v1106)",
|
||||
organization: "Openai",
|
||||
maxLength: 128000,
|
||||
},
|
||||
"openai/gpt-4": {
|
||||
id: "openai/gpt-4",
|
||||
name: "OpenAI: GPT-4",
|
||||
organization: "Openai",
|
||||
maxLength: 8191,
|
||||
},
|
||||
"openai/gpt-4-0314": {
|
||||
id: "openai/gpt-4-0314",
|
||||
name: "OpenAI: GPT-4 (older v0314)",
|
||||
organization: "Openai",
|
||||
maxLength: 8191,
|
||||
},
|
||||
"openai/gpt-4-32k": {
|
||||
id: "openai/gpt-4-32k",
|
||||
name: "OpenAI: GPT-4 32k",
|
||||
organization: "Openai",
|
||||
maxLength: 32767,
|
||||
},
|
||||
"openai/gpt-4-32k-0314": {
|
||||
id: "openai/gpt-4-32k-0314",
|
||||
name: "OpenAI: GPT-4 32k (older v0314)",
|
||||
organization: "Openai",
|
||||
maxLength: 32767,
|
||||
},
|
||||
"openai/gpt-4-vision-preview": {
|
||||
id: "openai/gpt-4-vision-preview",
|
||||
name: "OpenAI: GPT-4 Vision (preview)",
|
||||
organization: "Openai",
|
||||
maxLength: 128000,
|
||||
},
|
||||
"openai/gpt-3.5-turbo-instruct": {
|
||||
id: "openai/gpt-3.5-turbo-instruct",
|
||||
name: "OpenAI: GPT-3.5 Turbo Instruct",
|
||||
organization: "Openai",
|
||||
maxLength: 4095,
|
||||
},
|
||||
"google/palm-2-chat-bison": {
|
||||
id: "google/palm-2-chat-bison",
|
||||
name: "Google: PaLM 2 Chat",
|
||||
organization: "Google",
|
||||
maxLength: 36864,
|
||||
},
|
||||
"google/palm-2-codechat-bison": {
|
||||
id: "google/palm-2-codechat-bison",
|
||||
name: "Google: PaLM 2 Code Chat",
|
||||
organization: "Google",
|
||||
maxLength: 28672,
|
||||
},
|
||||
"google/palm-2-chat-bison-32k": {
|
||||
id: "google/palm-2-chat-bison-32k",
|
||||
name: "Google: PaLM 2 Chat 32k",
|
||||
organization: "Google",
|
||||
maxLength: 131072,
|
||||
},
|
||||
"google/palm-2-codechat-bison-32k": {
|
||||
id: "google/palm-2-codechat-bison-32k",
|
||||
name: "Google: PaLM 2 Code Chat 32k",
|
||||
organization: "Google",
|
||||
maxLength: 131072,
|
||||
},
|
||||
"google/gemini-pro": {
|
||||
id: "google/gemini-pro",
|
||||
name: "Google: Gemini Pro (preview)",
|
||||
organization: "Google",
|
||||
maxLength: 131040,
|
||||
},
|
||||
"google/gemini-pro-vision": {
|
||||
id: "google/gemini-pro-vision",
|
||||
name: "Google: Gemini Pro Vision (preview)",
|
||||
organization: "Google",
|
||||
maxLength: 65536,
|
||||
},
|
||||
"perplexity/pplx-70b-online": {
|
||||
id: "perplexity/pplx-70b-online",
|
||||
name: "Perplexity: PPLX 70B Online",
|
||||
organization: "Perplexity",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"perplexity/pplx-7b-online": {
|
||||
id: "perplexity/pplx-7b-online",
|
||||
name: "Perplexity: PPLX 7B Online",
|
||||
organization: "Perplexity",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"perplexity/pplx-7b-chat": {
|
||||
id: "perplexity/pplx-7b-chat",
|
||||
name: "Perplexity: PPLX 7B Chat",
|
||||
organization: "Perplexity",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"perplexity/pplx-70b-chat": {
|
||||
id: "perplexity/pplx-70b-chat",
|
||||
name: "Perplexity: PPLX 70B Chat",
|
||||
organization: "Perplexity",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"meta-llama/llama-2-70b-chat": {
|
||||
id: "meta-llama/llama-2-70b-chat",
|
||||
name: "Meta: Llama v2 70B Chat",
|
||||
organization: "Meta-llama",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"jondurbin/airoboros-l2-70b": {
|
||||
id: "jondurbin/airoboros-l2-70b",
|
||||
name: "Airoboros 70B",
|
||||
organization: "Jondurbin",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"austism/chronos-hermes-13b": {
|
||||
id: "austism/chronos-hermes-13b",
|
||||
name: "Chronos Hermes 13B v2",
|
||||
organization: "Austism",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"mistralai/mistral-7b-instruct": {
|
||||
id: "mistralai/mistral-7b-instruct",
|
||||
name: "Mistral 7B Instruct",
|
||||
organization: "Mistralai",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"openchat/openchat-7b": {
|
||||
id: "openchat/openchat-7b",
|
||||
name: "OpenChat 3.5",
|
||||
organization: "Openchat",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"lizpreciatior/lzlv-70b-fp16-hf": {
|
||||
id: "lizpreciatior/lzlv-70b-fp16-hf",
|
||||
name: "lzlv 70B",
|
||||
organization: "Lizpreciatior",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"cognitivecomputations/dolphin-mixtral-8x7b": {
|
||||
id: "cognitivecomputations/dolphin-mixtral-8x7b",
|
||||
name: "Dolphin 2.6 Mixtral 8x7B 🐬",
|
||||
organization: "Cognitivecomputations",
|
||||
maxLength: 32000,
|
||||
},
|
||||
"rwkv/rwkv-5-world-3b": {
|
||||
id: "rwkv/rwkv-5-world-3b",
|
||||
name: "RWKV v5 World 3B",
|
||||
organization: "Rwkv",
|
||||
maxLength: 10000,
|
||||
},
|
||||
"recursal/rwkv-5-3b-ai-town": {
|
||||
id: "recursal/rwkv-5-3b-ai-town",
|
||||
name: "RWKV v5 3B AI Town",
|
||||
organization: "Recursal",
|
||||
maxLength: 10000,
|
||||
},
|
||||
"recursal/eagle-7b": {
|
||||
id: "recursal/eagle-7b",
|
||||
name: "RWKV v5: Eagle 7B",
|
||||
organization: "Recursal",
|
||||
maxLength: 10000,
|
||||
},
|
||||
"google/gemma-7b-it": {
|
||||
id: "google/gemma-7b-it",
|
||||
name: "Google: Gemma 7B",
|
||||
organization: "Google",
|
||||
maxLength: 8000,
|
||||
},
|
||||
"anthropic/claude-2": {
|
||||
id: "anthropic/claude-2",
|
||||
name: "Anthropic: Claude v2",
|
||||
organization: "Anthropic",
|
||||
maxLength: 200000,
|
||||
},
|
||||
"anthropic/claude-2.1": {
|
||||
id: "anthropic/claude-2.1",
|
||||
name: "Anthropic: Claude v2.1",
|
||||
organization: "Anthropic",
|
||||
maxLength: 200000,
|
||||
},
|
||||
"anthropic/claude-2.0": {
|
||||
id: "anthropic/claude-2.0",
|
||||
name: "Anthropic: Claude v2.0",
|
||||
organization: "Anthropic",
|
||||
maxLength: 100000,
|
||||
},
|
||||
"anthropic/claude-instant-1": {
|
||||
id: "anthropic/claude-instant-1",
|
||||
name: "Anthropic: Claude Instant v1",
|
||||
organization: "Anthropic",
|
||||
maxLength: 100000,
|
||||
},
|
||||
"anthropic/claude-instant-1.2": {
|
||||
id: "anthropic/claude-instant-1.2",
|
||||
name: "Anthropic: Claude Instant v1.2",
|
||||
organization: "Anthropic",
|
||||
maxLength: 100000,
|
||||
},
|
||||
"anthropic/claude-1": {
|
||||
id: "anthropic/claude-1",
|
||||
name: "Anthropic: Claude v1",
|
||||
organization: "Anthropic",
|
||||
maxLength: 100000,
|
||||
},
|
||||
"anthropic/claude-1.2": {
|
||||
id: "anthropic/claude-1.2",
|
||||
name: "Anthropic: Claude (older v1)",
|
||||
organization: "Anthropic",
|
||||
maxLength: 100000,
|
||||
},
|
||||
"anthropic/claude-instant-1.0": {
|
||||
id: "anthropic/claude-instant-1.0",
|
||||
name: "Anthropic: Claude Instant (older v1)",
|
||||
organization: "Anthropic",
|
||||
maxLength: 100000,
|
||||
},
|
||||
"anthropic/claude-instant-1.1": {
|
||||
id: "anthropic/claude-instant-1.1",
|
||||
name: "Anthropic: Claude Instant (older v1.1)",
|
||||
organization: "Anthropic",
|
||||
maxLength: 100000,
|
||||
},
|
||||
"anthropic/claude-2:beta": {
|
||||
id: "anthropic/claude-2:beta",
|
||||
name: "Anthropic: Claude v2 (experimental)",
|
||||
organization: "Anthropic",
|
||||
maxLength: 200000,
|
||||
},
|
||||
"anthropic/claude-2.1:beta": {
|
||||
id: "anthropic/claude-2.1:beta",
|
||||
name: "Anthropic: Claude v2.1 (experimental)",
|
||||
organization: "Anthropic",
|
||||
maxLength: 200000,
|
||||
},
|
||||
"anthropic/claude-2.0:beta": {
|
||||
id: "anthropic/claude-2.0:beta",
|
||||
name: "Anthropic: Claude v2.0 (experimental)",
|
||||
organization: "Anthropic",
|
||||
maxLength: 100000,
|
||||
},
|
||||
"anthropic/claude-instant-1:beta": {
|
||||
id: "anthropic/claude-instant-1:beta",
|
||||
name: "Anthropic: Claude Instant v1 (experimental)",
|
||||
organization: "Anthropic",
|
||||
maxLength: 100000,
|
||||
},
|
||||
"huggingfaceh4/zephyr-7b-beta:free": {
|
||||
id: "huggingfaceh4/zephyr-7b-beta:free",
|
||||
name: "Hugging Face: Zephyr 7B (free)",
|
||||
organization: "Huggingfaceh4",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"openchat/openchat-7b:free": {
|
||||
id: "openchat/openchat-7b:free",
|
||||
name: "OpenChat 3.5 (free)",
|
||||
organization: "Openchat",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"mistralai/mistral-tiny": {
|
||||
id: "mistralai/mistral-tiny",
|
||||
name: "Mistral: Tiny",
|
||||
organization: "Mistralai",
|
||||
maxLength: 32000,
|
||||
},
|
||||
"mistralai/mistral-small": {
|
||||
id: "mistralai/mistral-small",
|
||||
name: "Mistral: Small",
|
||||
organization: "Mistralai",
|
||||
maxLength: 32000,
|
||||
},
|
||||
"mistralai/mistral-medium": {
|
||||
id: "mistralai/mistral-medium",
|
||||
name: "Mistral: Medium",
|
||||
organization: "Mistralai",
|
||||
maxLength: 32000,
|
||||
},
|
||||
};
|
||||
|
||||
module.exports.MODELS = MODELS;
|
1
server/utils/AiProviders/openRouter/scripts/.gitignore
vendored
Normal file
1
server/utils/AiProviders/openRouter/scripts/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
*.json
|
37
server/utils/AiProviders/openRouter/scripts/parse.mjs
Normal file
37
server/utils/AiProviders/openRouter/scripts/parse.mjs
Normal file
@ -0,0 +1,37 @@
|
||||
// OpenRouter has lots of models we can use so we use this script
|
||||
// to cache all the models. We can see the list of all the models
|
||||
// here: https://openrouter.ai/docs#models
|
||||
|
||||
// To run, cd into this directory and run `node parse.mjs`
|
||||
// copy outputs into the export in ../models.js
|
||||
|
||||
// Update the date below if you run this again because OpenRouter added new models.
|
||||
// Last Collected: Feb 23, 2024
|
||||
|
||||
import fs from "fs";
|
||||
|
||||
async function parseChatModels() {
|
||||
const models = {};
|
||||
const response = await fetch("https://openrouter.ai/api/v1/models");
|
||||
const data = await response.json();
|
||||
data.data.forEach((model) => {
|
||||
models[model.id] = {
|
||||
id: model.id,
|
||||
name: model.name,
|
||||
// capitalize first letter
|
||||
organization:
|
||||
model.id.split("/")[0].charAt(0).toUpperCase() +
|
||||
model.id.split("/")[0].slice(1),
|
||||
maxLength: model.context_length,
|
||||
};
|
||||
});
|
||||
|
||||
fs.writeFileSync(
|
||||
"chat_models.json",
|
||||
JSON.stringify(models, null, 2),
|
||||
"utf-8"
|
||||
);
|
||||
return models;
|
||||
}
|
||||
|
||||
parseChatModels();
|
204
server/utils/AiProviders/perplexity/index.js
Normal file
204
server/utils/AiProviders/perplexity/index.js
Normal file
@ -0,0 +1,204 @@
|
||||
const { NativeEmbedder } = require("../../EmbeddingEngines/native");
|
||||
const { chatPrompt } = require("../../chats");
|
||||
const { handleDefaultStreamResponse } = require("../../helpers/chat/responses");
|
||||
|
||||
function perplexityModels() {
|
||||
const { MODELS } = require("./models.js");
|
||||
return MODELS || {};
|
||||
}
|
||||
|
||||
class PerplexityLLM {
|
||||
constructor(embedder = null, modelPreference = null) {
|
||||
const { Configuration, OpenAIApi } = require("openai");
|
||||
if (!process.env.PERPLEXITY_API_KEY)
|
||||
throw new Error("No Perplexity API key was set.");
|
||||
|
||||
const config = new Configuration({
|
||||
basePath: "https://api.perplexity.ai",
|
||||
apiKey: process.env.PERPLEXITY_API_KEY,
|
||||
});
|
||||
this.openai = new OpenAIApi(config);
|
||||
this.model =
|
||||
modelPreference || process.env.PERPLEXITY_MODEL_PREF || "pplx-7b-online"; // Give at least a unique model to the provider as last fallback.
|
||||
this.limits = {
|
||||
history: this.promptWindowLimit() * 0.15,
|
||||
system: this.promptWindowLimit() * 0.15,
|
||||
user: this.promptWindowLimit() * 0.7,
|
||||
};
|
||||
|
||||
this.embedder = !embedder ? new NativeEmbedder() : embedder;
|
||||
this.defaultTemp = 0.7;
|
||||
}
|
||||
|
||||
#appendContext(contextTexts = []) {
|
||||
if (!contextTexts || !contextTexts.length) return "";
|
||||
return (
|
||||
"\nContext:\n" +
|
||||
contextTexts
|
||||
.map((text, i) => {
|
||||
return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
|
||||
})
|
||||
.join("")
|
||||
);
|
||||
}
|
||||
|
||||
allModelInformation() {
|
||||
return perplexityModels();
|
||||
}
|
||||
|
||||
streamingEnabled() {
|
||||
return "streamChat" in this && "streamGetChatCompletion" in this;
|
||||
}
|
||||
|
||||
promptWindowLimit() {
|
||||
const availableModels = this.allModelInformation();
|
||||
return availableModels[this.model]?.maxLength || 4096;
|
||||
}
|
||||
|
||||
async isValidChatCompletionModel(model = "") {
|
||||
const availableModels = this.allModelInformation();
|
||||
return availableModels.hasOwnProperty(model);
|
||||
}
|
||||
|
||||
constructPrompt({
|
||||
systemPrompt = "",
|
||||
contextTexts = [],
|
||||
chatHistory = [],
|
||||
userPrompt = "",
|
||||
}) {
|
||||
const prompt = {
|
||||
role: "system",
|
||||
content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
|
||||
};
|
||||
return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
|
||||
}
|
||||
|
||||
async isSafe(_input = "") {
|
||||
// Not implemented so must be stubbed
|
||||
return { safe: true, reasons: [] };
|
||||
}
|
||||
|
||||
async sendChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
|
||||
if (!(await this.isValidChatCompletionModel(this.model)))
|
||||
throw new Error(
|
||||
`Perplexity chat: ${this.model} is not valid for chat completion!`
|
||||
);
|
||||
|
||||
const textResponse = await this.openai
|
||||
.createChatCompletion({
|
||||
model: this.model,
|
||||
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
|
||||
n: 1,
|
||||
messages: await this.compressMessages(
|
||||
{
|
||||
systemPrompt: chatPrompt(workspace),
|
||||
userPrompt: prompt,
|
||||
chatHistory,
|
||||
},
|
||||
rawHistory
|
||||
),
|
||||
})
|
||||
.then((json) => {
|
||||
const res = json.data;
|
||||
if (!res.hasOwnProperty("choices"))
|
||||
throw new Error("Perplexity chat: No results!");
|
||||
if (res.choices.length === 0)
|
||||
throw new Error("Perplexity chat: No results length!");
|
||||
return res.choices[0].message.content;
|
||||
})
|
||||
.catch((error) => {
|
||||
throw new Error(
|
||||
`Perplexity::createChatCompletion failed with: ${error.message}`
|
||||
);
|
||||
});
|
||||
|
||||
return textResponse;
|
||||
}
|
||||
|
||||
async streamChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
|
||||
if (!(await this.isValidChatCompletionModel(this.model)))
|
||||
throw new Error(
|
||||
`Perplexity chat: ${this.model} is not valid for chat completion!`
|
||||
);
|
||||
|
||||
const streamRequest = await this.openai.createChatCompletion(
|
||||
{
|
||||
model: this.model,
|
||||
stream: true,
|
||||
temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
|
||||
n: 1,
|
||||
messages: await this.compressMessages(
|
||||
{
|
||||
systemPrompt: chatPrompt(workspace),
|
||||
userPrompt: prompt,
|
||||
chatHistory,
|
||||
},
|
||||
rawHistory
|
||||
),
|
||||
},
|
||||
{ responseType: "stream" }
|
||||
);
|
||||
return streamRequest;
|
||||
}
|
||||
|
||||
async getChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
if (!(await this.isValidChatCompletionModel(this.model)))
|
||||
throw new Error(
|
||||
`Perplexity chat: ${this.model} is not valid for chat completion!`
|
||||
);
|
||||
|
||||
const { data } = await this.openai
|
||||
.createChatCompletion({
|
||||
model: this.model,
|
||||
messages,
|
||||
temperature,
|
||||
})
|
||||
.catch((e) => {
|
||||
throw new Error(e.response.data.error.message);
|
||||
});
|
||||
|
||||
if (!data.hasOwnProperty("choices")) return null;
|
||||
return data.choices[0].message.content;
|
||||
}
|
||||
|
||||
async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
|
||||
if (!(await this.isValidChatCompletionModel(this.model)))
|
||||
throw new Error(
|
||||
`Perplexity chat: ${this.model} is not valid for chat completion!`
|
||||
);
|
||||
|
||||
const streamRequest = await this.openai.createChatCompletion(
|
||||
{
|
||||
model: this.model,
|
||||
stream: true,
|
||||
messages,
|
||||
temperature,
|
||||
},
|
||||
{ responseType: "stream" }
|
||||
);
|
||||
return streamRequest;
|
||||
}
|
||||
|
||||
handleStream(response, stream, responseProps) {
|
||||
return handleDefaultStreamResponse(response, stream, responseProps);
|
||||
}
|
||||
|
||||
// Simple wrapper for dynamic embedder & normalize interface for all LLM implementations
|
||||
async embedTextInput(textInput) {
|
||||
return await this.embedder.embedTextInput(textInput);
|
||||
}
|
||||
async embedChunks(textChunks = []) {
|
||||
return await this.embedder.embedChunks(textChunks);
|
||||
}
|
||||
|
||||
async compressMessages(promptArgs = {}, rawHistory = []) {
|
||||
const { messageArrayCompressor } = require("../../helpers/chat");
|
||||
const messageArray = this.constructPrompt(promptArgs);
|
||||
return await messageArrayCompressor(this, messageArray, rawHistory);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
PerplexityLLM,
|
||||
perplexityModels,
|
||||
};
|
69
server/utils/AiProviders/perplexity/models.js
Normal file
69
server/utils/AiProviders/perplexity/models.js
Normal file
@ -0,0 +1,69 @@
|
||||
const MODELS = {
|
||||
"sonar-small-chat": {
|
||||
id: "sonar-small-chat",
|
||||
name: "sonar-small-chat",
|
||||
maxLength: 16384,
|
||||
},
|
||||
"sonar-small-online": {
|
||||
id: "sonar-small-online",
|
||||
name: "sonar-small-online",
|
||||
maxLength: 12000,
|
||||
},
|
||||
"sonar-medium-chat": {
|
||||
id: "sonar-medium-chat",
|
||||
name: "sonar-medium-chat",
|
||||
maxLength: 16384,
|
||||
},
|
||||
"sonar-medium-online": {
|
||||
id: "sonar-medium-online",
|
||||
name: "sonar-medium-online",
|
||||
maxLength: 12000,
|
||||
},
|
||||
"codellama-34b-instruct": {
|
||||
id: "codellama-34b-instruct",
|
||||
name: "codellama-34b-instruct",
|
||||
maxLength: 16384,
|
||||
},
|
||||
"codellama-70b-instruct": {
|
||||
id: "codellama-70b-instruct",
|
||||
name: "codellama-70b-instruct",
|
||||
maxLength: 16384,
|
||||
},
|
||||
"llama-2-70b-chat": {
|
||||
id: "llama-2-70b-chat",
|
||||
name: "llama-2-70b-chat",
|
||||
maxLength: 4096,
|
||||
},
|
||||
"mistral-7b-instruct": {
|
||||
id: "mistral-7b-instruct",
|
||||
name: "mistral-7b-instruct",
|
||||
maxLength: 16384,
|
||||
},
|
||||
"mixtral-8x7b-instruct": {
|
||||
id: "mixtral-8x7b-instruct",
|
||||
name: "mixtral-8x7b-instruct",
|
||||
maxLength: 16384,
|
||||
},
|
||||
"pplx-7b-chat": {
|
||||
id: "pplx-7b-chat",
|
||||
name: "pplx-7b-chat",
|
||||
maxLength: 16384,
|
||||
},
|
||||
"pplx-7b-online": {
|
||||
id: "pplx-7b-online",
|
||||
name: "pplx-7b-online",
|
||||
maxLength: 12000,
|
||||
},
|
||||
"pplx-70b-chat": {
|
||||
id: "pplx-70b-chat",
|
||||
name: "pplx-70b-chat",
|
||||
maxLength: 8192,
|
||||
},
|
||||
"pplx-70b-online": {
|
||||
id: "pplx-70b-online",
|
||||
name: "pplx-70b-online",
|
||||
maxLength: 4000,
|
||||
},
|
||||
};
|
||||
|
||||
module.exports.MODELS = MODELS;
|
1
server/utils/AiProviders/perplexity/scripts/.gitignore
vendored
Normal file
1
server/utils/AiProviders/perplexity/scripts/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
*.json
|
15
server/utils/AiProviders/perplexity/scripts/chat_models.txt
Normal file
15
server/utils/AiProviders/perplexity/scripts/chat_models.txt
Normal file
@ -0,0 +1,15 @@
|
||||
| Model | Parameter Count | Context Length | Model Type |
|
||||
| :-------------------------- | :-------------- | :------------- | :-------------- |
|
||||
| `sonar-small-chat` | 7B | 16384 | Chat Completion |
|
||||
| `sonar-small-online` | 7B | 12000 | Chat Completion |
|
||||
| `sonar-medium-chat` | 8x7B | 16384 | Chat Completion |
|
||||
| `sonar-medium-online` | 8x7B | 12000 | Chat Completion |
|
||||
| `codellama-34b-instruct`[3] | 34B | 16384 | Chat Completion |
|
||||
| `codellama-70b-instruct` | 70B | 16384 | Chat Completion |
|
||||
| `llama-2-70b-chat`[3] | 70B | 4096 | Chat Completion |
|
||||
| `mistral-7b-instruct` [1] | 7B | 16384 | Chat Completion |
|
||||
| `mixtral-8x7b-instruct` | 8x7B | 16384 | Chat Completion |
|
||||
| `pplx-7b-chat`[2] [3] | 7B | 16384 | Chat Completion |
|
||||
| `pplx-7b-online`[2] [3] | 7B | 12000 | Chat Completion |
|
||||
| `pplx-70b-chat`[3] | 70B | 8192 | Chat Completion |
|
||||
| `pplx-70b-online`[3] | 70B | 4000 | Chat Completion |
|
44
server/utils/AiProviders/perplexity/scripts/parse.mjs
Normal file
44
server/utils/AiProviders/perplexity/scripts/parse.mjs
Normal file
@ -0,0 +1,44 @@
|
||||
// Perplexity does not provide a simple REST API to get models,
|
||||
// so we have a table which we copy from their documentation
|
||||
// https://docs.perplexity.ai/edit/model-cards that we can
|
||||
// then parse and get all models from in a format that makes sense
|
||||
// Why this does not exist is so bizarre, but whatever.
|
||||
|
||||
// To run, cd into this directory and run `node parse.mjs`
|
||||
// copy outputs into the export in ../models.js
|
||||
|
||||
// Update the date below if you run this again because Perplexity added new models.
|
||||
// Last Collected: Feb 23, 2024
|
||||
|
||||
import fs from "fs";
|
||||
|
||||
function parseChatModels() {
|
||||
const models = {};
|
||||
const tableString = fs.readFileSync("chat_models.txt", { encoding: "utf-8" });
|
||||
const rows = tableString.split("\n").slice(2);
|
||||
|
||||
rows.forEach((row) => {
|
||||
let [model, _, contextLength] = row
|
||||
.split("|")
|
||||
.slice(1, -1)
|
||||
.map((text) => text.trim());
|
||||
model = model.replace(/`|\s*\[\d+\]\s*/g, "");
|
||||
const maxLength = Number(contextLength.replace(/\s*\[\d+\]\s*/g, ""));
|
||||
if (model && maxLength) {
|
||||
models[model] = {
|
||||
id: model,
|
||||
name: model,
|
||||
maxLength: maxLength,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
fs.writeFileSync(
|
||||
"chat_models.json",
|
||||
JSON.stringify(models, null, 2),
|
||||
"utf-8"
|
||||
);
|
||||
return models;
|
||||
}
|
||||
|
||||
parseChatModels();
|
90
server/utils/EmbeddingEngines/ollama/index.js
Normal file
90
server/utils/EmbeddingEngines/ollama/index.js
Normal file
@ -0,0 +1,90 @@
|
||||
const { maximumChunkLength } = require("../../helpers");
|
||||
|
||||
class OllamaEmbedder {
|
||||
constructor() {
|
||||
if (!process.env.EMBEDDING_BASE_PATH)
|
||||
throw new Error("No embedding base path was set.");
|
||||
if (!process.env.EMBEDDING_MODEL_PREF)
|
||||
throw new Error("No embedding model was set.");
|
||||
|
||||
this.basePath = `${process.env.EMBEDDING_BASE_PATH}/api/embeddings`;
|
||||
this.model = process.env.EMBEDDING_MODEL_PREF;
|
||||
// Limit of how many strings we can process in a single pass to stay with resource or network limits
|
||||
this.maxConcurrentChunks = 1;
|
||||
this.embeddingMaxChunkLength = maximumChunkLength();
|
||||
}
|
||||
|
||||
log(text, ...args) {
|
||||
console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args);
|
||||
}
|
||||
|
||||
async embedTextInput(textInput) {
|
||||
const result = await this.embedChunks([textInput]);
|
||||
return result?.[0] || [];
|
||||
}
|
||||
|
||||
async embedChunks(textChunks = []) {
|
||||
const embeddingRequests = [];
|
||||
this.log(
|
||||
`Embedding ${textChunks.length} chunks of text with ${this.model}.`
|
||||
);
|
||||
|
||||
for (const chunk of textChunks) {
|
||||
embeddingRequests.push(
|
||||
new Promise((resolve) => {
|
||||
fetch(this.basePath, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
model: this.model,
|
||||
prompt: chunk,
|
||||
}),
|
||||
})
|
||||
.then((res) => res.json())
|
||||
.then(({ embedding }) => {
|
||||
resolve({ data: embedding, error: null });
|
||||
return;
|
||||
})
|
||||
.catch((error) => {
|
||||
resolve({ data: [], error: error.message });
|
||||
return;
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const { data = [], error = null } = await Promise.all(
|
||||
embeddingRequests
|
||||
).then((results) => {
|
||||
// If any errors were returned from Ollama abort the entire sequence because the embeddings
|
||||
// will be incomplete.
|
||||
|
||||
const errors = results
|
||||
.filter((res) => !!res.error)
|
||||
.map((res) => res.error)
|
||||
.flat();
|
||||
if (errors.length > 0) {
|
||||
let uniqueErrors = new Set();
|
||||
errors.map((error) =>
|
||||
uniqueErrors.add(`[${error.type}]: ${error.message}`)
|
||||
);
|
||||
|
||||
return {
|
||||
data: [],
|
||||
error: Array.from(uniqueErrors).join(", "),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
data: results.map((res) => res?.data || []),
|
||||
error: null,
|
||||
};
|
||||
});
|
||||
|
||||
if (!!error) throw new Error(`Ollama Failed to embed: ${error}`);
|
||||
return data.length > 0 ? data : null;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
OllamaEmbedder,
|
||||
};
|
@ -1,3 +1,5 @@
|
||||
const { openRouterModels } = require("../AiProviders/openRouter");
|
||||
const { perplexityModels } = require("../AiProviders/perplexity");
|
||||
const { togetherAiModels } = require("../AiProviders/togetherAi");
|
||||
const SUPPORT_CUSTOM_MODELS = [
|
||||
"openai",
|
||||
@ -6,6 +8,8 @@ const SUPPORT_CUSTOM_MODELS = [
|
||||
"native-llm",
|
||||
"togetherai",
|
||||
"mistral",
|
||||
"perplexity",
|
||||
"openrouter",
|
||||
];
|
||||
|
||||
async function getCustomModels(provider = "", apiKey = null, basePath = null) {
|
||||
@ -25,6 +29,10 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
|
||||
return await getMistralModels(apiKey);
|
||||
case "native-llm":
|
||||
return nativeLLMModels();
|
||||
case "perplexity":
|
||||
return await getPerplexityModels();
|
||||
case "openrouter":
|
||||
return await getOpenRouterModels();
|
||||
default:
|
||||
return { models: [], error: "Invalid provider for custom models" };
|
||||
}
|
||||
@ -120,6 +128,35 @@ async function getTogetherAiModels() {
|
||||
return { models, error: null };
|
||||
}
|
||||
|
||||
async function getPerplexityModels() {
|
||||
const knownModels = perplexityModels();
|
||||
if (!Object.keys(knownModels).length === 0)
|
||||
return { models: [], error: null };
|
||||
|
||||
const models = Object.values(knownModels).map((model) => {
|
||||
return {
|
||||
id: model.id,
|
||||
name: model.name,
|
||||
};
|
||||
});
|
||||
return { models, error: null };
|
||||
}
|
||||
|
||||
async function getOpenRouterModels() {
|
||||
const knownModels = await openRouterModels();
|
||||
if (!Object.keys(knownModels).length === 0)
|
||||
return { models: [], error: null };
|
||||
|
||||
const models = Object.values(knownModels).map((model) => {
|
||||
return {
|
||||
id: model.id,
|
||||
organization: model.organization,
|
||||
name: model.name,
|
||||
};
|
||||
});
|
||||
return { models, error: null };
|
||||
}
|
||||
|
||||
async function getMistralModels(apiKey = null) {
|
||||
const { Configuration, OpenAIApi } = require("openai");
|
||||
const config = new Configuration({
|
||||
|
@ -58,6 +58,12 @@ function getLLMProvider(modelPreference = null) {
|
||||
case "togetherai":
|
||||
const { TogetherAiLLM } = require("../AiProviders/togetherAi");
|
||||
return new TogetherAiLLM(embedder, modelPreference);
|
||||
case "perplexity":
|
||||
const { PerplexityLLM } = require("../AiProviders/perplexity");
|
||||
return new PerplexityLLM(embedder, modelPreference);
|
||||
case "openrouter":
|
||||
const { OpenRouterLLM } = require("../AiProviders/openRouter");
|
||||
return new OpenRouterLLM(embedder, modelPreference);
|
||||
case "mistral":
|
||||
const { MistralLLM } = require("../AiProviders/mistral");
|
||||
return new MistralLLM(embedder, modelPreference);
|
||||
@ -86,6 +92,9 @@ function getEmbeddingEngineSelection() {
|
||||
case "localai":
|
||||
const { LocalAiEmbedder } = require("../EmbeddingEngines/localAi");
|
||||
return new LocalAiEmbedder();
|
||||
case "ollama":
|
||||
const { OllamaEmbedder } = require("../EmbeddingEngines/ollama");
|
||||
return new OllamaEmbedder();
|
||||
case "native":
|
||||
const { NativeEmbedder } = require("../EmbeddingEngines/native");
|
||||
console.log("\x1b[34m[INFO]\x1b[0m Using Native Embedder");
|
||||
|
@ -135,7 +135,7 @@ const KEY_MAPPING = {
|
||||
},
|
||||
EmbeddingBasePath: {
|
||||
envKey: "EMBEDDING_BASE_PATH",
|
||||
checks: [isNotEmpty, validLLMExternalBasePath, validDockerizedUrl],
|
||||
checks: [isNotEmpty, validDockerizedUrl],
|
||||
},
|
||||
EmbeddingModelPref: {
|
||||
envKey: "EMBEDDING_MODEL_PREF",
|
||||
@ -239,6 +239,26 @@ const KEY_MAPPING = {
|
||||
checks: [isNotEmpty],
|
||||
},
|
||||
|
||||
// Perplexity Options
|
||||
PerplexityApiKey: {
|
||||
envKey: "PERPLEXITY_API_KEY",
|
||||
checks: [isNotEmpty],
|
||||
},
|
||||
PerplexityModelPref: {
|
||||
envKey: "PERPLEXITY_MODEL_PREF",
|
||||
checks: [isNotEmpty],
|
||||
},
|
||||
|
||||
// OpenRouter Options
|
||||
OpenRouterApiKey: {
|
||||
envKey: "OPENROUTER_API_KEY",
|
||||
checks: [isNotEmpty],
|
||||
},
|
||||
OpenRouterModelPref: {
|
||||
envKey: "OPENROUTER_MODEL_PREF",
|
||||
checks: [isNotEmpty],
|
||||
},
|
||||
|
||||
// System Settings
|
||||
AuthToken: {
|
||||
envKey: "AUTH_TOKEN",
|
||||
@ -314,6 +334,8 @@ function supportedLLM(input = "") {
|
||||
"togetherai",
|
||||
"mistral",
|
||||
"huggingface",
|
||||
"perplexity",
|
||||
"openrouter",
|
||||
].includes(input);
|
||||
return validSelection ? null : `${input} is not a valid LLM provider.`;
|
||||
}
|
||||
@ -333,7 +355,7 @@ function validAnthropicModel(input = "") {
|
||||
}
|
||||
|
||||
function supportedEmbeddingModel(input = "") {
|
||||
const supported = ["openai", "azure", "localai", "native"];
|
||||
const supported = ["openai", "azure", "localai", "native", "ollama"];
|
||||
return supported.includes(input)
|
||||
? null
|
||||
: `Invalid Embedding model type. Must be one of ${supported.join(", ")}.`;
|
||||
|
Loading…
Reference in New Issue
Block a user