mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2024-11-04 22:10:12 +01:00
Add API key option to LocalAI (#407)
* Add API key option to LocalAI * add api key for model dropdown selector
This commit is contained in:
parent
203f5964d5
commit
6fa8b0ce93
@ -27,6 +27,7 @@ CACHE_VECTORS="true"
|
||||
# LOCAL_AI_BASE_PATH='http://host.docker.internal:8080/v1'
|
||||
# LOCAL_AI_MODEL_PREF='luna-ai-llama2'
|
||||
# LOCAL_AI_MODEL_TOKEN_LIMIT=4096
|
||||
# LOCAL_AI_API_KEY="sk-123abc"
|
||||
|
||||
###########################################
|
||||
######## Embedding API SElECTION ##########
|
||||
|
@ -6,12 +6,11 @@ import System from "../../../models/system";
|
||||
export default function LocalAiOptions({ settings, showAlert = false }) {
|
||||
const [basePathValue, setBasePathValue] = useState(settings?.LocalAiBasePath);
|
||||
const [basePath, setBasePath] = useState(settings?.LocalAiBasePath);
|
||||
function updateBasePath() {
|
||||
setBasePath(basePathValue);
|
||||
}
|
||||
const [apiKeyValue, setApiKeyValue] = useState(settings?.LocalAiApiKey);
|
||||
const [apiKey, setApiKey] = useState(settings?.LocalAiApiKey);
|
||||
|
||||
return (
|
||||
<div className="w-full flex flex-col">
|
||||
<div className="w-full flex flex-col gap-y-4">
|
||||
{showAlert && (
|
||||
<div className="flex flex-col md:flex-row md:items-center gap-x-2 text-white mb-6 bg-blue-800/30 w-fit rounded-lg px-4 py-2">
|
||||
<div className="gap-x-2 flex items-center">
|
||||
@ -44,10 +43,14 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
|
||||
autoComplete="off"
|
||||
spellCheck={false}
|
||||
onChange={(e) => setBasePathValue(e.target.value)}
|
||||
onBlur={updateBasePath}
|
||||
onBlur={() => setBasePath(basePathValue)}
|
||||
/>
|
||||
</div>
|
||||
<LocalAIModelSelection settings={settings} basePath={basePath} />
|
||||
<LocalAIModelSelection
|
||||
settings={settings}
|
||||
basePath={basePath}
|
||||
apiKey={apiKey}
|
||||
/>
|
||||
<div className="flex flex-col w-60">
|
||||
<label className="text-white text-sm font-semibold block mb-4">
|
||||
Token context window
|
||||
@ -65,11 +68,35 @@ export default function LocalAiOptions({ settings, showAlert = false }) {
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="w-full flex items-center gap-4">
|
||||
<div className="flex flex-col w-60">
|
||||
<div className="flex flex-col gap-y-1 mb-4">
|
||||
<label className="text-white text-sm font-semibold block">
|
||||
Local AI API Key
|
||||
</label>
|
||||
<p className="text-xs italic text-white/60">
|
||||
optional API key to use if running LocalAI with API keys.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<input
|
||||
type="password"
|
||||
name="LocalAiApiKey"
|
||||
className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
|
||||
placeholder="sk-mysecretkey"
|
||||
defaultValue={settings?.LocalAiApiKey ? "*".repeat(20) : ""}
|
||||
autoComplete="off"
|
||||
spellCheck={false}
|
||||
onChange={(e) => setApiKeyValue(e.target.value)}
|
||||
onBlur={() => setApiKey(apiKeyValue)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function LocalAIModelSelection({ settings, basePath = null }) {
|
||||
function LocalAIModelSelection({ settings, basePath = null, apiKey = null }) {
|
||||
const [customModels, setCustomModels] = useState([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
@ -81,12 +108,12 @@ function LocalAIModelSelection({ settings, basePath = null }) {
|
||||
return;
|
||||
}
|
||||
setLoading(true);
|
||||
const { models } = await System.customModels("localai", null, basePath);
|
||||
const { models } = await System.customModels("localai", apiKey, basePath);
|
||||
setCustomModels(models || []);
|
||||
setLoading(false);
|
||||
}
|
||||
findCustomModels();
|
||||
}, [basePath]);
|
||||
}, [basePath, apiKey]);
|
||||
|
||||
if (loading || customModels.length == 0) {
|
||||
return (
|
||||
|
@ -27,6 +27,7 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
|
||||
# LOCAL_AI_BASE_PATH='http://localhost:8080/v1'
|
||||
# LOCAL_AI_MODEL_PREF='luna-ai-llama2'
|
||||
# LOCAL_AI_MODEL_TOKEN_LIMIT=4096
|
||||
# LOCAL_AI_API_KEY="sk-123abc"
|
||||
|
||||
###########################################
|
||||
######## Embedding API SElECTION ##########
|
||||
|
@ -8,7 +8,7 @@ const {
|
||||
acceptedFileTypes,
|
||||
} = require("../utils/files/documentProcessor");
|
||||
const { purgeDocument } = require("../utils/files/purgeDocument");
|
||||
const { getVectorDbClass, getLLMProvider } = require("../utils/helpers");
|
||||
const { getVectorDbClass } = require("../utils/helpers");
|
||||
const { updateENV, dumpENV } = require("../utils/helpers/updateENV");
|
||||
const {
|
||||
reqBody,
|
||||
|
@ -103,6 +103,7 @@ const SystemSettings = {
|
||||
LocalAiBasePath: process.env.LOCAL_AI_BASE_PATH,
|
||||
LocalAiModelPref: process.env.LOCAL_AI_MODEL_PREF,
|
||||
LocalAiTokenLimit: process.env.LOCAL_AI_MODEL_TOKEN_LIMIT,
|
||||
LocalAiApiKey: !!process.env.LOCAL_AI_API_KEY,
|
||||
|
||||
// For embedding credentials when localai is selected.
|
||||
OpenAiKey: !!process.env.OPEN_AI_KEY,
|
||||
|
@ -8,6 +8,11 @@ class LocalAiLLM {
|
||||
const { Configuration, OpenAIApi } = require("openai");
|
||||
const config = new Configuration({
|
||||
basePath: process.env.LOCAL_AI_BASE_PATH,
|
||||
...(!!process.env.LOCAL_AI_API_KEY
|
||||
? {
|
||||
apiKey: process.env.LOCAL_AI_API_KEY,
|
||||
}
|
||||
: {}),
|
||||
});
|
||||
this.openai = new OpenAIApi(config);
|
||||
this.model = process.env.LOCAL_AI_MODEL_PREF;
|
||||
|
@ -35,10 +35,11 @@ async function openAiModels(apiKey = null) {
|
||||
return { models, error: null };
|
||||
}
|
||||
|
||||
async function localAIModels(basePath = null) {
|
||||
async function localAIModels(basePath = null, apiKey = null) {
|
||||
const { Configuration, OpenAIApi } = require("openai");
|
||||
const config = new Configuration({
|
||||
basePath,
|
||||
...(!!apiKey ? { apiKey } : {}),
|
||||
});
|
||||
const openai = new OpenAIApi(config);
|
||||
const models = await openai
|
||||
|
@ -67,6 +67,10 @@ const KEY_MAPPING = {
|
||||
envKey: "LOCAL_AI_MODEL_TOKEN_LIMIT",
|
||||
checks: [nonZero],
|
||||
},
|
||||
LocalAiApiKey: {
|
||||
envKey: "LOCAL_AI_API_KEY",
|
||||
checks: [],
|
||||
},
|
||||
|
||||
EmbeddingEngine: {
|
||||
envKey: "EMBEDDING_ENGINE",
|
||||
|
@ -52,6 +52,13 @@ function multiUserMode(response) {
|
||||
return response?.locals?.multiUserMode;
|
||||
}
|
||||
|
||||
function parseAuthHeader(headerValue = null, apiKey = null) {
|
||||
if (headerValue === null || apiKey === null) return {};
|
||||
if (headerValue === "Authorization")
|
||||
return { Authorization: `Bearer ${apiKey}` };
|
||||
return { [headerValue]: apiKey };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
reqBody,
|
||||
multiUserMode,
|
||||
@ -59,4 +66,5 @@ module.exports = {
|
||||
makeJWT,
|
||||
decodeJWT,
|
||||
userFromSession,
|
||||
parseAuthHeader,
|
||||
};
|
||||
|
@ -15,10 +15,10 @@ const Chroma = {
|
||||
...(!!process.env.CHROMA_API_HEADER && !!process.env.CHROMA_API_KEY
|
||||
? {
|
||||
fetchOptions: {
|
||||
headers: {
|
||||
[process.env.CHROMA_API_HEADER || "X-Api-Key"]:
|
||||
process.env.CHROMA_API_KEY,
|
||||
},
|
||||
headers: parseAuthHeader(
|
||||
process.env.CHROMA_API_HEADER || "X-Api-Key",
|
||||
process.env.CHROMA_API_KEY
|
||||
),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
|
Loading…
Reference in New Issue
Block a user