[FIX] Add max tokens field to generic OpenAI LLM connector (#1345)

* add max tokens field to generic openai llm connector

* add max_tokens property to generic openai agent provider
This commit is contained in:
Sean Hatfield 2024-05-10 14:49:02 -07:00 committed by GitHub
parent 734c5a9e96
commit 0a6a9e40c1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 25 additions and 0 deletions

View File

@ -61,6 +61,21 @@ export default function GenericOpenAiOptions({ settings }) {
autoComplete="off"
/>
</div>
<div className="flex flex-col w-60">
<label className="text-white text-sm font-semibold block mb-4">
Max Tokens
</label>
<input
type="number"
name="GenericOpenAiMaxTokens"
className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
placeholder="Max tokens per request (eg: 1024)"
min={1}
defaultValue={settings?.GenericOpenAiMaxTokens || 1024}
required={true}
autoComplete="off"
/>
</div>
</div>
);
}

View File

@ -373,6 +373,7 @@ const SystemSettings = {
GenericOpenAiModelPref: process.env.GENERIC_OPEN_AI_MODEL_PREF,
GenericOpenAiTokenLimit: process.env.GENERIC_OPEN_AI_MODEL_TOKEN_LIMIT,
GenericOpenAiKey: !!process.env.GENERIC_OPEN_AI_API_KEY,
GenericOpenAiMaxTokens: process.env.GENERIC_OPEN_AI_MAX_TOKENS,
// Cohere API Keys
CohereApiKey: !!process.env.COHERE_API_KEY,

View File

@ -18,6 +18,7 @@ class GenericOpenAiLLM {
});
this.model =
modelPreference ?? process.env.GENERIC_OPEN_AI_MODEL_PREF ?? null;
this.maxTokens = process.env.GENERIC_OPEN_AI_MAX_TOKENS ?? 1024;
if (!this.model)
throw new Error("GenericOpenAI must have a valid model set.");
this.limits = {
@ -94,6 +95,7 @@ class GenericOpenAiLLM {
model: this.model,
messages,
temperature,
max_tokens: this.maxTokens,
})
.catch((e) => {
throw new Error(e.response.data.error.message);
@ -110,6 +112,7 @@ class GenericOpenAiLLM {
stream: true,
messages,
temperature,
max_tokens: this.maxTokens,
});
return streamRequest;
}

View File

@ -24,6 +24,7 @@ class GenericOpenAiProvider extends InheritMultiple([Provider, UnTooled]) {
this._client = client;
this.model = model;
this.verbose = true;
this.maxTokens = process.env.GENERIC_OPEN_AI_MAX_TOKENS ?? 1024;
}
get client() {
@ -36,6 +37,7 @@ class GenericOpenAiProvider extends InheritMultiple([Provider, UnTooled]) {
model: this.model,
temperature: 0,
messages,
max_tokens: this.maxTokens,
})
.then((result) => {
if (!result.hasOwnProperty("choices"))

View File

@ -173,6 +173,10 @@ const KEY_MAPPING = {
envKey: "GENERIC_OPEN_AI_API_KEY",
checks: [],
},
GenericOpenAiMaxTokens: {
envKey: "GENERIC_OPEN_AI_MAX_TOKENS",
checks: [nonZero],
},
EmbeddingEngine: {
envKey: "EMBEDDING_ENGINE",