Set gpt-4o as default for OpenAI (#1391)

This commit is contained in:
Timothy Carambat 2024-05-13 14:31:49 -07:00 committed by GitHub
parent 2fabc84578
commit 64b62290d7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 24 additions and 12 deletions

View File

@ -9,7 +9,7 @@ GID='1000'
########################################### ###########################################
# LLM_PROVIDER='openai' # LLM_PROVIDER='openai'
# OPEN_AI_KEY= # OPEN_AI_KEY=
# OPEN_MODEL_PREF='gpt-3.5-turbo' # OPEN_MODEL_PREF='gpt-4o'
# LLM_PROVIDER='gemini' # LLM_PROVIDER='gemini'
# GEMINI_API_KEY= # GEMINI_API_KEY=

View File

@ -6,7 +6,7 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
########################################### ###########################################
# LLM_PROVIDER='openai' # LLM_PROVIDER='openai'
# OPEN_AI_KEY= # OPEN_AI_KEY=
# OPEN_MODEL_PREF='gpt-3.5-turbo' # OPEN_MODEL_PREF='gpt-4o'
# LLM_PROVIDER='gemini' # LLM_PROVIDER='gemini'
# GEMINI_API_KEY= # GEMINI_API_KEY=

View File

@ -297,7 +297,7 @@ const SystemSettings = {
return { return {
// OpenAI Keys // OpenAI Keys
OpenAiKey: !!process.env.OPEN_AI_KEY, OpenAiKey: !!process.env.OPEN_AI_KEY,
OpenAiModelPref: process.env.OPEN_MODEL_PREF || "gpt-3.5-turbo", OpenAiModelPref: process.env.OPEN_MODEL_PREF || "gpt-4o",
// Azure + OpenAI Keys // Azure + OpenAI Keys
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT, AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,

View File

@ -11,8 +11,7 @@ class OpenAiLLM {
this.openai = new OpenAIApi({ this.openai = new OpenAIApi({
apiKey: process.env.OPEN_AI_KEY, apiKey: process.env.OPEN_AI_KEY,
}); });
this.model = this.model = modelPreference || process.env.OPEN_MODEL_PREF || "gpt-4o";
modelPreference || process.env.OPEN_MODEL_PREF || "gpt-3.5-turbo";
this.limits = { this.limits = {
history: this.promptWindowLimit() * 0.15, history: this.promptWindowLimit() * 0.15,
system: this.promptWindowLimit() * 0.15, system: this.promptWindowLimit() * 0.15,
@ -48,6 +47,7 @@ class OpenAiLLM {
case "gpt-3.5-turbo": case "gpt-3.5-turbo":
case "gpt-3.5-turbo-1106": case "gpt-3.5-turbo-1106":
return 16_385; return 16_385;
case "gpt-4o":
case "gpt-4-turbo": case "gpt-4-turbo":
case "gpt-4-1106-preview": case "gpt-4-1106-preview":
case "gpt-4-turbo-preview": case "gpt-4-turbo-preview":

View File

@ -11,7 +11,7 @@ const Agent = {
const aibitat = new AIbitat({ const aibitat = new AIbitat({
provider: "openai", provider: "openai",
model: "gpt-3.5-turbo", model: "gpt-4o",
}) })
.use(cli.plugin()) .use(cli.plugin())
.function({ .function({

View File

@ -8,7 +8,7 @@ const {
require("dotenv").config({ path: `../../../../.env.development` }); require("dotenv").config({ path: `../../../../.env.development` });
const aibitat = new AIbitat({ const aibitat = new AIbitat({
model: "gpt-3.5-turbo", model: "gpt-4o",
}) })
.use(cli.plugin()) .use(cli.plugin())
.use(fileHistory.plugin()) .use(fileHistory.plugin())

View File

@ -69,7 +69,7 @@ async function runAIbitat(socket) {
console.log(chalk.blue("Booting AIbitat class & starting agent(s)")); console.log(chalk.blue("Booting AIbitat class & starting agent(s)"));
const aibitat = new AIbitat({ const aibitat = new AIbitat({
provider: "openai", provider: "openai",
model: "gpt-3.5-turbo", model: "gpt-4o",
}) })
.use(websocket.plugin({ socket })) .use(websocket.plugin({ socket }))
.use(webBrowsing.plugin()) .use(webBrowsing.plugin())

View File

@ -13,6 +13,10 @@ class OpenAIProvider extends Provider {
input: 0.03, input: 0.03,
output: 0.06, output: 0.06,
}, },
"gpt-4o": {
input: 0.005,
output: 0.015,
},
"gpt-4-32k": { "gpt-4-32k": {
input: 0.06, input: 0.06,
output: 0.12, output: 0.12,
@ -33,7 +37,7 @@ class OpenAIProvider extends Provider {
apiKey: process.env.OPEN_AI_KEY, apiKey: process.env.OPEN_AI_KEY,
maxRetries: 3, maxRetries: 3,
}, },
model = "gpt-3.5-turbo", model = "gpt-4o",
} = config; } = config;
const client = new OpenAI(options); const client = new OpenAI(options);

View File

@ -12,7 +12,7 @@ const Provider = require("../providers/ai-provider");
const SUMMARY_MODEL = { const SUMMARY_MODEL = {
anthropic: "claude-3-opus-20240229", // 200,000 tokens anthropic: "claude-3-opus-20240229", // 200,000 tokens
openai: "gpt-3.5-turbo-1106", // 16,385 tokens openai: "gpt-4o", // 128,000 tokens
}; };
async function summarizeContent( async function summarizeContent(

View File

@ -146,7 +146,7 @@ class AgentHandler {
#providerDefault() { #providerDefault() {
switch (this.provider) { switch (this.provider) {
case "openai": case "openai":
return "gpt-3.5-turbo"; return "gpt-4o";
case "anthropic": case "anthropic":
return "claude-3-sonnet-20240229"; return "claude-3-sonnet-20240229";
case "lmstudio": case "lmstudio":
@ -258,7 +258,7 @@ class AgentHandler {
) { ) {
this.aibitat = new AIbitat({ this.aibitat = new AIbitat({
provider: this.provider ?? "openai", provider: this.provider ?? "openai",
model: this.model ?? "gpt-3.5-turbo", model: this.model ?? "gpt-4o",
chats: await this.#chatHistory(20), chats: await this.#chatHistory(20),
handlerProps: { handlerProps: {
invocation: this.invocation, invocation: this.invocation,

View File

@ -66,6 +66,14 @@ async function openAiModels(apiKey = null) {
owned_by: "openai", owned_by: "openai",
organization: "OpenAi", organization: "OpenAi",
}, },
{
name: "gpt-4o",
id: "gpt-4o",
object: "model",
created: 1677610602,
owned_by: "openai",
organization: "OpenAi",
},
{ {
name: "gpt-4", name: "gpt-4",
id: "gpt-4", id: "gpt-4",