anything-llm/server/endpoints/chat.js

211 lines
6.4 KiB
JavaScript
Raw Normal View History

const { v4: uuidv4 } = require("uuid");
const { reqBody, userFromSession, multiUserMode } = require("../utils/http");
const { validatedRequest } = require("../utils/middleware/validatedRequest");
const { Telemetry } = require("../models/telemetry");
const { streamChatWithWorkspace } = require("../utils/chats/stream");
const {
ROLES,
flexUserRoleValid,
} = require("../utils/middleware/multiUserProtected");
const { EventLogs } = require("../models/eventLogs");
const {
validWorkspaceAndThreadSlug,
validWorkspaceSlug,
} = require("../utils/middleware/validWorkspace");
const { writeResponseChunk } = require("../utils/helpers/chat/responses");
const { WorkspaceThread } = require("../models/workspaceThread");
const { User } = require("../models/user");
const truncate = require("truncate");
2023-06-04 04:28:07 +02:00
function chatEndpoints(app) {
if (!app) return;
app.post(
"/workspace/:slug/stream-chat",
[validatedRequest, flexUserRoleValid([ROLES.all]), validWorkspaceSlug],
async (request, response) => {
try {
const user = await userFromSession(request, response);
const { message, attachments = [] } = reqBody(request);
const workspace = response.locals.workspace;
if (!message?.length) {
response.status(400).json({
id: uuidv4(),
type: "abort",
textResponse: null,
sources: [],
close: true,
error: !message?.length ? "Message is empty." : null,
});
return;
}
response.setHeader("Cache-Control", "no-cache");
response.setHeader("Content-Type", "text/event-stream");
response.setHeader("Access-Control-Allow-Origin", "*");
response.setHeader("Connection", "keep-alive");
response.flushHeaders();
if (multiUserMode(response) && !(await User.canSendChat(user))) {
writeResponseChunk(response, {
id: uuidv4(),
type: "abort",
textResponse: null,
sources: [],
close: true,
error: `You have met your maximum 24 hour chat quota of ${user.dailyMessageLimit} chats. Try again later.`,
});
return;
}
await streamChatWithWorkspace(
response,
workspace,
message,
workspace?.chatMode,
user,
null,
attachments
);
await Telemetry.sendTelemetry("sent_chat", {
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
2023-12-07 17:53:37 +01:00
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
multiModal: Array.isArray(attachments) && attachments?.length !== 0,
TTSSelection: process.env.TTS_PROVIDER || "native",
});
await EventLogs.logEvent(
"sent_chat",
{
workspaceName: workspace?.name,
chatModel: workspace?.chatModel || "System Default",
},
user?.id
);
response.end();
} catch (e) {
console.error(e);
writeResponseChunk(response, {
id: uuidv4(),
type: "abort",
textResponse: null,
sources: [],
close: true,
error: e.message,
});
response.end();
}
}
);
app.post(
"/workspace/:slug/thread/:threadSlug/stream-chat",
[
validatedRequest,
flexUserRoleValid([ROLES.all]),
validWorkspaceAndThreadSlug,
],
async (request, response) => {
try {
const user = await userFromSession(request, response);
const { message, attachments = [] } = reqBody(request);
const workspace = response.locals.workspace;
const thread = response.locals.thread;
if (!message?.length) {
response.status(400).json({
id: uuidv4(),
type: "abort",
textResponse: null,
sources: [],
close: true,
error: !message?.length ? "Message is empty." : null,
});
return;
}
response.setHeader("Cache-Control", "no-cache");
response.setHeader("Content-Type", "text/event-stream");
response.setHeader("Access-Control-Allow-Origin", "*");
response.setHeader("Connection", "keep-alive");
response.flushHeaders();
if (multiUserMode(response) && !(await User.canSendChat(user))) {
writeResponseChunk(response, {
id: uuidv4(),
type: "abort",
textResponse: null,
sources: [],
close: true,
error: `You have met your maximum 24 hour chat quota of ${user.dailyMessageLimit} chats. Try again later.`,
});
return;
}
await streamChatWithWorkspace(
response,
workspace,
message,
workspace?.chatMode,
user,
thread,
attachments
);
2024-06-08 02:12:54 +02:00
// If thread was renamed emit event to frontend via special `action` response.
await WorkspaceThread.autoRenameThread({
thread,
workspace,
user,
newName: truncate(message, 22),
2024-06-08 02:12:54 +02:00
onRename: (thread) => {
writeResponseChunk(response, {
action: "rename_thread",
thread: {
slug: thread.slug,
name: thread.name,
},
});
},
});
await Telemetry.sendTelemetry("sent_chat", {
multiUserMode: multiUserMode(response),
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "lancedb",
multiModal: Array.isArray(attachments) && attachments?.length !== 0,
TTSSelection: process.env.TTS_PROVIDER || "native",
});
await EventLogs.logEvent(
"sent_chat",
{
workspaceName: workspace.name,
thread: thread.name,
chatModel: workspace?.chatModel || "System Default",
},
user?.id
);
response.end();
} catch (e) {
console.error(e);
writeResponseChunk(response, {
id: uuidv4(),
type: "abort",
textResponse: null,
sources: [],
close: true,
error: e.message,
});
response.end();
}
}
);
2023-06-04 04:28:07 +02:00
}
2023-06-08 06:31:35 +02:00
module.exports = { chatEndpoints };