diff --git a/frontend/src/models/system.js b/frontend/src/models/system.js
index 596348ed..0ffbae69 100644
--- a/frontend/src/models/system.js
+++ b/frontend/src/models/system.js
@@ -389,6 +389,29 @@ const System = {
return [];
});
},
+ eventLogs: async (offset = 0) => {
+ return await fetch(`${API_BASE}/system/event-logs`, {
+ method: "POST",
+ headers: baseHeaders(),
+ body: JSON.stringify({ offset }),
+ })
+ .then((res) => res.json())
+ .catch((e) => {
+ console.error(e);
+ return [];
+ });
+ },
+ clearEventLogs: async () => {
+ return await fetch(`${API_BASE}/system/event-logs`, {
+ method: "DELETE",
+ headers: baseHeaders(),
+ })
+ .then((res) => res.json())
+ .catch((e) => {
+ console.error(e);
+ return { success: false, error: e.message };
+ });
+ },
deleteChat: async (chatId) => {
return await fetch(`${API_BASE}/system/workspace-chats/${chatId}`, {
method: "DELETE",
diff --git a/frontend/src/pages/Admin/Logging/LogRow/index.jsx b/frontend/src/pages/Admin/Logging/LogRow/index.jsx
new file mode 100644
index 00000000..0ce89b35
--- /dev/null
+++ b/frontend/src/pages/Admin/Logging/LogRow/index.jsx
@@ -0,0 +1,105 @@
+import { CaretDown, CaretUp } from "@phosphor-icons/react";
+import { useEffect, useState } from "react";
+
+export default function LogRow({ log }) {
+ const [expanded, setExpanded] = useState(false);
+ const [metadata, setMetadata] = useState(null);
+ const [hasMetadata, setHasMetadata] = useState(false);
+
+ useEffect(() => {
+ function parseAndSetMetadata() {
+ try {
+ let data = JSON.parse(log.metadata);
+ setHasMetadata(Object.keys(data)?.length > 0);
+ setMetadata(data);
+ } catch {}
+ }
+ parseAndSetMetadata();
+ }, [log.metadata]);
+
+ const handleRowClick = () => {
+ if (log.metadata !== "{}") {
+ setExpanded(!expanded);
+ }
+ };
+
+ return (
+ <>
+
+
+
+ {log.user.username}
+ |
+
+ {log.occurredAt}
+ |
+ {hasMetadata && (
+ <>
+ {expanded ? (
+
+
+ hide
+ |
+ ) : (
+
+
+ show
+ |
+ )}
+ >
+ )}
+
+
+ >
+ );
+}
+
+const EventMetadata = ({ metadata, expanded = false }) => {
+ if (!metadata || !expanded) return null;
+ return (
+
+
+ Event Metadata
+ |
+
+
+
+ {JSON.stringify(metadata, null, 2)}
+
+
+ |
+
+ );
+};
+
+const EventBadge = ({ event }) => {
+ let colorTheme = { bg: "bg-sky-600/20", text: "text-sky-400 " };
+ if (event.includes("update"))
+ colorTheme = { bg: "bg-yellow-600/20", text: "text-yellow-400 " };
+ if (event.includes("failed_") || event.includes("deleted"))
+ colorTheme = { bg: "bg-red-600/20", text: "text-red-400 " };
+ if (event === "login_event")
+ colorTheme = { bg: "bg-green-600/20", text: "text-green-400 " };
+
+ return (
+
+
+ {event}
+
+ |
+ );
+};
diff --git a/frontend/src/pages/Admin/Logging/index.jsx b/frontend/src/pages/Admin/Logging/index.jsx
new file mode 100644
index 00000000..0219b54a
--- /dev/null
+++ b/frontend/src/pages/Admin/Logging/index.jsx
@@ -0,0 +1,138 @@
+import Sidebar, { SidebarMobileHeader } from "@/components/SettingsSidebar";
+import useQuery from "@/hooks/useQuery";
+import System from "@/models/system";
+import { useEffect, useState } from "react";
+import { isMobile } from "react-device-detect";
+import * as Skeleton from "react-loading-skeleton";
+import LogRow from "./LogRow";
+import showToast from "@/utils/toast";
+
+export default function AdminLogs() {
+ const handleResetLogs = async () => {
+ if (
+ !window.confirm(
+ "Are you sure you want to clear all event logs? This action is irreversible."
+ )
+ )
+ return;
+ const { success, error } = await System.clearEventLogs();
+ if (success) {
+ showToast("Event logs cleared successfully.", "success");
+ setTimeout(() => {
+ window.location.reload();
+ }, 1000);
+ } else {
+ showToast(`Failed to clear logs: ${error}`, "error");
+ }
+ };
+ return (
+
+ {!isMobile &&
}
+
+ {isMobile &&
}
+
+
+
+
Event Logs
+
+
+
+ View all actions and events happening on this instance for
+ monitoring.
+
+
+
+
+
+
+ );
+}
+
+function LogsContainer() {
+ const query = useQuery();
+ const [loading, setLoading] = useState(true);
+ const [logs, setLogs] = useState([]);
+ const [offset, setOffset] = useState(Number(query.get("offset") || 0));
+ const [canNext, setCanNext] = useState(false);
+
+ const handlePrevious = () => {
+ setOffset(Math.max(offset - 1, 0));
+ };
+ const handleNext = () => {
+ setOffset(offset + 1);
+ };
+
+ useEffect(() => {
+ async function fetchLogs() {
+ const { logs: _logs, hasPages = false } = await System.eventLogs(offset);
+ setLogs(_logs);
+ setCanNext(hasPages);
+ setLoading(false);
+ }
+ fetchLogs();
+ }, [offset]);
+
+ if (loading) {
+ return (
+
+ );
+ }
+
+ return (
+ <>
+
+
+
+
+ Event Type
+ |
+
+ User
+ |
+
+ Occurred At
+ |
+
+ {" "}
+ |
+
+
+
+ {!!logs && logs.map((log) => )}
+
+
+
+
+
+
+ >
+ );
+}
diff --git a/frontend/src/utils/paths.js b/frontend/src/utils/paths.js
index 8fbaacec..06428c60 100644
--- a/frontend/src/utils/paths.js
+++ b/frontend/src/utils/paths.js
@@ -96,6 +96,9 @@ export default {
apiKeys: () => {
return "/settings/api-keys";
},
+ logs: () => {
+ return "/settings/event-logs";
+ },
embedSetup: () => {
return `/settings/embed-config`;
},
diff --git a/server/endpoints/admin.js b/server/endpoints/admin.js
index b107a11b..d9e1f9a0 100644
--- a/server/endpoints/admin.js
+++ b/server/endpoints/admin.js
@@ -1,7 +1,9 @@
const { ApiKey } = require("../models/apiKeys");
const { Document } = require("../models/documents");
+const { EventLogs } = require("../models/eventLogs");
const { Invite } = require("../models/invite");
const { SystemSettings } = require("../models/systemSettings");
+const { Telemetry } = require("../models/telemetry");
const { User } = require("../models/user");
const { DocumentVectors } = require("../models/vectors");
const { Workspace } = require("../models/workspace");
@@ -56,6 +58,14 @@ function adminEndpoints(app) {
}
const { user: newUser, error } = await User.create(newUserParams);
+ await EventLogs.logEvent(
+ "user_created",
+ {
+ userName: newUser.username,
+ createdBy: currUser.username,
+ },
+ currUser.id
+ );
response.status(200).json({ user: newUser, error });
} catch (e) {
console.error(e);
@@ -121,6 +131,14 @@ function adminEndpoints(app) {
}
await User.delete({ id: Number(id) });
+ await EventLogs.logEvent(
+ "user_deleted",
+ {
+ userName: user.username,
+ deletedBy: currUser.username,
+ },
+ currUser.id
+ );
response.status(200).json({ success: true, error: null });
} catch (e) {
console.error(e);
@@ -150,6 +168,14 @@ function adminEndpoints(app) {
try {
const user = await userFromSession(request, response);
const { invite, error } = await Invite.create(user.id);
+ await EventLogs.logEvent(
+ "invite_created",
+ {
+ inviteCode: invite.code,
+ createdBy: response.locals?.user?.username,
+ },
+ response.locals?.user?.id
+ );
response.status(200).json({ invite, error });
} catch (e) {
console.error(e);
@@ -165,6 +191,11 @@ function adminEndpoints(app) {
try {
const { id } = request.params;
const { success, error } = await Invite.deactivate(id);
+ await EventLogs.logEvent(
+ "invite_deleted",
+ { deletedBy: response.locals?.user?.username },
+ response.locals?.user?.id
+ );
response.status(200).json({ success, error });
} catch (e) {
console.error(e);
@@ -323,6 +354,13 @@ function adminEndpoints(app) {
try {
const user = await userFromSession(request, response);
const { apiKey, error } = await ApiKey.create(user.id);
+
+ await Telemetry.sendTelemetry("api_key_created");
+ await EventLogs.logEvent(
+ "api_key_created",
+ { createdBy: user?.username },
+ user?.id
+ );
return response.status(200).json({
apiKey,
error,
@@ -341,6 +379,12 @@ function adminEndpoints(app) {
try {
const { id } = request.params;
await ApiKey.delete({ id: Number(id) });
+
+ await EventLogs.logEvent(
+ "api_key_deleted",
+ { deletedBy: response.locals?.user?.username },
+ response?.locals?.user?.id
+ );
return response.status(200).end();
} catch (e) {
console.error(e);
diff --git a/server/endpoints/api/admin/index.js b/server/endpoints/api/admin/index.js
index 1f2a5bae..e91672e0 100644
--- a/server/endpoints/api/admin/index.js
+++ b/server/endpoints/api/admin/index.js
@@ -1,3 +1,4 @@
+const { EventLogs } = require("../../../models/eventLogs");
const { Invite } = require("../../../models/invite");
const { SystemSettings } = require("../../../models/systemSettings");
const { User } = require("../../../models/user");
@@ -259,7 +260,11 @@ function apiAdminEndpoints(app) {
}
const { id } = request.params;
- await User.delete({ id });
+ const user = await User.get({ id: Number(id) });
+ await User.delete({ id: user.id });
+ await EventLogs.logEvent("api_user_deleted", {
+ userName: user.username,
+ });
response.status(200).json({ success: true, error: null });
} catch (e) {
console.error(e);
diff --git a/server/endpoints/api/document/index.js b/server/endpoints/api/document/index.js
index 81704352..b72debbd 100644
--- a/server/endpoints/api/document/index.js
+++ b/server/endpoints/api/document/index.js
@@ -12,6 +12,7 @@ const {
findDocumentInDocuments,
} = require("../../../utils/files");
const { reqBody } = require("../../../utils/http");
+const { EventLogs } = require("../../../models/eventLogs");
const { handleUploads } = setupMulter();
function apiDocumentEndpoints(app) {
@@ -22,7 +23,7 @@ function apiDocumentEndpoints(app) {
[validApiKey],
handleUploads.single("file"),
async (request, response) => {
- /*
+ /*
#swagger.tags = ['Documents']
#swagger.description = 'Upload a new file to AnythingLLM to be parsed and prepared for embedding.'
#swagger.requestBody = {
@@ -68,9 +69,9 @@ function apiDocumentEndpoints(app) {
]
}
}
- }
+ }
}
- }
+ }
#swagger.responses[403] = {
schema: {
"$ref": "#/definitions/InvalidAPIKey"
@@ -105,6 +106,9 @@ function apiDocumentEndpoints(app) {
`Document ${originalname} uploaded processed and successfully. It is now available in documents.`
);
await Telemetry.sendTelemetry("document_uploaded");
+ await EventLogs.logEvent("api_document_uploaded", {
+ documentName: originalname,
+ });
response.status(200).json({ success: true, error: null, documents });
} catch (e) {
console.log(e.message, e);
@@ -117,7 +121,7 @@ function apiDocumentEndpoints(app) {
"/v1/document/upload-link",
[validApiKey],
async (request, response) => {
- /*
+ /*
#swagger.tags = ['Documents']
#swagger.description = 'Upload a valid URL for AnythingLLM to scrape and prepare for embedding.'
#swagger.requestBody = {
@@ -132,7 +136,7 @@ function apiDocumentEndpoints(app) {
"link": "https://useanything.com"
}
}
- }
+ }
}
}
#swagger.responses[200] = {
@@ -161,9 +165,9 @@ function apiDocumentEndpoints(app) {
]
}
}
- }
+ }
}
- }
+ }
#swagger.responses[403] = {
schema: {
"$ref": "#/definitions/InvalidAPIKey"
@@ -196,7 +200,10 @@ function apiDocumentEndpoints(app) {
console.log(
`Link ${link} uploaded processed and successfully. It is now available in documents.`
);
- await Telemetry.sendTelemetry("document_uploaded");
+ await Telemetry.sendTelemetry("link_uploaded");
+ await EventLogs.logEvent("api_link_uploaded", {
+ link,
+ });
response.status(200).json({ success: true, error: null, documents });
} catch (e) {
console.log(e.message, e);
@@ -206,7 +213,7 @@ function apiDocumentEndpoints(app) {
);
app.get("/v1/documents", [validApiKey], async (_, response) => {
- /*
+ /*
#swagger.tags = ['Documents']
#swagger.description = 'List of all locally-stored documents in instance'
#swagger.responses[200] = {
@@ -231,9 +238,9 @@ function apiDocumentEndpoints(app) {
}
}
}
- }
+ }
}
- }
+ }
#swagger.responses[403] = {
schema: {
"$ref": "#/definitions/InvalidAPIKey"
@@ -250,7 +257,7 @@ function apiDocumentEndpoints(app) {
});
app.get("/v1/document/:docName", [validApiKey], async (request, response) => {
- /*
+ /*
#swagger.tags = ['Documents']
#swagger.description = 'Get a single document by its unique AnythingLLM document name'
#swagger.parameters['docName'] = {
@@ -281,9 +288,9 @@ function apiDocumentEndpoints(app) {
}
}
}
- }
+ }
}
- }
+ }
#swagger.responses[403] = {
schema: {
"$ref": "#/definitions/InvalidAPIKey"
@@ -308,7 +315,7 @@ function apiDocumentEndpoints(app) {
"/v1/document/accepted-file-types",
[validApiKey],
async (_, response) => {
- /*
+ /*
#swagger.tags = ['Documents']
#swagger.description = 'Check available filetypes and MIMEs that can be uploaded.'
#swagger.responses[200] = {
@@ -337,9 +344,9 @@ function apiDocumentEndpoints(app) {
}
}
}
- }
+ }
}
- }
+ }
#swagger.responses[403] = {
schema: {
"$ref": "#/definitions/InvalidAPIKey"
diff --git a/server/endpoints/api/workspace/index.js b/server/endpoints/api/workspace/index.js
index c1642ce4..885d0f1a 100644
--- a/server/endpoints/api/workspace/index.js
+++ b/server/endpoints/api/workspace/index.js
@@ -16,6 +16,7 @@ const {
writeResponseChunk,
VALID_CHAT_MODE,
} = require("../../../utils/chats/stream");
+const { EventLogs } = require("../../../models/eventLogs");
function apiWorkspaceEndpoints(app) {
if (!app) return;
@@ -73,6 +74,9 @@ function apiWorkspaceEndpoints(app) {
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
});
+ await EventLogs.logEvent("api_workspace_created", {
+ workspaceName: workspace?.name || "Unknown Workspace",
+ });
response.status(200).json({ workspace, message });
} catch (e) {
console.log(e.message, e);
@@ -206,6 +210,10 @@ function apiWorkspaceEndpoints(app) {
await DocumentVectors.deleteForWorkspace(workspaceId);
await Document.delete({ workspaceId: workspaceId });
await Workspace.delete({ id: workspaceId });
+
+ await EventLogs.logEvent("api_workspace_deleted", {
+ workspaceName: workspace?.name || "Unknown Workspace",
+ });
try {
await VectorDb["delete-namespace"]({ namespace: slug });
} catch (e) {
@@ -519,6 +527,10 @@ function apiWorkspaceEndpoints(app) {
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
});
+ await EventLogs.logEvent("api_sent_chat", {
+ workspaceName: workspace?.name,
+ chatModel: workspace?.chatModel || "System Default",
+ });
response.status(200).json({ ...result });
} catch (e) {
response.status(500).json({
@@ -637,6 +649,10 @@ function apiWorkspaceEndpoints(app) {
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
});
+ await EventLogs.logEvent("api_sent_chat", {
+ workspaceName: workspace?.name,
+ chatModel: workspace?.chatModel || "System Default",
+ });
response.end();
} catch (e) {
console.error(e);
diff --git a/server/endpoints/chat.js b/server/endpoints/chat.js
index 23739084..848a7a36 100644
--- a/server/endpoints/chat.js
+++ b/server/endpoints/chat.js
@@ -14,6 +14,7 @@ const {
ROLES,
flexUserRoleValid,
} = require("../utils/middleware/multiUserProtected");
+const { EventLogs } = require("../models/eventLogs");
function chatEndpoints(app) {
if (!app) return;
@@ -98,6 +99,15 @@ function chatEndpoints(app) {
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
});
+
+ await EventLogs.logEvent(
+ "sent_chat",
+ {
+ workspaceName: workspace?.name,
+ chatModel: workspace?.chatModel || "System Default",
+ },
+ user?.id
+ );
response.end();
} catch (e) {
console.error(e);
diff --git a/server/endpoints/embedManagement.js b/server/endpoints/embedManagement.js
index c3a27ce4..7ebab23e 100644
--- a/server/endpoints/embedManagement.js
+++ b/server/endpoints/embedManagement.js
@@ -1,5 +1,7 @@
const { EmbedChats } = require("../models/embedChats");
const { EmbedConfig } = require("../models/embedConfig");
+const { EventLogs } = require("../models/eventLogs");
+const { Workspace } = require("../models/workspace");
const { reqBody, userFromSession } = require("../utils/http");
const { validEmbedConfigId } = require("../utils/middleware/embedMiddleware");
const {
@@ -32,9 +34,14 @@ function embedManagementEndpoints(app) {
[validatedRequest, flexUserRoleValid([ROLES.admin])],
async (request, response) => {
try {
- const user = userFromSession(request, response);
+ const user = await userFromSession(request, response);
const data = reqBody(request);
const { embed, message: error } = await EmbedConfig.new(data, user?.id);
+ await EventLogs.logEvent(
+ "embed_created",
+ { embedId: embed.id },
+ user?.id
+ );
response.status(200).json({ embed, error });
} catch (e) {
console.error(e);
@@ -48,9 +55,11 @@ function embedManagementEndpoints(app) {
[validatedRequest, flexUserRoleValid([ROLES.admin]), validEmbedConfigId],
async (request, response) => {
try {
+ const user = await userFromSession(request, response);
const { embedId } = request.params;
const updates = reqBody(request);
const { success, error } = await EmbedConfig.update(embedId, updates);
+ await EventLogs.logEvent("embed_updated", { embedId }, user?.id);
response.status(200).json({ success, error });
} catch (e) {
console.error(e);
@@ -66,6 +75,11 @@ function embedManagementEndpoints(app) {
try {
const { embedId } = request.params;
await EmbedConfig.delete({ id: Number(embedId) });
+ await EventLogs.logEvent(
+ "embed_deleted",
+ { embedId },
+ response?.locals?.user?.id
+ );
response.status(200).json({ success: true, error: null });
} catch (e) {
console.error(e);
diff --git a/server/endpoints/invite.js b/server/endpoints/invite.js
index 4fd8d154..38eb71de 100644
--- a/server/endpoints/invite.js
+++ b/server/endpoints/invite.js
@@ -1,3 +1,4 @@
+const { EventLogs } = require("../models/eventLogs");
const { Invite } = require("../models/invite");
const { User } = require("../models/user");
const { reqBody } = require("../utils/http");
@@ -56,6 +57,14 @@ function inviteEndpoints(app) {
}
await Invite.markClaimed(invite.id, user);
+ await EventLogs.logEvent(
+ "invite_accepted",
+ {
+ username: user.username,
+ },
+ user.id
+ );
+
response.status(200).json({ success: true, error: null });
} catch (e) {
console.error(e);
diff --git a/server/endpoints/system.js b/server/endpoints/system.js
index 100d0a4b..823de7f1 100644
--- a/server/endpoints/system.js
+++ b/server/endpoints/system.js
@@ -48,6 +48,7 @@ const {
prepareWorkspaceChatsForExport,
exportChatsAsType,
} = require("../utils/helpers/chat/convertTo");
+const { EventLogs } = require("../models/eventLogs");
function systemEndpoints(app) {
if (!app) return;
@@ -114,6 +115,14 @@ function systemEndpoints(app) {
const existingUser = await User.get({ username });
if (!existingUser) {
+ await EventLogs.logEvent(
+ "failed_login_invalid_username",
+ {
+ ip: request.ip || "Unknown IP",
+ username: username || "Unknown user",
+ },
+ existingUser?.id
+ );
response.status(200).json({
user: null,
valid: false,
@@ -124,6 +133,14 @@ function systemEndpoints(app) {
}
if (!bcrypt.compareSync(password, existingUser.password)) {
+ await EventLogs.logEvent(
+ "failed_login_invalid_password",
+ {
+ ip: request.ip || "Unknown IP",
+ username: username || "Unknown user",
+ },
+ existingUser?.id
+ );
response.status(200).json({
user: null,
valid: false,
@@ -134,6 +151,14 @@ function systemEndpoints(app) {
}
if (existingUser.suspended) {
+ await EventLogs.logEvent(
+ "failed_login_account_suspended",
+ {
+ ip: request.ip || "Unknown IP",
+ username: username || "Unknown user",
+ },
+ existingUser?.id
+ );
response.status(200).json({
user: null,
valid: false,
@@ -148,6 +173,16 @@ function systemEndpoints(app) {
{ multiUserMode: false },
existingUser?.id
);
+
+ await EventLogs.logEvent(
+ "login_event",
+ {
+ ip: request.ip || "Unknown IP",
+ username: existingUser.username || "Unknown user",
+ },
+ existingUser?.id
+ );
+
response.status(200).json({
valid: true,
user: existingUser,
@@ -166,6 +201,10 @@ function systemEndpoints(app) {
bcrypt.hashSync(process.env.AUTH_TOKEN, 10)
)
) {
+ await EventLogs.logEvent("failed_login_invalid_password", {
+ ip: request.ip || "Unknown IP",
+ multiUserMode: false,
+ });
response.status(401).json({
valid: false,
token: null,
@@ -175,6 +214,10 @@ function systemEndpoints(app) {
}
await Telemetry.sendTelemetry("login_event", { multiUserMode: false });
+ await EventLogs.logEvent("login_event", {
+ ip: request.ip || "Unknown IP",
+ multiUserMode: false,
+ });
response.status(200).json({
valid: true,
token: makeJWT({ p: password }, "30d"),
@@ -288,7 +331,11 @@ function systemEndpoints(app) {
async (request, response) => {
try {
const body = reqBody(request);
- const { newValues, error } = await updateENV(body);
+ const { newValues, error } = await updateENV(
+ body,
+ false,
+ response?.locals?.user?.id
+ );
if (process.env.NODE_ENV === "production") await dumpENV();
response.status(200).json({ newValues, error });
} catch (e) {
@@ -364,6 +411,7 @@ function systemEndpoints(app) {
await Telemetry.sendTelemetry("enabled_multi_user_mode", {
multiUserMode: true,
});
+ await EventLogs.logEvent("multi_user_mode_enabled", {}, user?.id);
response.status(200).json({ success: !!user, error });
} catch (e) {
await User.delete({});
@@ -694,6 +742,12 @@ function systemEndpoints(app) {
}
const { apiKey, error } = await ApiKey.create();
+ await Telemetry.sendTelemetry("api_key_created");
+ await EventLogs.logEvent(
+ "api_key_created",
+ {},
+ response?.locals?.user?.id
+ );
return response.status(200).json({
apiKey,
error,
@@ -715,6 +769,11 @@ function systemEndpoints(app) {
}
await ApiKey.delete();
+ await EventLogs.logEvent(
+ "api_key_deleted",
+ { deletedBy: response.locals?.user?.username },
+ response?.locals?.user?.id
+ );
return response.status(200).end();
} catch (error) {
console.error(error);
@@ -744,6 +803,45 @@ function systemEndpoints(app) {
}
);
+ app.post(
+ "/system/event-logs",
+ [validatedRequest, flexUserRoleValid([ROLES.admin])],
+ async (request, response) => {
+ try {
+ const { offset = 0, limit = 20 } = reqBody(request);
+ const logs = await EventLogs.whereWithData({}, limit, offset * limit, {
+ id: "desc",
+ });
+ const totalLogs = await EventLogs.count();
+ const hasPages = totalLogs > (offset + 1) * limit;
+
+ response.status(200).json({ logs: logs, hasPages, totalLogs });
+ } catch (e) {
+ console.error(e);
+ response.sendStatus(500).end();
+ }
+ }
+ );
+
+ app.delete(
+ "/system/event-logs",
+ [validatedRequest, flexUserRoleValid([ROLES.admin])],
+ async (_, response) => {
+ try {
+ await EventLogs.delete();
+ await EventLogs.logEvent(
+ "event_logs_cleared",
+ {},
+ response?.locals?.user?.id
+ );
+ response.json({ success: true });
+ } catch (e) {
+ console.error(e);
+ response.sendStatus(500).end();
+ }
+ }
+ );
+
app.post(
"/system/workspace-chats",
[validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],
@@ -790,6 +888,13 @@ function systemEndpoints(app) {
const { type = "jsonl" } = request.query;
const chats = await prepareWorkspaceChatsForExport();
const { contentType, data } = await exportChatsAsType(chats, type);
+ await EventLogs.logEvent(
+ "exported_chats",
+ {
+ type,
+ },
+ response.locals.user?.id
+ );
response.setHeader("Content-Type", contentType);
response.status(200).send(data);
} catch (e) {
diff --git a/server/endpoints/workspaces.js b/server/endpoints/workspaces.js
index b04d2337..57418062 100644
--- a/server/endpoints/workspaces.js
+++ b/server/endpoints/workspaces.js
@@ -17,6 +17,7 @@ const {
flexUserRoleValid,
ROLES,
} = require("../utils/middleware/multiUserProtected");
+const { EventLogs } = require("../models/eventLogs");
const {
WorkspaceSuggestedMessages,
} = require("../models/workspacesSuggestedMessages");
@@ -43,6 +44,14 @@ function workspaceEndpoints(app) {
},
user?.id
);
+
+ await EventLogs.logEvent(
+ "workspace_created",
+ {
+ workspaceName: workspace?.name || "Unknown Workspace",
+ },
+ user?.id
+ );
if (onboardingComplete === true)
await Telemetry.sendTelemetry("onboarding_complete");
@@ -112,6 +121,13 @@ function workspaceEndpoints(app) {
`Document ${originalname} uploaded processed and successfully. It is now available in documents.`
);
await Telemetry.sendTelemetry("document_uploaded");
+ await EventLogs.logEvent(
+ "document_uploaded",
+ {
+ documentName: originalname,
+ },
+ response.locals?.user?.id
+ );
response.status(200).json({ success: true, error: null });
}
);
@@ -144,6 +160,11 @@ function workspaceEndpoints(app) {
`Link ${link} uploaded processed and successfully. It is now available in documents.`
);
await Telemetry.sendTelemetry("link_uploaded");
+ await EventLogs.logEvent(
+ "link_uploaded",
+ { link },
+ response.locals?.user?.id
+ );
response.status(200).json({ success: true, error: null });
}
);
@@ -165,10 +186,15 @@ function workspaceEndpoints(app) {
return;
}
- await Document.removeDocuments(currWorkspace, deletes);
+ await Document.removeDocuments(
+ currWorkspace,
+ deletes,
+ response.locals?.user?.id
+ );
const { failedToEmbed = [], errors = [] } = await Document.addDocuments(
currWorkspace,
- adds
+ adds,
+ response.locals?.user?.id
);
const updatedWorkspace = await Workspace.get({ id: currWorkspace.id });
response.status(200).json({
@@ -209,6 +235,14 @@ function workspaceEndpoints(app) {
await Document.delete({ workspaceId: Number(workspace.id) });
await Workspace.delete({ id: Number(workspace.id) });
+ await EventLogs.logEvent(
+ "workspace_deleted",
+ {
+ workspaceName: workspace?.name || "Unknown Workspace",
+ },
+ response.locals?.user?.id
+ );
+
try {
await VectorDb["delete-namespace"]({ namespace: slug });
} catch (e) {
diff --git a/server/models/apiKeys.js b/server/models/apiKeys.js
index b6242397..32727fec 100644
--- a/server/models/apiKeys.js
+++ b/server/models/apiKeys.js
@@ -1,4 +1,3 @@
-const { Telemetry } = require("./telemetry");
const prisma = require("../utils/prisma");
const ApiKey = {
@@ -19,7 +18,6 @@ const ApiKey = {
},
});
- await Telemetry.sendTelemetry("api_key_created");
return { apiKey, error: null };
} catch (error) {
console.error("FAILED TO CREATE API KEY.", error.message);
diff --git a/server/models/documents.js b/server/models/documents.js
index bdb29dc7..9f50aa91 100644
--- a/server/models/documents.js
+++ b/server/models/documents.js
@@ -3,6 +3,7 @@ const { v4: uuidv4 } = require("uuid");
const { getVectorDbClass } = require("../utils/helpers");
const prisma = require("../utils/prisma");
const { Telemetry } = require("./telemetry");
+const { EventLogs } = require("./eventLogs");
const Document = {
forWorkspace: async function (workspaceId = null) {
@@ -34,7 +35,7 @@ const Document = {
}
},
- addDocuments: async function (workspace, additions = []) {
+ addDocuments: async function (workspace, additions = [], userId = null) {
const VectorDb = getVectorDbClass();
if (additions.length === 0) return { failed: [], embedded: [] };
const embedded = [];
@@ -84,10 +85,18 @@ const Document = {
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
});
+ await EventLogs.logEvent(
+ "workspace_documents_added",
+ {
+ workspaceName: workspace?.name || "Unknown Workspace",
+ numberOfDocumentsAdded: additions.length,
+ },
+ userId
+ );
return { failedToEmbed, errors: Array.from(errors), embedded };
},
- removeDocuments: async function (workspace, removals = []) {
+ removeDocuments: async function (workspace, removals = [], userId = null) {
const VectorDb = getVectorDbClass();
if (removals.length === 0) return;
@@ -119,6 +128,14 @@ const Document = {
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
VectorDbSelection: process.env.VECTOR_DB || "pinecone",
});
+ await EventLogs.logEvent(
+ "workspace_documents_removed",
+ {
+ workspaceName: workspace?.name || "Unknown Workspace",
+ numberOfDocuments: removals.length,
+ },
+ userId
+ );
return true;
},
diff --git a/server/models/eventLogs.js b/server/models/eventLogs.js
new file mode 100644
index 00000000..51240431
--- /dev/null
+++ b/server/models/eventLogs.js
@@ -0,0 +1,129 @@
+const prisma = require("../utils/prisma");
+
+const EventLogs = {
+ logEvent: async function (event, metadata = {}, userId = null) {
+ try {
+ const eventLog = await prisma.event_logs.create({
+ data: {
+ event,
+ metadata: metadata ? JSON.stringify(metadata) : null,
+ userId: userId ? Number(userId) : null,
+ occurredAt: new Date(),
+ },
+ });
+ console.log(`\x1b[32m[Event Logged]\x1b[0m - ${event}`);
+ return { eventLog, message: null };
+ } catch (error) {
+ console.error(
+ `\x1b[31m[Event Logging Failed]\x1b[0m - ${event}`,
+ error.message
+ );
+ return { eventLog: null, message: error.message };
+ }
+ },
+
+ getByEvent: async function (event, limit = null, orderBy = null) {
+ try {
+ const logs = await prisma.event_logs.findMany({
+ where: { event },
+ ...(limit !== null ? { take: limit } : {}),
+ ...(orderBy !== null
+ ? { orderBy }
+ : { orderBy: { occurredAt: "desc" } }),
+ });
+ return logs;
+ } catch (error) {
+ console.error(error.message);
+ return [];
+ }
+ },
+
+ getByUserId: async function (userId, limit = null, orderBy = null) {
+ try {
+ const logs = await prisma.event_logs.findMany({
+ where: { userId },
+ ...(limit !== null ? { take: limit } : {}),
+ ...(orderBy !== null
+ ? { orderBy }
+ : { orderBy: { occurredAt: "desc" } }),
+ });
+ return logs;
+ } catch (error) {
+ console.error(error.message);
+ return [];
+ }
+ },
+
+ where: async function (
+ clause = {},
+ limit = null,
+ orderBy = null,
+ offset = null
+ ) {
+ try {
+ const logs = await prisma.event_logs.findMany({
+ where: clause,
+ ...(limit !== null ? { take: limit } : {}),
+ ...(offset !== null ? { skip: offset } : {}),
+ ...(orderBy !== null
+ ? { orderBy }
+ : { orderBy: { occurredAt: "desc" } }),
+ });
+ return logs;
+ } catch (error) {
+ console.error(error.message);
+ return [];
+ }
+ },
+
+ whereWithData: async function (
+ clause = {},
+ limit = null,
+ offset = null,
+ orderBy = null
+ ) {
+ const { User } = require("./user");
+
+ try {
+ const results = await this.where(clause, limit, orderBy, offset);
+
+ for (const res of results) {
+ const user = res.userId ? await User.get({ id: res.userId }) : null;
+ res.user = user
+ ? { username: user.username }
+ : { username: "unknown user" };
+ }
+
+ return results;
+ } catch (error) {
+ console.error(error.message);
+ return [];
+ }
+ },
+
+ count: async function (clause = {}) {
+ try {
+ const count = await prisma.event_logs.count({
+ where: clause,
+ });
+ return count;
+ } catch (error) {
+ console.error(error.message);
+ return 0;
+ }
+ },
+
+ delete: async function (clause = {}) {
+ try {
+ await prisma.event_logs.deleteMany({
+ where: clause,
+ });
+ return true;
+ } catch (error) {
+ console.error(error.message);
+ return false;
+ }
+ },
+};
+
+module.exports = { EventLogs };
diff --git a/server/models/user.js b/server/models/user.js
index 269219fc..c447950c 100644
--- a/server/models/user.js
+++ b/server/models/user.js
@@ -1,4 +1,5 @@
const prisma = require("../utils/prisma");
+const { EventLogs } = require("./eventLogs");
const User = {
create: async function ({ username, password, role = "default" }) {
@@ -24,25 +25,52 @@ const User = {
}
},
+ // Log the changes to a user object, but omit sensitive fields
+ // that are not meant to be logged.
+ loggedChanges: function (updates, prev = {}) {
+ const changes = {};
+ const sensitiveFields = ["password"];
+
+ Object.keys(updates).forEach((key) => {
+ if (!sensitiveFields.includes(key) && updates[key] !== prev[key]) {
+ changes[key] = `${prev[key]} => ${updates[key]}`;
+ }
+ });
+
+ return changes;
+ },
+
update: async function (userId, updates = {}) {
try {
- // Rehash new password if it exists as update field
+ const currentUser = await prisma.users.findUnique({
+ where: { id: parseInt(userId) },
+ });
+ if (!currentUser) {
+ return { success: false, error: "User not found" };
+ }
+
if (updates.hasOwnProperty("password")) {
const passwordCheck = this.checkPasswordComplexity(updates.password);
if (!passwordCheck.checkedOK) {
return { success: false, error: passwordCheck.error };
}
-
const bcrypt = require("bcrypt");
updates.password = bcrypt.hashSync(updates.password, 10);
- } else {
- delete updates.password;
}
- await prisma.users.update({
+ const user = await prisma.users.update({
where: { id: parseInt(userId) },
data: updates,
});
+
+ await EventLogs.logEvent(
+ "user_updated",
+ {
+ username: user.username,
+ changes: this.loggedChanges(updates, currentUser),
+ },
+ userId
+ );
return { success: true, error: null };
} catch (error) {
console.error(error.message);
diff --git a/server/prisma/migrations/20240206211916_init/migration.sql b/server/prisma/migrations/20240206211916_init/migration.sql
new file mode 100644
index 00000000..f2e882a0
--- /dev/null
+++ b/server/prisma/migrations/20240206211916_init/migration.sql
@@ -0,0 +1,11 @@
+-- CreateTable
+CREATE TABLE "event_logs" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "event" TEXT NOT NULL,
+ "metadata" TEXT,
+ "userId" INTEGER,
+ "occurredAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
+);
+
+-- CreateIndex
+CREATE INDEX "event_logs_event_idx" ON "event_logs"("event");
diff --git a/server/prisma/schema.prisma b/server/prisma/schema.prisma
index ede8a1fd..1747db32 100644
--- a/server/prisma/schema.prisma
+++ b/server/prisma/schema.prisma
@@ -181,3 +181,13 @@ model embed_chats {
embed_config embed_configs @relation(fields: [embed_id], references: [id], onDelete: Cascade)
users users? @relation(fields: [usersId], references: [id])
}
+
+model event_logs {
+ id Int @id @default(autoincrement())
+ event String
+ metadata String?
+ userId Int?
+ occurredAt DateTime @default(now())
+
+ @@index([event])
+}
diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js
index acd77b2f..f89a193f 100644
--- a/server/utils/helpers/updateENV.js
+++ b/server/utils/helpers/updateENV.js
@@ -430,7 +430,7 @@ async function wipeWorkspaceModelPreference(key, prev, next) {
// read from an ENV file as this seems to be a complicating step for many so allowing people to write
// to the process will at least alleviate that issue. It does not perform comprehensive validity checks or sanity checks
// and is simply for debugging when the .env not found issue many come across.
-async function updateENV(newENVs = {}, force = false) {
+async function updateENV(newENVs = {}, force = false, userId = null) {
let error = "";
const validKeys = Object.keys(KEY_MAPPING);
const ENV_KEYS = Object.keys(newENVs).filter(
@@ -458,9 +458,25 @@ async function updateENV(newENVs = {}, force = false) {
await postUpdateFunc(key, prevValue, nextValue);
}
+ await logChangesToEventLog(newValues, userId);
return { newValues, error: error?.length > 0 ? error : false };
}
+async function logChangesToEventLog(newValues = {}, userId = null) {
+ const { EventLogs } = require("../../models/eventLogs");
+ const eventMapping = {
+ LLMProvider: "update_llm_provider",
+ EmbeddingEngine: "update_embedding_engine",
+ VectorDB: "update_vector_db",
+ };
+
+ for (const [key, eventName] of Object.entries(eventMapping)) {
+ if (!newValues.hasOwnProperty(key)) continue;
+ await EventLogs.logEvent(eventName, {}, userId);
+ }
+ return;
+}
+
async function dumpENV() {
const fs = require("fs");
const path = require("path");