+
diff --git a/frontend/src/components/Sidebar/ActiveWorkspaces/index.jsx b/frontend/src/components/Sidebar/ActiveWorkspaces/index.jsx
index 9a3cdc58..549d3f34 100644
--- a/frontend/src/components/Sidebar/ActiveWorkspaces/index.jsx
+++ b/frontend/src/components/Sidebar/ActiveWorkspaces/index.jsx
@@ -75,7 +75,7 @@ export default function ActiveWorkspaces() {
);
})}
{showing && !!selectedWs && (
-
+
)}
>
);
diff --git a/frontend/src/models/workspace.js b/frontend/src/models/workspace.js
index 11f97437..cf0a6120 100644
--- a/frontend/src/models/workspace.js
+++ b/frontend/src/models/workspace.js
@@ -15,6 +15,22 @@ const Workspace = {
return { workspace, message };
},
+ update: async function (slug, data = {}) {
+ const { workspace, message } = await fetch(
+ `${API_BASE}/workspace/${slug}/update`,
+ {
+ method: "POST",
+ body: JSON.stringify(data),
+ headers: baseHeaders(),
+ }
+ )
+ .then((res) => res.json())
+ .catch((e) => {
+ return { workspace: null, message: e.message };
+ });
+
+ return { workspace, message };
+ },
modifyEmbeddings: async function (slug, changes = {}) {
const { workspace, message } = await fetch(
`${API_BASE}/workspace/${slug}/update-embeddings`,
diff --git a/frontend/src/utils/chat/markdown.js b/frontend/src/utils/chat/markdown.js
index ea8d794d..7c6ccd6e 100644
--- a/frontend/src/utils/chat/markdown.js
+++ b/frontend/src/utils/chat/markdown.js
@@ -31,6 +31,6 @@ window.copySnippet = function () {
}, 5000);
};
-export default function renderMarkdown(text) {
+export default function renderMarkdown(text = "") {
return markdown.render(text);
}
diff --git a/server/endpoints/workspaces.js b/server/endpoints/workspaces.js
index d37b1ef6..73c9e17a 100644
--- a/server/endpoints/workspaces.js
+++ b/server/endpoints/workspaces.js
@@ -20,6 +20,28 @@ function workspaceEndpoints(app) {
}
});
+ app.post("/workspace/:slug/update", async (request, response) => {
+ try {
+ const { slug = null } = request.params;
+ const data = reqBody(request);
+ const currWorkspace = await Workspace.get(`slug = '${slug}'`);
+
+ if (!currWorkspace) {
+ response.sendStatus(400).end();
+ return;
+ }
+
+ const { workspace, message } = await Workspace.update(
+ currWorkspace.id,
+ data
+ );
+ response.status(200).json({ workspace, message });
+ } catch (e) {
+ console.log(e.message, e);
+ response.sendStatus(500).end();
+ }
+ });
+
app.post("/workspace/:slug/update-embeddings", async (request, response) => {
try {
const { slug = null } = request.params;
diff --git a/server/index.js b/server/index.js
index 5a506ae6..e96deda4 100644
--- a/server/index.js
+++ b/server/index.js
@@ -12,6 +12,7 @@ const { systemEndpoints } = require("./endpoints/system");
const { workspaceEndpoints } = require("./endpoints/workspaces");
const { chatEndpoints } = require("./endpoints/chat");
const { getVectorDbClass } = require("./utils/helpers");
+const { validateTablePragmas } = require("./utils/database");
const app = express();
const apiRouter = express.Router();
@@ -25,8 +26,9 @@ app.use(
);
apiRouter.use("/system/*", validatedRequest);
-apiRouter.use("/workspace/*", validatedRequest);
systemEndpoints(apiRouter);
+
+apiRouter.use("/workspace/*", validatedRequest);
workspaceEndpoints(apiRouter);
chatEndpoints(apiRouter);
@@ -75,7 +77,8 @@ app.all("*", function (_, response) {
});
app
- .listen(process.env.SERVER_PORT || 3001, () => {
+ .listen(process.env.SERVER_PORT || 3001, async () => {
+ await validateTablePragmas();
console.log(
`Example app listening on port ${process.env.SERVER_PORT || 3001}`
);
diff --git a/server/models/documents.js b/server/models/documents.js
index 73277119..777bd717 100644
--- a/server/models/documents.js
+++ b/server/models/documents.js
@@ -1,6 +1,7 @@
const { fileData } = require("../utils/files");
const { v4: uuidv4 } = require("uuid");
const { getVectorDbClass } = require("../utils/helpers");
+const { checkForMigrations } = require("../utils/database");
const Document = {
tablename: "workspace_documents",
@@ -14,7 +15,15 @@ const Document = {
createdAt TEXT DEFAULT CURRENT_TIMESTAMP,
lastUpdatedAt TEXT DEFAULT CURRENT_TIMESTAMP
`,
- db: async function () {
+ migrateTable: async function () {
+ console.log(`\x1b[34m[MIGRATING]\x1b[0m Checking for Document migrations`);
+ const db = await this.db(false);
+ await checkForMigrations(this, db);
+ },
+ migrations: function () {
+ return [];
+ },
+ db: async function (tracing = true) {
const sqlite3 = require("sqlite3").verbose();
const { open } = require("sqlite");
@@ -28,7 +37,8 @@ const Document = {
await db.exec(
`CREATE TABLE IF NOT EXISTS ${this.tablename} (${this.colsInit})`
);
- db.on("trace", (sql) => console.log(sql));
+
+ if (tracing) db.on("trace", (sql) => console.log(sql));
return db;
},
forWorkspace: async function (workspaceId = null) {
diff --git a/server/models/vectors.js b/server/models/vectors.js
index 776179d0..9e1a8dd4 100644
--- a/server/models/vectors.js
+++ b/server/models/vectors.js
@@ -1,8 +1,8 @@
+const { checkForMigrations } = require("../utils/database");
const { Document } = require("./documents");
// TODO: Do we want to store entire vectorized chunks in here
// so that we can easily spin up temp-namespace clones for threading
-//
const DocumentVectors = {
tablename: "document_vectors",
colsInit: `
@@ -12,7 +12,17 @@ const DocumentVectors = {
createdAt TEXT DEFAULT CURRENT_TIMESTAMP,
lastUpdatedAt TEXT DEFAULT CURRENT_TIMESTAMP
`,
- db: async function () {
+ migrateTable: async function () {
+ console.log(
+ `\x1b[34m[MIGRATING]\x1b[0m Checking for DocumentVector migrations`
+ );
+ const db = await this.db(false);
+ await checkForMigrations(this, db);
+ },
+ migrations: function () {
+ return [];
+ },
+ db: async function (tracing = true) {
const sqlite3 = require("sqlite3").verbose();
const { open } = require("sqlite");
@@ -26,7 +36,8 @@ const DocumentVectors = {
await db.exec(
`CREATE TABLE IF NOT EXISTS ${this.tablename} (${this.colsInit})`
);
- db.on("trace", (sql) => console.log(sql));
+
+ if (tracing) db.on("trace", (sql) => console.log(sql));
return db;
},
bulkInsert: async function (vectorRecords = []) {
diff --git a/server/models/workspace.js b/server/models/workspace.js
index 6472f477..09c3712c 100644
--- a/server/models/workspace.js
+++ b/server/models/workspace.js
@@ -1,17 +1,50 @@
const slugify = require("slugify");
const { Document } = require("./documents");
+const { checkForMigrations } = require("../utils/database");
const Workspace = {
tablename: "workspaces",
+ writable: [
+ // Used for generic updates so we can validate keys in request body
+ "name",
+ "slug",
+ "vectorTag",
+ "openAiTemp",
+ "lastUpdatedAt",
+ ],
colsInit: `
id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL UNIQUE,
+ name TEXT NOT NULL,
slug TEXT NOT NULL UNIQUE,
vectorTag TEXT DEFAULT NULL,
createdAt TEXT DEFAULT CURRENT_TIMESTAMP,
+ openAiTemp REAL DEFAULT NULL,
lastUpdatedAt TEXT DEFAULT CURRENT_TIMESTAMP
`,
- db: async function () {
+ migrateTable: async function () {
+ console.log(`\x1b[34m[MIGRATING]\x1b[0m Checking for Workspace migrations`);
+ const db = await this.db(false);
+ await checkForMigrations(this, db);
+ },
+ migrations: function () {
+ return [
+ {
+ colName: "openAiTemp",
+ execCmd: `ALTER TABLE ${this.tablename} ADD COLUMN openAiTemp REAL DEFAULT NULL`,
+ doif: false,
+ },
+ {
+ colName: "id",
+ execCmd: `CREATE TRIGGER IF NOT EXISTS Trg_LastUpdated AFTER UPDATE ON ${this.tablename}
+ FOR EACH ROW
+ BEGIN
+ UPDATE ${this.tablename} SET lastUpdatedAt = CURRENT_TIMESTAMP WHERE id = old.id;
+ END`,
+ doif: true,
+ },
+ ];
+ },
+ db: async function (tracing = true) {
const sqlite3 = require("sqlite3").verbose();
const { open } = require("sqlite");
@@ -25,17 +58,25 @@ const Workspace = {
await db.exec(
`CREATE TABLE IF NOT EXISTS ${this.tablename} (${this.colsInit})`
);
- db.on("trace", (sql) => console.log(sql));
+
+ if (tracing) db.on("trace", (sql) => console.log(sql));
return db;
},
new: async function (name = null) {
if (!name) return { result: null, message: "name cannot be null" };
+ var slug = slugify(name, { lower: true });
+
+ const existingBySlug = await this.get(`slug = '${slug}'`);
+ if (existingBySlug !== null) {
+ const slugSeed = Math.floor(10000000 + Math.random() * 90000000);
+ slug = slugify(`${name}-${slugSeed}`, { lower: true });
+ }
const db = await this.db();
const { id, success, message } = await db
.run(`INSERT INTO ${this.tablename} (name, slug) VALUES (?, ?)`, [
name,
- slugify(name, { lower: true }),
+ slug,
])
.then((res) => {
return { id: res.lastID, success: true, message: null };
@@ -43,19 +84,57 @@ const Workspace = {
.catch((error) => {
return { id: null, success: false, message: error.message };
});
- if (!success) return { workspace: null, message };
+
+ if (!success) {
+ db.close();
+ return { workspace: null, message };
+ }
const workspace = await db.get(
`SELECT * FROM ${this.tablename} WHERE id = ${id}`
);
+ db.close();
+
return { workspace, message: null };
},
+ update: async function (id = null, data = {}) {
+ if (!id) throw new Error("No workspace id provided for update");
+
+ const validKeys = Object.keys(data).filter((key) =>
+ this.writable.includes(key)
+ );
+ const values = Object.values(data);
+ if (validKeys.length === 0 || validKeys.length !== values.length)
+ return { workspace: { id }, message: "No valid fields to update!" };
+
+ const template = `UPDATE ${this.tablename} SET ${validKeys.map((key) => {
+ return `${key}=?`;
+ })} WHERE id = ?`;
+ const db = await this.db();
+ const { success, message } = await db
+ .run(template, [...values, id])
+ .then(() => {
+ return { success: true, message: null };
+ })
+ .catch((error) => {
+ return { success: false, message: error.message };
+ });
+
+ db.close();
+ if (!success) {
+ return { workspace: null, message };
+ }
+
+ const updatedWorkspace = await this.get(`id = ${id}`);
+ return { workspace: updatedWorkspace, message: null };
+ },
get: async function (clause = "") {
const db = await this.db();
const result = await db
.get(`SELECT * FROM ${this.tablename} WHERE ${clause}`)
.then((res) => res || null);
if (!result) return null;
+ db.close();
const documents = await Document.forWorkspace(result.id);
return { ...result, documents };
@@ -63,6 +142,8 @@ const Workspace = {
delete: async function (clause = "") {
const db = await this.db();
await db.get(`DELETE FROM ${this.tablename} WHERE ${clause}`);
+ db.close();
+
return true;
},
where: async function (clause = "", limit = null) {
@@ -72,6 +153,8 @@ const Workspace = {
!!limit ? `LIMIT ${limit}` : ""
}`
);
+ db.close();
+
return results;
},
};
diff --git a/server/models/workspaceChats.js b/server/models/workspaceChats.js
index 2ded62b4..3b90cc61 100644
--- a/server/models/workspaceChats.js
+++ b/server/models/workspaceChats.js
@@ -1,3 +1,5 @@
+const { checkForMigrations } = require("../utils/database");
+
const WorkspaceChats = {
tablename: "workspace_chats",
colsInit: `
@@ -9,7 +11,17 @@ const WorkspaceChats = {
createdAt TEXT DEFAULT CURRENT_TIMESTAMP,
lastUpdatedAt TEXT DEFAULT CURRENT_TIMESTAMP
`,
- db: async function () {
+ migrateTable: async function () {
+ console.log(
+ `\x1b[34m[MIGRATING]\x1b[0m Checking for WorkspaceChats migrations`
+ );
+ const db = await this.db(false);
+ await checkForMigrations(this, db);
+ },
+ migrations: function () {
+ return [];
+ },
+ db: async function (tracing = true) {
const sqlite3 = require("sqlite3").verbose();
const { open } = require("sqlite");
@@ -23,7 +35,8 @@ const WorkspaceChats = {
await db.exec(
`CREATE TABLE IF NOT EXISTS ${this.tablename} (${this.colsInit})`
);
- db.on("trace", (sql) => console.log(sql));
+
+ if (tracing) db.on("trace", (sql) => console.log(sql));
return db;
},
new: async function ({ workspaceId, prompt, response = {} }) {
@@ -39,11 +52,16 @@ const WorkspaceChats = {
.catch((error) => {
return { id: null, success: false, message: error.message };
});
- if (!success) return { chat: null, message };
+ if (!success) {
+ db.close();
+ return { chat: null, message };
+ }
const chat = await db.get(
`SELECT * FROM ${this.tablename} WHERE id = ${id}`
);
+ db.close();
+
return { chat, message: null };
},
forWorkspace: async function (workspaceId = null) {
@@ -61,6 +79,8 @@ const WorkspaceChats = {
`UPDATE ${this.tablename} SET include = false WHERE workspaceId = ?`,
[workspaceId]
);
+ db.close();
+
return;
},
get: async function (clause = "") {
@@ -68,12 +88,16 @@ const WorkspaceChats = {
const result = await db
.get(`SELECT * FROM ${this.tablename} WHERE ${clause}`)
.then((res) => res || null);
+ db.close();
+
if (!result) return null;
return result;
},
delete: async function (clause = "") {
const db = await this.db();
await db.get(`DELETE FROM ${this.tablename} WHERE ${clause}`);
+ db.close();
+
return true;
},
where: async function (clause = "", limit = null, order = null) {
@@ -83,6 +107,8 @@ const WorkspaceChats = {
!!limit ? `LIMIT ${limit}` : ""
} ${!!order ? order : ""}`
);
+ db.close();
+
return results;
},
};
diff --git a/server/utils/chats/index.js b/server/utils/chats/index.js
index 7459e37e..9be40b69 100644
--- a/server/utils/chats/index.js
+++ b/server/utils/chats/index.js
@@ -87,7 +87,7 @@ async function chatWithWorkspace(workspace, message, chatMode = "query") {
if (!hasVectorizedSpace) {
const rawHistory = await WorkspaceChats.forWorkspace(workspace.id);
const chatHistory = convertToPromptHistory(rawHistory);
- const response = await openai.sendChat(chatHistory, message);
+ const response = await openai.sendChat(chatHistory, message, workspace);
const data = { text: response, sources: [], type: "chat" };
await WorkspaceChats.new({
@@ -108,7 +108,11 @@ async function chatWithWorkspace(workspace, message, chatMode = "query") {
response,
sources,
message: error,
- } = await VectorDb[chatMode]({ namespace: workspace.slug, input: message });
+ } = await VectorDb[chatMode]({
+ namespace: workspace.slug,
+ input: message,
+ workspace,
+ });
if (!response) {
return {
id: uuid,
diff --git a/server/utils/database/index.js b/server/utils/database/index.js
new file mode 100644
index 00000000..f240d63b
--- /dev/null
+++ b/server/utils/database/index.js
@@ -0,0 +1,54 @@
+function checkColumnTemplate(tablename = null, column = null) {
+ if (!tablename || !column)
+ throw new Error(`Migration Error`, { tablename, column });
+ return `SELECT COUNT(*) AS _exists FROM pragma_table_info('${tablename}') WHERE name='${column}'`;
+}
+
+// Note (tcarambat): Since there is no good way to track migrations in Node/SQLite we use this simple system
+// Each model has a `migrations` method that will return an array like...
+// { colName: 'stringColName', execCmd: `SQL Command to run when`, doif: boolean },
+// colName = name of column
+// execCmd = Command to run when doif matches the state of the DB
+// doif = condition to match that determines if execCmd will run.
+// eg: Table workspace has slug column.
+// execCmd: ALTER TABLE DROP COLUMN slug;
+// doif: true
+// => Will drop the slug column if the workspace table has a column named 'slug' otherwise nothing happens.
+// If you are adding a new table column if needs to exist in the Models `colsInit` and as a migration.
+// So both new and existing DBs will get the column when code is pulled in.
+
+async function checkForMigrations(model, db) {
+ if (model.migrations().length === 0) return;
+ const toMigrate = [];
+ for (const { colName, execCmd, doif } of model.migrations()) {
+ const { _exists } = await db.get(
+ checkColumnTemplate(model.tablename, colName)
+ );
+ const colExists = _exists !== 0;
+ if (colExists !== doif) continue;
+
+ toMigrate.push(execCmd);
+ }
+
+ if (toMigrate.length === 0) return;
+
+ console.log(`Running ${toMigrate.length} migrations`, toMigrate);
+ await db.exec(toMigrate.join(";\n"));
+ return;
+}
+
+async function validateTablePragmas() {
+ const { Workspace } = require("../../models/workspace");
+ const { Document } = require("../../models/documents");
+ const { DocumentVectors } = require("../../models/vectors");
+ const { WorkspaceChats } = require("../../models/workspaceChats");
+ await Workspace.migrateTable();
+ await Document.migrateTable();
+ await DocumentVectors.migrateTable();
+ await WorkspaceChats.migrateTable();
+}
+
+module.exports = {
+ checkForMigrations,
+ validateTablePragmas,
+};
diff --git a/server/utils/openAi/index.js b/server/utils/openAi/index.js
index 72742fb7..00ec1326 100644
--- a/server/utils/openAi/index.js
+++ b/server/utils/openAi/index.js
@@ -40,7 +40,7 @@ class OpenAi {
return { safe: false, reasons };
}
- async sendChat(chatHistory = [], prompt) {
+ async sendChat(chatHistory = [], prompt, workspace = {}) {
const model = process.env.OPEN_MODEL_PREF;
if (!this.isValidChatModel(model))
throw new Error(
@@ -50,7 +50,7 @@ class OpenAi {
const textResponse = await this.openai
.createChatCompletion({
model,
- temperature: 0.7,
+ temperature: Number(workspace?.openAiTemp ?? 0.7),
n: 1,
messages: [
{ role: "system", content: "" },
diff --git a/server/utils/vectorDbProviders/chroma/index.js b/server/utils/vectorDbProviders/chroma/index.js
index fd08c1e3..bd1c6058 100644
--- a/server/utils/vectorDbProviders/chroma/index.js
+++ b/server/utils/vectorDbProviders/chroma/index.js
@@ -56,12 +56,12 @@ const Chroma = {
const openai = new OpenAIApi(config);
return openai;
},
- llm: function () {
+ llm: function ({ temperature = 0.7 }) {
const model = process.env.OPEN_MODEL_PREF || "gpt-3.5-turbo";
return new OpenAI({
openAIApiKey: process.env.OPEN_AI_KEY,
- temperature: 0.7,
modelName: model,
+ temperature,
});
},
embedChunk: async function (openai, textChunk) {
@@ -253,7 +253,7 @@ const Chroma = {
return true;
},
query: async function (reqBody = {}) {
- const { namespace = null, input } = reqBody;
+ const { namespace = null, input, workspace = {} } = reqBody;
if (!namespace || !input) throw new Error("Invalid request body");
const { client } = await this.connect();
@@ -269,7 +269,10 @@ const Chroma = {
this.embedder(),
{ collectionName: namespace, url: process.env.CHROMA_ENDPOINT }
);
- const model = this.llm();
+ const model = this.llm({
+ temperature: workspace?.openAiTemp,
+ });
+
const chain = VectorDBQAChain.fromLLM(model, vectorStore, {
k: 5,
returnSourceDocuments: true,
diff --git a/server/utils/vectorDbProviders/lance/index.js b/server/utils/vectorDbProviders/lance/index.js
index f4cc1898..d6aced15 100644
--- a/server/utils/vectorDbProviders/lance/index.js
+++ b/server/utils/vectorDbProviders/lance/index.js
@@ -69,11 +69,16 @@ const LanceDb = {
? data[0].embedding
: null;
},
- getChatCompletion: async function (openai, messages = []) {
+ getChatCompletion: async function (
+ openai,
+ messages = [],
+ { temperature = 0.7 }
+ ) {
const model = process.env.OPEN_MODEL_PREF || "gpt-3.5-turbo";
const { data } = await openai.createChatCompletion({
model,
messages,
+ temperature,
});
if (!data.hasOwnProperty("choices")) return null;
@@ -213,7 +218,7 @@ const LanceDb = {
}
},
query: async function (reqBody = {}) {
- const { namespace = null, input } = reqBody;
+ const { namespace = null, input, workspace = {} } = reqBody;
if (!namespace || !input) throw new Error("Invalid request body");
const { client } = await this.connect();
@@ -242,7 +247,9 @@ const LanceDb = {
},
{ role: "user", content: input },
];
- const responseText = await this.getChatCompletion(this.openai(), messages);
+ const responseText = await this.getChatCompletion(this.openai(), messages, {
+ temperature: workspace?.openAiTemp,
+ });
return {
response: responseText,
diff --git a/server/utils/vectorDbProviders/pinecone/index.js b/server/utils/vectorDbProviders/pinecone/index.js
index 9167b790..2dcf2b52 100644
--- a/server/utils/vectorDbProviders/pinecone/index.js
+++ b/server/utils/vectorDbProviders/pinecone/index.js
@@ -1,7 +1,6 @@
const { PineconeClient } = require("@pinecone-database/pinecone");
const { PineconeStore } = require("langchain/vectorstores/pinecone");
const { OpenAI } = require("langchain/llms/openai");
-const { ChatOpenAI } = require("langchain/chat_models/openai");
const { VectorDBQAChain, LLMChain } = require("langchain/chains");
const { OpenAIEmbeddings } = require("langchain/embeddings/openai");
const { VectorStoreRetrieverMemory } = require("langchain/memory");
@@ -50,20 +49,12 @@ const Pinecone = {
? data[0].embedding
: null;
},
- llm: function () {
+ llm: function ({ temperature = 0.7 }) {
const model = process.env.OPEN_MODEL_PREF || "gpt-3.5-turbo";
return new OpenAI({
openAIApiKey: process.env.OPEN_AI_KEY,
- temperature: 0.7,
- modelName: model,
- });
- },
- chatLLM: function () {
- const model = process.env.OPEN_MODEL_PREF || "gpt-3.5-turbo";
- return new ChatOpenAI({
- openAIApiKey: process.env.OPEN_AI_KEY,
- temperature: 0.7,
modelName: model,
+ temperature,
});
},
totalIndicies: async function () {
@@ -233,7 +224,7 @@ const Pinecone = {
};
},
query: async function (reqBody = {}) {
- const { namespace = null, input } = reqBody;
+ const { namespace = null, input, workspace = {} } = reqBody;
if (!namespace || !input) throw new Error("Invalid request body");
const { pineconeIndex } = await this.connect();
@@ -250,7 +241,9 @@ const Pinecone = {
namespace,
});
- const model = this.llm();
+ const model = this.llm({
+ temperature: workspace?.openAiTemp,
+ });
const chain = VectorDBQAChain.fromLLM(model, vectorStore, {
k: 5,
returnSourceDocuments: true,
@@ -265,7 +258,7 @@ const Pinecone = {
// This implementation of chat also expands the memory of the chat itself
// and adds more tokens to the PineconeDB instance namespace
chat: async function (reqBody = {}) {
- const { namespace = null, input } = reqBody;
+ const { namespace = null, input, workspace = {} } = reqBody;
if (!namespace || !input) throw new Error("Invalid request body");
const { pineconeIndex } = await this.connect();
@@ -284,7 +277,9 @@ const Pinecone = {
memoryKey: "history",
});
- const model = this.llm();
+ const model = this.llm({
+ temperature: workspace?.openAiTemp,
+ });
const prompt =
PromptTemplate.fromTemplate(`The following is a friendly conversation between a human and an AI. The AI is very casual and talkative and responds with a friendly tone. If the AI does not know the answer to a question, it truthfully says it does not know.
Relevant pieces of previous conversation: