Upload and process documents via UI + document processor in docker image (#65)

* implement dnd uploader
show file upload progress
write files to hotdirector
build simple flaskAPI to process files one off

* move document processor calls to util
build out dockerfile to run both procs at the same time
update UI to check for document processor before upload
* disable pragma update on boot
* dockerfile changes

* add filetype restrictions based on python app support response and show rejected files in the UI

* cleanup

* stub migrations on boot to prevent exit condition

* update CF template for AWS deploy
This commit is contained in:
Timothy Carambat 2023-06-16 16:01:27 -07:00 committed by GitHub
parent 537a6a91d2
commit c4eb46ca19
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
38 changed files with 839 additions and 57 deletions

View File

@ -11,7 +11,7 @@ The output of this cloudformation stack will be:
**Requirements**
- An AWS account with billing information.
- AnythingLLM can run within the free tier using a t2.micro and 10Gib SSD hard disk volume
- AnythingLLM (GUI + document processor) must use a t2.small minimum and 10Gib SSD hard disk volume
- `.env` file that is filled out with your settings and set up in the `docker/` folder
## How to deploy on AWS

View File

@ -5,13 +5,13 @@
"InstanceType": {
"Description": "EC2 instance type",
"Type": "String",
"Default": "t2.micro"
"Default": "t2.small"
},
"InstanceVolume": {
"Description": "Storage size of disk on Instance in GB",
"Type": "Number",
"Default": 10,
"MinValue": 2
"MinValue": 4
}
},
"Resources": {
@ -96,7 +96,6 @@
"!SUB::USER::CONTENT!",
"UID=\"1000\"\n",
"GID=\"1000\"\n",
"CLOUD_BUILD=1\n",
"END\n",
"cd ../frontend\n",
"rm -rf .env.production\n",
@ -105,6 +104,17 @@
"VITE_API_BASE=\"/api\"\n",
"END\n",
"sudo docker-compose -f /home/ec2-user/anything-llm/docker/docker-compose.yml up -d\n",
"echo \"Container ID: $(sudo docker ps --latest --quiet)\"\n",
"sudo docker container exec -u 0 -t $(sudo docker ps --latest --quiet) mkdir -p /app/server/storage /app/server/storage/documents /app/server/storage/vector-cache /app/server/storage/lancedb\n",
"echo \"Placeholder folders in storage created.\"\n",
"sudo docker container exec -u 0 -t $(sudo docker ps --latest --quiet) touch /app/server/storage/anythingllm.db\n",
"echo \"SQLite DB placeholder set.\"\n",
"sudo docker container exec -u 0 -t $(sudo docker ps --latest --quiet) chown -R anythingllm:anythingllm /app/collector /app/server\n",
"echo \"File permissions corrected.\"\n",
"export ONLINE=$(curl -Is http://localhost:3001/api/ping | head -n 1|cut -d$' ' -f2)\n",
"echo \"Health check: $ONLINE\"\n",
"if [ \"$ONLINE\" = 200 ] ; then echo \"Running migrations...\" && curl -Is http://localhost:3001/api/migrate | head -n 1|cut -d$' ' -f2; fi\n",
"echo \"Setup complete! AnythingLLM instance is now online!\"\n",
"\n",
"--//--\n"
]

View File

@ -1,6 +1,8 @@
outputs/*/*.json
hotdir/*
hotdir/processed/*
hotdir/failed/*
!hotdir/__HOTDIR__.md
!hotdir/processed
!hotdir/failed

View File

@ -43,3 +43,9 @@ If collection fails at any point in the process it will pick up where it last ba
- [Enable YouTube Data APIV3](https://console.cloud.google.com/apis/library/youtube.googleapis.com)
- Once enabled generate a Credential key for this API
- Paste your key after `GOOGLE_APIS_KEY=` in your `collector/.env` file.
### Running the document processing API locally
From the `collector` directory with the `v-env` active run `flask run --host '0.0.0.0' --port 8888`.
Now uploads from the frontend will be processed as if you ran the `watch.py` script manually.
**Docker**: If you run this application via docker the API is already started for you and no additional action is needed.

21
collector/api.py Normal file
View File

@ -0,0 +1,21 @@
from flask import Flask, json, request
from scripts.watch.process_single import process_single
from scripts.watch.filetypes import ACCEPTED_MIMES
api = Flask(__name__)
WATCH_DIRECTORY = "hotdir"
@api.route('/process', methods=['POST'])
def process_file():
content = request.json
target_filename = content.get('filename')
print(f"Processing {target_filename}")
success, reason = process_single(WATCH_DIRECTORY, target_filename)
return json.dumps({'filename': target_filename, 'success': success, 'reason': reason})
@api.route('/accepts', methods=['GET'])
def get_accepted_filetypes():
return json.dumps(ACCEPTED_MIMES)
@api.route('/', methods=['GET'])
def root():
return "<p>Use POST /process with filename key in JSON body in order to process a file. File by that name must exist in hotdir already.</p>"

View File

@ -9,6 +9,7 @@ async-timeout==4.0.2
attrs==23.1.0
backoff==2.2.1
beautifulsoup4==4.12.2
blinker==1.6.2
bs4==0.0.1
certifi==2023.5.7
cffi==1.15.1
@ -24,21 +25,26 @@ docx2txt==0.8
et-xmlfile==1.1.0
exceptiongroup==1.1.1
fake-useragent==1.1.3
Flask==2.3.2
frozenlist==1.3.3
grapheme==0.6.0
greenlet==2.0.2
gunicorn==20.1.0
h11==0.14.0
httpcore==0.16.3
httpx==0.23.3
idna==3.4
InquirerPy==0.3.4
importlib-metadata==6.6.0
importlib-resources==5.12.0
inquirerpy==0.3.4
install==1.3.5
itsdangerous==2.1.2
Jinja2==3.1.2
joblib==1.2.0
langchain==0.0.189
lxml==4.9.2
Markdown==3.4.3
MarkupSafe==2.1.3
marshmallow==3.19.0
marshmallow-enum==1.5.1
monotonic==1.6
@ -55,6 +61,7 @@ packaging==23.1
pandas==1.5.3
parse==1.19.0
pdfminer.six==20221105
pfzy==0.3.4
Pillow==9.5.0
prompt-toolkit==3.0.38
pycparser==2.21
@ -96,6 +103,7 @@ uuid==1.30
w3lib==2.1.1
wcwidth==0.2.6
websockets==10.4
Werkzeug==2.3.6
wrapt==1.14.1
xlrd==2.0.1
XlsxWriter==3.1.2

View File

@ -9,6 +9,7 @@ def as_docx(**kwargs):
parent_dir = kwargs.get('directory', 'hotdir')
filename = kwargs.get('filename')
ext = kwargs.get('ext', '.txt')
remove = kwargs.get('remove_on_complete', False)
fullpath = f"{parent_dir}/{filename}{ext}"
loader = Docx2txtLoader(fullpath)
@ -28,13 +29,14 @@ def as_docx(**kwargs):
}
write_to_server_documents(data, f"{slugify(filename)}-{data.get('id')}")
move_source(parent_dir, f"{filename}{ext}")
move_source(parent_dir, f"{filename}{ext}", remove=remove)
print(f"[SUCCESS]: {filename}{ext} converted & ready for embedding.\n")
def as_odt(**kwargs):
parent_dir = kwargs.get('directory', 'hotdir')
filename = kwargs.get('filename')
ext = kwargs.get('ext', '.txt')
remove = kwargs.get('remove_on_complete', False)
fullpath = f"{parent_dir}/{filename}{ext}"
loader = UnstructuredODTLoader(fullpath)
@ -54,5 +56,5 @@ def as_odt(**kwargs):
}
write_to_server_documents(data, f"{slugify(filename)}-{data.get('id')}")
move_source(parent_dir, f"{filename}{ext}")
move_source(parent_dir, f"{filename}{ext}", remove=remove)
print(f"[SUCCESS]: {filename}{ext} converted & ready for embedding.\n")

View File

@ -9,6 +9,7 @@ def as_markdown(**kwargs):
parent_dir = kwargs.get('directory', 'hotdir')
filename = kwargs.get('filename')
ext = kwargs.get('ext', '.txt')
remove = kwargs.get('remove_on_complete', False)
fullpath = f"{parent_dir}/{filename}{ext}"
loader = UnstructuredMarkdownLoader(fullpath)
@ -28,5 +29,5 @@ def as_markdown(**kwargs):
}
write_to_server_documents(data, f"{slugify(filename)}-{data.get('id')}")
move_source(parent_dir, f"{filename}{ext}")
move_source(parent_dir, f"{filename}{ext}", remove=remove)
print(f"[SUCCESS]: {filename}{ext} converted & ready for embedding.\n")

View File

@ -9,6 +9,7 @@ def as_pdf(**kwargs):
parent_dir = kwargs.get('directory', 'hotdir')
filename = kwargs.get('filename')
ext = kwargs.get('ext', '.txt')
remove = kwargs.get('remove_on_complete', False)
fullpath = f"{parent_dir}/{filename}{ext}"
loader = PyPDFLoader(fullpath)
@ -32,5 +33,5 @@ def as_pdf(**kwargs):
}
write_to_server_documents(data, f"{slugify(filename)}-pg{pg_num}-{data.get('id')}")
move_source(parent_dir, f"{filename}{ext}")
move_source(parent_dir, f"{filename}{ext}", remove=remove)
print(f"[SUCCESS]: {filename}{ext} converted & ready for embedding.\n")

View File

@ -8,6 +8,7 @@ def as_text(**kwargs):
parent_dir = kwargs.get('directory', 'hotdir')
filename = kwargs.get('filename')
ext = kwargs.get('ext', '.txt')
remove = kwargs.get('remove_on_complete', False)
fullpath = f"{parent_dir}/{filename}{ext}"
content = open(fullpath).read()
@ -24,5 +25,5 @@ def as_text(**kwargs):
}
write_to_server_documents(data, f"{slugify(filename)}-{data.get('id')}")
move_source(parent_dir, f"{filename}{ext}")
move_source(parent_dir, f"{filename}{ext}", remove=remove)
print(f"[SUCCESS]: {filename}{ext} converted & ready for embedding.\n")

View File

@ -9,4 +9,11 @@ FILETYPES = {
'.pdf': as_pdf,
'.docx': as_docx,
'.odt': as_odt,
}
ACCEPTED_MIMES = {
'text/plain': ['.txt', '.md'],
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': ['.docx'],
'application/vnd.oasis.opendocument.text': ['.odt'],
'application/pdf': ['.pdf'],
}

View File

@ -1,5 +1,6 @@
import os
from .filetypes import FILETYPES
from .utils import move_source
RESERVED = ['__HOTDIR__.md']
def watch_for_changes(directory):
@ -10,7 +11,8 @@ def watch_for_changes(directory):
if filename in ['.DS_Store'] or fileext == '': continue
if fileext not in FILETYPES.keys():
print(f"{fileext} not a supported file type for conversion. Please remove from hot directory.")
print(f"{fileext} not a supported file type for conversion. Removing from hot directory.")
move_source(new_destination_filename=raw_doc, failed=True)
continue
FILETYPES[fileext](

View File

@ -0,0 +1,35 @@
import os
from .filetypes import FILETYPES
from .utils import move_source
RESERVED = ['__HOTDIR__.md']
# This script will do a one-off processing of a specific document that exists in hotdir.
# For this function we remove the original source document since there is no need to keep it and it will
# only occupy additional disk space.
def process_single(directory, target_doc):
if os.path.isdir(f"{directory}/{target_doc}") or target_doc in RESERVED: return (False, "Not a file")
if os.path.exists(f"{directory}/{target_doc}") is False:
print(f"{directory}/{target_doc} does not exist.")
return (False, f"{directory}/{target_doc} does not exist.")
filename, fileext = os.path.splitext(target_doc)
if filename in ['.DS_Store'] or fileext == '': return False
if fileext == '.lock':
print(f"{filename} is locked - skipping until unlocked")
return (False, f"{filename} is locked - skipping until unlocked")
if fileext not in FILETYPES.keys():
print(f"{fileext} not a supported file type for conversion. It will not be processed.")
move_source(new_destination_filename=target_doc, failed=True, remove=True)
return (False, f"{fileext} not a supported file type for conversion. It will not be processed.")
FILETYPES[fileext](
directory=directory,
filename=filename,
ext=fileext,
remove_on_complete=True # remove source document to save disk space.
)
return (True, None)

View File

@ -15,8 +15,13 @@ def file_creation_time(path_to_file):
except AttributeError:
return datetime.today().strftime('%Y-%m-%d %H:%M:%S')
def move_source(working_dir='hotdir', new_destination_filename= ''):
destination = f"{working_dir}/processed"
def move_source(working_dir='hotdir', new_destination_filename='', failed=False, remove=False):
if remove and os.path.exists(f"{working_dir}/{new_destination_filename}"):
print(f"{new_destination_filename} deleted from filesystem")
os.remove(f"{working_dir}/{new_destination_filename}")
return
destination = f"{working_dir}/processed" if not failed else f"{working_dir}/failed"
if os.path.exists(destination) == False:
os.mkdir(destination)

4
collector/wsgi.py Normal file
View File

@ -0,0 +1,4 @@
from api import api
if __name__ == '__main__':
api.run(debug=False)

View File

@ -4,7 +4,6 @@ FROM ubuntu:jammy-20230522 AS base
# Build arguments
ARG ARG_UID
ARG ARG_GID
ARG ARG_CLOUD_BUILD=0 # Default to local docker build
# Install system dependencies
RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
@ -32,13 +31,15 @@ RUN groupadd -g $ARG_GID anythingllm && \
useradd -u $ARG_UID -m -d /app -s /bin/bash -g anythingllm anythingllm && \
mkdir -p /app/frontend/ /app/server/ /app/collector/ && chown -R anythingllm:anythingllm /app
# Copy the docker entrypoint and healthcheck scripts
# Copy docker helper scripts
COPY ./docker/docker-entrypoint.sh /usr/local/bin/
COPY ./docker/docker-healthcheck.sh /usr/local/bin/
COPY ./docker/dual_boot.sh /usr/local/bin/
# Ensure the scripts are executable
RUN chmod +x /usr/local/bin/docker-entrypoint.sh && \
chmod +x /usr/local/bin/docker-healthcheck.sh
chmod +x /usr/local/bin/docker-healthcheck.sh && \
chmod 777 /usr/local/bin/dual_boot.sh
USER anythingllm
@ -89,18 +90,7 @@ EXPOSE 3001
HEALTHCHECK --interval=1m --timeout=10s --start-period=1m \
CMD /bin/bash /usr/local/bin/docker-healthcheck.sh || exit 1
# Docker will still install deps as root so need to force chown
# or else
USER root
RUN if [ "$ARG_CLOUD_BUILD" = 1 ] ; then \
echo "Reowning all files as user!" && \
mkdir -p app/server/storage app/server/storage/documents app/server/storage/vector-cache app/server/storage/lancedb && \
touch anythingllm.db && \
chown -R anythingllm:anythingllm /app/collector /app/server; \
fi
USER anythingllm
# Run the server
ENTRYPOINT ["docker-entrypoint.sh"]
CMD ["node", "/app/server/index.js"]
CMD /bin/bash /usr/local/bin/dual_boot.sh

View File

@ -3,8 +3,6 @@ version: '3.9'
networks:
anything-llm:
driver: bridge
# chroma_net:
# external: true
services:
anything-llm:
@ -17,7 +15,6 @@ services:
args:
ARG_UID: ${UID}
ARG_GID: ${GID}
ARG_CLOUD_BUILD: ${CLOUD_BUILD}
volumes:
- "../server/storage:/app/server/storage"
- "../collector/hotdir/:/app/collector/hotdir"
@ -29,4 +26,3 @@ services:
- .env
networks:
- anything-llm
# - chroma_net

5
docker/dual_boot.sh Normal file
View File

@ -0,0 +1,5 @@
#!/bin/bash
node /app/server/index.js &
{ FLASK_ENV=production FLASK_APP=wsgi.py cd collector && gunicorn --workers 4 --bind 0.0.0.0:8888 wsgi:api; } &
wait -n
exit $?

View File

@ -20,7 +20,9 @@
"react": "^18.2.0",
"react-device-detect": "^2.2.2",
"react-dom": "^18.2.0",
"react-dropzone": "^14.2.3",
"react-feather": "^2.0.10",
"react-loading-icons": "^1.1.0",
"react-loading-skeleton": "^3.1.0",
"react-router-dom": "^6.3.0",
"text-case": "^1.0.9",

View File

@ -135,10 +135,10 @@ export default function Directory({
</div>
{showDetails && (
<div className="ml-[20px] flex flex-col gap-y-1 my-1 p-2 rounded-md bg-slate-200 font-mono text-sm overflow-x-scroll">
{Object.entries(meta).map(([key, value]) => {
{Object.entries(meta).map(([key, value], i) => {
if (key === "cached") return null;
return (
<p className="whitespace-pre">
<p key={i} className="whitespace-pre">
{key}: {value}
</p>
);

View File

@ -6,6 +6,7 @@ import { useParams } from "react-router-dom";
import Directory from "./Directory";
import ConfirmationModal from "./ConfirmationModal";
import CannotRemoveModal from "./CannotRemoveModal";
import { AlertTriangle } from "react-feather";
export default function DocumentSettings({ workspace }) {
const { slug } = useParams();
@ -17,16 +18,21 @@ export default function DocumentSettings({ workspace }) {
const [selectedFiles, setSelectFiles] = useState([]);
const [vectordb, setVectorDB] = useState(null);
const [showingNoRemovalModal, setShowingNoRemovalModal] = useState(false);
const [hasFiles, setHasFiles] = useState(true);
useEffect(() => {
async function fetchKeys() {
const localFiles = await System.localFiles();
const settings = await System.keys();
const originalDocs = workspace.documents.map((doc) => doc.docpath) || [];
const hasAnyFiles = localFiles.items.some(
(folder) => folder?.items?.length > 0
);
setDirectories(localFiles);
setOriginalDocuments([...originalDocs]);
setSelectFiles([...originalDocs]);
setVectorDB(settings?.VectorDB);
setHasFiles(hasAnyFiles);
setLoading(false);
}
fetchKeys();
@ -162,6 +168,16 @@ export default function DocumentSettings({ workspace }) {
)}
<div className="p-6 flex h-full w-full max-h-[80vh] overflow-y-scroll">
<div className="flex flex-col gap-y-1 w-full">
{!hasFiles && (
<div className="mb-4 w-full gap-x-2 rounded-lg h-10 border bg-orange-200 border-orange-800 dark:bg-orange-300 text-orange-800 flex items-center justify-center">
<AlertTriangle className="h-6 w-6" />
<p className="text-sm">
You don't have any files uploaded. Upload a file via the "Upload
Docs" tab.
</p>
</div>
)}
<div className="flex flex-col mb-2">
<p className="text-gray-800 dark:text-stone-200 text-base ">
Select folders to add or remove from workspace.

View File

@ -0,0 +1,73 @@
import React, { useState, useEffect, memo } from "react";
import Workspace from "../../../../../models/workspace";
import truncate from "truncate";
import { humanFileSize, milliToHms } from "../../../../../utils/numbers";
import { CheckCircle, XCircle } from "react-feather";
import { Grid } from "react-loading-icons";
function FileUploadProgressComponent({
slug,
file,
rejected = false,
reason = null,
}) {
const [timerMs, setTimerMs] = useState(10);
const [status, setStatus] = useState(file?.rejected ? "uploading" : "failed");
useEffect(() => {
async function uploadFile() {
const start = Number(new Date());
const formData = new FormData();
formData.append("file", file, file.name);
const timer = setInterval(() => {
setTimerMs(Number(new Date()) - start);
}, 100);
// Chunk streaming not working in production so we just sit and wait
await Workspace.uploadFile(slug, formData);
setStatus("complete");
clearInterval(timer);
}
!!file && !rejected && uploadFile();
}, []);
if (rejected) {
return (
<div className="w-fit px-2 py-2 flex items-center gap-x-4 rounded-lg bg-blue-100 border-blue-600 dark:bg-stone-800 bg-opacity-50 border dark:border-stone-600">
<div className="w-6 h-6">
<XCircle className="w-6 h-6 stroke-white bg-red-500 rounded-full p-1 w-full h-full" />
</div>
<div className="flex flex-col">
<p className="text-black dark:text-stone-200 text-sm font-mono overflow-x-scroll">
{truncate(file.name, 30)}
</p>
<p className="text-red-700 dark:text-red-400 text-xs font-mono">
{reason}
</p>
</div>
</div>
);
}
return (
<div className="w-fit px-2 py-2 flex items-center gap-x-4 rounded-lg bg-blue-100 border-blue-600 dark:bg-stone-800 bg-opacity-50 border dark:border-stone-600">
<div className="w-6 h-6">
{status !== "complete" ? (
<Grid className="w-6 h-6 grid-loader" />
) : (
<CheckCircle className="w-6 h-6 stroke-white bg-green-500 rounded-full p-1 w-full h-full" />
)}
</div>
<div className="flex flex-col">
<p className="text-black dark:text-stone-200 text-sm font-mono overflow-x-scroll">
{truncate(file.name, 30)}
</p>
<p className="text-gray-700 dark:text-stone-400 text-xs font-mono">
{humanFileSize(file.size)} | {milliToHms(timerMs)}
</p>
</div>
</div>
);
}
export default memo(FileUploadProgressComponent);

View File

@ -0,0 +1,190 @@
import React, { useState, useCallback, useEffect } from "react";
import Workspace from "../../../../models/workspace";
import paths from "../../../../utils/paths";
import FileUploadProgress from "./FileUploadProgress";
import { useDropzone } from "react-dropzone";
import { v4 } from "uuid";
import System from "../../../../models/system";
import { Frown } from "react-feather";
export default function UploadToWorkspace({ workspace, fileTypes }) {
const [ready, setReady] = useState(null);
const [files, setFiles] = useState([]);
const onDrop = useCallback(async (acceptedFiles, rejections) => {
const newAccepted = acceptedFiles.map((file) => {
return {
uid: v4(),
file,
};
});
const newRejected = rejections.map((file) => {
return {
uid: v4(),
file: file.file,
rejected: true,
reason: file.errors[0].code,
};
});
setFiles([...files, ...newAccepted, ...newRejected]);
}, []);
useEffect(() => {
async function checkProcessorOnline() {
const online = await System.checkDocumentProcessorOnline();
setReady(online);
}
checkProcessorOnline();
}, []);
const { getRootProps, getInputProps } = useDropzone({
onDrop,
accept: {
...fileTypes,
},
});
const deleteWorkspace = async () => {
if (
!window.confirm(
`You are about to delete your entire ${workspace.name} workspace. This will remove all vector embeddings on your vector database.\n\nThe original source files will remain untouched. This action is irreversible.`
)
)
return false;
await Workspace.delete(workspace.slug);
workspace.slug === slug
? (window.location = paths.home())
: window.location.reload();
};
if (ready === null) {
return (
<ModalWrapper deleteWorkspace={deleteWorkspace}>
<div className="outline-none transition-all cursor-wait duration-300 bg-stone-400 bg-opacity-20 flex h-[20rem] overflow-y-scroll overflow-x-hidden rounded-lg">
<div className="flex flex-col gap-y-1 w-full h-full items-center justify-center">
<p className="text-slate-400 text-xs">
Checking document processor is online - please wait.
</p>
<p className="text-slate-400 text-xs">
this should only take a few moments.
</p>
</div>
</div>
</ModalWrapper>
);
}
if (ready === false) {
return (
<ModalWrapper deleteWorkspace={deleteWorkspace}>
<div className="outline-none transition-all duration-300 bg-red-200 flex h-[20rem] overflow-y-scroll overflow-x-hidden rounded-lg">
<div className="flex flex-col gap-y-1 w-full h-full items-center justify-center">
<Frown className="w-8 h-8 text-red-800" />
<p className="text-red-800 text-xs">
Document processor is offline.
</p>
<p className="text-red-800 text-xs">
you cannot upload documents from the UI right now
</p>
</div>
</div>
</ModalWrapper>
);
}
return (
<ModalWrapper deleteWorkspace={deleteWorkspace}>
<div
{...getRootProps()}
className="outline-none transition-all cursor-pointer duration-300 hover:bg-opacity-40 bg-stone-400 bg-opacity-20 flex h-[20rem] overflow-y-scroll overflow-x-hidden rounded-lg"
>
<input {...getInputProps()} />
{files.length === 0 ? (
<div className="flex flex-col items-center justify-center w-full h-full">
<div className="flex flex-col items-center justify-center pt-5 pb-6">
<svg
aria-hidden="true"
className="w-10 h-10 mb-3 text-gray-600 dark:text-slate-300"
fill="none"
stroke="currentColor"
viewBox="0 0 24 24"
xmlns="http://www.w3.org/2000/svg"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="2"
d="M7 16a4 4 0 01-.88-7.903A5 5 0 1115.9 6L16 6a5 5 0 011 9.9M15 13l-3-3m0 0l-3 3m3-3v12"
></path>
</svg>
<p className="mb-2 text-sm text-gray-600 dark:text-slate-300">
<span className="font-semibold">Click to upload</span> or drag
and drop
</p>
<p className="text-xs text-gray-600 dark:text-slate-300"></p>
</div>
</div>
) : (
<div className="flex flex-col w-full p-4 gap-y-2">
{files.map((file) => (
<FileUploadProgress
key={file.uid}
file={file.file}
slug={workspace.slug}
rejected={file?.rejected}
reason={file?.reason}
/>
))}
</div>
)}
</div>
<p className="text-gray-600 dark:text-stone-400 text-xs ">
supported file extensions are{" "}
<code className="text-xs bg-gray-200 text-gray-800 dark:bg-stone-800 dark:text-slate-400 font-mono rounded-sm px-1">
{Object.values(fileTypes).flat().join(" ")}
</code>
</p>
</ModalWrapper>
);
}
function ModalWrapper({ deleteWorkspace, children }) {
return (
<>
<div className="p-6 flex h-full w-full max-h-[80vh] overflow-y-scroll">
<div className="flex flex-col gap-y-1 w-full">
<div className="flex flex-col mb-2">
<p className="text-gray-800 dark:text-stone-200 text-base ">
Add documents to your workspace.
</p>
<p className="text-gray-600 dark:text-stone-400 text-xs ">
These files will be uploaded to the document processor running on
this AnythingLLM instance. These files are not sent or shared with
a third party.
</p>
{process.env.NODE_ENV !== "production" && (
<div className="mt-2 text-gray-600 dark:text-stone-400 text-xs">
<div className="w-[1px] bg-stone-400 w-full" />
Local Environment Notice: You must have the{" "}
<code className="text-xs bg-gray-200 text-gray-800 dark:bg-stone-800 dark:text-slate-400 font-mono rounded-sm px-1">
python document processor app
</code>{" "}
running for these documents to process.
</div>
)}
</div>
{children}
</div>
</div>
<div className="flex items-center justify-between p-6 space-x-2 border-t border-gray-200 rounded-b dark:border-gray-600">
<button
onClick={deleteWorkspace}
type="button"
className="border border-transparent text-gray-500 bg-white hover:bg-red-100 rounded-lg text-sm font-medium px-5 py-2.5 hover:text-red-900 focus:z-10 dark:bg-transparent dark:text-gray-300 dark:hover:text-white dark:hover:bg-red-600"
>
Delete Workspace
</button>
</div>
</>
);
}

View File

@ -1,13 +1,16 @@
import React, { useState, useEffect } from "react";
import { Archive, Sliders, X } from "react-feather";
import { Archive, Sliders, UploadCloud, X } from "react-feather";
import DocumentSettings from "./Documents";
import WorkspaceSettings from "./Settings";
import { useParams } from "react-router-dom";
import Workspace from "../../../models/workspace";
import System from "../../../models/system";
import UploadToWorkspace from "./Upload";
const TABS = {
documents: DocumentSettings,
settings: WorkspaceSettings,
upload: UploadToWorkspace,
};
const noop = () => false;
@ -18,6 +21,15 @@ export default function ManageWorkspace({
const { slug } = useParams();
const [selectedTab, setSelectedTab] = useState("documents");
const [workspace, setWorkspace] = useState(null);
const [fileTypes, setFileTypes] = useState(null);
useEffect(() => {
async function checkSupportedFiletypes() {
const acceptedTypes = await System.acceptedDocumentTypes();
setFileTypes(acceptedTypes ?? {});
}
checkSupportedFiletypes();
}, []);
useEffect(() => {
async function fetchWorkspace() {
@ -57,7 +69,11 @@ export default function ManageWorkspace({
changeTab={setSelectedTab}
/>
</div>
<Component hideModal={hideModal} workspace={workspace} />
<Component
hideModal={hideModal}
workspace={workspace}
fileTypes={fileTypes}
/>
</div>
</div>
</div>
@ -75,6 +91,13 @@ function WorkspaceSettingTabs({ selectedTab, changeTab }) {
icon={<Archive className="h-4 w-4" />}
onClick={changeTab}
/>
<WorkspaceTab
active={selectedTab === "upload"}
displayName="Upload Docs"
tabName="upload"
icon={<UploadCloud className="h-4 w-4" />}
onClick={changeTab}
/>
<WorkspaceTab
active={selectedTab === "settings"}
displayName="Settings"

View File

@ -291,3 +291,7 @@ a {
background-position: 100% 50%;
}
}
.grid-loader > circle {
fill: #008eff;
}

View File

@ -59,6 +59,21 @@ const System = {
return { valid: false, message: e.message };
});
},
checkDocumentProcessorOnline: async () => {
return await fetch(`${API_BASE}/system/document-processing-status`, {
headers: baseHeaders(),
})
.then((res) => res.ok)
.catch(() => false);
},
acceptedDocumentTypes: async () => {
return await fetch(`${API_BASE}/system/accepted-document-types`, {
headers: baseHeaders(),
})
.then((res) => res.json())
.then((res) => res?.types)
.catch(() => null);
},
};
export default System;

View File

@ -97,6 +97,14 @@ const Workspace = {
return result;
},
uploadFile: async function (slug, formData) {
const response = await fetch(`${API_BASE}/workspace/${slug}/upload`, {
method: "POST",
body: formData,
headers: baseHeaders(),
});
return response;
},
};
export default Workspace;

View File

@ -14,3 +14,39 @@ export function dollarFormat(input) {
currency: "USD",
}).format(input);
}
export function humanFileSize(bytes, si = false, dp = 1) {
const thresh = si ? 1000 : 1024;
if (Math.abs(bytes) < thresh) {
return bytes + " B";
}
const units = si
? ["kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
: ["KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"];
let u = -1;
const r = 10 ** dp;
do {
bytes /= thresh;
++u;
} while (
Math.round(Math.abs(bytes) * r) / r >= thresh &&
u < units.length - 1
);
return bytes.toFixed(dp) + " " + units[u];
}
export function milliToHms(milli = 0) {
const d = parseFloat(milli) / 1_000.0;
var h = Math.floor(d / 3600);
var m = Math.floor((d % 3600) / 60);
var s = parseFloat((d % 3600.0) % 60);
var hDisplay = h >= 1 ? h + "h " : "";
var mDisplay = m >= 1 ? m + "m " : "";
var sDisplay = s >= 0.01 ? s.toFixed(2) + "s" : "";
return hDisplay + mDisplay + sDisplay;
}

View File

@ -590,6 +590,11 @@ array.prototype.tosorted@^1.1.1:
es-shim-unscopables "^1.0.0"
get-intrinsic "^1.1.3"
attr-accept@^2.2.2:
version "2.2.2"
resolved "https://registry.yarnpkg.com/attr-accept/-/attr-accept-2.2.2.tgz#646613809660110749e92f2c10833b70968d929b"
integrity sha512-7prDjvt9HmqiZ0cl5CRjtS84sEyhsHP2coDkaZKRKVfCDo9s7iw7ChVmar78Gu9pC4SoR/28wFu/G5JJhTnqEg==
autoprefixer@^10.4.14:
version "10.4.14"
resolved "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.14.tgz"
@ -1132,6 +1137,13 @@ file-entry-cache@^6.0.1:
dependencies:
flat-cache "^3.0.4"
file-selector@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/file-selector/-/file-selector-0.6.0.tgz#fa0a8d9007b829504db4d07dd4de0310b65287dc"
integrity sha512-QlZ5yJC0VxHxQQsQhXvBaC7VRJ2uaxTf+Tfpu4Z/OcVQJVpZO+DGU0rkoVW5ce2SccxugvpBJoMvUs59iILYdw==
dependencies:
tslib "^2.4.0"
fill-range@^7.0.1:
version "7.0.1"
resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz"
@ -1980,6 +1992,15 @@ react-dom@^18.2.0:
loose-envify "^1.1.0"
scheduler "^0.23.0"
react-dropzone@^14.2.3:
version "14.2.3"
resolved "https://registry.yarnpkg.com/react-dropzone/-/react-dropzone-14.2.3.tgz#0acab68308fda2d54d1273a1e626264e13d4e84b"
integrity sha512-O3om8I+PkFKbxCukfIR3QAGftYXDZfOE2N1mr/7qebQJHs7U+/RSL/9xomJNpRg9kM5h9soQSdf0Gc7OHF5Fug==
dependencies:
attr-accept "^2.2.2"
file-selector "^0.6.0"
prop-types "^15.8.1"
react-feather@^2.0.10:
version "2.0.10"
resolved "https://registry.npmjs.org/react-feather/-/react-feather-2.0.10.tgz"
@ -1992,6 +2013,11 @@ react-is@^16.13.1:
resolved "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz"
integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
react-loading-icons@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/react-loading-icons/-/react-loading-icons-1.1.0.tgz#c37f2472936ab93c6a7f43c0a2c2fe8efc3ff7c8"
integrity sha512-Y9eZ6HAufmUd8DIQd6rFrx5Bt/oDlTM9Nsjvf8YpajTa3dI8cLNU8jUN5z7KTANU+Yd6/KJuBjxVlrU2dMw33g==
react-loading-skeleton@^3.1.0:
version "3.3.1"
resolved "https://registry.npmjs.org/react-loading-skeleton/-/react-loading-skeleton-3.3.1.tgz"
@ -2472,6 +2498,11 @@ ts-interface-checker@^0.1.9:
resolved "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz"
integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==
tslib@^2.4.0:
version "2.5.3"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.3.tgz#24944ba2d990940e6e982c4bea147aba80209913"
integrity sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==
type-check@^0.4.0, type-check@~0.4.0:
version "0.4.0"
resolved "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz"

View File

@ -1,7 +1,12 @@
process.env.NODE_ENV === "development"
? require("dotenv").config({ path: `.env.${process.env.NODE_ENV}` })
: require("dotenv").config();
const { validateTablePragmas } = require("../utils/database");
const { viewLocalFiles } = require("../utils/files");
const {
checkPythonAppAlive,
acceptedFileTypes,
} = require("../utils/files/documentProcessor");
const { getVectorDbClass } = require("../utils/helpers");
const { reqBody, makeJWT } = require("../utils/http");
@ -12,6 +17,11 @@ function systemEndpoints(app) {
response.sendStatus(200);
});
app.get("/migrate", async (_, response) => {
await validateTablePragmas(true);
response.sendStatus(200);
});
app.get("/setup-complete", (_, response) => {
try {
const vectorDB = process.env.VECTOR_DB || "pinecone";
@ -88,6 +98,31 @@ function systemEndpoints(app) {
response.sendStatus(500).end();
}
});
app.get("/system/document-processing-status", async (_, response) => {
try {
const online = await checkPythonAppAlive();
response.sendStatus(online ? 200 : 503);
} catch (e) {
console.log(e.message, e);
response.sendStatus(500).end();
}
});
app.get("/system/accepted-document-types", async (_, response) => {
try {
const types = await acceptedFileTypes();
if (!types) {
response.sendStatus(404).end();
return;
}
response.status(200).json({ types });
} catch (e) {
console.log(e.message, e);
response.sendStatus(500).end();
}
});
}
module.exports = { systemEndpoints };

View File

@ -5,6 +5,15 @@ const { DocumentVectors } = require("../models/vectors");
const { WorkspaceChats } = require("../models/workspaceChats");
const { convertToChatHistory } = require("../utils/chats");
const { getVectorDbClass } = require("../utils/helpers");
const { setupMulter } = require("../utils/files/multer");
const {
fileUploadProgress,
} = require("../utils/middleware/fileUploadProgress");
const {
checkPythonAppAlive,
processDocument,
} = require("../utils/files/documentProcessor");
const { handleUploads } = setupMulter();
function workspaceEndpoints(app) {
if (!app) return;
@ -42,6 +51,36 @@ function workspaceEndpoints(app) {
}
});
app.post(
"/workspace/:slug/upload",
fileUploadProgress,
handleUploads.single("file"),
async function (request, _) {
const { originalname } = request.file;
const processingOnline = await checkPythonAppAlive();
if (!processingOnline) {
console.log(
`Python processing API is not online. Document ${originalname} will not be processed automatically.`
);
return;
}
const { success, reason } = await processDocument(originalname);
if (!success) {
console.log(
`Python processing API was not able to process document ${originalname}. Reason: ${reason}`
);
return false;
}
console.log(
`Document ${originalname} uploaded processed and successfully. It is now available in documents.`
);
return;
}
);
app.post("/workspace/:slug/update-embeddings", async (request, response) => {
try {
const { slug = null } = request.params;

View File

@ -22,15 +22,16 @@
"cors": "^2.8.5",
"dotenv": "^16.0.3",
"express": "^4.18.2",
"jsonwebtoken": "^8.5.1",
"langchain": "^0.0.90",
"moment": "^2.29.4",
"multer": "^1.4.5-lts.1",
"openai": "^3.2.1",
"pinecone-client": "^1.1.0",
"slugify": "^1.6.6",
"sqlite": "^4.2.1",
"sqlite3": "^5.1.6",
"uuid": "^9.0.0",
"jsonwebtoken": "^8.5.1",
"vectordb": "0.1.5"
},
"devDependencies": {

View File

@ -37,15 +37,32 @@ async function checkForMigrations(model, db) {
return;
}
async function validateTablePragmas() {
const { Workspace } = require("../../models/workspace");
const { Document } = require("../../models/documents");
const { DocumentVectors } = require("../../models/vectors");
const { WorkspaceChats } = require("../../models/workspaceChats");
await Workspace.migrateTable();
await Document.migrateTable();
await DocumentVectors.migrateTable();
await WorkspaceChats.migrateTable();
// Note(tcarambat): When building in production via Docker the SQLite file will not exist
// and if this function tries to run on boot the file will not exist
// and the server will abort and the container will exit.
// This function will run each reload on dev but on production
// it will be stubbed until the /api/migrate endpoint is GET.
async function validateTablePragmas(force = false) {
try {
if (process.env.NODE_ENV !== "development" && force === false) {
console.log(
`\x1b[34m[MIGRATIONS STUBBED]\x1b[0m Please ping /migrate once server starts to run migrations`
);
return;
}
const { Workspace } = require("../../models/workspace");
const { Document } = require("../../models/documents");
const { DocumentVectors } = require("../../models/vectors");
const { WorkspaceChats } = require("../../models/workspaceChats");
await Workspace.migrateTable();
await Document.migrateTable();
await DocumentVectors.migrateTable();
await WorkspaceChats.migrateTable();
} catch (e) {
console.error(`validateTablePragmas: Migrations failed`, e);
}
return;
}
module.exports = {

View File

@ -0,0 +1,46 @@
// When running locally will occupy the 0.0.0.0 hostname space but when deployed inside
// of docker this endpoint is not exposed so it is only on the Docker instances internal network
// so no additional security is needed on the endpoint directly. Auth is done however by the express
// middleware prior to leaving the node-side of the application so that is good enough >:)
const PYTHON_API = "http://0.0.0.0:8888";
async function checkPythonAppAlive() {
return await fetch(`${PYTHON_API}`)
.then((res) => res.ok)
.catch((e) => false);
}
async function acceptedFileTypes() {
return await fetch(`${PYTHON_API}/accepts`)
.then((res) => {
if (!res.ok) throw new Error("Could not reach");
return res.json();
})
.then((res) => res)
.catch(() => null);
}
async function processDocument(filename = "") {
if (!filename) return false;
return await fetch(`${PYTHON_API}/process`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ filename }),
})
.then((res) => {
if (!res.ok) throw new Error("Response could not be completed");
return res.json();
})
.then((res) => res)
.catch((e) => {
console.log(e.message);
return { success: false, reason: e.message };
});
}
module.exports = {
checkPythonAppAlive,
processDocument,
acceptedFileTypes,
};

View File

@ -0,0 +1,25 @@
function setupMulter() {
const multer = require("multer");
// Handle File uploads for auto-uploading.
const storage = multer.diskStorage({
destination: function (_, _, cb) {
const path = require("path");
const uploadOutput =
process.env.NODE_ENV === "development"
? path.resolve(__dirname, `../../../collector/hotdir`)
: path.resolve(process.env.STORAGE_DIR, `../../collector/hotdir`);
cb(null, uploadOutput);
},
filename: function (_, file, cb) {
cb(null, file.originalname);
},
});
const upload = multer({
storage,
});
return { handleUploads: upload };
}
module.exports = {
setupMulter,
};

View File

@ -0,0 +1,26 @@
async function fileUploadProgress(request, response, next) {
let progress = 0;
const fileSize = request.headers["content-length"]
? parseInt(request.headers["content-length"])
: 0;
// Note(tcarambat): While this is chunked it does not stream back to the UI for some reason.
// It just waits for the entire requests to finish. Likely because it is not using EventSource on frontend
// which is limited to GET.
// TODO: Someone smarter than me add streaming here to report back real-time progress.
response.writeHead(200);
request.on("data", (chunk) => {
progress += chunk.length;
const percentage = (progress / fileSize) * 100;
response.write(`${JSON.stringify({ progress, fileSize, percentage })}\n`);
if (progress >= fileSize) {
response.end();
}
});
next();
}
module.exports = {
fileUploadProgress,
};

View File

@ -189,6 +189,11 @@ apache-arrow@^12.0.0:
pad-left "^2.1.0"
tslib "^2.5.0"
append-field@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/append-field/-/append-field-1.0.0.tgz#1e3440e915f0b1203d23748e78edd7b9b5b43e56"
integrity sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==
"aproba@^1.0.3 || ^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.0.0.tgz#52520b8ae5b569215b354efc0caa3fe1e45a8adc"
@ -323,6 +328,18 @@ buffer-equal-constant-time@1.0.1:
resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==
buffer-from@^1.0.0:
version "1.1.2"
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
busboy@^1.0.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893"
integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==
dependencies:
streamsearch "^1.1.0"
bytes@3.1.2:
version "3.1.2"
resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5"
@ -448,6 +465,16 @@ concat-map@0.0.1:
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==
concat-stream@^1.5.2:
version "1.6.2"
resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34"
integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==
dependencies:
buffer-from "^1.0.0"
inherits "^2.0.3"
readable-stream "^2.2.2"
typedarray "^0.0.6"
console-control-strings@^1.0.0, console-control-strings@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e"
@ -475,6 +502,11 @@ cookie@0.5.0:
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b"
integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==
core-util-is@~1.0.0:
version "1.0.3"
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
cors@^2.8.5:
version "2.8.5"
resolved "https://registry.yarnpkg.com/cors/-/cors-2.8.5.tgz#eac11da51592dd86b9f06f6e7ac293b3df875d29"
@ -973,7 +1005,7 @@ inflight@^1.0.4:
once "^1.3.0"
wrappy "1"
inherits@2, inherits@2.0.4, inherits@^2.0.3:
inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@~2.0.3:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
@ -1032,6 +1064,11 @@ is-stream@^2.0.0:
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077"
integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==
isarray@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==
isexe@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
@ -1253,6 +1290,11 @@ minimatch@^3.1.1, minimatch@^3.1.2:
dependencies:
brace-expansion "^1.1.7"
minimist@^1.2.6:
version "1.2.8"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c"
integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==
minipass-collect@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/minipass-collect/-/minipass-collect-1.0.2.tgz#22b813bf745dc6edba2576b940022ad6edc8c617"
@ -1312,6 +1354,13 @@ minizlib@^2.0.0, minizlib@^2.1.1:
minipass "^3.0.0"
yallist "^4.0.0"
mkdirp@^0.5.4:
version "0.5.6"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6"
integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==
dependencies:
minimist "^1.2.6"
mkdirp@^1.0.3, mkdirp@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e"
@ -1373,6 +1422,19 @@ ms@2.1.3, ms@^2.0.0, ms@^2.1.1:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
multer@^1.4.5-lts.1:
version "1.4.5-lts.1"
resolved "https://registry.yarnpkg.com/multer/-/multer-1.4.5-lts.1.tgz#803e24ad1984f58edffbc79f56e305aec5cfd1ac"
integrity sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ==
dependencies:
append-field "^1.0.0"
busboy "^1.0.0"
concat-stream "^1.5.2"
mkdirp "^0.5.4"
object-assign "^4.1.1"
type-is "^1.6.4"
xtend "^4.0.0"
negotiator@0.6.3, negotiator@^0.6.2:
version "0.6.3"
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd"
@ -1582,6 +1644,11 @@ prettier@^2.4.1:
resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da"
integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==
process-nextick-args@~2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
promise-inflight@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3"
@ -1647,6 +1714,19 @@ raw-body@2.5.2:
iconv-lite "0.4.24"
unpipe "1.0.0"
readable-stream@^2.2.2:
version "2.3.8"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b"
integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==
dependencies:
core-util-is "~1.0.0"
inherits "~2.0.3"
isarray "~1.0.0"
process-nextick-args "~2.0.0"
safe-buffer "~5.1.1"
string_decoder "~1.1.1"
util-deprecate "~1.0.1"
readable-stream@^3.6.0:
version "3.6.2"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967"
@ -1695,6 +1775,11 @@ safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@~5.2.0:
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
safe-buffer@~5.1.0, safe-buffer@~5.1.1:
version "5.1.2"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0":
version "2.1.2"
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
@ -1837,6 +1922,11 @@ statuses@2.0.1:
resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63"
integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==
streamsearch@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764"
integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==
"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.2.3:
version "4.2.3"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
@ -1853,6 +1943,13 @@ string_decoder@^1.1.1:
dependencies:
safe-buffer "~5.2.0"
string_decoder@~1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
dependencies:
safe-buffer "~5.1.0"
strip-ansi@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
@ -1918,7 +2015,7 @@ tslib@^2.5.0:
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.3.tgz#24944ba2d990940e6e982c4bea147aba80209913"
integrity sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==
type-is@~1.6.18:
type-is@^1.6.4, type-is@~1.6.18:
version "1.6.18"
resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
@ -1926,6 +2023,11 @@ type-is@~1.6.18:
media-typer "0.3.0"
mime-types "~2.1.24"
typedarray@^0.0.6:
version "0.0.6"
resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==
typical@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/typical/-/typical-4.0.0.tgz#cbeaff3b9d7ae1e2bbfaf5a4e6f11eccfde94fc4"
@ -1965,7 +2067,7 @@ url-template@^2.0.8:
resolved "https://registry.yarnpkg.com/url-template/-/url-template-2.0.8.tgz#fc565a3cccbff7730c775f5641f9555791439f21"
integrity sha512-XdVKMF4SJ0nP/O7XIPB0JwAEuT9lDIYnNsK8yGVe43y0AWoKeJNdv3ZNWh7ksJ6KqQFjOO6ox/VEitLnaVNufw==
util-deprecate@^1.0.1:
util-deprecate@^1.0.1, util-deprecate@~1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
@ -2033,6 +2135,11 @@ wrappy@1:
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==
xtend@^4.0.0:
version "4.0.2"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==
yallist@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72"

View File

@ -1,8 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
chalk@^5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.2.0.tgz#249623b7d66869c673699fb66d65723e54dfcfb3"
integrity sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA==