anything-llm/server/package.json
Timothy Carambat 655ebd9479
[Feature] AnythingLLM use locally hosted Llama.cpp and GGUF files for inferencing (#413)
* Implement use of native embedder (all-Mini-L6-v2)
stop showing prisma queries during dev

* Add native embedder as an available embedder selection

* wrap model loader in try/catch

* print progress on download

* add built-in LLM support (expiermental)

* Update to progress output for embedder

* move embedder selection options to component

* saftey checks for modelfile

* update ref

* Hide selection when on hosted subdomain

* update documentation
hide localLlama when on hosted

* saftey checks for storage of models

* update dockerfile to pre-build Llama.cpp bindings

* update lockfile

* add langchain doc comment

* remove extraneous --no-metal option

* Show data handling for private LLM

* persist model in memory for N+1 chats

* update import
update dev comment on token model size

* update primary README

* chore: more readme updates and remove screenshots - too much to maintain, just use the app!

* remove screeshot link
2023-12-07 14:48:27 -08:00

68 lines
2.0 KiB
JSON

{
"name": "anything-llm-server",
"version": "0.2.0",
"description": "Server endpoints to process or create content for chatting",
"main": "index.js",
"author": "Timothy Carambat (Mintplex Labs)",
"license": "MIT",
"private": false,
"engines": {
"node": ">=18.12.1"
},
"scripts": {
"dev": "NODE_ENV=development nodemon --ignore documents --ignore vector-cache --ignore storage --ignore swagger --trace-warnings index.js",
"start": "NODE_ENV=production node index.js",
"lint": "yarn prettier --write ./endpoints ./models ./utils index.js",
"swagger": "node ./swagger/init.js",
"sqlite:migrate": "cd ./utils/prisma && node migrateFromSqlite.js"
},
"prisma": {
"seed": "node prisma/seed.js"
},
"dependencies": {
"@anthropic-ai/sdk": "^0.8.1",
"@azure/openai": "^1.0.0-beta.3",
"@googleapis/youtube": "^9.0.0",
"@pinecone-database/pinecone": "^0.1.6",
"@prisma/client": "5.3.0",
"@qdrant/js-client-rest": "^1.4.0",
"@xenova/transformers": "^2.10.0",
"archiver": "^5.3.1",
"bcrypt": "^5.1.0",
"body-parser": "^1.20.2",
"check-disk-space": "^3.4.0",
"chromadb": "^1.5.2",
"cors": "^2.8.5",
"dotenv": "^16.0.3",
"express": "^4.18.2",
"extract-zip": "^2.0.1",
"graphql": "^16.7.1",
"joi": "^17.11.0",
"joi-password-complexity": "^5.2.0",
"js-tiktoken": "^1.0.7",
"jsonwebtoken": "^8.5.1",
"langchain": "0.0.201",
"mime": "^3.0.0",
"moment": "^2.29.4",
"multer": "^1.4.5-lts.1",
"node-llama-cpp": "^2.8.0",
"openai": "^3.2.1",
"pinecone-client": "^1.1.0",
"posthog-node": "^3.1.1",
"prisma": "^5.3.1",
"serve-index": "^1.9.1",
"slugify": "^1.6.6",
"sqlite": "^4.2.1",
"sqlite3": "^5.1.6",
"swagger-autogen": "^2.23.5",
"swagger-ui-express": "^5.0.0",
"uuid": "^9.0.0",
"uuid-apikey": "^1.5.3",
"vectordb": "0.1.12",
"weaviate-ts-client": "^1.4.0"
},
"devDependencies": {
"nodemon": "^2.0.22",
"prettier": "^2.4.1"
}
}