1
0
mirror of https://github.com/stonith404/pingvin-share.git synced 2024-06-30 06:30:11 +02:00

feat: chunk uploads (#76)

* add first concept

* finished first concept

* allow 3 uploads at same time

* retry if chunk failed

* updated clean temporary files job

* fix throttling for chunk uploads

* update tests

* remove multer

* migrate from `MAX_FILE_SIZE` to `MAX_SHARE_SIZE`

* improve error handling if file failed to upload

* fix promise limit

* improve file progress
This commit is contained in:
Elias Schneider 2023-01-09 11:43:48 +01:00 committed by GitHub
parent a5bef5d4a4
commit 653d72bcb9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 364 additions and 246 deletions

View File

@ -20,13 +20,13 @@
"@prisma/client": "^4.7.1",
"archiver": "^5.3.1",
"argon2": "^0.30.2",
"body-parser": "^1.20.1",
"class-transformer": "^0.5.1",
"class-validator": "^0.13.2",
"content-disposition": "^0.5.4",
"cookie-parser": "^1.4.6",
"mime-types": "^2.1.35",
"moment": "^2.29.4",
"multer": "^1.4.5-lts.1",
"nodemailer": "^6.8.0",
"otplib": "^12.0.1",
"passport": "^0.6.0",
@ -47,7 +47,6 @@
"@types/cron": "^2.0.0",
"@types/express": "^4.17.14",
"@types/mime-types": "^2.1.1",
"@types/multer": "^1.4.7",
"@types/node": "^18.11.10",
"@types/nodemailer": "^6.4.6",
"@types/passport-jwt": "^3.0.7",
@ -1275,15 +1274,6 @@
"integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==",
"dev": true
},
"node_modules/@types/multer": {
"version": "1.4.7",
"resolved": "https://registry.npmjs.org/@types/multer/-/multer-1.4.7.tgz",
"integrity": "sha512-/SNsDidUFCvqqcWDwxv2feww/yqhNeTRL5CVoL3jU4Goc4kKEL10T7Eye65ZqPNi4HRx8sAEX59pV1aEH7drNA==",
"dev": true,
"dependencies": {
"@types/express": "*"
}
},
"node_modules/@types/node": {
"version": "18.11.10",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.10.tgz",
@ -4987,23 +4977,6 @@
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"node_modules/multer": {
"version": "1.4.5-lts.1",
"resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.1.tgz",
"integrity": "sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ==",
"dependencies": {
"append-field": "^1.0.0",
"busboy": "^1.0.0",
"concat-stream": "^1.5.2",
"mkdirp": "^0.5.4",
"object-assign": "^4.1.1",
"type-is": "^1.6.4",
"xtend": "^4.0.0"
},
"engines": {
"node": ">= 6.0.0"
}
},
"node_modules/mute-stream": {
"version": "0.0.8",
"resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz",
@ -8566,15 +8539,6 @@
"integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==",
"dev": true
},
"@types/multer": {
"version": "1.4.7",
"resolved": "https://registry.npmjs.org/@types/multer/-/multer-1.4.7.tgz",
"integrity": "sha512-/SNsDidUFCvqqcWDwxv2feww/yqhNeTRL5CVoL3jU4Goc4kKEL10T7Eye65ZqPNi4HRx8sAEX59pV1aEH7drNA==",
"dev": true,
"requires": {
"@types/express": "*"
}
},
"@types/node": {
"version": "18.11.10",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.10.tgz",
@ -11408,20 +11372,6 @@
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"multer": {
"version": "1.4.5-lts.1",
"resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.1.tgz",
"integrity": "sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ==",
"requires": {
"append-field": "^1.0.0",
"busboy": "^1.0.0",
"concat-stream": "^1.5.2",
"mkdirp": "^0.5.4",
"object-assign": "^4.1.1",
"type-is": "^1.6.4",
"xtend": "^4.0.0"
}
},
"mute-stream": {
"version": "0.0.8",
"resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz",

View File

@ -7,7 +7,7 @@
"prod": "prisma migrate deploy && prisma db seed && node dist/src/main",
"lint": "eslint 'src/**/*.ts'",
"format": "prettier --write 'src/**/*.ts'",
"test:system": "prisma migrate reset -f && nest start & wait-on http://localhost:8080/api/configs && newman run ./test/system/newman-system-tests.json"
"test:system": "prisma migrate reset -f && nest start & wait-on http://localhost:8080/api/configs && newman run ./test/newman-system-tests.json"
},
"prisma": {
"seed": "ts-node prisma/seed/config.seed.ts"
@ -25,13 +25,13 @@
"@prisma/client": "^4.7.1",
"archiver": "^5.3.1",
"argon2": "^0.30.2",
"body-parser": "^1.20.1",
"class-transformer": "^0.5.1",
"class-validator": "^0.13.2",
"content-disposition": "^0.5.4",
"cookie-parser": "^1.4.6",
"mime-types": "^2.1.35",
"moment": "^2.29.4",
"multer": "^1.4.5-lts.1",
"nodemailer": "^6.8.0",
"otplib": "^12.0.1",
"passport": "^0.6.0",
@ -52,7 +52,6 @@
"@types/cron": "^2.0.0",
"@types/express": "^4.17.14",
"@types/mime-types": "^2.1.1",
"@types/multer": "^1.4.7",
"@types/node": "^18.11.10",
"@types/nodemailer": "^6.4.6",
"@types/passport-jwt": "^3.0.7",

View File

@ -44,10 +44,10 @@ const configVariables: Prisma.ConfigCreateInput[] = [
secret: false,
},
{
key: "MAX_FILE_SIZE",
description: "Maximum file size in bytes",
key: "MAX_SHARE_SIZE",
description: "Maximum share size in bytes",
type: "number",
value: "1000000000",
value: "1073741824",
category: "share",
secret: false,
},

View File

@ -1,19 +1,17 @@
import { HttpException, HttpStatus, Module } from "@nestjs/common";
import { Module } from "@nestjs/common";
import { ScheduleModule } from "@nestjs/schedule";
import { AuthModule } from "./auth/auth.module";
import { MulterModule } from "@nestjs/platform-express";
import { ThrottlerModule } from "@nestjs/throttler";
import { Request } from "express";
import { APP_GUARD } from "@nestjs/core";
import { ThrottlerGuard, ThrottlerModule } from "@nestjs/throttler";
import { ConfigModule } from "./config/config.module";
import { ConfigService } from "./config/config.service";
import { EmailModule } from "./email/email.module";
import { FileModule } from "./file/file.module";
import { JobsModule } from "./jobs/jobs.module";
import { PrismaModule } from "./prisma/prisma.module";
import { ShareModule } from "./share/share.module";
import { UserModule } from "./user/user.module";
import { JobsModule } from "./jobs/jobs.module";
@Module({
imports: [
@ -25,29 +23,17 @@ import { JobsModule } from "./jobs/jobs.module";
ConfigModule,
JobsModule,
UserModule,
MulterModule.registerAsync({
useFactory: (config: ConfigService) => ({
fileFilter: (req: Request, file, cb) => {
const MAX_FILE_SIZE = config.get("MAX_FILE_SIZE");
const requestFileSize = parseInt(req.headers["content-length"]);
const isValidFileSize = requestFileSize <= MAX_FILE_SIZE;
cb(
!isValidFileSize &&
new HttpException(
`File must be smaller than ${MAX_FILE_SIZE} bytes`,
HttpStatus.PAYLOAD_TOO_LARGE
),
isValidFileSize
);
},
}),
inject: [ConfigService],
}),
ThrottlerModule.forRoot({
ttl: 60,
limit: 100,
}),
ScheduleModule.forRoot(),
],
providers: [
{
provide: APP_GUARD,
useClass: ThrottlerGuard,
},
],
})
export class AppModule {}

View File

@ -1,20 +1,19 @@
import {
Body,
Controller,
Get,
Param,
Post,
Query,
Res,
StreamableFile,
UploadedFile,
UseGuards,
UseInterceptors,
} from "@nestjs/common";
import { FileInterceptor } from "@nestjs/platform-express";
import { SkipThrottle } from "@nestjs/throttler";
import * as contentDisposition from "content-disposition";
import { Response } from "express";
import { JwtGuard } from "src/auth/guard/jwt.guard";
import { FileDownloadGuard } from "src/file/guard/fileDownload.guard";
import { ShareDTO } from "src/share/dto/share.dto";
import { ShareOwnerGuard } from "src/share/guard/shareOwner.guard";
import { ShareSecurityGuard } from "src/share/guard/shareSecurity.guard";
import { FileService } from "./file.service";
@ -24,22 +23,24 @@ export class FileController {
constructor(private fileService: FileService) {}
@Post()
@SkipThrottle()
@UseGuards(JwtGuard, ShareOwnerGuard)
@UseInterceptors(
FileInterceptor("file", {
dest: "./data/uploads/_temp/",
})
)
async create(
@UploadedFile()
file: Express.Multer.File,
@Query() query: any,
@Body() body: string,
@Param("shareId") shareId: string
) {
// Fixes file names with special characters
file.originalname = Buffer.from(file.originalname, "latin1").toString(
"utf8"
const { id, name, chunkIndex, totalChunks } = query;
const data = body.toString().split(",")[1];
return await this.fileService.create(
data,
{ index: parseInt(chunkIndex), total: parseInt(totalChunks) },
{ id, name },
shareId
);
return new ShareDTO().from(await this.fileService.create(file, shareId));
}
@Get(":fileId/download")

View File

@ -3,12 +3,11 @@ import { JwtModule } from "@nestjs/jwt";
import { ShareModule } from "src/share/share.module";
import { FileController } from "./file.controller";
import { FileService } from "./file.service";
import { FileValidationPipe } from "./pipe/fileValidation.pipe";
@Module({
imports: [JwtModule.register({}), ShareModule],
controllers: [FileController],
providers: [FileService, FileValidationPipe],
providers: [FileService],
exports: [FileService],
})
export class FileModule {}

View File

@ -1,10 +1,12 @@
import {
BadRequestException,
HttpException,
HttpStatus,
Injectable,
NotFoundException,
} from "@nestjs/common";
import { JwtService } from "@nestjs/jwt";
import { randomUUID } from "crypto";
import * as crypto from "crypto";
import * as fs from "fs";
import * as mime from "mime-types";
import { ConfigService } from "src/config/config.service";
@ -18,32 +20,85 @@ export class FileService {
private config: ConfigService
) {}
async create(file: Express.Multer.File, shareId: string) {
async create(
data: string,
chunk: { index: number; total: number },
file: { id?: string; name: string },
shareId: string
) {
if (!file.id) file.id = crypto.randomUUID();
const share = await this.prisma.share.findUnique({
where: { id: shareId },
include: { files: true },
});
if (share.uploadLocked)
throw new BadRequestException("Share is already completed");
const fileId = randomUUID();
let diskFileSize: number;
try {
diskFileSize = fs.statSync(
`./data/uploads/shares/${shareId}/${file.id}.tmp-chunk`
).size;
} catch {
diskFileSize = 0;
}
await fs.promises.mkdir(`./data/uploads/shares/${shareId}`, {
recursive: true,
});
fs.promises.rename(
`./data/uploads/_temp/${file.filename}`,
`./data/uploads/shares/${shareId}/${fileId}`
// If the sent chunk index and the expected chunk index doesn't match throw an error
const chunkSize = 10 * 1024 * 1024; // 10MB
const expectedChunkIndex = Math.ceil(diskFileSize / chunkSize);
if (expectedChunkIndex != chunk.index)
throw new BadRequestException({
message: "Unexpected chunk received",
error: "unexpected_chunk_index",
expectedChunkIndex,
});
const buffer = Buffer.from(data, "base64");
// Check if share size limit is exceeded
const fileSizeSum = share.files.reduce(
(n, { size }) => n + parseInt(size),
0
);
return await this.prisma.file.create({
data: {
id: fileId,
name: file.originalname,
size: file.size.toString(),
share: { connect: { id: shareId } },
},
});
if (
fileSizeSum + diskFileSize + buffer.byteLength >
this.config.get("MAX_SHARE_SIZE")
) {
throw new HttpException(
"Max share size exceeded",
HttpStatus.PAYLOAD_TOO_LARGE
);
}
fs.appendFileSync(
`./data/uploads/shares/${shareId}/${file.id}.tmp-chunk`,
buffer
);
const isLastChunk = chunk.index == chunk.total - 1;
if (isLastChunk) {
fs.renameSync(
`./data/uploads/shares/${shareId}/${file.id}.tmp-chunk`,
`./data/uploads/shares/${shareId}/${file.id}`
);
const fileSize = fs.statSync(
`./data/uploads/shares/${shareId}/${file.id}`
).size;
await this.prisma.file.create({
data: {
id: file.id,
name: file.name,
size: fileSize.toString(),
share: { connect: { id: shareId } },
},
});
}
return file;
}
async get(shareId: string, fileId: string) {

View File

@ -1,17 +0,0 @@
import {
ArgumentMetadata,
BadRequestException,
Injectable,
PipeTransform,
} from "@nestjs/common";
import { ConfigService } from "src/config/config.service";
@Injectable()
export class FileValidationPipe implements PipeTransform {
constructor(private config: ConfigService) {}
async transform(value: any, metadata: ArgumentMetadata) {
if (value.size > this.config.get("MAX_FILE_SIZE"))
throw new BadRequestException("File is ");
return value;
}
}

View File

@ -38,18 +38,34 @@ export class JobsService {
@Cron("0 0 * * *")
deleteTemporaryFiles() {
const files = fs.readdirSync("./data/uploads/_temp");
let filesDeleted = 0;
for (const file of files) {
const stats = fs.statSync(`./data/uploads/_temp/${file}`);
const isOlderThanOneDay = moment(stats.mtime)
.add(1, "day")
.isBefore(moment());
const shareDirectories = fs
.readdirSync("./data/uploads/shares", { withFileTypes: true })
.filter((dirent) => dirent.isDirectory())
.map((dirent) => dirent.name);
if (isOlderThanOneDay) fs.rmSync(`./data/uploads/_temp/${file}`);
for (const shareDirectory of shareDirectories) {
const temporaryFiles = fs
.readdirSync(`./data/uploads/shares/${shareDirectory}`)
.filter((file) => file.endsWith(".tmp-chunk"));
for (const file of temporaryFiles) {
const stats = fs.statSync(
`./data/uploads/shares/${shareDirectory}/${file}`
);
const isOlderThanOneDay = moment(stats.mtime)
.add(1, "day")
.isBefore(moment());
if (isOlderThanOneDay) {
fs.rmSync(`./data/uploads/shares/${shareDirectory}/${file}`);
filesDeleted++;
}
}
}
console.log(`job: deleted ${files.length} temporary files`);
console.log(`job: deleted ${filesDeleted} temporary files`);
}
@Cron("0 * * * *")

View File

@ -1,6 +1,7 @@
import { ClassSerializerInterceptor, ValidationPipe } from "@nestjs/common";
import { NestFactory, Reflector } from "@nestjs/core";
import { NestExpressApplication } from "@nestjs/platform-express";
import * as bodyParser from "body-parser";
import * as cookieParser from "cookie-parser";
import * as fs from "fs";
import { AppModule } from "./app.module";
@ -10,6 +11,7 @@ async function bootstrap() {
app.useGlobalPipes(new ValidationPipe({ whitelist: true }));
app.useGlobalInterceptors(new ClassSerializerInterceptor(app.get(Reflector)));
app.use(bodyParser.raw({type:'application/octet-stream', limit:'20mb'}));
app.use(cookieParser());
app.set("trust proxy", true);

View File

@ -56,6 +56,10 @@ export class ShareService {
expirationDate = moment(0).toDate();
}
fs.mkdirSync(`./data/uploads/shares/${share.id}`, {
recursive: true,
});
return await this.prisma.share.create({
data: {
...share,

View File

@ -1,6 +1,6 @@
{
"info": {
"_postman_id": "4b16228d-41ef-4c6b-8a0b-294a30a4cfc2",
"_postman_id": "38c7001d-4868-484b-935a-84fd3b5e7cf6",
"name": "Pingvin Share Testing",
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json",
"_exporter_id": "17822132"
@ -478,28 +478,34 @@
"pm.test(\"Response body correct\", () => {",
" const responseBody = pm.response.json();",
" pm.expect(responseBody).to.have.property(\"id\")",
" pm.expect(Object.keys(responseBody).length).be.equal(1)",
" pm.expect(responseBody.name).to.be.equal(\"test-file.txt\")",
" pm.expect(Object.keys(responseBody).length).be.equal(2)",
"});"
],
"type": "text/javascript"
}
}
],
"protocolProfileBehavior": {
"disabledSystemHeaders": {
"content-type": true
}
},
"request": {
"method": "POST",
"header": [],
"header": [
{
"key": "Content-Type",
"value": "application/octet-stream",
"type": "text"
}
],
"body": {
"mode": "formdata",
"formdata": [
{
"key": "file",
"type": "file",
"src": "./test/system/test-file.txt"
}
]
"mode": "raw",
"raw": "data:application/octet-stream;base64,VGhpcyBpcyBhIHRlc3QgZmlsZWQgdXNlZCBmb3IgdXBsb2FkaW5nIGluIHRoZSBzeXN0ZW0gdGVzdC4="
},
"url": {
"raw": "{{API_URL}}/shares/:shareId/files",
"raw": "{{API_URL}}/shares/:shareId/files?name=test-file.txt&chunkIndex=0&totalChunks=1",
"host": [
"{{API_URL}}"
],
@ -508,6 +514,20 @@
":shareId",
"files"
],
"query": [
{
"key": "name",
"value": "test-file.txt"
},
{
"key": "chunkIndex",
"value": "0"
},
{
"key": "totalChunks",
"value": "1"
}
],
"variable": [
{
"key": "shareId",
@ -531,29 +551,34 @@
"",
"pm.test(\"Response body correct\", () => {",
" const responseBody = pm.response.json();",
" pm.expect(responseBody).to.have.property(\"id\")",
" pm.expect(Object.keys(responseBody).length).be.equal(1)",
" pm.expect(responseBody.name).to.be.equal(\"test-file2.txt\")",
" pm.expect(Object.keys(responseBody).length).be.equal(2)",
"});"
],
"type": "text/javascript"
}
}
],
"protocolProfileBehavior": {
"disabledSystemHeaders": {
"content-type": true
}
},
"request": {
"method": "POST",
"header": [],
"header": [
{
"key": "Content-Type",
"value": "application/octet-stream",
"type": "text"
}
],
"body": {
"mode": "formdata",
"formdata": [
{
"key": "file",
"type": "file",
"src": "./test/system/test-file.txt"
}
]
"mode": "raw",
"raw": "data:application/octet-stream;base64,VGhpcyBpcyBhIHRlc3QgZmlsZWQgdXNlZCBmb3IgdXBsb2FkaW5nIGluIHRoZSBzeXN0ZW0gdGVzdC4="
},
"url": {
"raw": "{{API_URL}}/shares/:shareId/files",
"raw": "{{API_URL}}/shares/:shareId/files?name=test-file2.txt&chunkIndex=0&totalChunks=1",
"host": [
"{{API_URL}}"
],
@ -562,6 +587,20 @@
":shareId",
"files"
],
"query": [
{
"key": "name",
"value": "test-file2.txt"
},
{
"key": "chunkIndex",
"value": "0"
},
{
"key": "totalChunks",
"value": "1"
}
],
"variable": [
{
"key": "shareId",

View File

@ -1 +0,0 @@
This is a test filed used for uploading in the system test.

View File

@ -33,9 +33,11 @@ const useStyles = createStyles((theme) => ({
const Dropzone = ({
isUploading,
files,
setFiles,
}: {
isUploading: boolean;
files: FileUpload[];
setFiles: Dispatch<SetStateAction<FileUpload[]>>;
}) => {
const config = useConfig();
@ -45,18 +47,27 @@ const Dropzone = ({
return (
<div className={classes.wrapper}>
<MantineDropzone
maxSize={parseInt(config.get("MAX_FILE_SIZE"))}
onReject={(e) => {
toast.error(e[0].errors[0].message);
}}
disabled={isUploading}
openRef={openRef as ForwardedRef<() => void>}
onDrop={(files) => {
const newFiles = files.map((file) => {
(file as FileUpload).uploadingProgress = 0;
return file as FileUpload;
});
setFiles(newFiles);
onDrop={(newFiles: FileUpload[]) => {
const fileSizeSum = [...newFiles, ...files].reduce((n, { size }) => n + size, 0);
if (fileSizeSum > config.get("MAX_SHARE_SIZE")) {
toast.error(
`Your files exceed the maximum share size of ${byteStringToHumanSizeString(
config.get("MAX_SHARE_SIZE")
)}.`
);
} else {
newFiles = newFiles.map((newFile) => {
newFile.uploadingProgress = 0;
return newFile;
});
setFiles([...newFiles, ...files]);
}
}}
className={classes.dropzone}
radius="md"
@ -71,7 +82,8 @@ const Dropzone = ({
<Text align="center" size="sm" mt="xs" color="dimmed">
Drag&apos;n&apos;drop files here to start your share. We can accept
only files that are less than{" "}
{byteStringToHumanSizeString(config.get("MAX_FILE_SIZE"))} in size.
{byteStringToHumanSizeString(config.get("MAX_SHARE_SIZE"))} in
total.
</Text>
</div>
</MantineDropzone>

View File

@ -1,5 +1,5 @@
import { RingProgress } from "@mantine/core";
import { TbCircleCheck, TbCircleX } from "react-icons/tb";
import { Loader, RingProgress } from "@mantine/core";
import { TbCircleCheck } from "react-icons/tb";
const UploadProgressIndicator = ({ progress }: { progress: number }) => {
if (progress > 0 && progress < 100) {
return (
@ -12,7 +12,7 @@ const UploadProgressIndicator = ({ progress }: { progress: number }) => {
} else if (progress >= 100) {
return <TbCircleCheck color="green" size={22} />;
} else {
return <TbCircleX color="red" size={22} />;
return <Loader color="red" size={19} />;
}
};

View File

@ -37,6 +37,7 @@ const Body = ({ share }: { share: Share }) => {
return (
<Stack align="stretch">
<TextInput
readOnly
variant="filled"
value={link}
rightSection={

View File

@ -1,5 +1,7 @@
import { Button, Group } from "@mantine/core";
import { useModals } from "@mantine/modals";
import { cleanNotifications } from "@mantine/notifications";
import { AxiosError } from "axios";
import { useRouter } from "next/router";
import pLimit from "p-limit";
import { useEffect, useState } from "react";
@ -15,8 +17,10 @@ import { FileUpload } from "../types/File.type";
import { CreateShare, Share } from "../types/share.type";
import toast from "../utils/toast.util";
let createdShare: Share;
const promiseLimit = pLimit(3);
const chunkSize = 10 * 1024 * 1024; // 10MB
let errorToastShown = false;
let createdShare: Share;
const Upload = () => {
const router = useRouter();
@ -29,70 +33,122 @@ const Upload = () => {
const uploadFiles = async (share: CreateShare) => {
setisUploading(true);
try {
setFiles((files) =>
files.map((file) => {
file.uploadingProgress = 1;
return file;
})
);
createdShare = await shareService.create(share);
createdShare = await shareService.create(share);
const uploadPromises = files.map((file, i) => {
// Callback to indicate current upload progress
const progressCallBack = (progress: number) => {
setFiles((files) => {
return files.map((file, callbackIndex) => {
if (i == callbackIndex) {
const fileUploadPromises = files.map(async (file, fileIndex) =>
// Limit the number of concurrent uploads to 3
promiseLimit(async () => {
let fileId: string;
const setFileProgress = (progress: number) => {
setFiles((files) =>
files.map((file, callbackIndex) => {
if (fileIndex == callbackIndex) {
file.uploadingProgress = progress;
}
return file;
});
});
})
);
};
try {
return promiseLimit(() =>
shareService.uploadFile(share.id, file, progressCallBack)
);
} catch {
file.uploadingProgress = -1;
}
});
setFileProgress(1);
await Promise.all(uploadPromises);
} catch (e) {
toast.axiosError(e);
setisUploading(false);
}
const chunks = Math.ceil(file.size / chunkSize);
for (let chunkIndex = 0; chunkIndex < chunks; chunkIndex++) {
const from = chunkIndex * chunkSize;
const to = from + chunkSize;
const blob = file.slice(from, to);
try {
await new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = async (event) =>
await shareService
.uploadFile(
createdShare.id,
event,
{
id: fileId,
name: file.name,
},
chunkIndex,
Math.ceil(file.size / chunkSize)
)
.then((response) => {
fileId = response.id;
resolve(response);
})
.catch(reject);
reader.readAsDataURL(blob);
});
setFileProgress(((chunkIndex + 1) / chunks) * 100);
} catch (e) {
if (
e instanceof AxiosError &&
e.response?.data.error == "unexpected_chunk_index"
) {
// Retry with the expected chunk index
chunkIndex = e.response!.data!.expectedChunkIndex - 1;
continue;
} else {
setFileProgress(-1);
// Retry after 5 seconds
await new Promise((resolve) => setTimeout(resolve, 5000));
chunkIndex = -1;
continue;
}
}
}
})
);
Promise.all(fileUploadPromises);
};
useEffect(() => {
// Check if there are any files that failed to upload
const fileErrorCount = files.filter(
(file) => file.uploadingProgress == -1
).length;
if (fileErrorCount > 0) {
if (!errorToastShown) {
toast.error(
`${fileErrorCount} file(s) failed to upload. Trying again.`,
{
disallowClose: true,
autoClose: false,
}
);
}
errorToastShown = true;
} else {
cleanNotifications();
errorToastShown = false;
}
// Complete share
if (
files.length > 0 &&
files.every(
(file) => file.uploadingProgress >= 100 || file.uploadingProgress == -1
)
files.every((file) => file.uploadingProgress >= 100) &&
fileErrorCount == 0
) {
const fileErrorCount = files.filter(
(file) => file.uploadingProgress == -1
).length;
setisUploading(false);
if (fileErrorCount > 0) {
toast.error(`${fileErrorCount} file(s) failed to upload. Try again.`);
} else {
shareService
.completeShare(createdShare.id)
.then(() => {
showCompletedUploadModal(modals, createdShare);
setFiles([]);
})
.catch(() =>
toast.error("An error occurred while finishing your share.")
);
}
shareService
.completeShare(createdShare.id)
.then(() => {
setisUploading(false);
showCompletedUploadModal(modals, createdShare);
setFiles([]);
})
.catch(() =>
toast.error("An error occurred while finishing your share.")
);
}
}, [files]);
if (!user && !config.get("ALLOW_UNAUTHENTICATED_SHARES")) {
router.replace("/");
} else {
@ -120,7 +176,7 @@ const Upload = () => {
Share
</Button>
</Group>
<Dropzone setFiles={setFiles} isUploading={isUploading} />
<Dropzone files={files} setFiles={setFiles} isUploading={isUploading} />
{files.length > 0 && <FileList files={files} setFiles={setFiles} />}
</>
);

View File

@ -1,3 +1,4 @@
import { FileUploadResponse } from "../types/File.type";
import {
CreateShare,
MyShare,
@ -74,22 +75,27 @@ const downloadFile = async (shareId: string, fileId: string) => {
const uploadFile = async (
shareId: string,
file: File,
progressCallBack: (uploadingProgress: number) => void
) => {
let formData = new FormData();
formData.append("file", file);
readerEvent: ProgressEvent<FileReader>,
file: {
id?: string;
name: string;
},
chunkIndex: number,
totalChunks: number
): Promise<FileUploadResponse> => {
const data = readerEvent.target!.result;
const response = await api.post(`shares/${shareId}/files`, formData, {
onUploadProgress: (progressEvent) => {
const uploadingProgress = Math.round(
(100 * progressEvent.loaded) / (progressEvent.total ?? 1)
);
if (uploadingProgress < 100) progressCallBack(uploadingProgress);
},
});
progressCallBack(100);
return response;
return (
await api.post(`shares/${shareId}/files`, data, {
headers: { "Content-Type": "application/octet-stream" },
params: {
id: file.id,
name: file.name,
chunkIndex,
totalChunks,
},
})
).data;
};
export default {

View File

@ -1 +1,3 @@
export type FileUpload = File & { uploadingProgress: number };
export type FileUploadResponse = {id: string, name: string}

View File

@ -1,25 +1,33 @@
import { showNotification } from "@mantine/notifications";
import { NotificationProps, showNotification } from "@mantine/notifications";
import { TbCheck, TbX } from "react-icons/tb";
const error = (message: string) =>
const error = (message: string, config?: Omit<NotificationProps, "message">) =>
showNotification({
icon: <TbX />,
color: "red",
radius: "md",
title: "Error",
message: message,
autoClose: true,
...config,
});
const axiosError = (axiosError: any) =>
error(axiosError?.response?.data?.message ?? "An unknown error occurred");
const success = (message: string) =>
const success = (
message: string,
config?: Omit<NotificationProps, "message">
) =>
showNotification({
icon: <TbCheck />,
color: "green",
radius: "md",
title: "Success",
message: message,
autoClose: true,
...config,
});
const toast = {