1
0
mirror of https://github.com/stonith404/pingvin-share.git synced 2024-10-02 09:30:10 +02:00

fix: memory leak while uploading files by disabling base64 encoding of chunks

This commit is contained in:
Elias Schneider 2024-04-04 20:55:45 +02:00
parent 0bfbaea49a
commit 7a15fbb465
No known key found for this signature in database
GPG Key ID: 07E623B294202B6C
4 changed files with 46 additions and 67 deletions

View File

@ -27,17 +27,14 @@ export class FileController {
@UseGuards(CreateShareGuard, ShareOwnerGuard) @UseGuards(CreateShareGuard, ShareOwnerGuard)
async create( async create(
@Query() query: any, @Query() query: any,
@Body() body: string, @Body() body: string,
@Param("shareId") shareId: string, @Param("shareId") shareId: string,
) { ) {
const { id, name, chunkIndex, totalChunks } = query; const { id, name, chunkIndex, totalChunks } = query;
// Data can be empty if the file is empty // Data can be empty if the file is empty
const data = body.toString().split(",")[1] ?? "";
return await this.fileService.create( return await this.fileService.create(
data, body,
{ index: parseInt(chunkIndex), total: parseInt(totalChunks) }, { index: parseInt(chunkIndex), total: parseInt(totalChunks) },
{ id, name }, { id, name },
shareId, shareId,

View File

@ -1,22 +1,19 @@
import { Button, Group } from "@mantine/core"; import { Button, Group } from "@mantine/core";
import { useModals } from "@mantine/modals";
import { cleanNotifications } from "@mantine/notifications"; import { cleanNotifications } from "@mantine/notifications";
import { AxiosError } from "axios"; import { AxiosError } from "axios";
import { useRouter } from "next/router";
import pLimit from "p-limit"; import pLimit from "p-limit";
import { useEffect, useMemo, useState } from "react"; import { useEffect, useMemo, useRef, useState } from "react";
import { FormattedMessage } from "react-intl"; import { FormattedMessage } from "react-intl";
import Dropzone from "../../components/upload/Dropzone"; import Dropzone from "../../components/upload/Dropzone";
import FileList from "../../components/upload/FileList"; import FileList from "../../components/upload/FileList";
import showCompletedUploadModal from "../../components/upload/modals/showCompletedUploadModal";
import useConfig from "../../hooks/config.hook"; import useConfig from "../../hooks/config.hook";
import useTranslate from "../../hooks/useTranslate.hook"; import useTranslate from "../../hooks/useTranslate.hook";
import shareService from "../../services/share.service"; import shareService from "../../services/share.service";
import { FileListItem, FileMetaData, FileUpload } from "../../types/File.type"; import { FileListItem, FileMetaData, FileUpload } from "../../types/File.type";
import toast from "../../utils/toast.util"; import toast from "../../utils/toast.util";
import { useRouter } from "next/router";
const promiseLimit = pLimit(3); const promiseLimit = pLimit(3);
const chunkSize = 10 * 1024 * 1024; // 10MB
let errorToastShown = false; let errorToastShown = false;
const EditableUpload = ({ const EditableUpload = ({
@ -33,6 +30,8 @@ const EditableUpload = ({
const router = useRouter(); const router = useRouter();
const config = useConfig(); const config = useConfig();
const chunkSize = useRef(parseInt(config.get("share.chunkSize")));
const [existingFiles, setExistingFiles] = const [existingFiles, setExistingFiles] =
useState<Array<FileMetaData & { deleted?: boolean }>>(savedFiles); useState<Array<FileMetaData & { deleted?: boolean }>>(savedFiles);
const [uploadingFiles, setUploadingFiles] = useState<FileUpload[]>([]); const [uploadingFiles, setUploadingFiles] = useState<FileUpload[]>([]);
@ -66,7 +65,7 @@ const EditableUpload = ({
const fileUploadPromises = files.map(async (file, fileIndex) => const fileUploadPromises = files.map(async (file, fileIndex) =>
// Limit the number of concurrent uploads to 3 // Limit the number of concurrent uploads to 3
promiseLimit(async () => { promiseLimit(async () => {
let fileId: string; let fileId: string | undefined;
const setFileProgress = (progress: number) => { const setFileProgress = (progress: number) => {
setUploadingFiles((files) => setUploadingFiles((files) =>
@ -81,38 +80,30 @@ const EditableUpload = ({
setFileProgress(1); setFileProgress(1);
let chunks = Math.ceil(file.size / chunkSize); let chunks = Math.ceil(file.size / chunkSize.current);
// If the file is 0 bytes, we still need to upload 1 chunk // If the file is 0 bytes, we still need to upload 1 chunk
if (chunks == 0) chunks++; if (chunks == 0) chunks++;
for (let chunkIndex = 0; chunkIndex < chunks; chunkIndex++) { for (let chunkIndex = 0; chunkIndex < chunks; chunkIndex++) {
const from = chunkIndex * chunkSize; const from = chunkIndex * chunkSize.current;
const to = from + chunkSize; const to = from + chunkSize.current;
const blob = file.slice(from, to); const blob = file.slice(from, to);
try { try {
await new Promise((resolve, reject) => { await shareService
const reader = new FileReader(); .uploadFile(
reader.onload = async (event) => shareId,
await shareService blob,
.uploadFile( {
shareId, id: fileId,
event, name: file.name,
{ },
id: fileId, chunkIndex,
name: file.name, chunks,
}, )
chunkIndex, .then((response) => {
chunks, fileId = response.id;
) });
.then((response) => {
fileId = response.id;
resolve(response);
})
.catch(reject);
reader.readAsDataURL(blob);
});
setFileProgress(((chunkIndex + 1) / chunks) * 100); setFileProgress(((chunkIndex + 1) / chunks) * 100);
} catch (e) { } catch (e) {

View File

@ -3,7 +3,7 @@ import { useModals } from "@mantine/modals";
import { cleanNotifications } from "@mantine/notifications"; import { cleanNotifications } from "@mantine/notifications";
import { AxiosError } from "axios"; import { AxiosError } from "axios";
import pLimit from "p-limit"; import pLimit from "p-limit";
import { useEffect, useState } from "react"; import { useEffect, useRef, useState } from "react";
import { FormattedMessage } from "react-intl"; import { FormattedMessage } from "react-intl";
import Meta from "../../components/Meta"; import Meta from "../../components/Meta";
import Dropzone from "../../components/upload/Dropzone"; import Dropzone from "../../components/upload/Dropzone";
@ -19,7 +19,6 @@ import { CreateShare, Share } from "../../types/share.type";
import toast from "../../utils/toast.util"; import toast from "../../utils/toast.util";
const promiseLimit = pLimit(3); const promiseLimit = pLimit(3);
const chunkSize = 10 * 1024 * 1024; // 10MB
let errorToastShown = false; let errorToastShown = false;
let createdShare: Share; let createdShare: Share;
@ -38,6 +37,8 @@ const Upload = ({
const [files, setFiles] = useState<FileUpload[]>([]); const [files, setFiles] = useState<FileUpload[]>([]);
const [isUploading, setisUploading] = useState(false); const [isUploading, setisUploading] = useState(false);
const chunkSize = useRef(parseInt(config.get("share.chunkSize")));
maxShareSize ??= parseInt(config.get("share.maxSize")); maxShareSize ??= parseInt(config.get("share.maxSize"));
const uploadFiles = async (share: CreateShare, files: FileUpload[]) => { const uploadFiles = async (share: CreateShare, files: FileUpload[]) => {
@ -54,7 +55,7 @@ const Upload = ({
const fileUploadPromises = files.map(async (file, fileIndex) => const fileUploadPromises = files.map(async (file, fileIndex) =>
// Limit the number of concurrent uploads to 3 // Limit the number of concurrent uploads to 3
promiseLimit(async () => { promiseLimit(async () => {
let fileId: string; let fileId;
const setFileProgress = (progress: number) => { const setFileProgress = (progress: number) => {
setFiles((files) => setFiles((files) =>
@ -69,38 +70,30 @@ const Upload = ({
setFileProgress(1); setFileProgress(1);
let chunks = Math.ceil(file.size / chunkSize); let chunks = Math.ceil(file.size / chunkSize.current);
// If the file is 0 bytes, we still need to upload 1 chunk // If the file is 0 bytes, we still need to upload 1 chunk
if (chunks == 0) chunks++; if (chunks == 0) chunks++;
for (let chunkIndex = 0; chunkIndex < chunks; chunkIndex++) { for (let chunkIndex = 0; chunkIndex < chunks; chunkIndex++) {
const from = chunkIndex * chunkSize; const from = chunkIndex * chunkSize.current;
const to = from + chunkSize; const to = from + chunkSize.current;
const blob = file.slice(from, to); const blob = file.slice(from, to);
try { try {
await new Promise((resolve, reject) => { await shareService
const reader = new FileReader(); .uploadFile(
reader.onload = async (event) => createdShare.id,
await shareService blob,
.uploadFile( {
createdShare.id, id: fileId,
event, name: file.name,
{ },
id: fileId, chunkIndex,
name: file.name, chunks,
}, )
chunkIndex, .then((response) => {
chunks, fileId = response.id;
) });
.then((response) => {
fileId = response.id;
resolve(response);
})
.catch(reject);
reader.readAsDataURL(blob);
});
setFileProgress(((chunkIndex + 1) / chunks) * 100); setFileProgress(((chunkIndex + 1) / chunks) * 100);
} catch (e) { } catch (e) {

View File

@ -77,7 +77,7 @@ const removeFile = async (shareId: string, fileId: string) => {
const uploadFile = async ( const uploadFile = async (
shareId: string, shareId: string,
readerEvent: ProgressEvent<FileReader>, chunk: Blob,
file: { file: {
id?: string; id?: string;
name: string; name: string;
@ -85,10 +85,8 @@ const uploadFile = async (
chunkIndex: number, chunkIndex: number,
totalChunks: number, totalChunks: number,
): Promise<FileUploadResponse> => { ): Promise<FileUploadResponse> => {
const data = readerEvent.target!.result;
return ( return (
await api.post(`shares/${shareId}/files`, data, { await api.post(`shares/${shareId}/files`, chunk, {
headers: { "Content-Type": "application/octet-stream" }, headers: { "Content-Type": "application/octet-stream" },
params: { params: {
id: file.id, id: file.id,