1
0
mirror of https://github.com/stonith404/pingvin-share.git synced 2024-06-24 07:40:09 +02:00

fix: memory leak while downloading large files

This commit is contained in:
Elias Schneider 2023-10-09 11:14:51 +02:00
parent 38919003e9
commit 97e7d7190d
No known key found for this signature in database
GPG Key ID: 07E623B294202B6C
7 changed files with 53 additions and 16 deletions

View File

@ -32,7 +32,9 @@ ENV NODE_ENV=docker
# Alpine specific dependencies
RUN apk update --no-cache
RUN apk upgrade --no-cache
RUN apk add --no-cache curl
RUN apk add --no-cache curl nginx
COPY ./nginx/nginx.conf /etc/nginx/nginx.conf
WORKDIR /opt/app/frontend
COPY --from=frontend-builder /opt/app/public ./public
@ -55,4 +57,4 @@ HEALTHCHECK --interval=10s --timeout=3s CMD curl -f http://localhost:3000/api/he
# Application startup
# HOSTNAME=0.0.0.0 fixes https://github.com/vercel/next.js/issues/51684. It can be removed as soon as the issue is fixed
CMD cp -rn /tmp/img /opt/app/frontend/public && HOSTNAME=0.0.0.0 node frontend/server.js & cd backend && npm run prod
CMD cp -rn /tmp/img /opt/app/frontend/public && nginx && PORT=3333 HOSTNAME=0.0.0.0 node frontend/server.js & cd backend && npm run prod

View File

@ -63,6 +63,8 @@ npm run build
pm2 start --name="pingvin-share-frontend" npm -- run start
```
**Uploading Large Files**: By default, Pingvin Share uses a built-in reverse proxy to reduce the installation steps. However, this reverse proxy is not optimized for uploading large files. If you wish to upload larger files, you can either use the Docker installation or set up your own reverse proxy. An example configuration for Nginx can be found in `/nginx/nginx.conf`.
The website is now listening on `http://localhost:3000`, have fun with Pingvin Share 🐧!
### Integrations

View File

@ -0,0 +1,19 @@
import { Controller, Get, Res } from "@nestjs/common";
import { Response } from "express";
import { PrismaService } from "./prisma/prisma.service";
@Controller("/")
export class AppController {
constructor(private prismaService: PrismaService) {}
@Get("health")
async health(@Res({ passthrough: true }) res: Response) {
try {
await this.prismaService.config.findMany();
return "OK";
} catch {
res.statusCode = 500;
return "ERROR";
}
}
}

View File

@ -14,6 +14,7 @@ import { ShareModule } from "./share/share.module";
import { UserModule } from "./user/user.module";
import { ClamScanModule } from "./clamscan/clamscan.module";
import { ReverseShareModule } from "./reverseShare/reverseShare.module";
import { AppController } from "./app.controller";
@Module({
imports: [
@ -33,6 +34,9 @@ import { ReverseShareModule } from "./reverseShare/reverseShare.module";
ClamScanModule,
ReverseShareModule,
],
controllers:[
AppController,
],
providers: [
{
provide: APP_GUARD,

View File

@ -11,6 +11,8 @@ export const config = {
const { apiURL } = getConfig().serverRuntimeConfig;
// A proxy to the API server only used in development.
// In production this route gets overridden by nginx.
export default (req: NextApiRequest, res: NextApiResponse) => {
httpProxyMiddleware(req, res, {
headers: {

View File

@ -1,14 +0,0 @@
import axios from "axios";
import { NextApiRequest, NextApiResponse } from "next";
import getConfig from "next/config";
const { apiURL } = getConfig().serverRuntimeConfig;
export default async (req: NextApiRequest, res: NextApiResponse) => {
const apiStatus = await axios
.get(`${apiURL}/api/configs`)
.then(() => "OK")
.catch(() => "ERROR");
res.status(apiStatus == "OK" ? 200 : 500).send(apiStatus);
};

22
nginx/nginx.conf Normal file
View File

@ -0,0 +1,22 @@
events {}
http {
server {
listen 3000;
client_max_body_size 100M;
location /api {
proxy_pass http://localhost:8080;
proxy_set_header X-Forwarded-Host $host:$server_port;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location / {
proxy_pass http://localhost:3333;
proxy_set_header X-Forwarded-Host $host:$server_port;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
}
}