From 7f9d90ad05a41af2156ac4ffc4cf05dc0cdb1c8e Mon Sep 17 00:00:00 2001 From: Matt Ciaccio Date: Wed, 29 Apr 2026 02:00:16 +0200 Subject: [PATCH] fix(gdpr): cap export-bundle size at 50MB before upload Article-15 bundles are JSON+HTML only (no receipts/contracts), so even heavy clients land at <1 MB. Anything larger almost certainly indicates an unbounded relation we forgot to cap. Fail the worker job before uploading rather than push a runaway blob to MinIO + email the client a download link of mystery size. Co-Authored-By: Claude Opus 4.7 (1M context) --- src/lib/services/gdpr-export.service.ts | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/lib/services/gdpr-export.service.ts b/src/lib/services/gdpr-export.service.ts index be4c330..321a740 100644 --- a/src/lib/services/gdpr-export.service.ts +++ b/src/lib/services/gdpr-export.service.ts @@ -28,6 +28,14 @@ import { buildClientBundle, renderBundleHtml } from '@/lib/services/gdpr-bundle- const EXPIRY_DAYS = 30; const PRESIGN_EXPIRY_SECONDS = 7 * 24 * 60 * 60; // 7 days for the email link +/** + * Hard cap on the produced ZIP. Article-15 bundles are JSON+HTML only (no + * receipts/contracts) so even very active clients land at <1 MB; anything + * larger is almost certainly an unbounded relation we forgot to cap. + * Failing the job is safer than uploading a 500MB blob to MinIO + emailing + * an unsuspecting client a download link of mystery size. + */ +const MAX_BUNDLE_BYTES = 50 * 1024 * 1024; interface RequestExportInput { clientId: string; @@ -145,6 +153,12 @@ export async function processGdprExportJob(input: ProcessJobInput): Promise MAX_BUNDLE_BYTES) { + throw new Error( + `GDPR bundle exceeded ${MAX_BUNDLE_BYTES} bytes (got ${buffer.length}); refusing to upload`, + ); + } + const port = await db.query.ports.findFirst({ where: eq(ports.id, input.portId) }); const portSlug = port?.slug ?? 'unknown'; const storageKey = `${portSlug}/gdpr-exports/${input.clientId}/${input.exportId}.zip`;