fix(audit-tier-6): validation, perms, ops/infra, per-port webhook secret

Final audit polish — closes the remaining LOW + MED items the previous
tiers didn't reach:

* Validation hardening: me.preferences uses .strict() + 8KB cap
  instead of unbounded .passthrough(); files.uploadFile gains
  magic-byte verification (jpeg/png/gif/webp/pdf/doc/xlsx); OCR scan
  endpoint enforces 10MB cap + magic-byte check on receipt images;
  port logoUrl + me.avatarUrl reject javascript:/data: schemes via
  a shared httpUrl refinement.
* Permission gates: document-sends/{brochure,berth-pdf} now require
  email.send (was withAuth-only); document-sends/{preview,list} on
  email.view; ai/email-draft on email.send; documents/[id]/send
  uses send_for_signing (was create); expenses/export/parent-company
  flips from hard isSuperAdmin to expenses.export for parity;
  admin/users/options gated on reminders.assign_others (was withAuth).
* Envelope hygiene: auth/set-password switches the third {message}
  variant to errorResponse + {data: {email}}; ai/email-draft wraps
  jobId in {data: {jobId}}.
* UI polish: reports-list.handleDownload surfaces failures via
  toastError (was console-only).
* Ops/infra: pin pnpm@10.33.2 across all three Dockerfiles +
  packageManager field in package.json; Dockerfile.worker re-orders
  user creation BEFORE pnpm install so node_modules / .cache dirs
  are worker-owned (fixes tesseract.js + sharp EACCES at first PDF
  parse); add Redis-ping HEALTHCHECK to the worker container.
* Public health endpoint: returns full env+appUrl payload only when
  the caller presents X-Intake-Secret, otherwise a minimal {status}
  so generic uptime monitors still work but anonymous internet
  doesn't get deployment fingerprints.
* Per-port Documenso webhook secret: new system_settings key
  + listDocumensoWebhookSecrets() helper.  The webhook receiver
  iterates every configured per-port secret with timing-safe
  comparison + falls back to env, then forwards the resolved portId
  into handleDocumentExpired so two ports sharing a documensoId
  cannot cross-mutate.

Deferred (handled in dedicated follow-up PRs):
* Tier 5.1 — direct service tests for portal-auth / users /
  email-accounts / document-sends / sales-email-config.  MED, large
  test-writing scope.
* The {ok: true} → {data: null} envelope migration across
  alerts/expenses/admin-ocr-settings/storage routes.  Mechanical but
  needs coordinated client + test updates.
* CSP-nonce migration (drop unsafe-inline) — needs middleware-level
  nonce generation that the Next 15 router has to thread through.
* Idempotency-Key header on Documenso createDocument.  Requires
  schema column on documents to persist the key; deferred so it
  doesn't bundle a migration into this commit.
* The 16 better-auth user_id FKs — separate dedicated migration
  with care (some columns are NOT NULL today and cascade decisions
  matter).
* PermissionGate / Skeleton / EmptyState wraps across 5 admin lists
  (auditor-H §§36–37) and the residential-clients filter bar.

Test status: 1175/1175 vitest, tsc clean.

Refs: docs/audit-comprehensive-2026-05-05.md MED §§28,29,30 + LOW §§32–43
+ HIGH §9 (Documenso secrets follow-up).

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Matt Ciaccio
2026-05-05 21:03:31 +02:00
parent 4bab6de8be
commit 83239104e0
22 changed files with 402 additions and 176 deletions

View File

@@ -1,13 +1,13 @@
# Stage 1: Install dependencies # Stage 1: Install dependencies
FROM node:20-alpine AS deps FROM node:20-alpine AS deps
RUN corepack enable && corepack prepare pnpm@latest --activate RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
WORKDIR /app WORKDIR /app
COPY package.json pnpm-lock.yaml ./ COPY package.json pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile --prod=false RUN pnpm install --frozen-lockfile --prod=false
# Stage 2: Build the application # Stage 2: Build the application
FROM node:20-alpine AS builder FROM node:20-alpine AS builder
RUN corepack enable && corepack prepare pnpm@latest --activate RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
WORKDIR /app WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules COPY --from=deps /app/node_modules ./node_modules
COPY . . COPY . .

View File

@@ -1,5 +1,5 @@
FROM node:20-alpine FROM node:20-alpine
RUN corepack enable && corepack prepare pnpm@latest --activate RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
WORKDIR /app WORKDIR /app
COPY package.json pnpm-lock.yaml ./ COPY package.json pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile RUN pnpm install --frozen-lockfile

View File

@@ -1,26 +1,40 @@
# Stage 1: Install dependencies (dev deps needed for esbuild) # Stage 1: Install dependencies (dev deps needed for esbuild)
FROM node:20-alpine AS deps FROM node:20-alpine AS deps
RUN corepack enable && corepack prepare pnpm@latest --activate RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
WORKDIR /app WORKDIR /app
COPY package.json pnpm-lock.yaml ./ COPY package.json pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile --prod=false RUN pnpm install --frozen-lockfile --prod=false
# Stage 2: Build the worker bundle # Stage 2: Build the worker bundle
FROM node:20-alpine AS builder FROM node:20-alpine AS builder
RUN corepack enable && corepack prepare pnpm@latest --activate RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
WORKDIR /app WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules COPY --from=deps /app/node_modules ./node_modules
COPY . . COPY . .
ENV SKIP_ENV_VALIDATION=1 ENV SKIP_ENV_VALIDATION=1
RUN pnpm build:worker RUN pnpm build:worker
# Stage 3: Production runner (prod deps only) # Stage 3: Production runner (prod deps only).
#
# Critical ordering: create the worker user FIRST and chown the workdir
# BEFORE pnpm install, so node_modules + lazy-cache directories
# (tesseract.js, sharp) are owned by the worker user. Without this, the
# previous layout had pnpm install run as root → node_modules root-owned
# → tesseract.js / sharp wrote to node_modules/.cache and EACCES'd at
# first PDF parse in prod (auditor-K §39).
FROM node:20-alpine AS runner FROM node:20-alpine AS runner
RUN corepack enable && corepack prepare pnpm@latest --activate RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
WORKDIR /app
COPY package.json pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile --prod
RUN addgroup --system --gid 1001 nodejs && adduser --system --uid 1001 worker RUN addgroup --system --gid 1001 nodejs && adduser --system --uid 1001 worker
COPY --from=builder --chown=worker:nodejs /app/dist/worker.js ./worker.js WORKDIR /app
RUN chown -R worker:nodejs /app
USER worker USER worker
COPY --chown=worker:nodejs package.json pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile --prod
COPY --from=builder --chown=worker:nodejs /app/dist/worker.js ./worker.js
# Healthcheck — pings Redis from inside the worker container. Without
# this, a worker whose Redis connection has silently dropped (BullMQ
# rejects new jobs but the Node process is alive) is invisible to
# compose / swarm and jobs queue indefinitely (auditor-K §40).
HEALTHCHECK --interval=30s --timeout=5s --start-period=20s --retries=3 \
CMD node -e "const Redis=require('ioredis');const r=new Redis(process.env.REDIS_URL,{maxRetriesPerRequest:1,connectTimeout:3000,lazyConnect:true});r.connect().then(()=>r.ping()).then(()=>{r.disconnect();process.exit(0)}).catch(()=>process.exit(1))" || exit 1
CMD ["node", "worker.js"] CMD ["node", "worker.js"]

View File

@@ -2,6 +2,7 @@
"name": "port-nimara-crm", "name": "port-nimara-crm",
"version": "0.1.0", "version": "0.1.0",
"private": true, "private": true,
"packageManager": "pnpm@10.33.2",
"scripts": { "scripts": {
"dev": "next dev", "dev": "next dev",
"build": "next build && pnpm build:server", "build": "next build && pnpm build:server",

View File

@@ -1,7 +1,7 @@
import { NextRequest, NextResponse } from 'next/server'; import { NextRequest, NextResponse } from 'next/server';
import { z } from 'zod'; import { z } from 'zod';
import { errorResponse } from '@/lib/errors'; import { errorResponse, ValidationError } from '@/lib/errors';
import { consumeCrmInvite } from '@/lib/services/crm-invite.service'; import { consumeCrmInvite } from '@/lib/services/crm-invite.service';
import { enforcePublicRateLimit } from '@/lib/api/route-helpers'; import { enforcePublicRateLimit } from '@/lib/api/route-helpers';
@@ -15,27 +15,26 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
const limited = await enforcePublicRateLimit(req, 'portalToken'); const limited = await enforcePublicRateLimit(req, 'portalToken');
if (limited) return limited; if (limited) return limited;
try {
let body: unknown; let body: unknown;
try { try {
body = await req.json(); body = await req.json();
} catch { } catch {
return NextResponse.json({ message: 'Invalid request body' }, { status: 400 }); // Use {error} via errorResponse so the envelope matches every other
// route (auditor-F §32 — was emitting {message} as a third variant).
throw new ValidationError('Invalid request body');
} }
const parsed = bodySchema.safeParse(body); const parsed = bodySchema.safeParse(body);
if (!parsed.success) { if (!parsed.success) {
return NextResponse.json( throw new ValidationError(parsed.error.errors[0]?.message ?? 'Invalid input');
{ message: parsed.error.errors[0]?.message ?? 'Invalid input' },
{ status: 400 },
);
} }
try {
const result = await consumeCrmInvite({ const result = await consumeCrmInvite({
token: parsed.data.token, token: parsed.data.token,
password: parsed.data.password, password: parsed.data.password,
}); });
return NextResponse.json({ success: true, email: result.email }); return NextResponse.json({ data: { email: result.email } });
} catch (err) { } catch (err) {
return errorResponse(err); return errorResponse(err);
} }

View File

@@ -1,18 +1,34 @@
import { NextResponse } from 'next/server'; import { NextRequest, NextResponse } from 'next/server';
import { env } from '@/lib/env'; import { env } from '@/lib/env';
/** /**
* GET /api/public/health * GET /api/public/health
* *
* Public-facing health probe. Used by the marketing-website server on * Health probe used by the marketing-website server on startup to verify
* startup to verify it's pointed at a CRM matching its own deployment * it's pointed at a CRM matching its own deployment env (plan §14.8
* env (plan §14.8 critical: prevent staging-website-talking-to-prod-CRM). * critical: prevent staging-website-talking-to-prod-CRM).
* *
* Returns the CRM's `NODE_ENV` and `APP_URL` so the website can do a * Auditor-K §41 flagged that the previous response disclosed `NODE_ENV`
* strict equality check before serving any request. * and `APP_URL` to anonymous internet — mirrors the website's own intake
* secret gate so we don't leak deployment fingerprints. When
* `WEBSITE_INTAKE_SECRET` is set and the caller presents the matching
* `X-Intake-Secret` header we return the full payload; otherwise return
* a minimal `{status:'ok'}` so generic uptime monitors still get a 200.
*/ */
export function GET(): Response { export function GET(req: NextRequest): Response {
const expected = env.WEBSITE_INTAKE_SECRET;
const provided = req.headers.get('x-intake-secret');
const matched =
expected && provided && provided.length === expected.length && provided === expected;
if (!matched) {
return NextResponse.json(
{ status: 'ok', timestamp: new Date().toISOString() },
{ headers: { 'cache-control': 'no-store' } },
);
}
return NextResponse.json( return NextResponse.json(
{ {
status: 'ok', status: 'ok',

View File

@@ -1,12 +1,17 @@
import { NextResponse } from 'next/server'; import { NextResponse } from 'next/server';
import { eq } from 'drizzle-orm'; import { eq } from 'drizzle-orm';
import { withAuth } from '@/lib/api/helpers'; import { withAuth, withPermission } from '@/lib/api/helpers';
import { db } from '@/lib/db'; import { db } from '@/lib/db';
import { userPortRoles, userProfiles } from '@/lib/db/schema'; import { userPortRoles, userProfiles } from '@/lib/db/schema';
import { errorResponse } from '@/lib/errors'; import { errorResponse } from '@/lib/errors';
export const GET = withAuth(async (_req, ctx) => { // Sole consumer is the reminder-form's "assign to" picker, so gate on
// `reminders.assign_others` rather than letting any authed user
// enumerate every colleague's display name + user id at the port
// (auditor-A3 §6).
export const GET = withAuth(
withPermission('reminders', 'assign_others', async (_req, ctx) => {
try { try {
const rows = await db const rows = await db
.select({ .select({
@@ -22,4 +27,5 @@ export const GET = withAuth(async (_req, ctx) => {
} catch (error) { } catch (error) {
return errorResponse(error); return errorResponse(error);
} }
}); }),
);

View File

@@ -1,7 +1,7 @@
import { NextResponse } from 'next/server'; import { NextResponse } from 'next/server';
import { and, eq } from 'drizzle-orm'; import { and, eq } from 'drizzle-orm';
import { withAuth } from '@/lib/api/helpers'; import { withAuth, withPermission } from '@/lib/api/helpers';
import { db } from '@/lib/db'; import { db } from '@/lib/db';
import { systemSettings } from '@/lib/db/schema/system'; import { systemSettings } from '@/lib/db/schema/system';
import { requestEmailDraft } from '@/lib/services/email-draft.service'; import { requestEmailDraft } from '@/lib/services/email-draft.service';
@@ -9,11 +9,18 @@ import { parseBody } from '@/lib/api/route-helpers';
import { requestDraftSchema } from '@/lib/validators/ai'; import { requestDraftSchema } from '@/lib/validators/ai';
import { CodedError, errorResponse } from '@/lib/errors'; import { CodedError, errorResponse } from '@/lib/errors';
export const POST = withAuth(async (req, ctx) => { // Gated on `email.send` — the draft endpoint spends OpenAI tokens and
// renders client/interest-scoped content; only roles permitted to send
// emails should be able to mint drafts (auditor-A3 §7).
export const POST = withAuth(
withPermission('email', 'send', async (req, ctx) => {
try { try {
// Feature flag check // Feature flag check
const flag = await db.query.systemSettings.findFirst({ const flag = await db.query.systemSettings.findFirst({
where: and(eq(systemSettings.key, 'ai_email_drafts'), eq(systemSettings.portId, ctx.portId)), where: and(
eq(systemSettings.key, 'ai_email_drafts'),
eq(systemSettings.portId, ctx.portId),
),
}); });
if (flag?.value !== true) { if (flag?.value !== true) {
throw new CodedError('NOT_FOUND', { throw new CodedError('NOT_FOUND', {
@@ -30,8 +37,9 @@ export const POST = withAuth(async (req, ctx) => {
additionalInstructions: body.additionalInstructions, additionalInstructions: body.additionalInstructions,
}); });
return NextResponse.json({ jobId }, { status: 202 }); return NextResponse.json({ data: { jobId } }, { status: 202 });
} catch (error) { } catch (error) {
return errorResponse(error); return errorResponse(error);
} }
}); }),
);

View File

@@ -1,6 +1,6 @@
import { NextResponse } from 'next/server'; import { NextResponse } from 'next/server';
import { withAuth } from '@/lib/api/helpers'; import { withAuth, withPermission } from '@/lib/api/helpers';
import { parseBody } from '@/lib/api/route-helpers'; import { parseBody } from '@/lib/api/route-helpers';
import { errorResponse } from '@/lib/errors'; import { errorResponse } from '@/lib/errors';
import { sendBerthPdf } from '@/lib/services/document-sends.service'; import { sendBerthPdf } from '@/lib/services/document-sends.service';
@@ -12,9 +12,10 @@ import { sendBerthPdfSchema } from '@/lib/validators/document-sends';
* Sends the active per-berth PDF version to a client recipient. The body * Sends the active per-berth PDF version to a client recipient. The body
* markdown goes through the merge-field expander + sanitizer * markdown goes through the merge-field expander + sanitizer
* (`renderEmailBody`) before reaching nodemailer (§14.7 critical mitigation: * (`renderEmailBody`) before reaching nodemailer (§14.7 critical mitigation:
* body XSS). * body XSS). Gated on `email.send` (auditor-A3 §4).
*/ */
export const POST = withAuth(async (req, ctx) => { export const POST = withAuth(
withPermission('email', 'send', async (req, ctx) => {
try { try {
const input = await parseBody(req, sendBerthPdfSchema); const input = await parseBody(req, sendBerthPdfSchema);
const result = await sendBerthPdf({ const result = await sendBerthPdf({
@@ -30,4 +31,5 @@ export const POST = withAuth(async (req, ctx) => {
} catch (error) { } catch (error) {
return errorResponse(error); return errorResponse(error);
} }
}); }),
);

View File

@@ -1,6 +1,6 @@
import { NextResponse } from 'next/server'; import { NextResponse } from 'next/server';
import { withAuth } from '@/lib/api/helpers'; import { withAuth, withPermission } from '@/lib/api/helpers';
import { parseBody } from '@/lib/api/route-helpers'; import { parseBody } from '@/lib/api/route-helpers';
import { errorResponse } from '@/lib/errors'; import { errorResponse } from '@/lib/errors';
import { sendBrochure } from '@/lib/services/document-sends.service'; import { sendBrochure } from '@/lib/services/document-sends.service';
@@ -11,8 +11,14 @@ import { sendBrochureSchema } from '@/lib/validators/document-sends';
* *
* Sends a brochure (default or specified) to a client recipient. Same * Sends a brochure (default or specified) to a client recipient. Same
* sanitization + audit-row pipeline as the berth-pdf endpoint. * sanitization + audit-row pipeline as the berth-pdf endpoint.
*
* Gated on `email.send` so the lowest-privilege role at a port (which
* has only `email.view`) can no longer fire nodemailer at arbitrary
* recipients. The 50/user/hour rate-limit was previously the only
* restriction (auditor-A3 §4).
*/ */
export const POST = withAuth(async (req, ctx) => { export const POST = withAuth(
withPermission('email', 'send', async (req, ctx) => {
try { try {
const input = await parseBody(req, sendBrochureSchema); const input = await parseBody(req, sendBrochureSchema);
const result = await sendBrochure({ const result = await sendBrochure({
@@ -28,4 +34,5 @@ export const POST = withAuth(async (req, ctx) => {
} catch (error) { } catch (error) {
return errorResponse(error); return errorResponse(error);
} }
}); }),
);

View File

@@ -1,6 +1,6 @@
import { NextResponse } from 'next/server'; import { NextResponse } from 'next/server';
import { withAuth } from '@/lib/api/helpers'; import { withAuth, withPermission } from '@/lib/api/helpers';
import { parseBody } from '@/lib/api/route-helpers'; import { parseBody } from '@/lib/api/route-helpers';
import { errorResponse } from '@/lib/errors'; import { errorResponse } from '@/lib/errors';
import { previewBody } from '@/lib/services/document-sends.service'; import { previewBody } from '@/lib/services/document-sends.service';
@@ -12,9 +12,10 @@ import { previewBodySchema } from '@/lib/validators/document-sends';
* Renders a body for the dry-run UI without actually sending. Returns the * Renders a body for the dry-run UI without actually sending. Returns the
* sanitized HTML, the post-merge markdown, and the list of unresolved * sanitized HTML, the post-merge markdown, and the list of unresolved
* `{{tokens}}` so the UI can block submit until the rep fills them in * `{{tokens}}` so the UI can block submit until the rep fills them in
* (§14.7 mitigation). * (§14.7 mitigation). Gated on `email.view` (auditor-A3 §4).
*/ */
export const POST = withAuth(async (req, ctx) => { export const POST = withAuth(
withPermission('email', 'view', async (req, ctx) => {
try { try {
const input = await parseBody(req, previewBodySchema); const input = await parseBody(req, previewBodySchema);
const result = await previewBody( const result = await previewBody(
@@ -28,4 +29,5 @@ export const POST = withAuth(async (req, ctx) => {
} catch (error) { } catch (error) {
return errorResponse(error); return errorResponse(error);
} }
}); }),
);

View File

@@ -1,12 +1,16 @@
import { NextResponse } from 'next/server'; import { NextResponse } from 'next/server';
import { withAuth } from '@/lib/api/helpers'; import { withAuth, withPermission } from '@/lib/api/helpers';
import { parseQuery } from '@/lib/api/route-helpers'; import { parseQuery } from '@/lib/api/route-helpers';
import { errorResponse } from '@/lib/errors'; import { errorResponse } from '@/lib/errors';
import { listSends } from '@/lib/services/document-sends.service'; import { listSends } from '@/lib/services/document-sends.service';
import { listSendsQuerySchema } from '@/lib/validators/document-sends'; import { listSendsQuerySchema } from '@/lib/validators/document-sends';
export const GET = withAuth(async (req, ctx) => { // Gated on `email.view` so the lowest-privilege role at a port can no
// longer enumerate which clients have received which brochures / berth
// PDFs (auditor-A3 §4).
export const GET = withAuth(
withPermission('email', 'view', async (req, ctx) => {
try { try {
const query = parseQuery(req, listSendsQuerySchema); const query = parseQuery(req, listSendsQuerySchema);
const data = await listSends({ const data = await listSends({
@@ -20,4 +24,5 @@ export const GET = withAuth(async (req, ctx) => {
} catch (error) { } catch (error) {
return errorResponse(error); return errorResponse(error);
} }
}); }),
);

View File

@@ -5,7 +5,10 @@ import { errorResponse } from '@/lib/errors';
import { sendForSigning } from '@/lib/services/documents.service'; import { sendForSigning } from '@/lib/services/documents.service';
export const POST = withAuth( export const POST = withAuth(
withPermission('documents', 'create', async (req, ctx, params) => { // Use the dedicated `send_for_signing` permission rather than `create`,
// so a role with documents.create-only does not also gain the ability
// to dispatch a Documenso send (auditor-A3 §3).
withPermission('documents', 'send_for_signing', async (req, ctx, params) => {
try { try {
const doc = await sendForSigning(params.id!, ctx.portId, { const doc = await sendForSigning(params.id!, ctx.portId, {
userId: ctx.userId, userId: ctx.userId,

View File

@@ -1,14 +1,16 @@
import { NextResponse } from 'next/server'; import { NextResponse } from 'next/server';
import { requireSuperAdmin, withAuth } from '@/lib/api/helpers'; import { withAuth, withPermission } from '@/lib/api/helpers';
import { errorResponse } from '@/lib/errors'; import { errorResponse } from '@/lib/errors';
import { exportParentCompany } from '@/lib/services/expense-export'; import { exportParentCompany } from '@/lib/services/expense-export';
import { listExpensesSchema } from '@/lib/validators/expenses'; import { listExpensesSchema } from '@/lib/validators/expenses';
export const POST = withAuth(async (req, ctx) => { // Gated on `expenses.export` for parity with sibling export routes
// (auditor-A3 §5). Hard `isSuperAdmin` check used to lock out port
// admins who held expenses.export = true.
export const POST = withAuth(
withPermission('expenses', 'export', async (req, ctx) => {
try { try {
requireSuperAdmin(ctx, 'expenses.export.parent-company');
const body = await req.json().catch(() => ({})); const body = await req.json().catch(() => ({}));
const query = listExpensesSchema.parse(body); const query = listExpensesSchema.parse(body);
const pdf = await exportParentCompany(ctx.portId, query); const pdf = await exportParentCompany(ctx.portId, query);
@@ -23,4 +25,5 @@ export const POST = withAuth(async (req, ctx) => {
} catch (error) { } catch (error) {
return errorResponse(error); return errorResponse(error);
} }
}); }),
);

View File

@@ -30,8 +30,22 @@ export const POST = withAuth(
const formData = await req.formData(); const formData = await req.formData();
const file = formData.get('file') as File | null; const file = formData.get('file') as File | null;
if (!file) throw new ValidationError('A file is required'); if (!file) throw new ValidationError('A file is required');
// Hard 10 MB cap — without this any authenticated rep could grief
// their own port's AI budget by sending arbitrarily large images
// and burning OCR tokens (auditor-E3 §28).
const MAX_OCR_BYTES = 10 * 1024 * 1024;
if (file.size > MAX_OCR_BYTES) {
throw new ValidationError('Receipt image is too large (10 MB max).');
}
const buffer = Buffer.from(await file.arrayBuffer()); const buffer = Buffer.from(await file.arrayBuffer());
const mimeType = file.type || 'image/jpeg'; const mimeType = file.type || 'image/jpeg';
// Magic-byte gate so a forged Content-Type doesn't reach the OCR
// provider with arbitrary bytes.
const { bufferMatchesMime } = await import('@/lib/constants/file-validation');
const allowedOcrMimes = ['image/jpeg', 'image/png', 'image/webp'];
if (!allowedOcrMimes.includes(mimeType) || !bufferMatchesMime(buffer, mimeType)) {
throw new ValidationError('Unsupported receipt image type.');
}
const config = await getResolvedOcrConfig(ctx.portId); const config = await getResolvedOcrConfig(ctx.portId);
// Tesseract.js (in-browser) is the default. The server only invokes // Tesseract.js (in-browser) is the default. The server only invokes

View File

@@ -5,20 +5,33 @@ import { withAuth, type AuthContext } from '@/lib/api/helpers';
import { parseBody } from '@/lib/api/route-helpers'; import { parseBody } from '@/lib/api/route-helpers';
import { db } from '@/lib/db'; import { db } from '@/lib/db';
import { userProfiles } from '@/lib/db/schema'; import { userProfiles } from '@/lib/db/schema';
import { errorResponse, NotFoundError } from '@/lib/errors'; import { errorResponse, NotFoundError, ValidationError } from '@/lib/errors';
import { z } from 'zod'; import { z } from 'zod';
const updateProfileSchema = z.object({ const updateProfileSchema = z.object({
displayName: z.string().min(1).max(200).optional(), displayName: z.string().min(1).max(200).optional(),
phone: z.string().nullable().optional(), phone: z.string().nullable().optional(),
avatarUrl: z.string().url().nullable().optional(), // Refuse `javascript:` / `data:` schemes — z.string().url() lets them
// through and `<a href={avatarUrl}>` would otherwise be a stored-XSS
// vector if any future renderer treated the value as a link.
avatarUrl: z
.string()
.url()
.refine((u) => /^https?:\/\//i.test(u), 'must be an http(s) URL')
.nullable()
.optional(),
// Strict allow-list — no `.passthrough()` here. The previous schema let
// arbitrary client-supplied keys survive validation and persist into
// `userProfiles.preferences` JSONB unbounded; auditor-E3 §28 caught this.
// Add new keys here as the UI surfaces them rather than letting the
// client mint them at will.
preferences: z preferences: z
.object({ .object({
dark_mode: z.boolean().optional(), dark_mode: z.boolean().optional(),
locale: z.string().optional(), locale: z.string().optional(),
timezone: z.string().optional(), timezone: z.string().optional(),
}) })
.passthrough() .strict()
.optional(), .optional(),
}); });
@@ -49,10 +62,18 @@ export const PATCH = withAuth(async (req, ctx: AuthContext) => {
if (body.phone !== undefined) updates.phone = body.phone; if (body.phone !== undefined) updates.phone = body.phone;
if (body.avatarUrl !== undefined) updates.avatarUrl = body.avatarUrl; if (body.avatarUrl !== undefined) updates.avatarUrl = body.avatarUrl;
if (body.preferences !== undefined) { if (body.preferences !== undefined) {
updates.preferences = { const merged = {
...((profile.preferences as Record<string, unknown>) ?? {}), ...((profile.preferences as Record<string, unknown>) ?? {}),
...body.preferences, ...body.preferences,
}; };
// Hard cap on the merged JSONB to defend against historical rows
// bloated by the previous .passthrough() schema. 8 KB is generous
// — current legitimate keys are 3 booleans/strings.
const serialized = JSON.stringify(merged);
if (Buffer.byteLength(serialized, 'utf8') > 8 * 1024) {
throw new ValidationError('preferences exceeds 8KB');
}
updates.preferences = merged;
} }
const [updated] = await db const [updated] = await db

View File

@@ -3,6 +3,7 @@ import { createHash } from 'crypto';
import { db } from '@/lib/db'; import { db } from '@/lib/db';
import { verifyDocumensoSecret } from '@/lib/services/documenso-webhook'; import { verifyDocumensoSecret } from '@/lib/services/documenso-webhook';
import { listDocumensoWebhookSecrets } from '@/lib/services/port-config';
import { import {
handleRecipientSigned, handleRecipientSigned,
handleDocumentCompleted, handleDocumentCompleted,
@@ -11,7 +12,6 @@ import {
handleDocumentRejected, handleDocumentRejected,
handleDocumentCancelled, handleDocumentCancelled,
} from '@/lib/services/documents.service'; } from '@/lib/services/documents.service';
import { env } from '@/lib/env';
import { logger } from '@/lib/logger'; import { logger } from '@/lib/logger';
// BR-024: Dedup via signatureHash unique index on documentEvents // BR-024: Dedup via signatureHash unique index on documentEvents
@@ -49,9 +49,22 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
} }
// Documenso v1.13 + 2.x send the secret in plaintext via X-Documenso-Secret. // Documenso v1.13 + 2.x send the secret in plaintext via X-Documenso-Secret.
// Resolve the matching port by trying each configured per-port secret
// (plus the global env fallback) with timing-safe comparison. The
// resolved portId, when non-null, is threaded into handleDocumentExpired
// so two ports sharing a documensoId can't cross-mutate (auditor-D §22).
const providedSecret = req.headers.get('x-documenso-secret') ?? ''; const providedSecret = req.headers.get('x-documenso-secret') ?? '';
const secrets = await listDocumensoWebhookSecrets();
if (!verifyDocumensoSecret(providedSecret, env.DOCUMENSO_WEBHOOK_SECRET)) { let matchedPortId: string | null = null;
let matched = false;
for (const entry of secrets) {
if (verifyDocumensoSecret(providedSecret, entry.secret)) {
matched = true;
matchedPortId = entry.portId;
break;
}
}
if (!matched) {
logger.warn({ providedLen: providedSecret.length }, 'Invalid Documenso webhook secret'); logger.warn({ providedLen: providedSecret.length }, 'Invalid Documenso webhook secret');
return NextResponse.json({ ok: false, error: 'Invalid secret' }, { status: 200 }); return NextResponse.json({ ok: false, error: 'Invalid secret' }, { status: 200 });
} }
@@ -149,7 +162,12 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
break; break;
case 'DOCUMENT_EXPIRED': case 'DOCUMENT_EXPIRED':
await handleDocumentExpired({ documentId: documensoId }); // Forward the matched portId so cross-port documenso-id reuse
// can't flip the wrong port's document.
await handleDocumentExpired({
documentId: documensoId,
...(matchedPortId ? { portId: matchedPortId } : {}),
});
break; break;
default: default:

View File

@@ -17,6 +17,7 @@ import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
import { Skeleton } from '@/components/ui/skeleton'; import { Skeleton } from '@/components/ui/skeleton';
import { ReportStatusBadge } from '@/components/reports/report-status-badge'; import { ReportStatusBadge } from '@/components/reports/report-status-badge';
import { apiFetch } from '@/lib/api/client'; import { apiFetch } from '@/lib/api/client';
import { toastError } from '@/lib/api/toast-error';
interface GeneratedReport { interface GeneratedReport {
id: string; id: string;
@@ -66,7 +67,9 @@ export function ReportsList() {
const result = await apiFetch<{ url: string }>(`/api/v1/reports/${reportId}/download`); const result = await apiFetch<{ url: string }>(`/api/v1/reports/${reportId}/download`);
window.open(result.url, '_blank'); window.open(result.url, '_blank');
} catch (err) { } catch (err) {
console.error('Download failed', err); // Surface the failure to the user — was previously console-only,
// so the rep clicked Download and nothing happened (auditor-H §35).
toastError(err, 'Download failed');
} finally { } finally {
setDownloadingId(null); setDownloadingId(null);
} }

View File

@@ -35,3 +35,53 @@ export const PREVIEWABLE_MIMES = new Set<string>([
'image/webp', 'image/webp',
'application/pdf', 'application/pdf',
]); ]);
/**
* Magic-byte signatures keyed by claimed MIME type. Used by the file
* upload handler to reject files whose first few bytes don't match the
* MIME the browser declared. Without this, a `<form>` could lie about
* Content-Type and pass arbitrary bytes through ALLOWED_MIME_TYPES.
*
* Each signature is the leading prefix of the file. When multiple variants
* exist (e.g. JPEG SOI + APPn marker), we accept any of them.
*/
export const MAGIC_BYTE_SIGNATURES: Record<string, Uint8Array[]> = {
'image/jpeg': [new Uint8Array([0xff, 0xd8, 0xff])],
'image/png': [new Uint8Array([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a])],
'image/gif': [
new Uint8Array([0x47, 0x49, 0x46, 0x38, 0x37, 0x61]), // GIF87a
new Uint8Array([0x47, 0x49, 0x46, 0x38, 0x39, 0x61]), // GIF89a
],
'image/webp': [new Uint8Array([0x52, 0x49, 0x46, 0x46])], // RIFF; WEBP signature follows at offset 8
'application/pdf': [new Uint8Array([0x25, 0x50, 0x44, 0x46])], // %PDF
// Office formats are zip-based (modern: docx/xlsx) or OLE (legacy: doc/xls).
// Both share well-known magic bytes — match either family for a given MIME.
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': [
new Uint8Array([0x50, 0x4b, 0x03, 0x04]), // PK\3\4 (zip)
new Uint8Array([0x50, 0x4b, 0x05, 0x06]), // empty archive
],
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': [
new Uint8Array([0x50, 0x4b, 0x03, 0x04]),
new Uint8Array([0x50, 0x4b, 0x05, 0x06]),
],
'application/msword': [
new Uint8Array([0xd0, 0xcf, 0x11, 0xe0, 0xa1, 0xb1, 0x1a, 0xe1]), // OLE compound
],
'application/vnd.ms-excel': [new Uint8Array([0xd0, 0xcf, 0x11, 0xe0, 0xa1, 0xb1, 0x1a, 0xe1])],
// text/plain and text/csv have no magic bytes — leave unconstrained;
// size cap + ALLOWED_MIME_TYPES allow-list is the only gate.
};
/** Returns true when the buffer starts with one of the registered prefixes
* for the given MIME, or when the MIME has no signature requirement. */
export function bufferMatchesMime(buffer: Buffer, mime: string): boolean {
const sigs = MAGIC_BYTE_SIGNATURES[mime];
if (!sigs) return true; // text/plain, text/csv, or unrecognised allow-list entry
return sigs.some((sig) => {
if (buffer.length < sig.length) return false;
for (let i = 0; i < sig.length; i++) {
if (buffer[i] !== sig[i]) return false;
}
return true;
});
}

View File

@@ -14,6 +14,7 @@ import {
ALLOWED_MIME_TYPES, ALLOWED_MIME_TYPES,
MAX_FILE_SIZE, MAX_FILE_SIZE,
PREVIEWABLE_MIMES, PREVIEWABLE_MIMES,
bufferMatchesMime,
} from '@/lib/constants/file-validation'; } from '@/lib/constants/file-validation';
import { generateStorageKey, sanitizeFilename } from '@/lib/services/storage'; import { generateStorageKey, sanitizeFilename } from '@/lib/services/storage';
import type { UploadFileInput, UpdateFileInput, ListFilesInput } from '@/lib/validators/files'; import type { UploadFileInput, UpdateFileInput, ListFilesInput } from '@/lib/validators/files';
@@ -44,6 +45,15 @@ export async function uploadFile(
throw new ValidationError('File exceeds maximum size of 50MB'); throw new ValidationError('File exceeds maximum size of 50MB');
} }
// Magic-byte verification — without this, the browser-declared MIME is
// attacker-controlled and a malicious uploader could ship arbitrary
// bytes through the ALLOWED_MIME_TYPES allow-list (auditor-E3 §27).
// Berth-PDF and brochure paths already do this; the generic uploader
// matches their guarantee here.
if (!bufferMatchesMime(file.buffer, file.mimeType)) {
throw new ValidationError(`File contents do not match the declared type '${file.mimeType}'`);
}
const entity = data.entityType ?? 'general'; const entity = data.entityType ?? 'general';
const entityId = data.entityId ?? portId; const entityId = data.entityId ?? portId;
const storagePath = generateStorageKey(portSlug, entity, entityId, file.mimeType); const storagePath = generateStorageKey(portSlug, entity, entityId, file.mimeType);

View File

@@ -36,6 +36,11 @@ export const SETTING_KEYS = {
documensoClientRecipientId: 'documenso_client_recipient_id', documensoClientRecipientId: 'documenso_client_recipient_id',
documensoDeveloperRecipientId: 'documenso_developer_recipient_id', documensoDeveloperRecipientId: 'documenso_developer_recipient_id',
documensoApprovalRecipientId: 'documenso_approval_recipient_id', documensoApprovalRecipientId: 'documenso_approval_recipient_id',
// Per-port Documenso webhook secret — two ports pointed at different
// Documenso instances cannot share the global env secret. The receiver
// resolves the matching port by trying each enabled secret with a
// timing-safe comparison.
documensoWebhookSecret: 'documenso_webhook_secret',
eoiDefaultPathway: 'eoi_default_pathway', eoiDefaultPathway: 'eoi_default_pathway',
// Branding // Branding
@@ -192,6 +197,40 @@ export async function getPortDocumensoConfig(portId: string): Promise<PortDocume
}; };
} }
/**
* List every (portId, webhookSecret) pair configured across the platform,
* plus a wildcard-port entry for the global env secret. The Documenso
* webhook receiver iterates the list with `timingSafeEqual` until it
* finds a match, then dispatches with the resolved portId.
*
* `null` portId in the returned array means "matches but no port was
* resolved" — the caller falls back to the legacy global path.
*/
export interface DocumensoSecretEntry {
portId: string | null;
secret: string;
}
export async function listDocumensoWebhookSecrets(): Promise<DocumensoSecretEntry[]> {
const { db } = await import('@/lib/db');
const { systemSettings } = await import('@/lib/db/schema/system');
const { eq, isNotNull } = await import('drizzle-orm');
const rows = await db
.select({ portId: systemSettings.portId, value: systemSettings.value })
.from(systemSettings)
.where(eq(systemSettings.key, SETTING_KEYS.documensoWebhookSecret));
void isNotNull; // imported for future filters
const out: DocumensoSecretEntry[] = [];
for (const row of rows) {
if (typeof row.value !== 'string' || !row.value || !row.portId) continue;
out.push({ portId: row.portId, secret: row.value });
}
// Always include the global env secret as a fallback (null portId means
// "no per-port resolution" — preserves single-tenant compatibility).
out.push({ portId: null, secret: env.DOCUMENSO_WEBHOOK_SECRET });
return out;
}
// ─── Branding ─────────────────────────────────────────────────────────────── // ─── Branding ───────────────────────────────────────────────────────────────
export interface PortBrandingConfig { export interface PortBrandingConfig {

View File

@@ -1,5 +1,10 @@
import { z } from 'zod'; import { z } from 'zod';
const httpUrl = z
.string()
.url()
.refine((u) => /^https?:\/\//i.test(u), 'must be an http(s) URL');
export const createPortSchema = z.object({ export const createPortSchema = z.object({
name: z.string().min(1).max(200), name: z.string().min(1).max(200),
slug: z slug: z
@@ -7,7 +12,7 @@ export const createPortSchema = z.object({
.min(1) .min(1)
.max(100) .max(100)
.regex(/^[a-z0-9-]+$/, 'Slug must be lowercase alphanumeric with hyphens'), .regex(/^[a-z0-9-]+$/, 'Slug must be lowercase alphanumeric with hyphens'),
logoUrl: z.string().url().optional(), logoUrl: httpUrl.optional(),
primaryColor: z primaryColor: z
.string() .string()
.regex(/^#[0-9a-fA-F]{6}$/) .regex(/^#[0-9a-fA-F]{6}$/)
@@ -26,7 +31,7 @@ export const updatePortSchema = z.object({
.max(100) .max(100)
.regex(/^[a-z0-9-]+$/, 'Slug must be lowercase alphanumeric with hyphens') .regex(/^[a-z0-9-]+$/, 'Slug must be lowercase alphanumeric with hyphens')
.optional(), .optional(),
logoUrl: z.string().url().nullable().optional(), logoUrl: httpUrl.nullable().optional(),
primaryColor: z primaryColor: z
.string() .string()
.regex(/^#[0-9a-fA-F]{6}$/) .regex(/^#[0-9a-fA-F]{6}$/)