fix(audit-tier-6): validation, perms, ops/infra, per-port webhook secret
Final audit polish — closes the remaining LOW + MED items the previous
tiers didn't reach:
* Validation hardening: me.preferences uses .strict() + 8KB cap
instead of unbounded .passthrough(); files.uploadFile gains
magic-byte verification (jpeg/png/gif/webp/pdf/doc/xlsx); OCR scan
endpoint enforces 10MB cap + magic-byte check on receipt images;
port logoUrl + me.avatarUrl reject javascript:/data: schemes via
a shared httpUrl refinement.
* Permission gates: document-sends/{brochure,berth-pdf} now require
email.send (was withAuth-only); document-sends/{preview,list} on
email.view; ai/email-draft on email.send; documents/[id]/send
uses send_for_signing (was create); expenses/export/parent-company
flips from hard isSuperAdmin to expenses.export for parity;
admin/users/options gated on reminders.assign_others (was withAuth).
* Envelope hygiene: auth/set-password switches the third {message}
variant to errorResponse + {data: {email}}; ai/email-draft wraps
jobId in {data: {jobId}}.
* UI polish: reports-list.handleDownload surfaces failures via
toastError (was console-only).
* Ops/infra: pin pnpm@10.33.2 across all three Dockerfiles +
packageManager field in package.json; Dockerfile.worker re-orders
user creation BEFORE pnpm install so node_modules / .cache dirs
are worker-owned (fixes tesseract.js + sharp EACCES at first PDF
parse); add Redis-ping HEALTHCHECK to the worker container.
* Public health endpoint: returns full env+appUrl payload only when
the caller presents X-Intake-Secret, otherwise a minimal {status}
so generic uptime monitors still work but anonymous internet
doesn't get deployment fingerprints.
* Per-port Documenso webhook secret: new system_settings key
+ listDocumensoWebhookSecrets() helper. The webhook receiver
iterates every configured per-port secret with timing-safe
comparison + falls back to env, then forwards the resolved portId
into handleDocumentExpired so two ports sharing a documensoId
cannot cross-mutate.
Deferred (handled in dedicated follow-up PRs):
* Tier 5.1 — direct service tests for portal-auth / users /
email-accounts / document-sends / sales-email-config. MED, large
test-writing scope.
* The {ok: true} → {data: null} envelope migration across
alerts/expenses/admin-ocr-settings/storage routes. Mechanical but
needs coordinated client + test updates.
* CSP-nonce migration (drop unsafe-inline) — needs middleware-level
nonce generation that the Next 15 router has to thread through.
* Idempotency-Key header on Documenso createDocument. Requires
schema column on documents to persist the key; deferred so it
doesn't bundle a migration into this commit.
* The 16 better-auth user_id FKs — separate dedicated migration
with care (some columns are NOT NULL today and cascade decisions
matter).
* PermissionGate / Skeleton / EmptyState wraps across 5 admin lists
(auditor-H §§36–37) and the residential-clients filter bar.
Test status: 1175/1175 vitest, tsc clean.
Refs: docs/audit-comprehensive-2026-05-05.md MED §§28,29,30 + LOW §§32–43
+ HIGH §9 (Documenso secrets follow-up).
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -1,13 +1,13 @@
|
||||
# Stage 1: Install dependencies
|
||||
FROM node:20-alpine AS deps
|
||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
||||
RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
|
||||
WORKDIR /app
|
||||
COPY package.json pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --prod=false
|
||||
|
||||
# Stage 2: Build the application
|
||||
FROM node:20-alpine AS builder
|
||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
||||
RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
|
||||
WORKDIR /app
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
FROM node:20-alpine
|
||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
||||
RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
|
||||
WORKDIR /app
|
||||
COPY package.json pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
@@ -1,26 +1,40 @@
|
||||
# Stage 1: Install dependencies (dev deps needed for esbuild)
|
||||
FROM node:20-alpine AS deps
|
||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
||||
RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
|
||||
WORKDIR /app
|
||||
COPY package.json pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --prod=false
|
||||
|
||||
# Stage 2: Build the worker bundle
|
||||
FROM node:20-alpine AS builder
|
||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
||||
RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
|
||||
WORKDIR /app
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
ENV SKIP_ENV_VALIDATION=1
|
||||
RUN pnpm build:worker
|
||||
|
||||
# Stage 3: Production runner (prod deps only)
|
||||
# Stage 3: Production runner (prod deps only).
|
||||
#
|
||||
# Critical ordering: create the worker user FIRST and chown the workdir
|
||||
# BEFORE pnpm install, so node_modules + lazy-cache directories
|
||||
# (tesseract.js, sharp) are owned by the worker user. Without this, the
|
||||
# previous layout had pnpm install run as root → node_modules root-owned
|
||||
# → tesseract.js / sharp wrote to node_modules/.cache and EACCES'd at
|
||||
# first PDF parse in prod (auditor-K §39).
|
||||
FROM node:20-alpine AS runner
|
||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
||||
WORKDIR /app
|
||||
COPY package.json pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --prod
|
||||
RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
|
||||
RUN addgroup --system --gid 1001 nodejs && adduser --system --uid 1001 worker
|
||||
COPY --from=builder --chown=worker:nodejs /app/dist/worker.js ./worker.js
|
||||
WORKDIR /app
|
||||
RUN chown -R worker:nodejs /app
|
||||
USER worker
|
||||
COPY --chown=worker:nodejs package.json pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --prod
|
||||
COPY --from=builder --chown=worker:nodejs /app/dist/worker.js ./worker.js
|
||||
# Healthcheck — pings Redis from inside the worker container. Without
|
||||
# this, a worker whose Redis connection has silently dropped (BullMQ
|
||||
# rejects new jobs but the Node process is alive) is invisible to
|
||||
# compose / swarm and jobs queue indefinitely (auditor-K §40).
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=20s --retries=3 \
|
||||
CMD node -e "const Redis=require('ioredis');const r=new Redis(process.env.REDIS_URL,{maxRetriesPerRequest:1,connectTimeout:3000,lazyConnect:true});r.connect().then(()=>r.ping()).then(()=>{r.disconnect();process.exit(0)}).catch(()=>process.exit(1))" || exit 1
|
||||
CMD ["node", "worker.js"]
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"name": "port-nimara-crm",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.33.2",
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build && pnpm build:server",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { NextRequest, NextResponse } from 'next/server';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { errorResponse } from '@/lib/errors';
|
||||
import { errorResponse, ValidationError } from '@/lib/errors';
|
||||
import { consumeCrmInvite } from '@/lib/services/crm-invite.service';
|
||||
import { enforcePublicRateLimit } from '@/lib/api/route-helpers';
|
||||
|
||||
@@ -15,27 +15,26 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
|
||||
const limited = await enforcePublicRateLimit(req, 'portalToken');
|
||||
if (limited) return limited;
|
||||
|
||||
let body: unknown;
|
||||
try {
|
||||
body = await req.json();
|
||||
} catch {
|
||||
return NextResponse.json({ message: 'Invalid request body' }, { status: 400 });
|
||||
}
|
||||
let body: unknown;
|
||||
try {
|
||||
body = await req.json();
|
||||
} catch {
|
||||
// Use {error} via errorResponse so the envelope matches every other
|
||||
// route (auditor-F §32 — was emitting {message} as a third variant).
|
||||
throw new ValidationError('Invalid request body');
|
||||
}
|
||||
|
||||
const parsed = bodySchema.safeParse(body);
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json(
|
||||
{ message: parsed.error.errors[0]?.message ?? 'Invalid input' },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
const parsed = bodySchema.safeParse(body);
|
||||
if (!parsed.success) {
|
||||
throw new ValidationError(parsed.error.errors[0]?.message ?? 'Invalid input');
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await consumeCrmInvite({
|
||||
token: parsed.data.token,
|
||||
password: parsed.data.password,
|
||||
});
|
||||
return NextResponse.json({ success: true, email: result.email });
|
||||
return NextResponse.json({ data: { email: result.email } });
|
||||
} catch (err) {
|
||||
return errorResponse(err);
|
||||
}
|
||||
|
||||
@@ -1,18 +1,34 @@
|
||||
import { NextResponse } from 'next/server';
|
||||
import { NextRequest, NextResponse } from 'next/server';
|
||||
|
||||
import { env } from '@/lib/env';
|
||||
|
||||
/**
|
||||
* GET /api/public/health
|
||||
*
|
||||
* Public-facing health probe. Used by the marketing-website server on
|
||||
* startup to verify it's pointed at a CRM matching its own deployment
|
||||
* env (plan §14.8 critical: prevent staging-website-talking-to-prod-CRM).
|
||||
* Health probe used by the marketing-website server on startup to verify
|
||||
* it's pointed at a CRM matching its own deployment env (plan §14.8
|
||||
* critical: prevent staging-website-talking-to-prod-CRM).
|
||||
*
|
||||
* Returns the CRM's `NODE_ENV` and `APP_URL` so the website can do a
|
||||
* strict equality check before serving any request.
|
||||
* Auditor-K §41 flagged that the previous response disclosed `NODE_ENV`
|
||||
* and `APP_URL` to anonymous internet — mirrors the website's own intake
|
||||
* secret gate so we don't leak deployment fingerprints. When
|
||||
* `WEBSITE_INTAKE_SECRET` is set and the caller presents the matching
|
||||
* `X-Intake-Secret` header we return the full payload; otherwise return
|
||||
* a minimal `{status:'ok'}` so generic uptime monitors still get a 200.
|
||||
*/
|
||||
export function GET(): Response {
|
||||
export function GET(req: NextRequest): Response {
|
||||
const expected = env.WEBSITE_INTAKE_SECRET;
|
||||
const provided = req.headers.get('x-intake-secret');
|
||||
const matched =
|
||||
expected && provided && provided.length === expected.length && provided === expected;
|
||||
|
||||
if (!matched) {
|
||||
return NextResponse.json(
|
||||
{ status: 'ok', timestamp: new Date().toISOString() },
|
||||
{ headers: { 'cache-control': 'no-store' } },
|
||||
);
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
status: 'ok',
|
||||
|
||||
@@ -1,25 +1,31 @@
|
||||
import { NextResponse } from 'next/server';
|
||||
import { eq } from 'drizzle-orm';
|
||||
|
||||
import { withAuth } from '@/lib/api/helpers';
|
||||
import { withAuth, withPermission } from '@/lib/api/helpers';
|
||||
import { db } from '@/lib/db';
|
||||
import { userPortRoles, userProfiles } from '@/lib/db/schema';
|
||||
import { errorResponse } from '@/lib/errors';
|
||||
|
||||
export const GET = withAuth(async (_req, ctx) => {
|
||||
try {
|
||||
const rows = await db
|
||||
.select({
|
||||
id: userPortRoles.userId,
|
||||
displayName: userProfiles.displayName,
|
||||
})
|
||||
.from(userPortRoles)
|
||||
.innerJoin(userProfiles, eq(userPortRoles.userId, userProfiles.userId))
|
||||
.where(eq(userPortRoles.portId, ctx.portId))
|
||||
.orderBy(userProfiles.displayName);
|
||||
// Sole consumer is the reminder-form's "assign to" picker, so gate on
|
||||
// `reminders.assign_others` rather than letting any authed user
|
||||
// enumerate every colleague's display name + user id at the port
|
||||
// (auditor-A3 §6).
|
||||
export const GET = withAuth(
|
||||
withPermission('reminders', 'assign_others', async (_req, ctx) => {
|
||||
try {
|
||||
const rows = await db
|
||||
.select({
|
||||
id: userPortRoles.userId,
|
||||
displayName: userProfiles.displayName,
|
||||
})
|
||||
.from(userPortRoles)
|
||||
.innerJoin(userProfiles, eq(userPortRoles.userId, userProfiles.userId))
|
||||
.where(eq(userPortRoles.portId, ctx.portId))
|
||||
.orderBy(userProfiles.displayName);
|
||||
|
||||
return NextResponse.json({ data: rows });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
});
|
||||
return NextResponse.json({ data: rows });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { NextResponse } from 'next/server';
|
||||
import { and, eq } from 'drizzle-orm';
|
||||
|
||||
import { withAuth } from '@/lib/api/helpers';
|
||||
import { withAuth, withPermission } from '@/lib/api/helpers';
|
||||
import { db } from '@/lib/db';
|
||||
import { systemSettings } from '@/lib/db/schema/system';
|
||||
import { requestEmailDraft } from '@/lib/services/email-draft.service';
|
||||
@@ -9,29 +9,37 @@ import { parseBody } from '@/lib/api/route-helpers';
|
||||
import { requestDraftSchema } from '@/lib/validators/ai';
|
||||
import { CodedError, errorResponse } from '@/lib/errors';
|
||||
|
||||
export const POST = withAuth(async (req, ctx) => {
|
||||
try {
|
||||
// Feature flag check
|
||||
const flag = await db.query.systemSettings.findFirst({
|
||||
where: and(eq(systemSettings.key, 'ai_email_drafts'), eq(systemSettings.portId, ctx.portId)),
|
||||
});
|
||||
if (flag?.value !== true) {
|
||||
throw new CodedError('NOT_FOUND', {
|
||||
internalMessage: 'AI email-draft feature flag disabled for this port',
|
||||
// Gated on `email.send` — the draft endpoint spends OpenAI tokens and
|
||||
// renders client/interest-scoped content; only roles permitted to send
|
||||
// emails should be able to mint drafts (auditor-A3 §7).
|
||||
export const POST = withAuth(
|
||||
withPermission('email', 'send', async (req, ctx) => {
|
||||
try {
|
||||
// Feature flag check
|
||||
const flag = await db.query.systemSettings.findFirst({
|
||||
where: and(
|
||||
eq(systemSettings.key, 'ai_email_drafts'),
|
||||
eq(systemSettings.portId, ctx.portId),
|
||||
),
|
||||
});
|
||||
if (flag?.value !== true) {
|
||||
throw new CodedError('NOT_FOUND', {
|
||||
internalMessage: 'AI email-draft feature flag disabled for this port',
|
||||
});
|
||||
}
|
||||
|
||||
const body = await parseBody(req, requestDraftSchema);
|
||||
const { jobId } = await requestEmailDraft(ctx.userId, {
|
||||
interestId: body.interestId,
|
||||
clientId: body.clientId,
|
||||
portId: ctx.portId,
|
||||
context: body.context,
|
||||
additionalInstructions: body.additionalInstructions,
|
||||
});
|
||||
|
||||
return NextResponse.json({ data: { jobId } }, { status: 202 });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
|
||||
const body = await parseBody(req, requestDraftSchema);
|
||||
const { jobId } = await requestEmailDraft(ctx.userId, {
|
||||
interestId: body.interestId,
|
||||
clientId: body.clientId,
|
||||
portId: ctx.portId,
|
||||
context: body.context,
|
||||
additionalInstructions: body.additionalInstructions,
|
||||
});
|
||||
|
||||
return NextResponse.json({ jobId }, { status: 202 });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { NextResponse } from 'next/server';
|
||||
|
||||
import { withAuth } from '@/lib/api/helpers';
|
||||
import { withAuth, withPermission } from '@/lib/api/helpers';
|
||||
import { parseBody } from '@/lib/api/route-helpers';
|
||||
import { errorResponse } from '@/lib/errors';
|
||||
import { sendBerthPdf } from '@/lib/services/document-sends.service';
|
||||
@@ -12,22 +12,24 @@ import { sendBerthPdfSchema } from '@/lib/validators/document-sends';
|
||||
* Sends the active per-berth PDF version to a client recipient. The body
|
||||
* markdown goes through the merge-field expander + sanitizer
|
||||
* (`renderEmailBody`) before reaching nodemailer (§14.7 critical mitigation:
|
||||
* body XSS).
|
||||
* body XSS). Gated on `email.send` (auditor-A3 §4).
|
||||
*/
|
||||
export const POST = withAuth(async (req, ctx) => {
|
||||
try {
|
||||
const input = await parseBody(req, sendBerthPdfSchema);
|
||||
const result = await sendBerthPdf({
|
||||
portId: ctx.portId,
|
||||
berthId: input.berthId,
|
||||
recipient: input.recipient,
|
||||
customBodyMarkdown: input.customBodyMarkdown,
|
||||
sentBy: ctx.userId,
|
||||
ipAddress: ctx.ipAddress,
|
||||
userAgent: ctx.userAgent,
|
||||
});
|
||||
return NextResponse.json({ data: result });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
});
|
||||
export const POST = withAuth(
|
||||
withPermission('email', 'send', async (req, ctx) => {
|
||||
try {
|
||||
const input = await parseBody(req, sendBerthPdfSchema);
|
||||
const result = await sendBerthPdf({
|
||||
portId: ctx.portId,
|
||||
berthId: input.berthId,
|
||||
recipient: input.recipient,
|
||||
customBodyMarkdown: input.customBodyMarkdown,
|
||||
sentBy: ctx.userId,
|
||||
ipAddress: ctx.ipAddress,
|
||||
userAgent: ctx.userAgent,
|
||||
});
|
||||
return NextResponse.json({ data: result });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { NextResponse } from 'next/server';
|
||||
|
||||
import { withAuth } from '@/lib/api/helpers';
|
||||
import { withAuth, withPermission } from '@/lib/api/helpers';
|
||||
import { parseBody } from '@/lib/api/route-helpers';
|
||||
import { errorResponse } from '@/lib/errors';
|
||||
import { sendBrochure } from '@/lib/services/document-sends.service';
|
||||
@@ -11,21 +11,28 @@ import { sendBrochureSchema } from '@/lib/validators/document-sends';
|
||||
*
|
||||
* Sends a brochure (default or specified) to a client recipient. Same
|
||||
* sanitization + audit-row pipeline as the berth-pdf endpoint.
|
||||
*
|
||||
* Gated on `email.send` so the lowest-privilege role at a port (which
|
||||
* has only `email.view`) can no longer fire nodemailer at arbitrary
|
||||
* recipients. The 50/user/hour rate-limit was previously the only
|
||||
* restriction (auditor-A3 §4).
|
||||
*/
|
||||
export const POST = withAuth(async (req, ctx) => {
|
||||
try {
|
||||
const input = await parseBody(req, sendBrochureSchema);
|
||||
const result = await sendBrochure({
|
||||
portId: ctx.portId,
|
||||
brochureId: input.brochureId,
|
||||
recipient: input.recipient,
|
||||
customBodyMarkdown: input.customBodyMarkdown,
|
||||
sentBy: ctx.userId,
|
||||
ipAddress: ctx.ipAddress,
|
||||
userAgent: ctx.userAgent,
|
||||
});
|
||||
return NextResponse.json({ data: result });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
});
|
||||
export const POST = withAuth(
|
||||
withPermission('email', 'send', async (req, ctx) => {
|
||||
try {
|
||||
const input = await parseBody(req, sendBrochureSchema);
|
||||
const result = await sendBrochure({
|
||||
portId: ctx.portId,
|
||||
brochureId: input.brochureId,
|
||||
recipient: input.recipient,
|
||||
customBodyMarkdown: input.customBodyMarkdown,
|
||||
sentBy: ctx.userId,
|
||||
ipAddress: ctx.ipAddress,
|
||||
userAgent: ctx.userAgent,
|
||||
});
|
||||
return NextResponse.json({ data: result });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { NextResponse } from 'next/server';
|
||||
|
||||
import { withAuth } from '@/lib/api/helpers';
|
||||
import { withAuth, withPermission } from '@/lib/api/helpers';
|
||||
import { parseBody } from '@/lib/api/route-helpers';
|
||||
import { errorResponse } from '@/lib/errors';
|
||||
import { previewBody } from '@/lib/services/document-sends.service';
|
||||
@@ -12,20 +12,22 @@ import { previewBodySchema } from '@/lib/validators/document-sends';
|
||||
* Renders a body for the dry-run UI without actually sending. Returns the
|
||||
* sanitized HTML, the post-merge markdown, and the list of unresolved
|
||||
* `{{tokens}}` so the UI can block submit until the rep fills them in
|
||||
* (§14.7 mitigation).
|
||||
* (§14.7 mitigation). Gated on `email.view` (auditor-A3 §4).
|
||||
*/
|
||||
export const POST = withAuth(async (req, ctx) => {
|
||||
try {
|
||||
const input = await parseBody(req, previewBodySchema);
|
||||
const result = await previewBody(
|
||||
ctx.portId,
|
||||
input.documentKind,
|
||||
input.recipient,
|
||||
input.customBodyMarkdown ?? null,
|
||||
{ berthId: input.berthId, brochureLabel: input.brochureId },
|
||||
);
|
||||
return NextResponse.json({ data: result });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
});
|
||||
export const POST = withAuth(
|
||||
withPermission('email', 'view', async (req, ctx) => {
|
||||
try {
|
||||
const input = await parseBody(req, previewBodySchema);
|
||||
const result = await previewBody(
|
||||
ctx.portId,
|
||||
input.documentKind,
|
||||
input.recipient,
|
||||
input.customBodyMarkdown ?? null,
|
||||
{ berthId: input.berthId, brochureLabel: input.brochureId },
|
||||
);
|
||||
return NextResponse.json({ data: result });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -1,23 +1,28 @@
|
||||
import { NextResponse } from 'next/server';
|
||||
|
||||
import { withAuth } from '@/lib/api/helpers';
|
||||
import { withAuth, withPermission } from '@/lib/api/helpers';
|
||||
import { parseQuery } from '@/lib/api/route-helpers';
|
||||
import { errorResponse } from '@/lib/errors';
|
||||
import { listSends } from '@/lib/services/document-sends.service';
|
||||
import { listSendsQuerySchema } from '@/lib/validators/document-sends';
|
||||
|
||||
export const GET = withAuth(async (req, ctx) => {
|
||||
try {
|
||||
const query = parseQuery(req, listSendsQuerySchema);
|
||||
const data = await listSends({
|
||||
portId: ctx.portId,
|
||||
clientId: query.clientId,
|
||||
interestId: query.interestId,
|
||||
berthId: query.berthId,
|
||||
limit: query.limit,
|
||||
});
|
||||
return NextResponse.json({ data });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
});
|
||||
// Gated on `email.view` so the lowest-privilege role at a port can no
|
||||
// longer enumerate which clients have received which brochures / berth
|
||||
// PDFs (auditor-A3 §4).
|
||||
export const GET = withAuth(
|
||||
withPermission('email', 'view', async (req, ctx) => {
|
||||
try {
|
||||
const query = parseQuery(req, listSendsQuerySchema);
|
||||
const data = await listSends({
|
||||
portId: ctx.portId,
|
||||
clientId: query.clientId,
|
||||
interestId: query.interestId,
|
||||
berthId: query.berthId,
|
||||
limit: query.limit,
|
||||
});
|
||||
return NextResponse.json({ data });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -5,7 +5,10 @@ import { errorResponse } from '@/lib/errors';
|
||||
import { sendForSigning } from '@/lib/services/documents.service';
|
||||
|
||||
export const POST = withAuth(
|
||||
withPermission('documents', 'create', async (req, ctx, params) => {
|
||||
// Use the dedicated `send_for_signing` permission rather than `create`,
|
||||
// so a role with documents.create-only does not also gain the ability
|
||||
// to dispatch a Documenso send (auditor-A3 §3).
|
||||
withPermission('documents', 'send_for_signing', async (req, ctx, params) => {
|
||||
try {
|
||||
const doc = await sendForSigning(params.id!, ctx.portId, {
|
||||
userId: ctx.userId,
|
||||
|
||||
@@ -1,26 +1,29 @@
|
||||
import { NextResponse } from 'next/server';
|
||||
|
||||
import { requireSuperAdmin, withAuth } from '@/lib/api/helpers';
|
||||
import { withAuth, withPermission } from '@/lib/api/helpers';
|
||||
import { errorResponse } from '@/lib/errors';
|
||||
import { exportParentCompany } from '@/lib/services/expense-export';
|
||||
import { listExpensesSchema } from '@/lib/validators/expenses';
|
||||
|
||||
export const POST = withAuth(async (req, ctx) => {
|
||||
try {
|
||||
requireSuperAdmin(ctx, 'expenses.export.parent-company');
|
||||
// Gated on `expenses.export` for parity with sibling export routes
|
||||
// (auditor-A3 §5). Hard `isSuperAdmin` check used to lock out port
|
||||
// admins who held expenses.export = true.
|
||||
export const POST = withAuth(
|
||||
withPermission('expenses', 'export', async (req, ctx) => {
|
||||
try {
|
||||
const body = await req.json().catch(() => ({}));
|
||||
const query = listExpensesSchema.parse(body);
|
||||
const pdf = await exportParentCompany(ctx.portId, query);
|
||||
|
||||
const body = await req.json().catch(() => ({}));
|
||||
const query = listExpensesSchema.parse(body);
|
||||
const pdf = await exportParentCompany(ctx.portId, query);
|
||||
|
||||
return new NextResponse(Buffer.from(pdf), {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/pdf',
|
||||
'Content-Disposition': `attachment; filename="expenses-parent-company-${Date.now()}.pdf"`,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
});
|
||||
return new NextResponse(Buffer.from(pdf), {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/pdf',
|
||||
'Content-Disposition': `attachment; filename="expenses-parent-company-${Date.now()}.pdf"`,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -30,8 +30,22 @@ export const POST = withAuth(
|
||||
const formData = await req.formData();
|
||||
const file = formData.get('file') as File | null;
|
||||
if (!file) throw new ValidationError('A file is required');
|
||||
// Hard 10 MB cap — without this any authenticated rep could grief
|
||||
// their own port's AI budget by sending arbitrarily large images
|
||||
// and burning OCR tokens (auditor-E3 §28).
|
||||
const MAX_OCR_BYTES = 10 * 1024 * 1024;
|
||||
if (file.size > MAX_OCR_BYTES) {
|
||||
throw new ValidationError('Receipt image is too large (10 MB max).');
|
||||
}
|
||||
const buffer = Buffer.from(await file.arrayBuffer());
|
||||
const mimeType = file.type || 'image/jpeg';
|
||||
// Magic-byte gate so a forged Content-Type doesn't reach the OCR
|
||||
// provider with arbitrary bytes.
|
||||
const { bufferMatchesMime } = await import('@/lib/constants/file-validation');
|
||||
const allowedOcrMimes = ['image/jpeg', 'image/png', 'image/webp'];
|
||||
if (!allowedOcrMimes.includes(mimeType) || !bufferMatchesMime(buffer, mimeType)) {
|
||||
throw new ValidationError('Unsupported receipt image type.');
|
||||
}
|
||||
|
||||
const config = await getResolvedOcrConfig(ctx.portId);
|
||||
// Tesseract.js (in-browser) is the default. The server only invokes
|
||||
|
||||
@@ -5,20 +5,33 @@ import { withAuth, type AuthContext } from '@/lib/api/helpers';
|
||||
import { parseBody } from '@/lib/api/route-helpers';
|
||||
import { db } from '@/lib/db';
|
||||
import { userProfiles } from '@/lib/db/schema';
|
||||
import { errorResponse, NotFoundError } from '@/lib/errors';
|
||||
import { errorResponse, NotFoundError, ValidationError } from '@/lib/errors';
|
||||
import { z } from 'zod';
|
||||
|
||||
const updateProfileSchema = z.object({
|
||||
displayName: z.string().min(1).max(200).optional(),
|
||||
phone: z.string().nullable().optional(),
|
||||
avatarUrl: z.string().url().nullable().optional(),
|
||||
// Refuse `javascript:` / `data:` schemes — z.string().url() lets them
|
||||
// through and `<a href={avatarUrl}>` would otherwise be a stored-XSS
|
||||
// vector if any future renderer treated the value as a link.
|
||||
avatarUrl: z
|
||||
.string()
|
||||
.url()
|
||||
.refine((u) => /^https?:\/\//i.test(u), 'must be an http(s) URL')
|
||||
.nullable()
|
||||
.optional(),
|
||||
// Strict allow-list — no `.passthrough()` here. The previous schema let
|
||||
// arbitrary client-supplied keys survive validation and persist into
|
||||
// `userProfiles.preferences` JSONB unbounded; auditor-E3 §28 caught this.
|
||||
// Add new keys here as the UI surfaces them rather than letting the
|
||||
// client mint them at will.
|
||||
preferences: z
|
||||
.object({
|
||||
dark_mode: z.boolean().optional(),
|
||||
locale: z.string().optional(),
|
||||
timezone: z.string().optional(),
|
||||
})
|
||||
.passthrough()
|
||||
.strict()
|
||||
.optional(),
|
||||
});
|
||||
|
||||
@@ -49,10 +62,18 @@ export const PATCH = withAuth(async (req, ctx: AuthContext) => {
|
||||
if (body.phone !== undefined) updates.phone = body.phone;
|
||||
if (body.avatarUrl !== undefined) updates.avatarUrl = body.avatarUrl;
|
||||
if (body.preferences !== undefined) {
|
||||
updates.preferences = {
|
||||
const merged = {
|
||||
...((profile.preferences as Record<string, unknown>) ?? {}),
|
||||
...body.preferences,
|
||||
};
|
||||
// Hard cap on the merged JSONB to defend against historical rows
|
||||
// bloated by the previous .passthrough() schema. 8 KB is generous
|
||||
// — current legitimate keys are 3 booleans/strings.
|
||||
const serialized = JSON.stringify(merged);
|
||||
if (Buffer.byteLength(serialized, 'utf8') > 8 * 1024) {
|
||||
throw new ValidationError('preferences exceeds 8KB');
|
||||
}
|
||||
updates.preferences = merged;
|
||||
}
|
||||
|
||||
const [updated] = await db
|
||||
|
||||
@@ -3,6 +3,7 @@ import { createHash } from 'crypto';
|
||||
|
||||
import { db } from '@/lib/db';
|
||||
import { verifyDocumensoSecret } from '@/lib/services/documenso-webhook';
|
||||
import { listDocumensoWebhookSecrets } from '@/lib/services/port-config';
|
||||
import {
|
||||
handleRecipientSigned,
|
||||
handleDocumentCompleted,
|
||||
@@ -11,7 +12,6 @@ import {
|
||||
handleDocumentRejected,
|
||||
handleDocumentCancelled,
|
||||
} from '@/lib/services/documents.service';
|
||||
import { env } from '@/lib/env';
|
||||
import { logger } from '@/lib/logger';
|
||||
|
||||
// BR-024: Dedup via signatureHash unique index on documentEvents
|
||||
@@ -49,9 +49,22 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
|
||||
}
|
||||
|
||||
// Documenso v1.13 + 2.x send the secret in plaintext via X-Documenso-Secret.
|
||||
// Resolve the matching port by trying each configured per-port secret
|
||||
// (plus the global env fallback) with timing-safe comparison. The
|
||||
// resolved portId, when non-null, is threaded into handleDocumentExpired
|
||||
// so two ports sharing a documensoId can't cross-mutate (auditor-D §22).
|
||||
const providedSecret = req.headers.get('x-documenso-secret') ?? '';
|
||||
|
||||
if (!verifyDocumensoSecret(providedSecret, env.DOCUMENSO_WEBHOOK_SECRET)) {
|
||||
const secrets = await listDocumensoWebhookSecrets();
|
||||
let matchedPortId: string | null = null;
|
||||
let matched = false;
|
||||
for (const entry of secrets) {
|
||||
if (verifyDocumensoSecret(providedSecret, entry.secret)) {
|
||||
matched = true;
|
||||
matchedPortId = entry.portId;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!matched) {
|
||||
logger.warn({ providedLen: providedSecret.length }, 'Invalid Documenso webhook secret');
|
||||
return NextResponse.json({ ok: false, error: 'Invalid secret' }, { status: 200 });
|
||||
}
|
||||
@@ -149,7 +162,12 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
|
||||
break;
|
||||
|
||||
case 'DOCUMENT_EXPIRED':
|
||||
await handleDocumentExpired({ documentId: documensoId });
|
||||
// Forward the matched portId so cross-port documenso-id reuse
|
||||
// can't flip the wrong port's document.
|
||||
await handleDocumentExpired({
|
||||
documentId: documensoId,
|
||||
...(matchedPortId ? { portId: matchedPortId } : {}),
|
||||
});
|
||||
break;
|
||||
|
||||
default:
|
||||
|
||||
@@ -17,6 +17,7 @@ import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||
import { Skeleton } from '@/components/ui/skeleton';
|
||||
import { ReportStatusBadge } from '@/components/reports/report-status-badge';
|
||||
import { apiFetch } from '@/lib/api/client';
|
||||
import { toastError } from '@/lib/api/toast-error';
|
||||
|
||||
interface GeneratedReport {
|
||||
id: string;
|
||||
@@ -66,7 +67,9 @@ export function ReportsList() {
|
||||
const result = await apiFetch<{ url: string }>(`/api/v1/reports/${reportId}/download`);
|
||||
window.open(result.url, '_blank');
|
||||
} catch (err) {
|
||||
console.error('Download failed', err);
|
||||
// Surface the failure to the user — was previously console-only,
|
||||
// so the rep clicked Download and nothing happened (auditor-H §35).
|
||||
toastError(err, 'Download failed');
|
||||
} finally {
|
||||
setDownloadingId(null);
|
||||
}
|
||||
|
||||
@@ -35,3 +35,53 @@ export const PREVIEWABLE_MIMES = new Set<string>([
|
||||
'image/webp',
|
||||
'application/pdf',
|
||||
]);
|
||||
|
||||
/**
|
||||
* Magic-byte signatures keyed by claimed MIME type. Used by the file
|
||||
* upload handler to reject files whose first few bytes don't match the
|
||||
* MIME the browser declared. Without this, a `<form>` could lie about
|
||||
* Content-Type and pass arbitrary bytes through ALLOWED_MIME_TYPES.
|
||||
*
|
||||
* Each signature is the leading prefix of the file. When multiple variants
|
||||
* exist (e.g. JPEG SOI + APPn marker), we accept any of them.
|
||||
*/
|
||||
export const MAGIC_BYTE_SIGNATURES: Record<string, Uint8Array[]> = {
|
||||
'image/jpeg': [new Uint8Array([0xff, 0xd8, 0xff])],
|
||||
'image/png': [new Uint8Array([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a])],
|
||||
'image/gif': [
|
||||
new Uint8Array([0x47, 0x49, 0x46, 0x38, 0x37, 0x61]), // GIF87a
|
||||
new Uint8Array([0x47, 0x49, 0x46, 0x38, 0x39, 0x61]), // GIF89a
|
||||
],
|
||||
'image/webp': [new Uint8Array([0x52, 0x49, 0x46, 0x46])], // RIFF; WEBP signature follows at offset 8
|
||||
'application/pdf': [new Uint8Array([0x25, 0x50, 0x44, 0x46])], // %PDF
|
||||
// Office formats are zip-based (modern: docx/xlsx) or OLE (legacy: doc/xls).
|
||||
// Both share well-known magic bytes — match either family for a given MIME.
|
||||
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': [
|
||||
new Uint8Array([0x50, 0x4b, 0x03, 0x04]), // PK\3\4 (zip)
|
||||
new Uint8Array([0x50, 0x4b, 0x05, 0x06]), // empty archive
|
||||
],
|
||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': [
|
||||
new Uint8Array([0x50, 0x4b, 0x03, 0x04]),
|
||||
new Uint8Array([0x50, 0x4b, 0x05, 0x06]),
|
||||
],
|
||||
'application/msword': [
|
||||
new Uint8Array([0xd0, 0xcf, 0x11, 0xe0, 0xa1, 0xb1, 0x1a, 0xe1]), // OLE compound
|
||||
],
|
||||
'application/vnd.ms-excel': [new Uint8Array([0xd0, 0xcf, 0x11, 0xe0, 0xa1, 0xb1, 0x1a, 0xe1])],
|
||||
// text/plain and text/csv have no magic bytes — leave unconstrained;
|
||||
// size cap + ALLOWED_MIME_TYPES allow-list is the only gate.
|
||||
};
|
||||
|
||||
/** Returns true when the buffer starts with one of the registered prefixes
|
||||
* for the given MIME, or when the MIME has no signature requirement. */
|
||||
export function bufferMatchesMime(buffer: Buffer, mime: string): boolean {
|
||||
const sigs = MAGIC_BYTE_SIGNATURES[mime];
|
||||
if (!sigs) return true; // text/plain, text/csv, or unrecognised allow-list entry
|
||||
return sigs.some((sig) => {
|
||||
if (buffer.length < sig.length) return false;
|
||||
for (let i = 0; i < sig.length; i++) {
|
||||
if (buffer[i] !== sig[i]) return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
ALLOWED_MIME_TYPES,
|
||||
MAX_FILE_SIZE,
|
||||
PREVIEWABLE_MIMES,
|
||||
bufferMatchesMime,
|
||||
} from '@/lib/constants/file-validation';
|
||||
import { generateStorageKey, sanitizeFilename } from '@/lib/services/storage';
|
||||
import type { UploadFileInput, UpdateFileInput, ListFilesInput } from '@/lib/validators/files';
|
||||
@@ -44,6 +45,15 @@ export async function uploadFile(
|
||||
throw new ValidationError('File exceeds maximum size of 50MB');
|
||||
}
|
||||
|
||||
// Magic-byte verification — without this, the browser-declared MIME is
|
||||
// attacker-controlled and a malicious uploader could ship arbitrary
|
||||
// bytes through the ALLOWED_MIME_TYPES allow-list (auditor-E3 §27).
|
||||
// Berth-PDF and brochure paths already do this; the generic uploader
|
||||
// matches their guarantee here.
|
||||
if (!bufferMatchesMime(file.buffer, file.mimeType)) {
|
||||
throw new ValidationError(`File contents do not match the declared type '${file.mimeType}'`);
|
||||
}
|
||||
|
||||
const entity = data.entityType ?? 'general';
|
||||
const entityId = data.entityId ?? portId;
|
||||
const storagePath = generateStorageKey(portSlug, entity, entityId, file.mimeType);
|
||||
|
||||
@@ -36,6 +36,11 @@ export const SETTING_KEYS = {
|
||||
documensoClientRecipientId: 'documenso_client_recipient_id',
|
||||
documensoDeveloperRecipientId: 'documenso_developer_recipient_id',
|
||||
documensoApprovalRecipientId: 'documenso_approval_recipient_id',
|
||||
// Per-port Documenso webhook secret — two ports pointed at different
|
||||
// Documenso instances cannot share the global env secret. The receiver
|
||||
// resolves the matching port by trying each enabled secret with a
|
||||
// timing-safe comparison.
|
||||
documensoWebhookSecret: 'documenso_webhook_secret',
|
||||
eoiDefaultPathway: 'eoi_default_pathway',
|
||||
|
||||
// Branding
|
||||
@@ -192,6 +197,40 @@ export async function getPortDocumensoConfig(portId: string): Promise<PortDocume
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* List every (portId, webhookSecret) pair configured across the platform,
|
||||
* plus a wildcard-port entry for the global env secret. The Documenso
|
||||
* webhook receiver iterates the list with `timingSafeEqual` until it
|
||||
* finds a match, then dispatches with the resolved portId.
|
||||
*
|
||||
* `null` portId in the returned array means "matches but no port was
|
||||
* resolved" — the caller falls back to the legacy global path.
|
||||
*/
|
||||
export interface DocumensoSecretEntry {
|
||||
portId: string | null;
|
||||
secret: string;
|
||||
}
|
||||
|
||||
export async function listDocumensoWebhookSecrets(): Promise<DocumensoSecretEntry[]> {
|
||||
const { db } = await import('@/lib/db');
|
||||
const { systemSettings } = await import('@/lib/db/schema/system');
|
||||
const { eq, isNotNull } = await import('drizzle-orm');
|
||||
const rows = await db
|
||||
.select({ portId: systemSettings.portId, value: systemSettings.value })
|
||||
.from(systemSettings)
|
||||
.where(eq(systemSettings.key, SETTING_KEYS.documensoWebhookSecret));
|
||||
void isNotNull; // imported for future filters
|
||||
const out: DocumensoSecretEntry[] = [];
|
||||
for (const row of rows) {
|
||||
if (typeof row.value !== 'string' || !row.value || !row.portId) continue;
|
||||
out.push({ portId: row.portId, secret: row.value });
|
||||
}
|
||||
// Always include the global env secret as a fallback (null portId means
|
||||
// "no per-port resolution" — preserves single-tenant compatibility).
|
||||
out.push({ portId: null, secret: env.DOCUMENSO_WEBHOOK_SECRET });
|
||||
return out;
|
||||
}
|
||||
|
||||
// ─── Branding ───────────────────────────────────────────────────────────────
|
||||
|
||||
export interface PortBrandingConfig {
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
const httpUrl = z
|
||||
.string()
|
||||
.url()
|
||||
.refine((u) => /^https?:\/\//i.test(u), 'must be an http(s) URL');
|
||||
|
||||
export const createPortSchema = z.object({
|
||||
name: z.string().min(1).max(200),
|
||||
slug: z
|
||||
@@ -7,7 +12,7 @@ export const createPortSchema = z.object({
|
||||
.min(1)
|
||||
.max(100)
|
||||
.regex(/^[a-z0-9-]+$/, 'Slug must be lowercase alphanumeric with hyphens'),
|
||||
logoUrl: z.string().url().optional(),
|
||||
logoUrl: httpUrl.optional(),
|
||||
primaryColor: z
|
||||
.string()
|
||||
.regex(/^#[0-9a-fA-F]{6}$/)
|
||||
@@ -26,7 +31,7 @@ export const updatePortSchema = z.object({
|
||||
.max(100)
|
||||
.regex(/^[a-z0-9-]+$/, 'Slug must be lowercase alphanumeric with hyphens')
|
||||
.optional(),
|
||||
logoUrl: z.string().url().nullable().optional(),
|
||||
logoUrl: httpUrl.nullable().optional(),
|
||||
primaryColor: z
|
||||
.string()
|
||||
.regex(/^#[0-9a-fA-F]{6}$/)
|
||||
|
||||
Reference in New Issue
Block a user