fix(audit-v2): platform-wide post-merge hardening across 5 domains
Five-domain audit (security, routes, DB, integrations, UI/UX) ran after
the cf37d09 merge. Critical + high-impact items landed here; deferred
medium/low items indexed in docs/audit-final-deferred.md (now organised
into a "Audit-final v2" section).
Security:
- Storage proxy tokens now bind to op (`'get'` vs `'put'`). A long-lived
download URL minted by `presignDownload` for an emailed brochure can no
longer be replayed against the proxy PUT to overwrite the original
storage object. `verifyProxyToken` requires `expectedOp` and rejects
mismatches; legacy tokens missing `op` fail-closed. Regression tests
added.
- Markdown email merge values are now markdown-escaped (`[`, `]`, `(`,
`)`, `*`, `_`, `\`, backticks, braces) before substitution into the
rep-authored body. A malicious value like `[click here](https://evil)`
stored in `client.fullName` no longer survives `escapeHtml` to render
as a real `<a href>` in the outbound email. Phishing-via-merge-field
closed; regression tests added.
- Middleware now performs an Origin/Referer check on
POST/PUT/PATCH/DELETE to `/api/v1/**`. Defense-in-depth on top of
better-auth's SameSite=Lax cookie. Webhooks/public/auth/portal routes
exempt as they don't carry the session cookie.
Routes:
- Template management routes were calling `withPermission('documents',
'manage', ...)` — but `documents` doesn't have a `manage` action. The
registry has `document_templates.manage`. Every non-superadmin was
getting 403'd on the seven template endpoints. Fixed across the
/admin/templates surface.
- Custom-fields permission resource is hardcoded to `clients` regardless
of which entity (yacht/company/etc.) the values belong to. Documented
as deferred (requires per-entity routes).
DB:
- documentSends: every parent FK (client_id, interest_id, berth_id,
brochure_id, brochure_version_id) now uses ON DELETE SET NULL so the
audit trail outlasts hard-deletes. The denormalized columns
(recipient_email, document_kind, body_markdown, from_address) were
added precisely for this. Migration 0035.
- Polymorphic discriminators on yachts.current_owner_type and
invoices.billing_entity_type now have CHECK constraints — typos like
`'clients'` vs `'client'` were silently inserting unreachable rows
before. Migration 0036.
Integrations:
- Email attachment resolution (`src/lib/email/index.ts`) was importing
MinIO directly instead of `getStorageBackend()`. Filesystem-backend
deployments would have broken every email-with-attachment send. Now
routes through the pluggable abstraction per CLAUDE.md.
- Documenso DOCUMENT_OPENED webhook filter relaxed: v2 may omit
`readStatus` or send lowercase, so an event that was the SIGNAL of an
open was being silently dropped. Now treats any recipient on a
DOCUMENT_OPENED event as opened.
UI/UX:
- Expense detail used to render `receiptFileIds` as opaque UUID badges —
reps couldn't view the receipt they uploaded. Now renders an image
thumbnail (via `/api/v1/files/[id]/preview`) plus a Download link for
PDFs. Closed the "where's my receipt?" loop in the expense flow.
- Expense detail Edit + Archive buttons now `<PermissionGate>` and the
archive mutation surfaces success/error toasts instead of silent 403s.
- Brochures admin: setDefault/archive/create mutations now have onError
toasts (only onSuccess existed before).
- Removed broken bulk-upload link in scan/page (route doesn't exist;
used a raw `<a>` triggering a full reload to a 404).
Test status: 1168/1168 vitest passing. tsc clean.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -241,15 +241,6 @@ export default function ScanReceiptPage() {
|
||||
<p className="text-xs text-muted-foreground sm:col-span-2 text-center">
|
||||
JPEG, PNG, HEIC, WebP up to 10 MB
|
||||
</p>
|
||||
<p className="text-xs text-muted-foreground sm:col-span-2 text-center">
|
||||
Have many receipts?{' '}
|
||||
<a
|
||||
href={`/${params.portSlug}/expenses/bulk-upload`}
|
||||
className="text-primary hover:underline"
|
||||
>
|
||||
Bulk upload →
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
{/* `image/*` is the broadest accept — includes HEIC on iOS,
|
||||
|
||||
@@ -52,7 +52,7 @@ export async function GET(
|
||||
);
|
||||
}
|
||||
|
||||
const result = verifyProxyToken(token, backend.getHmacSecret());
|
||||
const result = verifyProxyToken(token, backend.getHmacSecret(), 'get');
|
||||
if (!result.ok) {
|
||||
logger.warn({ reason: result.reason }, 'Storage proxy token rejected');
|
||||
return NextResponse.json({ error: 'Invalid or expired token' }, { status: 403 });
|
||||
@@ -145,7 +145,7 @@ export async function PUT(
|
||||
);
|
||||
}
|
||||
|
||||
const result = verifyProxyToken(token, backend.getHmacSecret());
|
||||
const result = verifyProxyToken(token, backend.getHmacSecret(), 'put');
|
||||
if (!result.ok) {
|
||||
logger.warn({ reason: result.reason }, 'Storage proxy upload token rejected');
|
||||
return NextResponse.json({ error: 'Invalid or expired token' }, { status: 403 });
|
||||
|
||||
@@ -14,7 +14,7 @@ import { rollbackAdminTemplateSchema } from '@/lib/validators/document-templates
|
||||
* Body: { version: number }
|
||||
*/
|
||||
export const POST = withAuth(
|
||||
withPermission('documents', 'manage', async (req, ctx, params) => {
|
||||
withPermission('document_templates', 'manage', async (req, ctx, params) => {
|
||||
try {
|
||||
const body = await parseBody(req, rollbackAdminTemplateSchema);
|
||||
const result = await rollbackAdminTemplate(
|
||||
|
||||
@@ -15,7 +15,7 @@ import { updateAdminTemplateSchema } from '@/lib/validators/document-templates';
|
||||
* Retrieve a single TipTap-based document template.
|
||||
*/
|
||||
export const GET = withAuth(
|
||||
withPermission('documents', 'manage', async (req, ctx, params) => {
|
||||
withPermission('document_templates', 'manage', async (req, ctx, params) => {
|
||||
try {
|
||||
const template = await getAdminTemplate(ctx.portId, params.templateId!);
|
||||
return NextResponse.json({ data: template });
|
||||
@@ -30,21 +30,15 @@ export const GET = withAuth(
|
||||
* Update a TipTap-based document template. Increments version if content changes.
|
||||
*/
|
||||
export const PATCH = withAuth(
|
||||
withPermission('documents', 'manage', async (req, ctx, params) => {
|
||||
withPermission('document_templates', 'manage', async (req, ctx, params) => {
|
||||
try {
|
||||
const body = await parseBody(req, updateAdminTemplateSchema);
|
||||
const updated = await updateAdminTemplate(
|
||||
ctx.portId,
|
||||
params.templateId!,
|
||||
ctx.userId,
|
||||
body,
|
||||
{
|
||||
userId: ctx.userId,
|
||||
portId: ctx.portId,
|
||||
ipAddress: ctx.ipAddress,
|
||||
userAgent: ctx.userAgent,
|
||||
},
|
||||
);
|
||||
const updated = await updateAdminTemplate(ctx.portId, params.templateId!, ctx.userId, body, {
|
||||
userId: ctx.userId,
|
||||
portId: ctx.portId,
|
||||
ipAddress: ctx.ipAddress,
|
||||
userAgent: ctx.userAgent,
|
||||
});
|
||||
return NextResponse.json({ data: updated });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
@@ -57,19 +51,14 @@ export const PATCH = withAuth(
|
||||
* Delete a TipTap-based document template.
|
||||
*/
|
||||
export const DELETE = withAuth(
|
||||
withPermission('documents', 'manage', async (req, ctx, params) => {
|
||||
withPermission('document_templates', 'manage', async (req, ctx, params) => {
|
||||
try {
|
||||
await deleteAdminTemplate(
|
||||
ctx.portId,
|
||||
params.templateId!,
|
||||
ctx.userId,
|
||||
{
|
||||
userId: ctx.userId,
|
||||
portId: ctx.portId,
|
||||
ipAddress: ctx.ipAddress,
|
||||
userAgent: ctx.userAgent,
|
||||
},
|
||||
);
|
||||
await deleteAdminTemplate(ctx.portId, params.templateId!, ctx.userId, {
|
||||
userId: ctx.userId,
|
||||
portId: ctx.portId,
|
||||
ipAddress: ctx.ipAddress,
|
||||
userAgent: ctx.userAgent,
|
||||
});
|
||||
return new NextResponse(null, { status: 204 });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
|
||||
@@ -9,12 +9,9 @@ import { getAdminTemplateVersions } from '@/lib/services/document-templates.serv
|
||||
* Returns version history for a template, sourced from audit_logs.
|
||||
*/
|
||||
export const GET = withAuth(
|
||||
withPermission('documents', 'manage', async (req, ctx, params) => {
|
||||
withPermission('document_templates', 'manage', async (req, ctx, params) => {
|
||||
try {
|
||||
const versions = await getAdminTemplateVersions(
|
||||
ctx.portId,
|
||||
params.templateId!,
|
||||
);
|
||||
const versions = await getAdminTemplateVersions(ctx.portId, params.templateId!);
|
||||
return NextResponse.json({ data: versions });
|
||||
} catch (error) {
|
||||
return errorResponse(error);
|
||||
|
||||
@@ -25,7 +25,7 @@ import { previewAdminTemplateSchema } from '@/lib/validators/document-templates'
|
||||
* sampleData?: Record<string, string> - variable substitutions
|
||||
*/
|
||||
export const POST = withAuth(
|
||||
withPermission('documents', 'manage', async (req, _ctx) => {
|
||||
withPermission('document_templates', 'manage', async (req, _ctx) => {
|
||||
try {
|
||||
const body = await parseBody(req, previewAdminTemplateSchema);
|
||||
|
||||
|
||||
@@ -3,10 +3,7 @@ import { NextResponse } from 'next/server';
|
||||
import { withAuth, withPermission } from '@/lib/api/helpers';
|
||||
import { parseQuery, parseBody } from '@/lib/api/route-helpers';
|
||||
import { errorResponse } from '@/lib/errors';
|
||||
import {
|
||||
listAdminTemplates,
|
||||
createAdminTemplate,
|
||||
} from '@/lib/services/document-templates.service';
|
||||
import { listAdminTemplates, createAdminTemplate } from '@/lib/services/document-templates.service';
|
||||
import {
|
||||
listAdminTemplatesSchema,
|
||||
createAdminTemplateSchema,
|
||||
@@ -17,7 +14,7 @@ import {
|
||||
* List TipTap-based document templates for the port.
|
||||
*/
|
||||
export const GET = withAuth(
|
||||
withPermission('documents', 'manage', async (req, ctx) => {
|
||||
withPermission('document_templates', 'manage', async (req, ctx) => {
|
||||
try {
|
||||
const query = parseQuery(req, listAdminTemplatesSchema);
|
||||
const data = await listAdminTemplates(ctx.portId, query);
|
||||
@@ -33,7 +30,7 @@ export const GET = withAuth(
|
||||
* Create a new TipTap-based document template.
|
||||
*/
|
||||
export const POST = withAuth(
|
||||
withPermission('documents', 'manage', async (req, ctx) => {
|
||||
withPermission('document_templates', 'manage', async (req, ctx) => {
|
||||
try {
|
||||
const body = await parseBody(req, createAdminTemplateSchema);
|
||||
const template = await createAdminTemplate(ctx.portId, ctx.userId, body, {
|
||||
|
||||
@@ -111,7 +111,15 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
|
||||
}
|
||||
|
||||
case 'DOCUMENT_OPENED': {
|
||||
const openedRecipients = recipients.filter((r) => r.readStatus === 'OPENED');
|
||||
// Documenso v1 sends `readStatus: 'OPENED'`; v2 has used both
|
||||
// upper and lower case across releases and may omit the field
|
||||
// entirely (the event itself signals the open). Treat the event
|
||||
// as the signal: dispatch a per-recipient open for every
|
||||
// recipient on the document so v2 deployments stop silently
|
||||
// dropping opens.
|
||||
const openedRecipients = recipients.filter(
|
||||
(r) => !r.readStatus || String(r.readStatus).toUpperCase() === 'OPENED',
|
||||
);
|
||||
for (const r of openedRecipients) {
|
||||
await handleDocumentOpened({
|
||||
documentId: documensoId,
|
||||
|
||||
@@ -123,6 +123,8 @@ function BrochureCard({ brochure, onChange }: { brochure: BrochureRow; onChange:
|
||||
toast.success('Default brochure updated');
|
||||
onChange();
|
||||
},
|
||||
onError: (e) =>
|
||||
toast.error(e instanceof Error ? e.message : 'Could not update default brochure'),
|
||||
});
|
||||
|
||||
const archiveMutation = useMutation({
|
||||
@@ -131,6 +133,7 @@ function BrochureCard({ brochure, onChange }: { brochure: BrochureRow; onChange:
|
||||
toast.success('Brochure archived');
|
||||
onChange();
|
||||
},
|
||||
onError: (e) => toast.error(e instanceof Error ? e.message : 'Archive failed'),
|
||||
});
|
||||
|
||||
async function handleUpload(file: File) {
|
||||
@@ -284,6 +287,7 @@ function CreateBrochureDialog({
|
||||
onCreated();
|
||||
onOpenChange(false);
|
||||
},
|
||||
onError: (e) => toast.error(e instanceof Error ? e.message : 'Could not create brochure'),
|
||||
});
|
||||
|
||||
return (
|
||||
|
||||
@@ -3,17 +3,87 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import { format } from 'date-fns';
|
||||
import { Loader2, Receipt, Edit, Archive } from 'lucide-react';
|
||||
import { Archive, Download, Edit, FileText, Loader2, Receipt } from 'lucide-react';
|
||||
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { Badge } from '@/components/ui/badge';
|
||||
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||
import { ArchiveConfirmDialog } from '@/components/shared/archive-confirm-dialog';
|
||||
import { PermissionGate } from '@/components/shared/permission-gate';
|
||||
import { toast } from 'sonner';
|
||||
import { apiFetch } from '@/lib/api/client';
|
||||
import { useMobileChrome } from '@/components/layout/mobile/mobile-layout-provider';
|
||||
import type { ExpenseRow } from './expense-columns';
|
||||
import { ExpenseDuplicateBanner } from './expense-duplicate-banner';
|
||||
|
||||
/**
|
||||
* Renders an image thumbnail for previewable receipts (jpeg/png/webp/heic
|
||||
* via the existing /files/[id]/preview presign), falling back to a "Download"
|
||||
* link for PDFs and other non-previewable types. Replaces the prior
|
||||
* impossible-to-use UUID-badge list — reps can finally see the receipt
|
||||
* they uploaded against the expense.
|
||||
*/
|
||||
function ReceiptThumbnail({ fileId }: { fileId: string }) {
|
||||
const { data, isLoading, isError } = useQuery<{
|
||||
data: { url: string; mimeType: string } | null;
|
||||
error?: string;
|
||||
}>({
|
||||
queryKey: ['file-preview', fileId],
|
||||
queryFn: async () => {
|
||||
try {
|
||||
const res = await apiFetch<{ data: { url: string; mimeType: string } }>(
|
||||
`/api/v1/files/${fileId}/preview`,
|
||||
);
|
||||
return res;
|
||||
} catch (e) {
|
||||
// Non-image files raise ValidationError ("This file type cannot be
|
||||
// previewed") — fall through to the Download link.
|
||||
return { data: null, error: e instanceof Error ? e.message : 'preview unavailable' };
|
||||
}
|
||||
},
|
||||
staleTime: 5 * 60 * 1000,
|
||||
});
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="flex h-32 items-center justify-center rounded border bg-muted/40 text-xs text-muted-foreground">
|
||||
<Loader2 className="mr-2 h-3 w-3 animate-spin" /> Loading preview…
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const url = data?.data?.url;
|
||||
const mime = data?.data?.mimeType ?? '';
|
||||
const isImage = mime.startsWith('image/');
|
||||
|
||||
return (
|
||||
<div className="rounded border bg-muted/40 p-2">
|
||||
{url && isImage ? (
|
||||
<a href={url} target="_blank" rel="noopener noreferrer">
|
||||
<img
|
||||
src={url}
|
||||
alt="Receipt"
|
||||
className="h-32 w-full rounded object-cover hover:opacity-90"
|
||||
/>
|
||||
</a>
|
||||
) : (
|
||||
<div className="flex h-32 items-center justify-center text-muted-foreground">
|
||||
<FileText className="h-8 w-8" />
|
||||
</div>
|
||||
)}
|
||||
<div className="mt-2 flex items-center justify-between text-xs text-muted-foreground">
|
||||
<span className="truncate">{mime || (isError ? 'Receipt' : 'File')}</span>
|
||||
<a
|
||||
href={`/api/v1/files/${fileId}/download`}
|
||||
className="inline-flex items-center gap-1 text-primary hover:underline"
|
||||
>
|
||||
<Download className="h-3 w-3" /> Download
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const PAYMENT_STATUS_COLORS: Record<string, string> = {
|
||||
unpaid: 'bg-red-100 text-red-700 border-red-200',
|
||||
paid: 'bg-green-100 text-green-700 border-green-200',
|
||||
@@ -48,8 +118,13 @@ export function ExpenseDetail({ expenseId, onEdit, onArchived }: ExpenseDetailPr
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: ['expenses'] });
|
||||
setArchiveOpen(false);
|
||||
toast.success('Expense archived');
|
||||
onArchived?.();
|
||||
},
|
||||
onError: (e) => {
|
||||
toast.error(e instanceof Error ? e.message : 'Archive failed');
|
||||
setArchiveOpen(false);
|
||||
},
|
||||
});
|
||||
|
||||
if (isLoading) {
|
||||
@@ -84,20 +159,24 @@ export function ExpenseDetail({ expenseId, onEdit, onArchived }: ExpenseDetailPr
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{onEdit && (
|
||||
<Button variant="outline" size="sm" onClick={onEdit}>
|
||||
<Edit className="mr-1.5 h-4 w-4" />
|
||||
Edit
|
||||
</Button>
|
||||
<PermissionGate resource="expenses" action="edit">
|
||||
<Button variant="outline" size="sm" onClick={onEdit}>
|
||||
<Edit className="mr-1.5 h-4 w-4" />
|
||||
Edit
|
||||
</Button>
|
||||
</PermissionGate>
|
||||
)}
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="text-destructive"
|
||||
onClick={() => setArchiveOpen(true)}
|
||||
>
|
||||
<Archive className="mr-1.5 h-4 w-4" />
|
||||
Archive
|
||||
</Button>
|
||||
<PermissionGate resource="expenses" action="delete">
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="text-destructive"
|
||||
onClick={() => setArchiveOpen(true)}
|
||||
>
|
||||
<Archive className="mr-1.5 h-4 w-4" />
|
||||
Archive
|
||||
</Button>
|
||||
</PermissionGate>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -172,11 +251,9 @@ export function ExpenseDetail({ expenseId, onEdit, onArchived }: ExpenseDetailPr
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="flex flex-wrap gap-2">
|
||||
{(expense.receiptFileIds as string[]).map((fileId: string) => (
|
||||
<Badge key={fileId} variant="secondary" className="font-mono text-xs">
|
||||
{fileId}
|
||||
</Badge>
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 gap-3">
|
||||
{(expense.receiptFileIds as string[]).map((fileId) => (
|
||||
<ReceiptThumbnail key={fileId} fileId={fileId} />
|
||||
))}
|
||||
</div>
|
||||
</CardContent>
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
-- Audit-final v2 fix: document_sends FKs default to NO ACTION which means
|
||||
-- a hard-delete of a referenced client/interest/berth/brochure either
|
||||
-- silently blocks the parent delete OR (if a future cascade path is added)
|
||||
-- nukes the send-out audit row. The audit trail must outlast its source —
|
||||
-- recipient_email + document_kind + body_markdown + from_address are
|
||||
-- already denormalized onto the row for exactly this purpose.
|
||||
--
|
||||
-- Switch every parent FK to ON DELETE SET NULL so the audit row keeps a
|
||||
-- timestamp + email even when the source row is gone.
|
||||
|
||||
ALTER TABLE document_sends
|
||||
DROP CONSTRAINT IF EXISTS document_sends_client_id_clients_id_fk,
|
||||
DROP CONSTRAINT IF EXISTS document_sends_interest_id_interests_id_fk,
|
||||
DROP CONSTRAINT IF EXISTS document_sends_berth_id_berths_id_fk,
|
||||
DROP CONSTRAINT IF EXISTS document_sends_brochure_id_brochures_id_fk,
|
||||
DROP CONSTRAINT IF EXISTS document_sends_brochure_version_id_brochure_versions_id_fk;
|
||||
|
||||
ALTER TABLE document_sends
|
||||
ADD CONSTRAINT document_sends_client_id_clients_id_fk
|
||||
FOREIGN KEY (client_id) REFERENCES clients(id) ON DELETE SET NULL,
|
||||
ADD CONSTRAINT document_sends_interest_id_interests_id_fk
|
||||
FOREIGN KEY (interest_id) REFERENCES interests(id) ON DELETE SET NULL,
|
||||
ADD CONSTRAINT document_sends_berth_id_berths_id_fk
|
||||
FOREIGN KEY (berth_id) REFERENCES berths(id) ON DELETE SET NULL,
|
||||
ADD CONSTRAINT document_sends_brochure_id_brochures_id_fk
|
||||
FOREIGN KEY (brochure_id) REFERENCES brochures(id) ON DELETE SET NULL,
|
||||
ADD CONSTRAINT document_sends_brochure_version_id_brochure_versions_id_fk
|
||||
FOREIGN KEY (brochure_version_id) REFERENCES brochure_versions(id) ON DELETE SET NULL;
|
||||
13
src/lib/db/migrations/0036_polymorphic_check_constraints.sql
Normal file
13
src/lib/db/migrations/0036_polymorphic_check_constraints.sql
Normal file
@@ -0,0 +1,13 @@
|
||||
-- Audit-final v2 fix: polymorphic discriminator columns are currently free-text
|
||||
-- and a typo in service code (e.g. `'clients'` vs `'client'`) inserts silently
|
||||
-- and the row becomes unreachable through every read path that uses the
|
||||
-- resolver service. Add CHECK constraints on the two most load-bearing
|
||||
-- discriminators to surface drift at the DB level.
|
||||
|
||||
ALTER TABLE yachts
|
||||
ADD CONSTRAINT yachts_current_owner_type_chk
|
||||
CHECK (current_owner_type IN ('client', 'company'));
|
||||
|
||||
ALTER TABLE invoices
|
||||
ADD CONSTRAINT invoices_billing_entity_type_chk
|
||||
CHECK (billing_entity_type IN ('client', 'company'));
|
||||
@@ -246,6 +246,20 @@
|
||||
"when": 1778000000000,
|
||||
"tag": "0034_normalize_mooring_numbers_broaden",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 35,
|
||||
"version": "7",
|
||||
"when": 1778050000000,
|
||||
"tag": "0035_document_sends_preserve_audit_on_parent_delete",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 36,
|
||||
"version": "7",
|
||||
"when": 1778100000000,
|
||||
"tag": "0036_polymorphic_check_constraints",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -102,17 +102,25 @@ export const documentSends = pgTable(
|
||||
portId: text('port_id')
|
||||
.notNull()
|
||||
.references(() => ports.id),
|
||||
/** Either client_id or interest_id is set (or both). */
|
||||
clientId: text('client_id').references(() => clients.id),
|
||||
interestId: text('interest_id').references(() => interests.id),
|
||||
/**
|
||||
* Either client_id or interest_id is set (or both). All five FKs use
|
||||
* `onDelete: 'set null'` so the audit row survives if the parent
|
||||
* client/interest/berth/brochure is deleted — `recipient_email`,
|
||||
* `document_kind`, `body_markdown`, and `from_address` are denormalized
|
||||
* onto the row precisely so the audit trail outlasts the source.
|
||||
*/
|
||||
clientId: text('client_id').references(() => clients.id, { onDelete: 'set null' }),
|
||||
interestId: text('interest_id').references(() => interests.id, { onDelete: 'set null' }),
|
||||
recipientEmail: text('recipient_email').notNull(),
|
||||
/** 'berth_pdf' | 'brochure' */
|
||||
documentKind: text('document_kind').notNull(),
|
||||
berthId: text('berth_id').references(() => berths.id),
|
||||
berthId: text('berth_id').references(() => berths.id, { onDelete: 'set null' }),
|
||||
/** Forward FK ref — berth_pdf_versions defined in Phase 6b. Loose-coupled. */
|
||||
berthPdfVersionId: text('berth_pdf_version_id'),
|
||||
brochureId: text('brochure_id').references(() => brochures.id),
|
||||
brochureVersionId: text('brochure_version_id').references(() => brochureVersions.id),
|
||||
brochureId: text('brochure_id').references(() => brochures.id, { onDelete: 'set null' }),
|
||||
brochureVersionId: text('brochure_version_id').references(() => brochureVersions.id, {
|
||||
onDelete: 'set null',
|
||||
}),
|
||||
/** Exact body used (after merge-field expansion + sanitization). */
|
||||
bodyMarkdown: text('body_markdown'),
|
||||
sentByUserId: text('sent_by_user_id').notNull(),
|
||||
|
||||
@@ -69,7 +69,10 @@ async function resolveAttachments(
|
||||
const { files } = await import('@/lib/db/schema/documents');
|
||||
const { eq } = await import('drizzle-orm');
|
||||
const { ForbiddenError, NotFoundError } = await import('@/lib/errors');
|
||||
const { minioClient } = await import('@/lib/minio');
|
||||
// Pluggable storage backend (s3 OR filesystem). Direct MinIO imports
|
||||
// break the filesystem-mode deployment path documented in CLAUDE.md.
|
||||
const { getStorageBackend } = await import('@/lib/storage');
|
||||
const backend = await getStorageBackend();
|
||||
|
||||
return Promise.all(
|
||||
refs.map(async (ref) => {
|
||||
@@ -78,9 +81,9 @@ async function resolveAttachments(
|
||||
if (portId && file.portId !== portId) {
|
||||
throw new ForbiddenError('File belongs to a different port');
|
||||
}
|
||||
const stream = await minioClient.getObject(file.storageBucket, file.storagePath);
|
||||
const stream = await backend.get(file.storagePath);
|
||||
const chunks: Buffer[] = [];
|
||||
for await (const chunk of stream) {
|
||||
for await (const chunk of stream as AsyncIterable<Buffer | string>) {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
}
|
||||
return {
|
||||
|
||||
@@ -70,6 +70,17 @@ export function validateStorageKey(key: string): void {
|
||||
|
||||
// ─── HMAC token helpers ─────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Token op binding. `'get'` tokens are issued by `presignDownload` and only
|
||||
* accepted by the proxy GET handler. `'put'` tokens are issued by
|
||||
* `presignUpload` and only accepted by the proxy PUT handler. Without this
|
||||
* binding a long-lived 24h download URL emailed to a customer could be
|
||||
* replayed against the PUT handler to overwrite the original storage object
|
||||
* (since both routes share an HMAC and key — the magic-byte check is also
|
||||
* skipped when `c` is unset).
|
||||
*/
|
||||
export type ProxyTokenOp = 'get' | 'put';
|
||||
|
||||
interface ProxyTokenPayload {
|
||||
/** Storage key (validated). */
|
||||
k: string;
|
||||
@@ -77,6 +88,11 @@ interface ProxyTokenPayload {
|
||||
e: number;
|
||||
/** Random nonce so two URLs for the same (key, expiry) differ. */
|
||||
n: string;
|
||||
/**
|
||||
* Bound operation. Tokens minted before this field was added (legacy)
|
||||
* fail-closed: the proxy handlers require the field's exact value.
|
||||
*/
|
||||
op: ProxyTokenOp;
|
||||
/** Optional download filename. */
|
||||
f?: string;
|
||||
/** Optional content-type override. */
|
||||
@@ -102,6 +118,12 @@ export function signProxyToken(payload: ProxyTokenPayload, secret: string): stri
|
||||
export function verifyProxyToken(
|
||||
token: string,
|
||||
secret: string,
|
||||
/**
|
||||
* Required: the operation the verifier is allowed to perform. The token
|
||||
* must have been minted with the same `op`. Without this argument an
|
||||
* upload token could be replayed as a download (and vice versa).
|
||||
*/
|
||||
expectedOp: ProxyTokenOp,
|
||||
): { ok: true; payload: ProxyTokenPayload } | { ok: false; reason: string } {
|
||||
if (typeof token !== 'string' || !token.includes('.')) {
|
||||
return { ok: false, reason: 'malformed' };
|
||||
@@ -138,6 +160,11 @@ export function verifyProxyToken(
|
||||
} catch {
|
||||
return { ok: false, reason: 'invalid-key' };
|
||||
}
|
||||
// Op-binding: tokens minted before this field was added have no `op`
|
||||
// and are now rejected. Fresh tokens must match `expectedOp` exactly.
|
||||
if (payload.op !== expectedOp) {
|
||||
return { ok: false, reason: 'op-mismatch' };
|
||||
}
|
||||
return { ok: true, payload };
|
||||
}
|
||||
|
||||
@@ -269,7 +296,7 @@ export class FilesystemBackend implements StorageBackend {
|
||||
validateStorageKey(key);
|
||||
const expiresAt = Math.floor(Date.now() / 1000) + (opts.expirySeconds ?? 900);
|
||||
const token = signProxyToken(
|
||||
{ k: key, e: expiresAt, n: randomUUID(), c: opts.contentType },
|
||||
{ k: key, e: expiresAt, n: randomUUID(), op: 'put', c: opts.contentType },
|
||||
this.hmacSecret,
|
||||
);
|
||||
return { url: `/api/storage/${token}`, method: 'PUT' };
|
||||
@@ -280,7 +307,14 @@ export class FilesystemBackend implements StorageBackend {
|
||||
const expirySec = opts.expirySeconds ?? 900;
|
||||
const expiresAtSec = Math.floor(Date.now() / 1000) + expirySec;
|
||||
const token = signProxyToken(
|
||||
{ k: key, e: expiresAtSec, n: randomUUID(), f: opts.filename, c: opts.contentType },
|
||||
{
|
||||
k: key,
|
||||
e: expiresAtSec,
|
||||
n: randomUUID(),
|
||||
op: 'get',
|
||||
f: opts.filename,
|
||||
c: opts.contentType,
|
||||
},
|
||||
this.hmacSecret,
|
||||
);
|
||||
// ABSOLUTE URL: send-out emails interpolate this verbatim into the
|
||||
|
||||
@@ -133,11 +133,41 @@ export function extractTokens(markdown: string): string[] {
|
||||
return matches ? Array.from(new Set(matches)) : [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Markdown-significant characters that need to be neutralized before a merge
|
||||
* value is substituted into the rep-authored body. Without this, a value
|
||||
* like `[click here](https://attacker.tld)` stored on a client/company would
|
||||
* survive `renderEmailBody`'s HTML escape (escapeHtml leaves `[`, `]`, `(`,
|
||||
* `)` intact) and produce a real `<a href>` in the outbound email — a
|
||||
* phishing lure delivered from the legitimate sales account.
|
||||
*
|
||||
* Each char is replaced with its HTML entity. The entity encoding survives
|
||||
* `escapeHtml` (which only re-escapes `&`) and renders as the original
|
||||
* literal character — visually the user still sees their data verbatim,
|
||||
* but the markdown rules (link, emphasis, code) no longer fire on it.
|
||||
*/
|
||||
const MERGE_VALUE_ESCAPE_MAP: Record<string, string> = {
|
||||
'\\': '\',
|
||||
'`': '`',
|
||||
'*': '*',
|
||||
_: '_',
|
||||
'[': '[',
|
||||
']': ']',
|
||||
'(': '(',
|
||||
')': ')',
|
||||
'{': '{',
|
||||
'}': '}',
|
||||
};
|
||||
|
||||
function escapeMergeValue(value: string): string {
|
||||
return value.replace(/[\\`*_[\](){}]/g, (ch) => MERGE_VALUE_ESCAPE_MAP[ch] ?? ch);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace `{{token}}` references with values from the supplied map. Tokens
|
||||
* not present in the map are left intact so the dry-run reporter can flag
|
||||
* them. Values are HTML-escape-safe by virtue of being run BEFORE
|
||||
* `renderEmailBody()`; the caller is expected to pass plain strings.
|
||||
* them. Values are markdown-escaped before substitution so a malicious
|
||||
* field cannot inject a link, emphasis, or another `{{token}}` form.
|
||||
*/
|
||||
export function expandMergeTokens(
|
||||
markdown: string,
|
||||
@@ -147,7 +177,7 @@ export function expandMergeTokens(
|
||||
const key = `{{${raw}}}`;
|
||||
const value = values[key];
|
||||
if (value === null || value === undefined || value === '') return full;
|
||||
return String(value);
|
||||
return escapeMergeValue(String(value));
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -32,9 +32,58 @@ function isApiRoute(pathname: string): boolean {
|
||||
return pathname.startsWith('/api/');
|
||||
}
|
||||
|
||||
const STATE_CHANGING_METHODS = new Set(['POST', 'PUT', 'PATCH', 'DELETE']);
|
||||
|
||||
/**
|
||||
* SameSite=Lax cookies block top-level cross-site POSTs in modern browsers,
|
||||
* but defense-in-depth: every state-changing request to a session-authed
|
||||
* `/api/v1/**` endpoint must originate from the same origin as the app.
|
||||
* Webhooks (`/api/webhooks/**`) and public posts (`/api/public/**`) are
|
||||
* exempt because they're called by external systems with no session
|
||||
* cookie. Auth flows (`/api/auth/**`) and portal (`/api/portal/**`) handle
|
||||
* their own origin/CSRF checks via better-auth.
|
||||
*/
|
||||
function isOriginCheckedPath(pathname: string): boolean {
|
||||
if (!pathname.startsWith('/api/v1/')) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
function originAllowed(request: NextRequest): boolean {
|
||||
const origin = request.headers.get('origin');
|
||||
const referer = request.headers.get('referer');
|
||||
// Same-origin fetch from the app sends both Origin AND a matching host.
|
||||
// Use request.nextUrl.origin (the deployed origin) as the source of truth.
|
||||
const expectedOrigin = request.nextUrl.origin;
|
||||
if (origin) return origin === expectedOrigin;
|
||||
if (referer) {
|
||||
try {
|
||||
return new URL(referer).origin === expectedOrigin;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Neither header present: most browser fetches always send Origin on
|
||||
// POST/PUT/PATCH/DELETE, so this likely means a same-origin server-side
|
||||
// call (e.g. Next.js internal fetch). Allow.
|
||||
return true;
|
||||
}
|
||||
|
||||
export function middleware(request: NextRequest): NextResponse {
|
||||
const { pathname } = request.nextUrl;
|
||||
|
||||
// CSRF defense-in-depth: state-changing requests to authed /api/v1
|
||||
// endpoints must come from the app's own origin.
|
||||
if (
|
||||
STATE_CHANGING_METHODS.has(request.method) &&
|
||||
isOriginCheckedPath(pathname) &&
|
||||
!originAllowed(request)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Cross-origin state-changing request rejected' },
|
||||
{ status: 403 },
|
||||
);
|
||||
}
|
||||
|
||||
// Always allow public paths through
|
||||
if (isPublicPath(pathname)) {
|
||||
return NextResponse.next();
|
||||
|
||||
Reference in New Issue
Block a user