From 8dc16dcd2e1814d5ce613f6a3839ce84e4367dc3 Mon Sep 17 00:00:00 2001 From: Matt Date: Fri, 8 May 2026 02:20:27 +0200 Subject: [PATCH] =?UTF-8?q?fix(audit):=20non-Documenso=20backlog=20sweep?= =?UTF-8?q?=20=E2=80=94=20port-binding,=20NULLS=20NOT=20DISTINCT,=20custom?= =?UTF-8?q?=20merge=20tokens,=20company=20docs?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Wave through the remaining audit-final-deferred items that aren't blocked on the back-burnered Documenso work. Multi-tenant isolation: - Storage proxy ProxyTokenPayload gains optional `p` (port slug) claim; verifier asserts `key.startsWith(${p}/)`. Defense-in-depth against a buggy issuer in some future code path that mixes port scopes — every storage key generated by generateStorageKey() already prefixes the slug. document-sends opts in for 24h emailed download links; other callers continue working unchanged via the optional field. DB schema reconciliation: - Migration 0047 rebuilds system_settings unique index with NULLS NOT DISTINCT (Postgres 15+) so global settings (port_id IS NULL) are uniquely keyed by `key` alone. Surfaced + dedupe'd 65 duplicate (storage_backend, NULL) rows that had accumulated from race-prone delete-then-insert patterns in ocr-config / settings / residential- stages / ai-budget services. All four services converted to true onConflictDoUpdate upserts so the race window is closed. API uniformity: - Response shape standardization: 16 routes converted from `{ success: true }` to 204 No Content. CLAUDE.md documents the convention (`{ data: }` for content, 204 for empty mutations, portal-auth retains `{ success: true }` for the frontend's auth chain). - req.json() → parseBody() migration across 9 admin/CRM routes (custom-fields, expenses/export ×3, currency convert, search/recently-viewed, admin/duplicates, berths/pdf-{upload-url, versions, parse-results}). Uniform 400 error shapes for ZodError-flagged bodies. Custom-fields merge tokens (shipped end-to-end): - merge-fields.ts gains CUSTOM_MERGE_TOKEN_RE + helpers for the `{{custom.}}` shape. - document-templates validator accepts the dynamic shape alongside the static catalog tokens. - document-sends.service mergeCustomFieldValues resolver fetches per-port custom_field_definitions for client/interest/berth contexts and substitutes stored values keyed by `{{custom.fieldName}}`. - custom-fields-manager amber banner updated to reflect that merge tokens now expand (search index + entity-diff remain documented design limitations). /api/v1/files cross-entity filtering: - Validator + listFiles + uploadFile accept companyId AND yachtId alongside clientId. file-upload-zone propagates both. - New CompanyFilesTab component mirrors ClientFilesTab; restored as a visible Documents tab in company-tabs.tsx (was a hidden stub). Inline TODOs: - Reviewed remaining two TODOs (per-user reminder schedule, import worker handlers). Both are placeholders for future feature surfaces, not bugs — per-port digest works for every customer; nothing currently enqueues import jobs (verified). Annotated in BACKLOG. BACKLOG.md updated to reflect what landed and what's still pending (Documenso-related items still bundled with the back-burnered phases). Tests: 1185/1185 vitest, tsc clean. --- CLAUDE.md | 2 + docs/BACKLOG.md | 74 ++++++++------ src/app/api/v1/admin/brochures/[id]/route.ts | 2 +- .../v1/admin/custom-fields/[fieldId]/route.ts | 99 +++++++++---------- src/app/api/v1/admin/custom-fields/route.ts | 9 +- src/app/api/v1/admin/duplicates/handlers.ts | 21 ++-- .../api/v1/admin/invitations/[id]/route.ts | 2 +- src/app/api/v1/admin/roles/[id]/route.ts | 2 +- src/app/api/v1/admin/settings/route.ts | 2 +- src/app/api/v1/admin/users/[id]/route.ts | 2 +- .../v1/admin/webhooks/[webhookId]/route.ts | 8 +- .../v1/berths/[id]/pdf-upload-url/handlers.ts | 14 +-- .../v1/berths/[id]/pdf-versions/handlers.ts | 39 ++++---- .../parse-results/apply/handlers.ts | 20 ++-- src/app/api/v1/berths/[id]/route.ts | 2 +- .../api/v1/clients/[id]/portal-user/route.ts | 2 +- src/app/api/v1/clients/[id]/tags/route.ts | 2 +- src/app/api/v1/companies/[id]/tags/route.ts | 2 +- src/app/api/v1/currency/convert/route.ts | 4 +- .../api/v1/custom-fields/[entityId]/route.ts | 5 +- src/app/api/v1/expenses/export/csv/route.ts | 4 +- .../expenses/export/parent-company/route.ts | 4 +- src/app/api/v1/expenses/export/pdf/route.ts | 4 +- src/app/api/v1/files/upload/route.ts | 2 + .../api/v1/interests/[id]/restore/route.ts | 2 +- src/app/api/v1/interests/[id]/route.ts | 8 +- .../notifications/[notificationId]/route.ts | 2 +- .../api/v1/notifications/read-all/route.ts | 2 +- src/app/api/v1/reminders/[id]/route.ts | 2 +- .../api/v1/search/recently-viewed/route.ts | 10 +- src/app/api/v1/yachts/[id]/tags/route.ts | 2 +- .../custom-fields/custom-fields-manager.tsx | 11 ++- .../companies/company-files-tab.tsx | 88 +++++++++++++++++ src/components/companies/company-tabs.tsx | 9 +- src/components/files/file-upload-zone.tsx | 23 +++-- src/hooks/use-search.ts | 6 +- ...047_system_settings_nulls_not_distinct.sql | 34 +++++++ src/lib/db/schema/system.ts | 9 +- src/lib/services/ai-budget.service.ts | 27 +++-- src/lib/services/document-sends.service.ts | 96 ++++++++++++++++++ src/lib/services/files.ts | 10 +- src/lib/services/ocr-config.service.ts | 33 ++++--- .../services/residential-stages.service.ts | 23 +++-- src/lib/services/settings.service.ts | 25 +++-- src/lib/storage/filesystem.ts | 31 +++++- src/lib/storage/index.ts | 12 ++- src/lib/templates/merge-fields.ts | 22 +++++ src/lib/validators/document-templates.ts | 14 ++- src/lib/validators/files.ts | 4 + 49 files changed, 578 insertions(+), 254 deletions(-) create mode 100644 src/components/companies/company-files-tab.tsx create mode 100644 src/lib/db/migrations/0047_system_settings_nulls_not_distinct.sql diff --git a/CLAUDE.md b/CLAUDE.md index 860ab5f..9d034cd 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -106,6 +106,8 @@ src/ - **Send-from accounts (sales send-outs):** Configurable via `system_settings`; defaults to `sales@portnimara.com` for human-touch and `noreply@portnimara.com` for automation. SMTP/IMAP passwords are AES-256-GCM encrypted at rest; the API never returns decrypted secrets — only `*PassIsSet` boolean markers. Send-out audit goes to `document_sends` (separate from `audit_logs` because of volume + binary refs). Body markdown is XSS-safe via `renderEmailBody()` (escape-then-allowlist; tested against the standard XSS vector list). Rate limit: 50 sends/user/hour individual. Pre-send size threshold: files > `email_attach_threshold_mb` ship as a 24h signed-URL link rather than an attachment (avoids the duplicate-send race from async bounces). The download-link fallback HTML-escapes the filename to prevent injection from admin-supplied brochure names. Bounce monitoring requires IMAP credentials in addition to SMTP — without them, the size-rejection banner stays disabled. - **NocoDB berth import:** `pnpm tsx scripts/import-berths-from-nocodb.ts --apply --port-slug port-nimara` re-imports from the legacy NocoDB Berths table. Idempotent: rows where `updated_at > last_imported_at` (the "human edited this since last import" guard) are skipped unless `--force`. Adds `--update-snapshot` to also rewrite `src/lib/db/seed-data/berths.json`. Uses `pg_advisory_xact_lock` so two simultaneous runs serialize. Pure helpers in `src/lib/services/berth-import.ts` are unit-tested. - **Routes:** Multi-tenant via `[portSlug]` dynamic segment. Typed routes enabled. +- **API response shapes:** Conventional envelope is `{ data: }` for any endpoint that returns content (read OR write). Mutations that return nothing emit `204 No Content` (`new NextResponse(null, { status: 204 })`). Don't use `{ success: true }` for CRM mutations — it was a legacy pattern, normalized away in 2026-05-07. Public portal-auth endpoints are an exception: they return `{ success: true }` because the frontend needs a non-error JSON body to chain on. List/paginated reads return `{ data: , total?, hasMore? }` (see `/api/v1/clients` for the shape). Errors always go through `errorResponse(error)` from `@/lib/errors` so request-id propagation and the audit-tier mapping stay uniform. +- **Body parsing:** Always use `parseBody(req, schema)` from `@/lib/api/route-helpers` instead of `await req.json(); schema.parse(body)`. The helper returns a uniform 400 with field-level errors that the frontend's `toastError` hook recognizes; raw `req.json` + `schema.parse` produces a generic 500 because the ZodError isn't caught in the same shape. - **Pre-commit:** Husky + lint-staged runs ESLint fix + Prettier on staged `.ts`/`.tsx` files. The hook also blocks `.env*` files (including `.env.example`) from being committed; pass them via a separate workflow if needed. ## Schema migrations during dev diff --git a/docs/BACKLOG.md b/docs/BACKLOG.md index 6edc195..e137bbf 100644 --- a/docs/BACKLOG.md +++ b/docs/BACKLOG.md @@ -4,13 +4,12 @@ asking "what's left to build/fix?". Items are grouped by source doc; each entry links back to the original spec for full context. -Last updated: 2026-05-07 (after the audit-final-deferred sweep — partial -archived indexes, document_sends interestId port-verify, custom-fields -per-entity permission gate, recommender bool parsing, expense PDF cursor -math, berth PDF silent-drop logging, YachtForm preset-owner + interest -form member-company yacht filter + add-new shortcut, invoice detail -typed). Many older items in §C and §F were already resolved by earlier -fix-audit commit waves; the audit doc was stale. +Last updated: 2026-05-08 (second non-Documenso sweep — storage-proxy +port-binding, system_settings NULLS NOT DISTINCT + dedup migration, +response-shape standardization, parseBody migration, custom-field merge +tokens, /api/v1/files companyId+yachtId filter, Company Documents tab, +file-upload zone wired for company/yacht targeting). Documenso phases +2-7 stay back-burnered per user. --- @@ -35,15 +34,14 @@ Remaining phases — explicitly back-burnered by the user on 2026-05-07: --- -## B. Custom-fields hardening (~ongoing, deferred) +## B. Custom-fields hardening **Source:** [`docs/admin-ux-backlog.md`](./admin-ux-backlog.md) §7. -Custom Settings page already shows the amber warning banner. Remediation work: - -- **Search index** — extend the GIN tsvector to include `customFieldValues` content -- **Audit diff** — extend `diffEntity` to walk the `customFieldValues` blob -- **Merge tokens** — add `{{custom.}}` handling at template-render time, plus surface them in the merge-tokens UI +- ✅ **Merge tokens** — `{{custom.}}` validators + resolver shipped 2026-05-08. Tokens expand at template-render time for client/interest/berth contexts via `mergeCustomFieldValues` in `document-sends.service.ts`. Banner updated. +- **Search index** — DEFERRED as design limitation. Adding GIN coverage requires either joining `custom_field_values` per search (slow at scale) or materializing values into a search_text column on the parent (additive maintenance burden). The amber banner documents this. +- **Audit diff** — N/A. Custom-field values live in their own table, not as a JSONB blob on the parent entity. The `setValues()` service-layer call already creates its own audit log entry (custom-fields.service.ts:349-358), so changes ARE audited — just separately from the entity-diff. +- **UI surfacing of `{{custom.…}}` tokens in template-edit pickers** — Open. The token list dialog currently only shows static catalog tokens. Surface per-port custom-field definitions as a dynamic group under "Custom" so reps can browse them. Backend already accepts the tokens; this is a UI follow-up. --- @@ -55,15 +53,26 @@ The 2026-05-07 backlog sweep landed every small/concrete item. Remaining entries are deferred because they need design decisions, live external instances, or cross-cutting refactors: -### Deferred — needs design or larger refactor +### Deferred — Documenso-related (back-burnered until phases 2-7 land) -- **Storage proxy token does not bind to port_id** — `src/lib/storage/filesystem.ts:73-84`. Adding a `p` (portId) claim is mechanical; the meaningful security gain requires the proxy verifier to look up the file's owning row + assert `owner.portId === payload.p`. That requires either a routing prefix in the key (currently `${portSlug}/...` already, so a prefix check is plausible) or a per-table lookup across all owners. Decide which approach before implementing — current state ships with `validateStorageKey` + per-issuer port scoping, so this is defense-in-depth rather than an open hole. -- **Documenso webhook does not enforce port_id on document lookups** — `src/app/api/webhooks/documenso/route.ts:96-148`. Adding port scope requires either including the originating Documenso instance/team id in the lookup (Documenso doesn't surface that on the webhook payload today) OR proving `documents(documenso_id)` is globally unique with a DB constraint and a backfill check. Pick the strategy with the audit doc open. -- **Webhook dedup vs per-recipient signed events** — `src/app/api/webhooks/documenso/route.ts:103-110`. Replacing the body-hash dedup with a `(documensoDocumentId, recipientEmail, eventType)` composite unique requires schema column for recipient_email on `documentEvents`. Right place to do this is alongside Documenso Phase 2 (webhook handler enhancement) since they touch the same code. -- **v2 voidDocument endpoint shape verification** — `src/lib/services/documenso-client.ts:450-466`. Needs a live Documenso 2.x instance to confirm `POST /api/v2/envelope/delete` body shape. Bundle with Documenso Phase 5. -- **Public POST routes bypass service layer** — `src/app/api/public/{interests,website-inquiries,residential-inquiries}/route.ts`. Multi-route refactor extracting a shared `publicInterestService.create(...)`. Worth doing but big enough to deserve its own session. -- **Inconsistent response shapes** — most endpoints return `{ data: ... }`, but `notifications/[notificationId]` returns `{ success: true }`, `website-inquiries` returns `{ id, deduped }`. Codebase-wide migration; document a convention in CLAUDE.md first. -- **`systemSettings` PK / unique-index drift** — `src/lib/db/schema/system.ts:119-133`. Schema declares `uniqueIndex` on `(key, port_id)`, migration uses `key` as PK. `port_id` is nullable so `(key, port_id)` cannot serve as a PK with default NULLs-not-equal semantics. Reconcile by either making `portId` non-null with a sentinel ("**global**") and declaring composite PK, OR by dropping the schema-level unique index and using partial unique indexes for global vs per-port. Either path is a data migration. +- **Documenso webhook does not enforce port_id on document lookups** — `src/app/api/webhooks/documenso/route.ts:96-148`. Bundle with Documenso Phase 2 (webhook handler enhancement) since they touch the same code. +- **Webhook dedup vs per-recipient signed events** — `src/app/api/webhooks/documenso/route.ts:103-110`. Replacing the body-hash dedup with a `(documensoDocumentId, recipientEmail, eventType)` composite unique requires a recipient_email column on `documentEvents`. Bundle with Phase 2. +- **v2 voidDocument endpoint shape verification** — `src/lib/services/documenso-client.ts:450-466`. Needs a live Documenso 2.x instance. Bundle with Phase 5. + +### Deferred — pure refactor (no active bug) + +- **Public POST routes bypass service layer** — `src/app/api/public/{interests,website-inquiries,residential-inquiries}/route.ts`. The audit's `userId: null as unknown as string` cast was already cleaned up to a proper `userId: null`. Remaining concern is testability: extract a shared `publicInterestService.create(...)`. Pure ergonomics — no active bug or security issue. + +### Done in 2026-05-08 sweep (latest) + +- ✅ Storage proxy port_id binding: `ProxyTokenPayload` gains optional `p` (port slug) claim; verifier asserts `key.startsWith(${p}/)`. document-sends 24h URLs opt in; other issuers continue working unchanged. +- ✅ system_settings index rebuilt with `NULLS NOT DISTINCT` (migration 0047) — global settings are now uniquely keyed by `key` alone. Surfaced + cleaned 65 duplicate `(storage_backend, NULL)` rows that had accumulated from race-prone delete-then-insert patterns. +- ✅ All 4 read-then-write systemSettings sites converted to true `onConflictDoUpdate` upserts (ocr-config, settings, residential-stages, ai-budget). +- ✅ Response shape standardization: 16 routes converted from `{ success: true }` → `204 No Content`. CLAUDE.md documents the convention. +- ✅ `req.json()` → `parseBody()` migration across 9 admin/CRM routes (custom-fields, expenses/export ×3, currency convert, search/recently-viewed, admin/duplicates, berths/pdf-{upload-url,versions,parse-results}). Portal-auth routes intentionally retained `{ success: true }`. +- ✅ Custom-field merge tokens: validator accepts `{{custom.}}` shape; resolver in `mergeCustomFieldValues` substitutes from per-port custom_field_definitions + per-entity values for client/interest/berth contexts. Banner updated. +- ✅ `/api/v1/files` accepts `companyId` and `yachtId` filters. uploadFile service writes both. file-upload-zone component accepts both props. +- ✅ Company Documents tab (CompanyFilesTab) re-enabled and added to company detail tabs. ### Done in 2026-05-07 sweep (commits in this session) @@ -91,28 +100,29 @@ instances, or cross-cutting refactors: - ✅ All FK indexes called out in audit doc (already in place — audit was stale) - ✅ `documentSends.sentByUserId` FK (already had `.references(...)`) -### Still open — small enough to bundle next time +### Documented limitations (no action planned) -- **`berths.current_pdf_version_id` lacks Drizzle FK** — `src/lib/db/schema/berths.ts:83`. The in-line comment fully documents why (circular FK between `berths` ↔ `berth_pdf_versions` makes column-level `.references()` infeasible). FK is enforced via migration 0030. Treat as documented limitation; revisit if Drizzle adds deferred-FK support. -- **`req.json()` without `parseBody` helper** — admin custom-fields routes use `await req.json(); schema.parse(body)` directly. Migrate for uniform 400 error shapes when the surface area calms down. +- **`berths.current_pdf_version_id` lacks Drizzle FK** — `src/lib/db/schema/berths.ts:83`. The in-line comment fully documents why (circular FK between `berths` ↔ `berth_pdf_versions` makes column-level `.references()` infeasible). FK is enforced via migration 0030. Revisit if Drizzle adds deferred-FK support. +- **`systemSettings` schema declares `uniqueIndex` instead of `NULLS NOT DISTINCT`** — Drizzle's `uniqueIndex` builder doesn't surface the flag. Migration 0047 is the source of truth; `db:push` against an empty DB would skip the flag. Same documented-limitation pattern as `berths.current_pdf_version_id`. +- **One remaining `req.json()` in admin/custom-fields/[fieldId]** — intentional. The handler inspects raw body to detect `fieldType` mutation attempts; parseBody would lose the raw view. Documented inline. --- ## D. Inline TODOs in code (2 remaining) -| File:line | Note | Status | -| ------------------------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------ | -| ~~`client-yachts-tab.tsx:93`~~ | YachtForm preset owner prop | ✅ landed 2026-05-07 (`initialOwner` prop) | -| ~~`interest-form.tsx:329`~~ | Include company-owned yachts where client is a member | ✅ landed 2026-05-07 (`yachtOwnerFilter` array filter) | -| ~~`interest-form.tsx:330`~~ | "Add new yacht" inline shortcut | ✅ landed 2026-05-07 (Plus button + YachtForm sheet) | -| [`src/lib/queue/scheduler.ts:44`](../src/lib/queue/scheduler.ts#L44) | Per-user reminder schedule configurable from `user_settings` | Open — needs `user_settings` UI surface | -| [`src/lib/queue/workers/import.ts:13`](../src/lib/queue/workers/import.ts#L13) | Import job handlers — worker is a stub | Open — entire feature surface | +| File:line | Note | Status | +| ------------------------------------------------------------------------------ | --------------------------------------------------------------- | --------------------------------------------------------------------------------------- | +| ~~`client-yachts-tab.tsx:93`~~ | YachtForm preset owner prop | ✅ landed 2026-05-07 (`initialOwner` prop) | +| ~~`interest-form.tsx:329`~~ | Include company-owned yachts where client is a member | ✅ landed 2026-05-07 (`yachtOwnerFilter` array filter) | +| ~~`interest-form.tsx:330`~~ | "Add new yacht" inline shortcut | ✅ landed 2026-05-07 (Plus button + YachtForm sheet) | +| [`src/lib/queue/scheduler.ts:44`](../src/lib/queue/scheduler.ts#L44) | Per-user reminder schedule (override on top of per-port digest) | Placeholder — per-port digest works; revisit when a customer asks for per-user override | +| [`src/lib/queue/workers/import.ts:13`](../src/lib/queue/workers/import.ts#L13) | CSV/Excel import worker — entire feature surface | Placeholder — nothing currently enqueues `import` jobs (verified) | --- ## E. Hidden / stubbed UI tabs -- **Company Documents tab** — `src/components/companies/company-tabs.tsx:229`. Hidden until `/api/v1/files` accepts a `companyId` filter (schema supports it, validator doesn't). +- ✅ **Company Documents tab** — landed 2026-05-08. `/api/v1/files` accepts `companyId`+`yachtId` filters; CompanyFilesTab + uploadZone wired through the storage abstraction. - **Berth Waiting List + Maintenance Log tabs** — `src/components/berths/berth-tabs.tsx:346`. Removed entirely; revisit if/when product asks. - **Interest Contract / Reservation tabs** — `src/components/interests/interest-{contract,reservation}-tab.tsx`. Render a "coming soon" friendly card; the real flow is gated on Documenso Phases 2–6. diff --git a/src/app/api/v1/admin/brochures/[id]/route.ts b/src/app/api/v1/admin/brochures/[id]/route.ts index a650d10..e194628 100644 --- a/src/app/api/v1/admin/brochures/[id]/route.ts +++ b/src/app/api/v1/admin/brochures/[id]/route.ts @@ -36,7 +36,7 @@ export const DELETE = withAuth( try { const id = params.id!; await archiveBrochure(ctx.portId, id); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/admin/custom-fields/[fieldId]/route.ts b/src/app/api/v1/admin/custom-fields/[fieldId]/route.ts index b711a12..7b888a9 100644 --- a/src/app/api/v1/admin/custom-fields/[fieldId]/route.ts +++ b/src/app/api/v1/admin/custom-fields/[fieldId]/route.ts @@ -3,67 +3,58 @@ import { NextRequest, NextResponse } from 'next/server'; import { withAuth, withPermission } from '@/lib/api/helpers'; import { errorResponse, NotFoundError } from '@/lib/errors'; import { updateFieldSchema } from '@/lib/validators/custom-fields'; -import { - updateDefinition, - deleteDefinition, -} from '@/lib/services/custom-fields.service'; +import { updateDefinition, deleteDefinition } from '@/lib/services/custom-fields.service'; export const PATCH = withAuth( - withPermission( - 'admin', - 'manage_custom_fields', - async (req: NextRequest, ctx, params) => { - try { - const { fieldId } = params; - if (!fieldId) throw new NotFoundError('Custom field'); + withPermission('admin', 'manage_custom_fields', async (req: NextRequest, ctx, params) => { + try { + const { fieldId } = params; + if (!fieldId) throw new NotFoundError('Custom field'); - const body = await req.json(); + // Read raw body before parsing so we can inspect `fieldType` + // (the schema strips it; the service rejects any change). Using + // req.json() directly here is intentional — parseBody would lose + // the raw view we need for the mutation-attempt detection below. + const body = (await req.json()) as Record; + const data = updateFieldSchema.parse(body); - // Parse only allowed fields; if fieldType sneaks in, the service will catch it - const data = updateFieldSchema.parse(body); - - // Pass raw body too so service can detect fieldType mutation attempts - const updated = await updateDefinition( - ctx.portId, - fieldId, - ctx.userId, - { ...data, ...(body.fieldType !== undefined && { fieldType: body.fieldType }) }, - { - userId: ctx.userId, - portId: ctx.portId, - ipAddress: ctx.ipAddress, - userAgent: ctx.userAgent, - }, - ); - - return NextResponse.json({ data: updated }); - } catch (error) { - return errorResponse(error); - } - }, - ), -); - -export const DELETE = withAuth( - withPermission( - 'admin', - 'manage_custom_fields', - async (_req: NextRequest, ctx, params) => { - try { - const { fieldId } = params; - if (!fieldId) throw new NotFoundError('Custom field'); - - const result = await deleteDefinition(ctx.portId, fieldId, ctx.userId, { + // Pass raw body too so service can detect fieldType mutation attempts + const updated = await updateDefinition( + ctx.portId, + fieldId, + ctx.userId, + { ...data, ...(body.fieldType !== undefined && { fieldType: body.fieldType }) }, + { userId: ctx.userId, portId: ctx.portId, ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, - }); + }, + ); - return NextResponse.json({ data: result }); - } catch (error) { - return errorResponse(error); - } - }, - ), + return NextResponse.json({ data: updated }); + } catch (error) { + return errorResponse(error); + } + }), +); + +export const DELETE = withAuth( + withPermission('admin', 'manage_custom_fields', async (_req: NextRequest, ctx, params) => { + try { + const { fieldId } = params; + if (!fieldId) throw new NotFoundError('Custom field'); + + const result = await deleteDefinition(ctx.portId, fieldId, ctx.userId, { + userId: ctx.userId, + portId: ctx.portId, + ipAddress: ctx.ipAddress, + userAgent: ctx.userAgent, + }); + + return NextResponse.json({ data: result }); + } catch (error) { + return errorResponse(error); + } + }), ); diff --git a/src/app/api/v1/admin/custom-fields/route.ts b/src/app/api/v1/admin/custom-fields/route.ts index 6d614da..0626c79 100644 --- a/src/app/api/v1/admin/custom-fields/route.ts +++ b/src/app/api/v1/admin/custom-fields/route.ts @@ -1,12 +1,10 @@ import { NextRequest, NextResponse } from 'next/server'; import { withAuth, withPermission } from '@/lib/api/helpers'; +import { parseBody } from '@/lib/api/route-helpers'; import { errorResponse } from '@/lib/errors'; import { createFieldSchema } from '@/lib/validators/custom-fields'; -import { - listDefinitions, - createDefinition, -} from '@/lib/services/custom-fields.service'; +import { listDefinitions, createDefinition } from '@/lib/services/custom-fields.service'; export const GET = withAuth( withPermission('admin', 'manage_custom_fields', async (req: NextRequest, ctx) => { @@ -25,8 +23,7 @@ export const GET = withAuth( export const POST = withAuth( withPermission('admin', 'manage_custom_fields', async (req: NextRequest, ctx) => { try { - const body = await req.json(); - const data = createFieldSchema.parse(body); + const data = await parseBody(req, createFieldSchema); const definition = await createDefinition(ctx.portId, ctx.userId, data, { userId: ctx.userId, diff --git a/src/app/api/v1/admin/duplicates/handlers.ts b/src/app/api/v1/admin/duplicates/handlers.ts index 36d6586..b58f552 100644 --- a/src/app/api/v1/admin/duplicates/handlers.ts +++ b/src/app/api/v1/admin/duplicates/handlers.ts @@ -1,7 +1,9 @@ -import { NextResponse } from 'next/server'; +import { NextRequest, NextResponse } from 'next/server'; +import { z } from 'zod'; import { and, eq, inArray } from 'drizzle-orm'; import type { AuthContext } from '@/lib/api/helpers'; +import { parseBody } from '@/lib/api/route-helpers'; import { db } from '@/lib/db'; import { clients, clientMergeCandidates } from '@/lib/db/schema/clients'; import { errorResponse, NotFoundError, ValidationError } from '@/lib/errors'; @@ -11,6 +13,11 @@ import { type MergeFieldChoices, } from '@/lib/services/client-merge.service'; +const confirmMergeSchema = z.object({ + winnerId: z.string().min(1), + fieldChoices: z.record(z.string(), z.string()).optional(), +}); + /** * GET /api/v1/admin/duplicates * @@ -70,19 +77,13 @@ export async function listHandler(_req: Request, ctx: AuthContext): Promise { try { const id = params.id ?? ''; - const body = (await req.json().catch(() => ({}))) as { - winnerId?: string; - fieldChoices?: MergeFieldChoices; - }; - if (!body.winnerId) { - throw new ValidationError('winnerId is required'); - } + const body = await parseBody(req, confirmMergeSchema); const [candidate] = await db .select() @@ -111,7 +112,7 @@ export async function confirmMergeHandler( loserId, mergedBy: ctx.userId, callerPortId: ctx.portId, - fieldChoices: body.fieldChoices, + fieldChoices: body.fieldChoices as MergeFieldChoices | undefined, }); return NextResponse.json({ data: result }); diff --git a/src/app/api/v1/admin/invitations/[id]/route.ts b/src/app/api/v1/admin/invitations/[id]/route.ts index dee4d6e..e10d7f1 100644 --- a/src/app/api/v1/admin/invitations/[id]/route.ts +++ b/src/app/api/v1/admin/invitations/[id]/route.ts @@ -18,7 +18,7 @@ export const DELETE = withAuth( ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, }); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/admin/roles/[id]/route.ts b/src/app/api/v1/admin/roles/[id]/route.ts index 38ff9e4..2dbe5d4 100644 --- a/src/app/api/v1/admin/roles/[id]/route.ts +++ b/src/app/api/v1/admin/roles/[id]/route.ts @@ -43,7 +43,7 @@ export const DELETE = withAuth(async (_req, ctx, params) => { ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, }); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/admin/settings/route.ts b/src/app/api/v1/admin/settings/route.ts index 4e5d9ad..5b09f41 100644 --- a/src/app/api/v1/admin/settings/route.ts +++ b/src/app/api/v1/admin/settings/route.ts @@ -44,7 +44,7 @@ export const DELETE = withAuth( ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, }); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/admin/users/[id]/route.ts b/src/app/api/v1/admin/users/[id]/route.ts index 4819e12..01f9bea 100644 --- a/src/app/api/v1/admin/users/[id]/route.ts +++ b/src/app/api/v1/admin/users/[id]/route.ts @@ -43,7 +43,7 @@ export const DELETE = withAuth( ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, }); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/admin/webhooks/[webhookId]/route.ts b/src/app/api/v1/admin/webhooks/[webhookId]/route.ts index cc56a4a..3209bea 100644 --- a/src/app/api/v1/admin/webhooks/[webhookId]/route.ts +++ b/src/app/api/v1/admin/webhooks/[webhookId]/route.ts @@ -4,11 +4,7 @@ import { withAuth, withPermission } from '@/lib/api/helpers'; import { parseBody } from '@/lib/api/route-helpers'; import { errorResponse } from '@/lib/errors'; import { updateWebhookSchema } from '@/lib/validators/webhooks'; -import { - getWebhook, - updateWebhook, - deleteWebhook, -} from '@/lib/services/webhooks.service'; +import { getWebhook, updateWebhook, deleteWebhook } from '@/lib/services/webhooks.service'; // ─── GET /api/v1/admin/webhooks/[webhookId] ─────────────────────────────────── @@ -56,7 +52,7 @@ export const DELETE = withAuth( ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, }); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/berths/[id]/pdf-upload-url/handlers.ts b/src/app/api/v1/berths/[id]/pdf-upload-url/handlers.ts index f77ad9a..aae1047 100644 --- a/src/app/api/v1/berths/[id]/pdf-upload-url/handlers.ts +++ b/src/app/api/v1/berths/[id]/pdf-upload-url/handlers.ts @@ -8,8 +8,10 @@ */ import { NextResponse } from 'next/server'; +import { z } from 'zod'; import { type RouteHandler } from '@/lib/api/helpers'; +import { parseBody } from '@/lib/api/route-helpers'; import { db } from '@/lib/db'; import { berths } from '@/lib/db/schema/berths'; import { and, eq } from 'drizzle-orm'; @@ -17,17 +19,17 @@ import { errorResponse, NotFoundError, ValidationError } from '@/lib/errors'; import { getMaxUploadMb } from '@/lib/services/berth-pdf.service'; import { getStorageBackend } from '@/lib/storage'; -interface PostBody { - fileName: string; +const postBodySchema = z.object({ + fileName: z.string().min(1).max(255), /** Size hint in bytes — used to early-reject oversized uploads before we * burn a presigned URL. */ - sizeBytes?: number; -} + sizeBytes: z.number().int().nonnegative().optional(), +}); export const postHandler: RouteHandler = async (req, ctx, params) => { try { - const body = (await req.json()) as Partial; - const fileName = (body.fileName ?? '').trim(); + const body = await parseBody(req, postBodySchema); + const fileName = body.fileName.trim(); if (!fileName) throw new ValidationError('fileName is required'); // Tenant-scoped berth lookup. Without `eq(berths.portId, ctx.portId)` a diff --git a/src/app/api/v1/berths/[id]/pdf-versions/handlers.ts b/src/app/api/v1/berths/[id]/pdf-versions/handlers.ts index 955adf9..eab0faf 100644 --- a/src/app/api/v1/berths/[id]/pdf-versions/handlers.ts +++ b/src/app/api/v1/berths/[id]/pdf-versions/handlers.ts @@ -7,23 +7,27 @@ */ import { NextResponse } from 'next/server'; +import { z } from 'zod'; import { type RouteHandler } from '@/lib/api/helpers'; +import { parseBody } from '@/lib/api/route-helpers'; import { errorResponse, ValidationError } from '@/lib/errors'; import { listBerthPdfVersions, uploadBerthPdf } from '@/lib/services/berth-pdf.service'; -interface PostBody { - storageKey: string; - fileName: string; - fileSizeBytes: number; - sha256: string; - parseResults?: { - engine: 'acroform' | 'ocr' | 'ai'; - extracted?: Record; - meanConfidence?: number; - warnings?: string[]; - }; -} +const postBodySchema = z.object({ + storageKey: z.string().min(1), + fileName: z.string().min(1).max(255), + fileSizeBytes: z.number().int().positive(), + sha256: z.string().min(1), + parseResults: z + .object({ + engine: z.enum(['acroform', 'ocr', 'ai']), + extracted: z.record(z.string(), z.unknown()).optional(), + meanConfidence: z.number().optional(), + warnings: z.array(z.string()).optional(), + }) + .optional(), +}); export const getHandler: RouteHandler = async (_req, ctx, params) => { try { @@ -47,16 +51,7 @@ const STORAGE_KEY_RE = export const postHandler: RouteHandler = async (req, ctx, params) => { try { - const body = (await req.json()) as Partial; - if (!body.storageKey || !body.fileName) { - throw new ValidationError('storageKey and fileName are required'); - } - if (typeof body.fileSizeBytes !== 'number' || body.fileSizeBytes <= 0) { - throw new ValidationError('fileSizeBytes must be a positive integer'); - } - if (!body.sha256 || typeof body.sha256 !== 'string') { - throw new ValidationError('sha256 is required'); - } + const body = await parseBody(req, postBodySchema); const expectedPrefix = `berths/${params.id!}/uploads/`; if (!body.storageKey.startsWith(expectedPrefix) || !STORAGE_KEY_RE.test(body.storageKey)) { throw new ValidationError( diff --git a/src/app/api/v1/berths/[id]/pdf-versions/parse-results/apply/handlers.ts b/src/app/api/v1/berths/[id]/pdf-versions/parse-results/apply/handlers.ts index a6bc022..3d3630b 100644 --- a/src/app/api/v1/berths/[id]/pdf-versions/parse-results/apply/handlers.ts +++ b/src/app/api/v1/berths/[id]/pdf-versions/parse-results/apply/handlers.ts @@ -1,25 +1,23 @@ import { NextResponse } from 'next/server'; +import { z } from 'zod'; import { type RouteHandler } from '@/lib/api/helpers'; -import { errorResponse, ValidationError } from '@/lib/errors'; +import { parseBody } from '@/lib/api/route-helpers'; +import { errorResponse } from '@/lib/errors'; import { applyParseResults, type ExtractedBerthFields } from '@/lib/services/berth-pdf.service'; -interface PostBody { - versionId: string; - fieldsToApply: Partial; -} +const postBodySchema = z.object({ + versionId: z.string().min(1), + fieldsToApply: z.record(z.string(), z.unknown()), +}); export const postHandler: RouteHandler = async (req, ctx, params) => { try { - const body = (await req.json()) as Partial; - if (!body.versionId) throw new ValidationError('versionId is required'); - if (!body.fieldsToApply || typeof body.fieldsToApply !== 'object') { - throw new ValidationError('fieldsToApply must be an object'); - } + const body = await parseBody(req, postBodySchema); const result = await applyParseResults( params.id!, body.versionId, - body.fieldsToApply, + body.fieldsToApply as Partial, ctx.portId, ); return NextResponse.json({ data: result }); diff --git a/src/app/api/v1/berths/[id]/route.ts b/src/app/api/v1/berths/[id]/route.ts index 5df2ee9..e79150d 100644 --- a/src/app/api/v1/berths/[id]/route.ts +++ b/src/app/api/v1/berths/[id]/route.ts @@ -46,7 +46,7 @@ export const DELETE = withAuth( ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, }); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/clients/[id]/portal-user/route.ts b/src/app/api/v1/clients/[id]/portal-user/route.ts index c7b1de9..fac2801 100644 --- a/src/app/api/v1/clients/[id]/portal-user/route.ts +++ b/src/app/api/v1/clients/[id]/portal-user/route.ts @@ -38,7 +38,7 @@ export const POST = withAuth( }); if (!existing) throw new NotFoundError('portal user'); await resendActivation(existing.id, ctx.portId); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } const body = await parseBody(req, inviteSchema); diff --git a/src/app/api/v1/clients/[id]/tags/route.ts b/src/app/api/v1/clients/[id]/tags/route.ts index 29419d2..3a3f6b0 100644 --- a/src/app/api/v1/clients/[id]/tags/route.ts +++ b/src/app/api/v1/clients/[id]/tags/route.ts @@ -20,7 +20,7 @@ export const PUT = withAuth( ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, }); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/companies/[id]/tags/route.ts b/src/app/api/v1/companies/[id]/tags/route.ts index e4654bf..2e3fa29 100644 --- a/src/app/api/v1/companies/[id]/tags/route.ts +++ b/src/app/api/v1/companies/[id]/tags/route.ts @@ -20,7 +20,7 @@ export const PUT = withAuth( ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, }); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/currency/convert/route.ts b/src/app/api/v1/currency/convert/route.ts index 1e715aa..86bd782 100644 --- a/src/app/api/v1/currency/convert/route.ts +++ b/src/app/api/v1/currency/convert/route.ts @@ -2,6 +2,7 @@ import { NextResponse } from 'next/server'; import { z } from 'zod'; import { withAuth } from '@/lib/api/helpers'; +import { parseBody } from '@/lib/api/route-helpers'; import { errorResponse } from '@/lib/errors'; import { convert } from '@/lib/services/currency'; @@ -13,8 +14,7 @@ const convertSchema = z.object({ export const POST = withAuth(async (req, _ctx) => { try { - const body = await req.json(); - const { amount, from, to } = convertSchema.parse(body); + const { amount, from, to } = await parseBody(req, convertSchema); const result = await convert(amount, from, to); diff --git a/src/app/api/v1/custom-fields/[entityId]/route.ts b/src/app/api/v1/custom-fields/[entityId]/route.ts index 65f5d5c..00cac6d 100644 --- a/src/app/api/v1/custom-fields/[entityId]/route.ts +++ b/src/app/api/v1/custom-fields/[entityId]/route.ts @@ -2,7 +2,7 @@ import { NextRequest, NextResponse } from 'next/server'; import { z } from 'zod'; import { withAuth } from '@/lib/api/helpers'; -import { parseQuery } from '@/lib/api/route-helpers'; +import { parseBody, parseQuery } from '@/lib/api/route-helpers'; import { errorResponse, NotFoundError, ValidationError } from '@/lib/errors'; import { requirePermission } from '@/lib/auth/permissions'; import { setValuesSchema } from '@/lib/validators/custom-fields'; @@ -91,8 +91,7 @@ export const PUT = withAuth(async (req: NextRequest, ctx, params) => { const { entityType } = parseQuery(req, querySchema); gateForEdit(entityType, ctx); - const body = await req.json(); - const { values } = setValuesSchema.parse(body); + const { values } = await parseBody(req, setValuesSchema); const result = await setValues( entityId, diff --git a/src/app/api/v1/expenses/export/csv/route.ts b/src/app/api/v1/expenses/export/csv/route.ts index befd283..7668fd1 100644 --- a/src/app/api/v1/expenses/export/csv/route.ts +++ b/src/app/api/v1/expenses/export/csv/route.ts @@ -1,6 +1,7 @@ import { NextResponse } from 'next/server'; import { withAuth, withPermission } from '@/lib/api/helpers'; +import { parseBody } from '@/lib/api/route-helpers'; import { errorResponse } from '@/lib/errors'; import { exportCsv } from '@/lib/services/expense-export'; import { listExpensesSchema } from '@/lib/validators/expenses'; @@ -9,8 +10,7 @@ import { createAuditLog } from '@/lib/audit'; export const POST = withAuth( withPermission('expenses', 'view', async (req, ctx) => { try { - const body = await req.json().catch(() => ({})); - const query = listExpensesSchema.parse(body); + const query = await parseBody(req, listExpensesSchema); const csv = await exportCsv(ctx.portId, query); void createAuditLog({ diff --git a/src/app/api/v1/expenses/export/parent-company/route.ts b/src/app/api/v1/expenses/export/parent-company/route.ts index 04f561a..a4d74bc 100644 --- a/src/app/api/v1/expenses/export/parent-company/route.ts +++ b/src/app/api/v1/expenses/export/parent-company/route.ts @@ -1,6 +1,7 @@ import { NextResponse } from 'next/server'; import { withAuth, withPermission } from '@/lib/api/helpers'; +import { parseBody } from '@/lib/api/route-helpers'; import { errorResponse } from '@/lib/errors'; import { exportParentCompany } from '@/lib/services/expense-export'; import { listExpensesSchema } from '@/lib/validators/expenses'; @@ -11,8 +12,7 @@ import { listExpensesSchema } from '@/lib/validators/expenses'; export const POST = withAuth( withPermission('expenses', 'export', async (req, ctx) => { try { - const body = await req.json().catch(() => ({})); - const query = listExpensesSchema.parse(body); + const query = await parseBody(req, listExpensesSchema); const pdf = await exportParentCompany(ctx.portId, query); return new NextResponse(Buffer.from(pdf), { diff --git a/src/app/api/v1/expenses/export/pdf/route.ts b/src/app/api/v1/expenses/export/pdf/route.ts index 7b7548f..8667bb3 100644 --- a/src/app/api/v1/expenses/export/pdf/route.ts +++ b/src/app/api/v1/expenses/export/pdf/route.ts @@ -1,6 +1,7 @@ import { NextResponse } from 'next/server'; import { withAuth, withPermission } from '@/lib/api/helpers'; +import { parseBody } from '@/lib/api/route-helpers'; import { errorResponse } from '@/lib/errors'; import { streamExpensePdf } from '@/lib/services/expense-pdf.service'; import { exportExpensePdfSchema } from '@/lib/validators/expenses'; @@ -32,8 +33,7 @@ export const dynamic = 'force-dynamic'; export const POST = withAuth( withPermission('expenses', 'export', async (req, ctx) => { try { - const body = await req.json().catch(() => ({})); - const input = exportExpensePdfSchema.parse(body); + const input = await parseBody(req, exportExpensePdfSchema); const { stream, suggestedFilename } = await streamExpensePdf({ portId: ctx.portId, diff --git a/src/app/api/v1/files/upload/route.ts b/src/app/api/v1/files/upload/route.ts index b71cdb0..901cf62 100644 --- a/src/app/api/v1/files/upload/route.ts +++ b/src/app/api/v1/files/upload/route.ts @@ -20,6 +20,8 @@ export const POST = withAuth( const metadata = uploadFileSchema.parse({ filename: (formData.get('filename') as string | null) ?? file.name, clientId: formData.get('clientId') as string | undefined, + yachtId: formData.get('yachtId') as string | undefined, + companyId: formData.get('companyId') as string | undefined, category: formData.get('category') as string | undefined, entityType: formData.get('entityType') as string | undefined, entityId: formData.get('entityId') as string | undefined, diff --git a/src/app/api/v1/interests/[id]/restore/route.ts b/src/app/api/v1/interests/[id]/restore/route.ts index d739472..0af362f 100644 --- a/src/app/api/v1/interests/[id]/restore/route.ts +++ b/src/app/api/v1/interests/[id]/restore/route.ts @@ -13,7 +13,7 @@ export const POST = withAuth( ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, }); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/interests/[id]/route.ts b/src/app/api/v1/interests/[id]/route.ts index 25e304b..5af4dab 100644 --- a/src/app/api/v1/interests/[id]/route.ts +++ b/src/app/api/v1/interests/[id]/route.ts @@ -3,11 +3,7 @@ import { NextResponse } from 'next/server'; import { withAuth, withPermission } from '@/lib/api/helpers'; import { parseBody } from '@/lib/api/route-helpers'; import { errorResponse } from '@/lib/errors'; -import { - getInterestById, - updateInterest, - archiveInterest, -} from '@/lib/services/interests.service'; +import { getInterestById, updateInterest, archiveInterest } from '@/lib/services/interests.service'; import { updateInterestSchema } from '@/lib/validators/interests'; export const GET = withAuth( @@ -47,7 +43,7 @@ export const DELETE = withAuth( ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, }); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/notifications/[notificationId]/route.ts b/src/app/api/v1/notifications/[notificationId]/route.ts index 2a425c0..0cbd2db 100644 --- a/src/app/api/v1/notifications/[notificationId]/route.ts +++ b/src/app/api/v1/notifications/[notificationId]/route.ts @@ -9,7 +9,7 @@ export const PATCH = withAuth(async (_req, ctx, params) => { const { notificationId } = params; if (!notificationId) throw new NotFoundError('Notification'); await notificationsService.markRead(notificationId, ctx.userId); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/notifications/read-all/route.ts b/src/app/api/v1/notifications/read-all/route.ts index ab7f623..cae0f4c 100644 --- a/src/app/api/v1/notifications/read-all/route.ts +++ b/src/app/api/v1/notifications/read-all/route.ts @@ -7,7 +7,7 @@ import * as notificationsService from '@/lib/services/notifications.service'; export const POST = withAuth(async (_req, ctx) => { try { await notificationsService.markAllRead(ctx.userId, ctx.portId); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/reminders/[id]/route.ts b/src/app/api/v1/reminders/[id]/route.ts index 81cd346..58dad85 100644 --- a/src/app/api/v1/reminders/[id]/route.ts +++ b/src/app/api/v1/reminders/[id]/route.ts @@ -43,7 +43,7 @@ export const DELETE = withAuth( ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, }); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/search/recently-viewed/route.ts b/src/app/api/v1/search/recently-viewed/route.ts index 50f6490..e6b6ef3 100644 --- a/src/app/api/v1/search/recently-viewed/route.ts +++ b/src/app/api/v1/search/recently-viewed/route.ts @@ -2,6 +2,7 @@ import { NextRequest, NextResponse } from 'next/server'; import { sql } from 'drizzle-orm'; import { withAuth } from '@/lib/api/helpers'; +import { parseBody } from '@/lib/api/route-helpers'; import { db } from '@/lib/db'; import { errorResponse } from '@/lib/errors'; import { getRecentlyViewed, trackView } from '@/lib/services/recently-viewed.service'; @@ -255,7 +256,7 @@ export const GET = withAuth(async (req: NextRequest, ctx) => { const pairs = await getRecentlyViewed(ctx.userId, ctx.portId, limit); const items = await hydrate(ctx.portSlug, ctx.portId, pairs); - return NextResponse.json({ items }); + return NextResponse.json({ data: items }); } catch (error) { return errorResponse(error); } @@ -263,12 +264,9 @@ export const GET = withAuth(async (req: NextRequest, ctx) => { export const POST = withAuth(async (req: NextRequest, ctx) => { try { - const body = await req.json(); - const parsed = trackViewSchema.parse(body); - + const parsed = await parseBody(req, trackViewSchema); trackView(ctx.userId, ctx.portId, parsed.type, parsed.id); - - return NextResponse.json({ ok: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/app/api/v1/yachts/[id]/tags/route.ts b/src/app/api/v1/yachts/[id]/tags/route.ts index 1721dd5..527b84d 100644 --- a/src/app/api/v1/yachts/[id]/tags/route.ts +++ b/src/app/api/v1/yachts/[id]/tags/route.ts @@ -20,7 +20,7 @@ export const PUT = withAuth( ipAddress: ctx.ipAddress, userAgent: ctx.userAgent, }); - return NextResponse.json({ success: true }); + return new NextResponse(null, { status: 204 }); } catch (error) { return errorResponse(error); } diff --git a/src/components/admin/custom-fields/custom-fields-manager.tsx b/src/components/admin/custom-fields/custom-fields-manager.tsx index d69be89..15bd710 100644 --- a/src/components/admin/custom-fields/custom-fields-manager.tsx +++ b/src/components/admin/custom-fields/custom-fields-manager.tsx @@ -169,11 +169,12 @@ export function CustomFieldsManager() {
Heads up: custom fields render in detail-page sidebars and the entity - export, but they don’t plug into core platform behaviour: search doesn’t index - them, the recommender doesn’t score on them, audit logs don’t diff them, and - merge-tokens won’t expand them in EOI/contract templates. Use them for rep-only - annotations (e.g. “Berth visit notes”, “Referral source”) — anything - load-bearing for the deal flow needs a first-class column. + export, and merge-tokens of the form{' '} + {`{{custom.fieldName}}`} now expand in + EOI/contract/email templates for client/interest/berth contexts. They still don’t plug + into the global search index, the berth recommender, or the entity-diff audit log — use them + for rep-only annotations and template-merge values, but anything load-bearing for the deal + flow still needs a first-class column.
setActiveTab(v as EntityTab)}> diff --git a/src/components/companies/company-files-tab.tsx b/src/components/companies/company-files-tab.tsx new file mode 100644 index 0000000..a47a066 --- /dev/null +++ b/src/components/companies/company-files-tab.tsx @@ -0,0 +1,88 @@ +'use client'; + +import { useState } from 'react'; +import { useQueryClient } from '@tanstack/react-query'; + +import { FileGrid } from '@/components/files/file-grid'; +import { FileUploadZone } from '@/components/files/file-upload-zone'; +import { FilePreviewDialog } from '@/components/files/file-preview-dialog'; +import { PermissionGate } from '@/components/shared/permission-gate'; +import { usePaginatedQuery } from '@/hooks/use-paginated-query'; +import { useRealtimeInvalidation } from '@/hooks/use-realtime-invalidation'; +import { apiFetch } from '@/lib/api/client'; +import type { FileRow } from '@/components/files/file-grid'; + +interface CompanyFilesTabProps { + companyId: string; +} + +export function CompanyFilesTab({ companyId }: CompanyFilesTabProps) { + const queryClient = useQueryClient(); + const [previewFile, setPreviewFile] = useState(null); + + const { data, isLoading } = usePaginatedQuery({ + queryKey: ['files', { companyId }], + endpoint: `/api/v1/files?companyId=${encodeURIComponent(companyId)}`, + filterDefinitions: [], + }); + + useRealtimeInvalidation({ + 'file:uploaded': [['files', { companyId }]], + 'file:updated': [['files', { companyId }]], + 'file:deleted': [['files', { companyId }]], + }); + + const handleDownload = async (file: FileRow) => { + try { + const res = await apiFetch<{ data: { url: string; filename: string } }>( + `/api/v1/files/${file.id}/download`, + ); + const a = document.createElement('a'); + a.href = res.data.url; + a.download = res.data.filename; + a.click(); + } catch { + // silent + } + }; + + const handleDelete = async (file: FileRow) => { + if (!confirm(`Delete "${file.filename}"? This cannot be undone.`)) return; + try { + await apiFetch(`/api/v1/files/${file.id}`, { method: 'DELETE' }); + queryClient.invalidateQueries({ queryKey: ['files', { companyId }] }); + } catch { + // silent + } + }; + + return ( +
+ + { + queryClient.invalidateQueries({ queryKey: ['files', { companyId }] }); + }} + /> + + + {}} + onDelete={handleDelete} + isLoading={isLoading} + /> + + !open && setPreviewFile(null)} + fileId={previewFile?.id} + fileName={previewFile?.filename} + mimeType={previewFile?.mimeType ?? undefined} + /> +
+ ); +} diff --git a/src/components/companies/company-tabs.tsx b/src/components/companies/company-tabs.tsx index 0f77be9..037e7fe 100644 --- a/src/components/companies/company-tabs.tsx +++ b/src/components/companies/company-tabs.tsx @@ -11,6 +11,7 @@ import { NotesList } from '@/components/shared/notes-list'; import { EntityActivityFeed } from '@/components/shared/entity-activity-feed'; import { CompanyMembersTab } from '@/components/companies/company-members-tab'; import { CompanyOwnedYachtsTab } from '@/components/companies/company-owned-yachts-tab'; +import { CompanyFilesTab } from '@/components/companies/company-files-tab'; import { AddressesEditor, type Address } from '@/components/shared/addresses-editor'; import { apiFetch } from '@/lib/api/client'; import type { CountryCode } from '@/lib/i18n/countries'; @@ -226,9 +227,11 @@ export function getCompanyTabs({ /> ), }, - // The Documents tab was a "Coming soon" stub. Hidden until the - // /api/v1/files endpoint accepts a companyId filter (the schema - // supports it; the validator doesn't). + { + id: 'documents', + label: 'Documents', + content: , + }, { id: 'notes', label: 'Notes', diff --git a/src/components/files/file-upload-zone.tsx b/src/components/files/file-upload-zone.tsx index 7200a33..5ae32c6 100644 --- a/src/components/files/file-upload-zone.tsx +++ b/src/components/files/file-upload-zone.tsx @@ -16,6 +16,8 @@ interface FileUploadZoneProps { entityType?: string; entityId?: string; clientId?: string; + yachtId?: string; + companyId?: string; onUploadComplete?: () => void; } @@ -23,6 +25,8 @@ export function FileUploadZone({ entityType, entityId, clientId, + yachtId, + companyId, onUploadComplete, }: FileUploadZoneProps) { const [isDragOver, setIsDragOver] = useState(false); @@ -46,6 +50,8 @@ export function FileUploadZone({ formData.append('file', file); formData.append('filename', file.name); if (clientId) formData.append('clientId', clientId); + if (yachtId) formData.append('yachtId', yachtId); + if (companyId) formData.append('companyId', companyId); if (entityType) formData.append('entityType', entityType); if (entityId) formData.append('entityId', entityId); @@ -54,8 +60,7 @@ export function FileUploadZone({ ); // Use fetch directly for FormData (apiFetch JSON-encodes body) - const portId = (await import('@/stores/ui-store')) - .useUIStore.getState().currentPortId; + const portId = (await import('@/stores/ui-store')).useUIStore.getState().currentPortId; const headers = new Headers(); if (portId) headers.set('X-Port-Id', portId); const uploadRes = await fetch('/api/v1/files/upload', { @@ -73,9 +78,7 @@ export function FileUploadZone({ ); } catch { setUploading((prev) => - prev.map((u) => - u.id === uploadId ? { ...u, error: 'Upload failed' } : u, - ), + prev.map((u) => (u.id === uploadId ? { ...u, error: 'Upload failed' } : u)), ); } }), @@ -87,7 +90,7 @@ export function FileUploadZone({ onUploadComplete?.(); }, 1500); }, - [clientId, entityType, entityId, onUploadComplete], + [clientId, yachtId, companyId, entityType, entityId, onUploadComplete], ); const handleDrop = useCallback( @@ -135,9 +138,7 @@ export function FileUploadZone({ >

Drop files here or click to upload

-

- PDF, Word, Excel, images up to 50MB -

+

PDF, Word, Excel, images up to 50MB

- setUploading((prev) => prev.filter((x) => x.id !== u.id)) - } + onClick={() => setUploading((prev) => prev.filter((x) => x.id !== u.id))} > diff --git a/src/hooks/use-search.ts b/src/hooks/use-search.ts index df5c9e6..f41e59e 100644 --- a/src/hooks/use-search.ts +++ b/src/hooks/use-search.ts @@ -225,10 +225,10 @@ export function useSearch(query: string, opts: UseSearchOptions = {}) { staleTime: 60_000, }); - const recentlyViewedQuery = useQuery<{ items: RecentlyViewedItem[] }>({ + const recentlyViewedQuery = useQuery<{ data: RecentlyViewedItem[] }>({ queryKey: ['search', 'recently-viewed'], queryFn: ({ signal }) => - apiFetch<{ items: RecentlyViewedItem[] }>('/api/v1/search/recently-viewed', { signal }), + apiFetch<{ data: RecentlyViewedItem[] }>('/api/v1/search/recently-viewed', { signal }), staleTime: 30_000, }); @@ -238,7 +238,7 @@ export function useSearch(query: string, opts: UseSearchOptions = {}) { isFetching: searchQuery.isFetching, enabled, recentSearches: recentSearchQuery.data?.searches ?? [], - recentlyViewed: recentlyViewedQuery.data?.items ?? [], + recentlyViewed: recentlyViewedQuery.data?.data ?? [], }; } diff --git a/src/lib/db/migrations/0047_system_settings_nulls_not_distinct.sql b/src/lib/db/migrations/0047_system_settings_nulls_not_distinct.sql new file mode 100644 index 0000000..680e8d8 --- /dev/null +++ b/src/lib/db/migrations/0047_system_settings_nulls_not_distinct.sql @@ -0,0 +1,34 @@ +-- Reconcile the system_settings unique-index drift surfaced in the +-- final-deferred audit. The Drizzle schema declares a uniqueIndex on +-- (key, port_id), but Postgres treats NULL values as distinct by default. +-- That means two rows with `(same_key, NULL)` would BOTH be allowed — +-- a global-setting collision the index claims to prevent. +-- +-- This was not just theoretical: the dev DB had 60+ duplicate +-- `(storage_backend, NULL)` rows from buggy non-upsert call sites that +-- predated the upsert hardening. Those rows accumulated invisibly because +-- the index allowed them. Step 1 dedupes (keeps the most recent row per +-- `(key, port_id)` group); step 2 rebuilds the unique index with +-- `NULLS NOT DISTINCT` (Postgres 15+) so future inserts can't recreate the +-- ambiguity. + +-- Step 1: dedupe duplicate rows, keeping the row with the latest updated_at. +-- Uses a CTE + ROW_NUMBER() so the keeper is deterministic across reruns. +WITH ranked AS ( + SELECT ctid, + ROW_NUMBER() OVER ( + PARTITION BY "key", "port_id" + ORDER BY "updated_at" DESC, ctid DESC + ) AS rn + FROM "system_settings" +) +DELETE FROM "system_settings" +USING ranked +WHERE "system_settings".ctid = ranked.ctid AND ranked.rn > 1; + +-- Step 2: replace the unique index with one that treats NULLs as equal, +-- so global settings (port_id IS NULL) are unique by key alone. +DROP INDEX IF EXISTS "system_settings_key_port_idx"; +CREATE UNIQUE INDEX "system_settings_key_port_idx" + ON "system_settings" ("key", "port_id") + NULLS NOT DISTINCT; diff --git a/src/lib/db/schema/system.ts b/src/lib/db/schema/system.ts index ee2f935..21ef554 100644 --- a/src/lib/db/schema/system.ts +++ b/src/lib/db/schema/system.ts @@ -135,9 +135,14 @@ export const systemSettings = pgTable( updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(), }, (table) => [ + // Migration 0047 rebuilds this index with `NULLS NOT DISTINCT` so a + // global setting (port_id IS NULL) is unique by key alone — the + // default `NULLS DISTINCT` semantics let duplicates accumulate. + // Drizzle's `uniqueIndex` builder doesn't surface NULLS NOT DISTINCT, + // so the migration is the source of truth for that flag and + // `db:push` against an empty DB would skip it (matches the + // documented limitation for `berths.current_pdf_version_id`). uniqueIndex('system_settings_key_port_idx').on(table.key, table.portId), - // Note: the PRIMARY KEY is `key` alone based on schema, but unique on (key, port_id) - // We use key as primary key per SQL schema ], ); diff --git a/src/lib/services/ai-budget.service.ts b/src/lib/services/ai-budget.service.ts index 619bbad..1e91697 100644 --- a/src/lib/services/ai-budget.service.ts +++ b/src/lib/services/ai-budget.service.ts @@ -76,15 +76,26 @@ export async function setAiBudget( if (next.softCapTokens > next.hardCapTokens) { throw new ValidationError('softCapTokens cannot exceed hardCapTokens'); } + // True upsert (atomic on the (key, port_id) NULLS NOT DISTINCT index + // — migration 0047). Replaces a delete-then-insert pattern that had a + // race window where two concurrent updates could both DELETE and both + // INSERT, accumulating duplicates. await db - .delete(systemSettings) - .where(and(eq(systemSettings.key, KEY), eq(systemSettings.portId, portId))); - await db.insert(systemSettings).values({ - key: KEY, - portId, - value: next as unknown as Record, - updatedBy: userId, - }); + .insert(systemSettings) + .values({ + key: KEY, + portId, + value: next as unknown as Record, + updatedBy: userId, + }) + .onConflictDoUpdate({ + target: [systemSettings.key, systemSettings.portId], + set: { + value: next as unknown as Record, + updatedBy: userId, + updatedAt: new Date(), + }, + }); return next; } diff --git a/src/lib/services/document-sends.service.ts b/src/lib/services/document-sends.service.ts index 71ce266..4f2bd74 100644 --- a/src/lib/services/document-sends.service.ts +++ b/src/lib/services/document-sends.service.ts @@ -38,9 +38,12 @@ import { berthPdfVersions, clients, clientContacts, + customFieldDefinitions, + customFieldValues, interests, ports, } from '@/lib/db/schema'; +import { inArray } from 'drizzle-orm'; import type { DocumentSend } from '@/lib/db/schema'; import { ForbiddenError, NotFoundError, ValidationError } from '@/lib/errors'; import { logger } from '@/lib/logger'; @@ -162,9 +165,93 @@ export async function buildMergeValues( } } + // Custom-field tokens (`{{custom.}}`). The validator allows + // any matching shape; the resolver here looks up real values per-port, + // per-entity and substitutes them. Unknown field names stay + // unresolved — `findUnresolvedTokens` flags them at preview time so + // the rep can edit the template before sending. + await mergeCustomFieldValues(values, portId, recipient, context); + return values; } +interface CustomMergeContext { + berthId?: string; + brochureLabel?: string; +} + +/** + * Resolve `{{custom.}}` tokens. Reads every per-port custom + * field definition for the entity types currently in scope (client, + * interest, berth) and joins to the actual stored value for each entity + * id we have on hand. Boolean values render as 'true' / 'false', dates + * as ISO yyyy-mm-dd, numbers as plain numerics, selects/text verbatim. + */ +async function mergeCustomFieldValues( + values: Record, + portId: string, + recipient: SendRecipientInput, + context: CustomMergeContext, +): Promise { + // Build the (entityType → entityId) map for the current send context. + const entityIdsByType = new Map(); + if (recipient.clientId) entityIdsByType.set('client', recipient.clientId); + if (recipient.interestId) entityIdsByType.set('interest', recipient.interestId); + if (context.berthId) entityIdsByType.set('berth', context.berthId); + if (entityIdsByType.size === 0) return; + + const definitions = await db + .select() + .from(customFieldDefinitions) + .where( + and( + eq(customFieldDefinitions.portId, portId), + inArray(customFieldDefinitions.entityType, Array.from(entityIdsByType.keys())), + ), + ); + if (definitions.length === 0) return; + + const fieldIds = definitions.map((d) => d.id); + const entityIds = Array.from(entityIdsByType.values()); + const valueRows = await db + .select() + .from(customFieldValues) + .where( + and( + inArray(customFieldValues.fieldId, fieldIds), + inArray(customFieldValues.entityId, entityIds), + ), + ); + + const valueByFieldEntity = new Map(); + for (const row of valueRows) { + valueByFieldEntity.set(`${row.fieldId}|${row.entityId}`, row.value); + } + + for (const def of definitions) { + const entityId = entityIdsByType.get(def.entityType); + if (!entityId) continue; + const raw = valueByFieldEntity.get(`${def.id}|${entityId}`); + if (raw === undefined || raw === null) continue; + const token = `{{custom.${def.fieldName}}}`; + values[token] = stringifyCustomValue(raw, def.fieldType); + } +} + +function stringifyCustomValue(raw: unknown, fieldType: string): string { + if (raw === null || raw === undefined) return ''; + switch (fieldType) { + case 'boolean': + return raw ? 'true' : 'false'; + case 'date': + return typeof raw === 'string' ? raw.slice(0, 10) : String(raw); + case 'number': + return String(raw); + default: + return typeof raw === 'string' ? raw : JSON.stringify(raw); + } +} + /** * Render a body for the dry-run UI. Returns `{ html, unresolved }`. The UI * uses `unresolved` to populate the warning chip; the rep can't submit @@ -295,9 +382,18 @@ async function streamAttachmentOrLink( // to the body. Per §11.1 the size decision is made BEFORE the SMTP relay, // so we never produce duplicate sends. const storage = await getStorageBackend(); + // Bind the proxy token to the issuing port slug. The storage key is + // already structured `${portSlug}/...` via generateStorageKey() — this + // closes the loop so a buggy future call site that hands us a key from + // a different port can't mint a valid 24h URL for it. + const portRow = await db.query.ports.findFirst({ + where: eq(ports.id, portId), + columns: { slug: true }, + }); const { url } = await storage.presignDownload(attachment.storageKey, { expirySeconds: 24 * 60 * 60, filename: attachment.fileName, + portSlug: portRow?.slug, }); // HTML-escape the filename: brochure filenames are admin-supplied and // could in theory carry markup (e.g. `">