Compare commits
135 Commits
868b1f40c0
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 5c8c12ba1f | |||
| 3e4d9d6310 | |||
| 267c2b6d1f | |||
| a0e68eb060 | |||
|
|
05babe57a0 | ||
|
|
1a87f28fd4 | ||
|
|
f3143d7561 | ||
|
|
0f648a924b | ||
|
|
b4fb3b2ca6 | ||
|
|
da7ede71d6 | ||
|
|
0a5f085a9e | ||
|
|
c312cd3685 | ||
|
|
59b9e8f177 | ||
|
|
5fc68a5f34 | ||
|
|
a8c6c071e6 | ||
|
|
94331bd6ec | ||
|
|
588f8bc43c | ||
|
|
c5b41ca4b5 | ||
|
|
9890d065f8 | ||
|
|
d2171ea79b | ||
|
|
4592789712 | ||
|
|
758d8628cf | ||
|
|
44db579988 | ||
|
|
7274baf1e1 | ||
|
|
70105715a7 | ||
|
|
472c12280b | ||
|
|
1ae5d88af4 | ||
|
|
8c02f88cbd | ||
|
|
789656bc70 | ||
|
|
fb02f3d5e1 | ||
|
|
e95316bd8a | ||
|
|
d07f1ed5e0 | ||
|
|
f10334683d | ||
|
|
8690352c56 | ||
|
|
9240cf1808 | ||
|
|
adba73fcca | ||
|
|
c60cbf4014 | ||
|
|
f93de75bb5 | ||
|
|
64f0e0a1b8 | ||
|
|
3f6a8aa3b8 | ||
|
|
c90876abad | ||
|
|
8cdee99310 | ||
|
|
d19b74b935 | ||
|
|
1b78eadd36 | ||
|
|
1fb3aa3aeb | ||
|
|
7bd969b41a | ||
|
|
63c4073e64 | ||
|
|
83239104e0 | ||
|
|
4bab6de8be | ||
|
|
4eea4ceff9 | ||
|
|
7854cbabe4 | ||
|
|
d3a6a9beef | ||
|
|
fc7595faf8 | ||
|
|
6a609ecf94 | ||
|
|
cf430d70c3 | ||
|
|
312779c0c5 | ||
|
|
4723994bdc | ||
|
|
c4a41d5f5b | ||
|
|
687a1f1c2f | ||
|
|
ade4c9e77d | ||
|
|
d4b3a1338f | ||
|
|
cf37d09519 | ||
|
|
180912ba9f | ||
|
|
014bbe1923 | ||
|
|
a3e002852b | ||
|
|
312ebf1a88 | ||
|
|
0b8d08b57e | ||
|
|
86372a857f | ||
|
|
b4776b4c3c | ||
|
|
a0091e4ca6 | ||
|
|
249ffe3e4a | ||
|
|
83693dd993 | ||
|
|
15d4849030 | ||
|
|
e00e812199 | ||
|
|
b1e787e55c | ||
|
|
fb1116f1d4 | ||
|
|
5b70e9b04b | ||
|
|
57cbc9a506 | ||
|
|
6e3d910c76 | ||
|
|
ff92a08620 | ||
|
|
05257723f6 | ||
|
|
3017ce4b3a | ||
|
|
a2588f2c4a | ||
|
|
18119644ae | ||
|
|
61e2fbb2db | ||
|
|
05be89ec6f | ||
|
|
8699f81879 | ||
|
|
d62822c284 | ||
|
|
089f4a67a4 | ||
|
|
77ad10ced1 | ||
|
|
e598cc0708 | ||
|
|
f5772ce318 | ||
|
|
49d34e00c8 | ||
|
|
c612bbdfd9 | ||
|
|
872c75f1a1 | ||
|
|
c45aac551d | ||
|
|
9ad1df85d2 | ||
|
|
8e4d2fc5b4 | ||
|
|
78f2f46d41 | ||
|
|
3a9419fe10 | ||
|
|
b703684285 | ||
|
|
a792d9a182 | ||
|
|
d7ec2a8507 | ||
|
|
cb83b09b2d | ||
|
|
7574c3b575 | ||
|
|
bb105f5365 | ||
|
|
caafae15dd | ||
|
|
46c7389930 | ||
|
|
80fc5932be | ||
|
|
b26b87b2fa | ||
|
|
88f76b6b04 | ||
|
|
a32f41b91d | ||
|
|
cf1c8b66db | ||
|
|
596476280d | ||
|
|
e9359fc431 | ||
|
|
4767caec01 | ||
|
|
49d92234dd | ||
|
|
cad55e3565 | ||
|
|
21868ee5fc | ||
|
|
c7ab816c99 | ||
|
|
e40b6c3d99 | ||
|
|
4bcc7f8be6 | ||
|
|
18e5c124b0 | ||
|
|
8b077e1999 | ||
|
|
36b92eb827 | ||
|
|
e2398099c4 | ||
|
|
d364b09885 | ||
|
|
57a099acc4 | ||
|
|
a391934b73 | ||
|
|
e3e0e69c04 | ||
|
|
6af2ac9680 | ||
|
|
a767652d74 | ||
|
|
c824b2df12 | ||
|
|
d197f8b321 | ||
|
|
76a7387dcc |
@@ -1 +0,0 @@
|
|||||||
{"sessionId":"fd05cbd7-d695-4a70-9223-4b25f3369829","pid":88534,"acquiredAt":1776866083076}
|
|
||||||
19
.gitignore
vendored
19
.gitignore
vendored
@@ -28,9 +28,28 @@ docker-compose.override.yml
|
|||||||
|
|
||||||
# Ad-hoc screenshots / scratch artifacts at repo root
|
# Ad-hoc screenshots / scratch artifacts at repo root
|
||||||
/*.png
|
/*.png
|
||||||
|
/*.jpg
|
||||||
|
|
||||||
# Legacy Nuxt portal — kept on disk for reference, not tracked here
|
# Legacy Nuxt portal — kept on disk for reference, not tracked here
|
||||||
/client-portal/
|
/client-portal/
|
||||||
|
|
||||||
|
# Sister marketing site — separate Nuxt project, not part of CRM tracking
|
||||||
|
/website/
|
||||||
|
|
||||||
# Mobile audit screenshots — generated locally, regenerable
|
# Mobile audit screenshots — generated locally, regenerable
|
||||||
/.audit/
|
/.audit/
|
||||||
|
/.audit-screenshots/
|
||||||
|
|
||||||
|
# Migration script output (CSV reports, transcripts)
|
||||||
|
.migration/
|
||||||
|
|
||||||
|
# Tool caches / runtime state
|
||||||
|
/.claude/
|
||||||
|
/.serena/
|
||||||
|
/ruvector.db
|
||||||
|
|
||||||
|
# Filesystem storage backend root (FilesystemBackend default location)
|
||||||
|
/storage/
|
||||||
|
|
||||||
|
# Local berth-PDF + brochure samples used as upload fixtures during dev.
|
||||||
|
/berth_pdf_example/
|
||||||
|
|||||||
20
CLAUDE.md
20
CLAUDE.md
@@ -95,6 +95,16 @@ src/
|
|||||||
- **Inline editing pattern:** detail pages (clients, yachts, companies, interests, residential clients/interests) use `<InlineEditableField>` (`src/components/shared/inline-editable-field.tsx`) for click-to-edit text/select/textarea fields and `<InlineTagEditor>` (`src/components/shared/inline-tag-editor.tsx`) for tag chips. Each entity exposes a `PUT /api/v1/<entity>/[id]/tags` endpoint backed by a `set<Entity>Tags` service helper that wipes-and-rewrites the join table inside a single transaction. There are no separate "Edit" modal forms on detail pages — the entire overview tab is editable in place.
|
- **Inline editing pattern:** detail pages (clients, yachts, companies, interests, residential clients/interests) use `<InlineEditableField>` (`src/components/shared/inline-editable-field.tsx`) for click-to-edit text/select/textarea fields and `<InlineTagEditor>` (`src/components/shared/inline-tag-editor.tsx`) for tag chips. Each entity exposes a `PUT /api/v1/<entity>/[id]/tags` endpoint backed by a `set<Entity>Tags` service helper that wipes-and-rewrites the join table inside a single transaction. There are no separate "Edit" modal forms on detail pages — the entire overview tab is editable in place.
|
||||||
- **Notes (polymorphic across entity types):** `notes.service.ts` dispatches across `clientNotes`, `interestNotes`, `yachtNotes`, `companyNotes` based on an `entityType` discriminator. `<NotesList entityType="…" />` works for all four. `companyNotes` lacks an `updatedAt` column — the service substitutes `createdAt` so callers get a uniform shape.
|
- **Notes (polymorphic across entity types):** `notes.service.ts` dispatches across `clientNotes`, `interestNotes`, `yachtNotes`, `companyNotes` based on an `entityType` discriminator. `<NotesList entityType="…" />` works for all four. `companyNotes` lacks an `updatedAt` column — the service substitutes `createdAt` so callers get a uniform shape.
|
||||||
- **Route handler exports:** Next.js App Router `route.ts` files only allow specific named exports (`GET|POST|…`). Service-tested handler functions live in sibling `handlers.ts` files (e.g. `src/app/api/v1/yachts/[id]/handlers.ts`) and are imported by the colocated `route.ts` for `withAuth(withPermission(...))` wrapping. Integration tests import from `handlers.ts` directly to bypass auth/permission middleware.
|
- **Route handler exports:** Next.js App Router `route.ts` files only allow specific named exports (`GET|POST|…`). Service-tested handler functions live in sibling `handlers.ts` files (e.g. `src/app/api/v1/yachts/[id]/handlers.ts`) and are imported by the colocated `route.ts` for `withAuth(withPermission(...))` wrapping. Integration tests import from `handlers.ts` directly to bypass auth/permission middleware.
|
||||||
|
- **Multi-berth interest model:** `interest_berths` is the source of truth for which berths an interest is linked to; `interests.berth_id` does not exist (dropped in migration 0029). Three role flags: `is_primary` (≤1 row per interest, enforced by partial unique index — surfaces as "the berth for this deal" in templates / forms / list views), `is_specific_interest` (true → berth shows as "Under Offer" on the public map; false → legal/EOI-only link), `is_in_eoi_bundle` (covered by the interest's EOI signature). Read/write through `src/lib/services/interest-berths.service.ts` helpers (`getPrimaryBerth`, `getPrimaryBerthsForInterests`, `upsertInterestBerth`, `setPrimaryBerth`, `removeInterestBerth`); never query `interest_berths` from outside that service.
|
||||||
|
- **Mooring number canonical format:** `^[A-Z]+\d+$` (e.g. `A1`, `B12`, `E18`) — no hyphen, no leading zeros. Stored, displayed, URL-encoded, and rendered in EOIs in this exact form. Phase 0 normalized the entire CRM dataset; the mooring-pattern regex gates the public `/api/public/berths/[mooringNumber]` route before any DB hit.
|
||||||
|
- **Public berths API:** `/api/public/berths` (list) and `/api/public/berths/[mooringNumber]` (single) are the public-facing data feed for the marketing website. Output shape mirrors the legacy NocoDB Berths shape verbatim (`"Mooring Number"`, `"Side Pontoon"`, etc.) — see `src/lib/services/public-berths.ts`. Cache headers: `s-maxage=300, stale-while-revalidate=60`. Status mapping: `"Sold"` (berth.status=sold) > `"Under Offer"` (status=under_offer OR has any active `interest_berths.is_specific_interest=true` link with `interests.outcome IS NULL`) > `"Available"`. The companion `/api/public/health` endpoint returns `{env, appUrl}` so the website refuses to start when its `CRM_PUBLIC_URL` points at a different deployment env.
|
||||||
|
- **Berth recommender:** Pure SQL ranking (no AI). Lives in `src/lib/services/berth-recommender.service.ts`. Tier ladder A/B/C/D classifies each feasible berth based on its `interest_berths` aggregates. Heat scoring (recency / furthest stage / interest count / EOI count) only fires for tier B (lost/cancelled-only history); per-port admin tunes weights via `system_settings` keys (`heat_weight_*`, `recommender_max_oversize_pct`, `recommender_top_n_default`, `fallthrough_policy`, `fallthrough_cooldown_days`, `tier_ladder_hide_late_stage`). The recommender enforces multi-port isolation both at the entry point (rejects cross-port interest lookups) AND inside the SQL aggregates CTE (defense-in-depth `i.port_id` filter).
|
||||||
|
- **EOI bundle / range formatter:** Multi-berth EOIs render the in-bundle berth set as a compact range string ("A1-A3, B5-B7") via `formatBerthRange()` in `src/lib/templates/berth-range.ts`. Used only inside the Documenso `Berth Range` form field — CRM UI always shows berths as individual chips. The `{{eoi.berthRange}}` token is in `VALID_MERGE_TOKENS`.
|
||||||
|
- **Pluggable storage backend:** Code never imports MinIO/S3 directly. All file I/O goes through `getStorageBackend()` from `src/lib/storage/`. Configured via `system_settings.storage_backend` ('s3' | 'filesystem'). Switching backends is a settings change + `pnpm tsx scripts/migrate-storage.ts` run. **Filesystem backend is single-node only**: refuses to start when `MULTI_NODE_DEPLOYMENT=true`. Multi-node deployments must use the s3-compatible backend.
|
||||||
|
- **Per-berth PDFs:** Versioned via `berth_pdf_versions`; `berths.current_pdf_version_id` always points to the latest active version. Storage key is UUID-based per upload (not version-numbered) so concurrent uploads can't collide on blob paths; `pg_advisory_xact_lock` per berth_id serializes the version-number allocation. 3-tier parser: AcroForm → OCR (Tesseract.js with positional heuristics) → optional AI (rep clicks "AI parse" only when OCR confidence is low). Magic-byte (`%PDF-`) check enforced on BOTH the in-server upload path AND the presigned-PUT path (the post-upload service streams the first 5 bytes via the storage backend). Mooring-number mismatch between PDF and target berth surfaces as a service-level `ConflictError` unless the apply call passes `confirmMooringMismatch: true`.
|
||||||
|
- **Brochures:** Per-port; default brochure marked via `is_default` (enforced by partial unique index on `(port_id) WHERE is_default=true AND archived_at IS NULL`). Archived brochures retain version history. Same upload flow as berth PDFs (presign + magic-byte verification on the post-upload register endpoint).
|
||||||
|
- **Send-from accounts (sales send-outs):** Configurable via `system_settings`; defaults to `sales@portnimara.com` for human-touch and `noreply@portnimara.com` for automation. SMTP/IMAP passwords are AES-256-GCM encrypted at rest; the API never returns decrypted secrets — only `*PassIsSet` boolean markers. Send-out audit goes to `document_sends` (separate from `audit_logs` because of volume + binary refs). Body markdown is XSS-safe via `renderEmailBody()` (escape-then-allowlist; tested against the standard XSS vector list). Rate limit: 50 sends/user/hour individual. Pre-send size threshold: files > `email_attach_threshold_mb` ship as a 24h signed-URL link rather than an attachment (avoids the duplicate-send race from async bounces). The download-link fallback HTML-escapes the filename to prevent injection from admin-supplied brochure names. Bounce monitoring requires IMAP credentials in addition to SMTP — without them, the size-rejection banner stays disabled.
|
||||||
|
- **NocoDB berth import:** `pnpm tsx scripts/import-berths-from-nocodb.ts --apply --port-slug port-nimara` re-imports from the legacy NocoDB Berths table. Idempotent: rows where `updated_at > last_imported_at` (the "human edited this since last import" guard) are skipped unless `--force`. Adds `--update-snapshot` to also rewrite `src/lib/db/seed-data/berths.json`. Uses `pg_advisory_xact_lock` so two simultaneous runs serialize. Pure helpers in `src/lib/services/berth-import.ts` are unit-tested.
|
||||||
- **Routes:** Multi-tenant via `[portSlug]` dynamic segment. Typed routes enabled.
|
- **Routes:** Multi-tenant via `[portSlug]` dynamic segment. Typed routes enabled.
|
||||||
- **Pre-commit:** Husky + lint-staged runs ESLint fix + Prettier on staged `.ts`/`.tsx` files. The hook also blocks `.env*` files (including `.env.example`) from being committed; pass them via a separate workflow if needed.
|
- **Pre-commit:** Husky + lint-staged runs ESLint fix + Prettier on staged `.ts`/`.tsx` files. The hook also blocks `.env*` files (including `.env.example`) from being committed; pass them via a separate workflow if needed.
|
||||||
|
|
||||||
@@ -139,6 +149,14 @@ Domain-specific references:
|
|||||||
|
|
||||||
- `docs/eoi-documenso-field-mapping.md` — canonical mapping from `EoiContext`
|
- `docs/eoi-documenso-field-mapping.md` — canonical mapping from `EoiContext`
|
||||||
paths to the Documenso template's `formValues` keys, with the matching
|
paths to the Documenso template's `formValues` keys, with the matching
|
||||||
AcroForm field names used by the in-app pathway.
|
AcroForm field names used by the in-app pathway. **Note:** the multi-
|
||||||
|
berth EOI bundle adds a new `Berth Range` form field populated by
|
||||||
|
`formatBerthRange()` from `src/lib/templates/berth-range.ts` — the live
|
||||||
|
Documenso template needs the field added before multi-berth EOIs render
|
||||||
|
with the compact range string instead of just the primary mooring.
|
||||||
- `assets/README.md` — what the in-app EOI source PDF must contain and how
|
- `assets/README.md` — what the in-app EOI source PDF must contain and how
|
||||||
to override its path in dev/test.
|
to override its path in dev/test.
|
||||||
|
- `docs/berth-recommender-and-pdf-plan.md` — the comprehensive plan for the
|
||||||
|
Phase 0–8 berth-recommender + PDF + send-outs work bundle. Single source
|
||||||
|
of truth for the multi-berth interest model, recommender tier ladder,
|
||||||
|
pluggable storage, per-berth PDF parser, and sales send-out flows.
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
# Stage 1: Install dependencies
|
# Stage 1: Install dependencies
|
||||||
FROM node:20-alpine AS deps
|
FROM node:20-alpine AS deps
|
||||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY package.json pnpm-lock.yaml ./
|
COPY package.json pnpm-lock.yaml ./
|
||||||
RUN pnpm install --frozen-lockfile --prod=false
|
RUN pnpm install --frozen-lockfile --prod=false
|
||||||
|
|
||||||
# Stage 2: Build the application
|
# Stage 2: Build the application
|
||||||
FROM node:20-alpine AS builder
|
FROM node:20-alpine AS builder
|
||||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY --from=deps /app/node_modules ./node_modules
|
COPY --from=deps /app/node_modules ./node_modules
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
FROM node:20-alpine
|
FROM node:20-alpine
|
||||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY package.json pnpm-lock.yaml ./
|
COPY package.json pnpm-lock.yaml ./
|
||||||
RUN pnpm install --frozen-lockfile
|
RUN pnpm install --frozen-lockfile
|
||||||
|
|||||||
@@ -1,26 +1,40 @@
|
|||||||
# Stage 1: Install dependencies (dev deps needed for esbuild)
|
# Stage 1: Install dependencies (dev deps needed for esbuild)
|
||||||
FROM node:20-alpine AS deps
|
FROM node:20-alpine AS deps
|
||||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY package.json pnpm-lock.yaml ./
|
COPY package.json pnpm-lock.yaml ./
|
||||||
RUN pnpm install --frozen-lockfile --prod=false
|
RUN pnpm install --frozen-lockfile --prod=false
|
||||||
|
|
||||||
# Stage 2: Build the worker bundle
|
# Stage 2: Build the worker bundle
|
||||||
FROM node:20-alpine AS builder
|
FROM node:20-alpine AS builder
|
||||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY --from=deps /app/node_modules ./node_modules
|
COPY --from=deps /app/node_modules ./node_modules
|
||||||
COPY . .
|
COPY . .
|
||||||
ENV SKIP_ENV_VALIDATION=1
|
ENV SKIP_ENV_VALIDATION=1
|
||||||
RUN pnpm build:worker
|
RUN pnpm build:worker
|
||||||
|
|
||||||
# Stage 3: Production runner (prod deps only)
|
# Stage 3: Production runner (prod deps only).
|
||||||
|
#
|
||||||
|
# Critical ordering: create the worker user FIRST and chown the workdir
|
||||||
|
# BEFORE pnpm install, so node_modules + lazy-cache directories
|
||||||
|
# (tesseract.js, sharp) are owned by the worker user. Without this, the
|
||||||
|
# previous layout had pnpm install run as root → node_modules root-owned
|
||||||
|
# → tesseract.js / sharp wrote to node_modules/.cache and EACCES'd at
|
||||||
|
# first PDF parse in prod (auditor-K §39).
|
||||||
FROM node:20-alpine AS runner
|
FROM node:20-alpine AS runner
|
||||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
RUN corepack enable && corepack prepare pnpm@10.33.2 --activate
|
||||||
WORKDIR /app
|
|
||||||
COPY package.json pnpm-lock.yaml ./
|
|
||||||
RUN pnpm install --frozen-lockfile --prod
|
|
||||||
RUN addgroup --system --gid 1001 nodejs && adduser --system --uid 1001 worker
|
RUN addgroup --system --gid 1001 nodejs && adduser --system --uid 1001 worker
|
||||||
COPY --from=builder --chown=worker:nodejs /app/dist/worker.js ./worker.js
|
WORKDIR /app
|
||||||
|
RUN chown -R worker:nodejs /app
|
||||||
USER worker
|
USER worker
|
||||||
|
COPY --chown=worker:nodejs package.json pnpm-lock.yaml ./
|
||||||
|
RUN pnpm install --frozen-lockfile --prod
|
||||||
|
COPY --from=builder --chown=worker:nodejs /app/dist/worker.js ./worker.js
|
||||||
|
# Healthcheck — pings Redis from inside the worker container. Without
|
||||||
|
# this, a worker whose Redis connection has silently dropped (BullMQ
|
||||||
|
# rejects new jobs but the Node process is alive) is invisible to
|
||||||
|
# compose / swarm and jobs queue indefinitely (auditor-K §40).
|
||||||
|
HEALTHCHECK --interval=30s --timeout=5s --start-period=20s --retries=3 \
|
||||||
|
CMD node -e "const Redis=require('ioredis');const r=new Redis(process.env.REDIS_URL,{maxRetriesPerRequest:1,connectTimeout:3000,lazyConnect:true});r.connect().then(()=>r.ping()).then(()=>{r.disconnect();process.exit(0)}).catch(()=>process.exit(1))" || exit 1
|
||||||
CMD ["node", "worker.js"]
|
CMD ["node", "worker.js"]
|
||||||
|
|||||||
@@ -46,6 +46,10 @@ services:
|
|||||||
interval: 15s
|
interval: 15s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 3
|
retries: 3
|
||||||
|
# Give the SIGTERM handler in src/server.ts time to drain in-flight
|
||||||
|
# HTTP requests, close Socket.io, and disconnect Redis before Docker
|
||||||
|
# SIGKILLs the process. The internal hard timeout is 25s.
|
||||||
|
stop_grace_period: 30s
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
networks:
|
networks:
|
||||||
- internal
|
- internal
|
||||||
@@ -58,6 +62,9 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
# Match the app: BullMQ jobs need time to finish or be released back
|
||||||
|
# to the queue when worker.ts handles SIGTERM.
|
||||||
|
stop_grace_period: 30s
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
networks:
|
networks:
|
||||||
- internal
|
- internal
|
||||||
|
|||||||
196
docs/admin-ux-backlog.md
Normal file
196
docs/admin-ux-backlog.md
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
# Admin / settings UX backlog — STATUS
|
||||||
|
|
||||||
|
Living tracker for the admin/UX backlog. Items are marked DONE or
|
||||||
|
REMAINING based on what landed in the autonomous-push session.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## DONE in the autonomous push
|
||||||
|
|
||||||
|
### Foundations
|
||||||
|
|
||||||
|
- **Currency API verified end-to-end**. `scripts/test-currency-api.ts`
|
||||||
|
fetches live Frankfurter rates → upserts → reads back → converts.
|
||||||
|
Inverse-rate drift confirmed at ≤0.001.
|
||||||
|
- **Storage abstraction audit complete**. Every byte path
|
||||||
|
(signed EOIs, contracts, brochures, berth PDFs, files, avatars,
|
||||||
|
branding logos) goes through `getStorageBackend()`. `/api/ready`
|
||||||
|
and the system-monitoring health probe now check the active
|
||||||
|
backend (S3 or filesystem) instead of always probing MinIO.
|
||||||
|
|
||||||
|
### User settings
|
||||||
|
|
||||||
|
- Country + Timezone selectors with cross-defaulting + auto-detect
|
||||||
|
banner ("Looks like you're in Europe/Paris — Update?")
|
||||||
|
- Email change with verification flow (`user_email_changes` table,
|
||||||
|
`/api/v1/me/email/confirm/<token>`, `/api/v1/me/email/cancel/<token>`)
|
||||||
|
- Password reset triggered via better-auth `requestPasswordReset`
|
||||||
|
- Profile photo upload + crop (square 256×256) via shared
|
||||||
|
`<ImageCropperDialog>` + `/api/v1/me/avatar`
|
||||||
|
|
||||||
|
### Branding
|
||||||
|
|
||||||
|
- Logo upload + crop modal in admin/branding (uses the same shared
|
||||||
|
cropper, persists via `/api/v1/admin/settings/image` → storage backend)
|
||||||
|
- Email header/footer HTML defaults injectable via "Insert default" button
|
||||||
|
- Brand colour picker, app-name field, logo URL all in one card
|
||||||
|
|
||||||
|
### Storage admin
|
||||||
|
|
||||||
|
- New layout: S3 config form FIRST, swap action SECOND
|
||||||
|
- Test connection button before any switch
|
||||||
|
- Two-button switch: "Switch + migrate" vs "Switch only" with warning modal
|
||||||
|
- `runMigration()` honours `skipMigration` flag
|
||||||
|
|
||||||
|
### Backup management
|
||||||
|
|
||||||
|
- Real `/admin/backup` page driven by new `backup_jobs` table
|
||||||
|
- `runBackup()` service spawns `pg_dump --format=custom`, streams to
|
||||||
|
active storage backend, records size + path
|
||||||
|
- Download button presigns the .dump for offline restore
|
||||||
|
- Super-admin gated
|
||||||
|
|
||||||
|
### AI admin panel
|
||||||
|
|
||||||
|
- Dedicated `/admin/ai` page consolidating master switch +
|
||||||
|
monthly token cap + provider credentials
|
||||||
|
- Per-feature settings (OCR, berth-PDF parser, recommender)
|
||||||
|
linked from the same page
|
||||||
|
|
||||||
|
### Onboarding
|
||||||
|
|
||||||
|
- Real `/admin/onboarding` page with auto-checked steps
|
||||||
|
- Reads each setting key + lists endpoint (roles / users / tags) to
|
||||||
|
decide completion
|
||||||
|
- Manual checkboxes for steps without an auto-detect signal
|
||||||
|
- Progress bar + "Mark done"/"Mark incomplete" buttons
|
||||||
|
- State persisted in `system_settings.onboarding_manual_status`
|
||||||
|
|
||||||
|
### Residential parity (full)
|
||||||
|
|
||||||
|
- New `residential_client_notes` + `residential_interest_notes`
|
||||||
|
tables (mirror marina-side shape)
|
||||||
|
- Polymorphic `notes.service.ts` extended with two new entity types
|
||||||
|
through verifyParent + listForEntity + create + update + delete
|
||||||
|
- New `<NotesList>` accepts `residential_clients` /
|
||||||
|
`residential_interests` entity types
|
||||||
|
- Activity endpoints: `/api/v1/residential/clients/[id]/activity` +
|
||||||
|
`/api/v1/residential/interests/[id]/activity`
|
||||||
|
- Notes endpoints: 4 new routes covering GET/POST/PATCH/DELETE
|
||||||
|
- `residential-client-tabs.tsx` + `residential-interest-tabs.tsx`
|
||||||
|
built using the marina-side `DetailLayout` pattern (Overview +
|
||||||
|
Notes + Activity tabs, Interests tab on the client)
|
||||||
|
- Detail header components mirror the marina-side strip
|
||||||
|
- `useBreadcrumbHint` wired into both detail components
|
||||||
|
|
||||||
|
### Residential pipeline stages — configurable
|
||||||
|
|
||||||
|
- New `residential-stages.service.ts` with list/save + orphan-check
|
||||||
|
- `/api/v1/residential/stages` GET/PUT
|
||||||
|
- `/admin/residential-stages` admin UI with reassign-on-remove
|
||||||
|
modal (select new stage per affected interest before save)
|
||||||
|
- Validators relaxed from `z.enum(...)` to `z.string()` so any
|
||||||
|
admin-defined stage id round-trips
|
||||||
|
|
||||||
|
### Documenso Phase 1 (EOI generate flow polish)
|
||||||
|
|
||||||
|
- Schema migrations applied:
|
||||||
|
`document_signers.invited_at / opened_at / last_reminder_sent_at / signing_token`,
|
||||||
|
`documents.completion_cc_emails / auto_reminder_interval_days`
|
||||||
|
- `transformSigningUrl()` now maps SignerRole → URL segment correctly
|
||||||
|
(approver→cc, witness→witness) so emails don't land on `/sign/error`
|
||||||
|
- New `POST /api/v1/documents/[id]/send-invitation` endpoint with
|
||||||
|
next-pending-signer auto-pick
|
||||||
|
- Per-port settings added: `documenso_developer_label`,
|
||||||
|
`documenso_approver_label`, `documenso_developer_user_id`,
|
||||||
|
`documenso_approver_user_id` (Phase 7 RBAC binding fields)
|
||||||
|
|
||||||
|
### Misc UI/UX
|
||||||
|
|
||||||
|
- Sidebar collapse removed (always expanded)
|
||||||
|
- Audit log filter inputs sized + dates widened
|
||||||
|
- Custom Settings section got a long-form description
|
||||||
|
- Reminder digest timezone uses `TimezoneCombobox`
|
||||||
|
- Port form: currency dropdown + timezone combobox + brand color
|
||||||
|
- Permissions count badge opens a modal with granted/denied
|
||||||
|
- Role names display-normalized via `prettifyRoleName`
|
||||||
|
- Sales email config: token list + tooltips on threshold + body fields
|
||||||
|
- Custom Fields page: amber heads-up about non-integration with
|
||||||
|
search / recommender / audit / merge tokens
|
||||||
|
- Tag form: native `<input type="color">`
|
||||||
|
- FilterBar Select crash fixed (no empty-string item values)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## REMAINING — large pieces that didn't fit this push
|
||||||
|
|
||||||
|
### 1. Documenso Phase 2 — Webhook handler enhancement (~3-4 hours)
|
||||||
|
|
||||||
|
Cascading "your turn" emails when each signer completes; on-completion
|
||||||
|
PDF distribution; token-based recipient matching; idempotency lock.
|
||||||
|
File to extend: `src/app/api/webhooks/documenso/route.ts`. The
|
||||||
|
schema columns are already in place (Phase 1).
|
||||||
|
|
||||||
|
### 2. Documenso Phase 3 — Custom doc upload-to-Documenso (~6-8 hours)
|
||||||
|
|
||||||
|
Backend service `custom-document-upload.service.ts` + endpoint
|
||||||
|
`POST /api/v1/interests/[id]/upload-for-signing`. Accepts a PDF +
|
||||||
|
recipient list + field-placement JSON, calls `createDocument` →
|
||||||
|
`placeFields` → `sendDocument` on the per-port Documenso client.
|
||||||
|
Persists a row in `documents` table.
|
||||||
|
|
||||||
|
### 3. Documenso Phase 4 — Field placement UI (~10-14 hours)
|
||||||
|
|
||||||
|
The biggest piece. Needs:
|
||||||
|
|
||||||
|
- 4a: Recipient configurator dialog (~2-3h)
|
||||||
|
- 4b: PDF rendering with `react-pdf` (~3-4h)
|
||||||
|
- 4c: Auto-detect anchor scanner via `pdfjs-dist.getTextContent` (~4-6h)
|
||||||
|
- 4d: Drag-drop overlay using `dnd-kit` (~3-4h)
|
||||||
|
- 4e: Send button → calls Phase 3 endpoint (~1h)
|
||||||
|
|
||||||
|
Plan locked in `docs/documenso-build-plan.md` Phase 4 — the
|
||||||
|
field-detector regexes, the anchor patterns, and the type-to-bbox
|
||||||
|
sizing table are all spelled out.
|
||||||
|
|
||||||
|
### 4. Documenso Phase 5 — Embedded signing URL emission verification (~1-2 hours)
|
||||||
|
|
||||||
|
Verify the website's `/sign/<type>/<token>` page handles every signer
|
||||||
|
role + every documentType combination. Update website's
|
||||||
|
`signerMessages` map keyed on `(documentType, role)`. Apply the
|
||||||
|
nginx CORS block from `docs/documenso-integration-audit.md`.
|
||||||
|
|
||||||
|
### 5. Documenso Phase 6 — Polish items (deferred)
|
||||||
|
|
||||||
|
Auto-send delay, audit-log additions, per-document customisation,
|
||||||
|
document expiration, reminder rate-limit display, failed-webhook
|
||||||
|
recovery UI. Each ~2-3 hours; all deferred until Phases 1-4 ship.
|
||||||
|
|
||||||
|
### 6. Project Director — UI binding for the developer-user fields
|
||||||
|
|
||||||
|
Schema + setting keys are now in place
|
||||||
|
(`documenso_developer_user_id`, `documenso_approver_user_id` +
|
||||||
|
`documenso_developer_label` / `_approver_label`). The remaining
|
||||||
|
work is: add a "Linked to CRM user" dropdown in
|
||||||
|
`/admin/documenso/page.tsx` that lists port users; when bound,
|
||||||
|
auto-fill name/email from the user profile and mark name/email
|
||||||
|
fields read-only. Webhook handler can then match against the
|
||||||
|
linked user's email for in-CRM signing-status updates.
|
||||||
|
|
||||||
|
### 7. Custom-fields hardening (~ongoing)
|
||||||
|
|
||||||
|
Remediation paths for the heads-up banner concerns:
|
||||||
|
|
||||||
|
- **Search index**: extend the GIN tsvector to include
|
||||||
|
customFieldValues content
|
||||||
|
- **Audit diff**: extend `diffEntity` to walk the
|
||||||
|
customFieldValues blob
|
||||||
|
- **Merge tokens**: add `{{custom.<fieldName>}}` handling at
|
||||||
|
template-render time, plus surface them in the merge-tokens UI
|
||||||
|
|
||||||
|
### 8. Documenso v2 webhook payload audit (small)
|
||||||
|
|
||||||
|
Risk #4 from `docs/documenso-build-plan.md` — confirm v2 payload
|
||||||
|
shape (`payload.documentId` vs `payload.id`, recipient.token vs
|
||||||
|
`recipient.recipientId`) against a live v2 instance before relying
|
||||||
|
on Phase 2 cascading emails.
|
||||||
1126
docs/audit-comprehensive-2026-05-05.md
Normal file
1126
docs/audit-comprehensive-2026-05-05.md
Normal file
File diff suppressed because it is too large
Load Diff
753
docs/audit-comprehensive-2026-05-06.md
Normal file
753
docs/audit-comprehensive-2026-05-06.md
Normal file
@@ -0,0 +1,753 @@
|
|||||||
|
# Comprehensive Audit — 2026-05-06
|
||||||
|
|
||||||
|
Conducted directly after the smart-archive / hard-delete / bulk-wizard /
|
||||||
|
audit-overhaul / synthetic-seed batches landed (commits `d07f1ed`
|
||||||
|
through `9890d06`). Prior comprehensive audit:
|
||||||
|
`docs/audit-comprehensive-2026-05-05.md`.
|
||||||
|
|
||||||
|
Findings are sorted by severity. Each has a concrete file:line, a
|
||||||
|
scenario, and a fix recommendation.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## CRITICAL
|
||||||
|
|
||||||
|
### C1. 5 of 10 BullMQ workers are never imported (production + dev)
|
||||||
|
|
||||||
|
**Files:** `src/worker.ts:13-17`, `src/server.ts:72-76`
|
||||||
|
|
||||||
|
`src/worker.ts` (production) and `src/server.ts` (dev fallback) both
|
||||||
|
import only:
|
||||||
|
|
||||||
|
- `emailWorker`
|
||||||
|
- `documentsWorker`
|
||||||
|
- `notificationsWorker`
|
||||||
|
- `importWorker`
|
||||||
|
- `exportWorker`
|
||||||
|
|
||||||
|
**Missing:** `aiWorker`, `bulkWorker`, `maintenanceWorker`, `reportsWorker`, `webhooksWorker`.
|
||||||
|
|
||||||
|
Because BullMQ workers are constructed at the top of each worker
|
||||||
|
module and only "start" when the module is imported, never importing
|
||||||
|
them means:
|
||||||
|
|
||||||
|
- **Webhooks never deliver.** `webhooksWorker` is what processes the
|
||||||
|
`webhooks` queue; the admin "Replay" button we just shipped enqueues
|
||||||
|
jobs that pile up in `pending` forever.
|
||||||
|
- **All maintenance crons silently no-op.** `maintenanceWorker` handles
|
||||||
|
`database-backup`, `backup-cleanup`, `session-cleanup`,
|
||||||
|
`currency-refresh`, `gdpr-export-cleanup`, `ai-usage-retention`,
|
||||||
|
`error-events-retention`, `website-submissions-retention`,
|
||||||
|
`alerts-evaluate`, `analytics-refresh`, `calendar-sync`,
|
||||||
|
`temp-file-cleanup`, `form-expiry-check` — none run.
|
||||||
|
- **Scheduled reports never generate.** `reportsWorker` handles
|
||||||
|
`report-scheduler` (every minute).
|
||||||
|
- **Bulk jobs never process** (the synchronous bulk endpoints work, but
|
||||||
|
any deferred-bulk path is dead).
|
||||||
|
- **AI usage features never run.**
|
||||||
|
|
||||||
|
**Impact:** Production CRM has been silently shedding webhook
|
||||||
|
deliveries, never running retention/cleanup, never sending scheduled
|
||||||
|
reports.
|
||||||
|
|
||||||
|
**Fix:**
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// Append to src/worker.ts AND the inline section of src/server.ts:
|
||||||
|
import { aiWorker } from '@/lib/queue/workers/ai';
|
||||||
|
import { bulkWorker } from '@/lib/queue/workers/bulk';
|
||||||
|
import { maintenanceWorker } from '@/lib/queue/workers/maintenance';
|
||||||
|
import { reportsWorker } from '@/lib/queue/workers/reports';
|
||||||
|
import { webhooksWorker } from '@/lib/queue/workers/webhooks';
|
||||||
|
|
||||||
|
const workers = [
|
||||||
|
emailWorker,
|
||||||
|
documentsWorker,
|
||||||
|
notificationsWorker,
|
||||||
|
importWorker,
|
||||||
|
exportWorker,
|
||||||
|
aiWorker,
|
||||||
|
bulkWorker,
|
||||||
|
maintenanceWorker,
|
||||||
|
reportsWorker,
|
||||||
|
webhooksWorker,
|
||||||
|
];
|
||||||
|
```
|
||||||
|
|
||||||
|
After fix, run `pnpm dev` and watch `/admin/webhooks/{id}` deliveries
|
||||||
|
go from `pending` → `success` to confirm.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## HIGH
|
||||||
|
|
||||||
|
### H1. Hard-delete request endpoints have zero rate limiting
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- `src/app/api/v1/clients/[id]/hard-delete-request/route.ts:1-37`
|
||||||
|
- `src/app/api/v1/clients/bulk-hard-delete-request/route.ts:1-32`
|
||||||
|
|
||||||
|
Each call writes a fresh code to Redis and emails it to the operator's
|
||||||
|
address. No `withRateLimit(...)`. An attacker who has compromised an
|
||||||
|
admin account (or even just the new `permanently_delete_clients`
|
||||||
|
permission) can:
|
||||||
|
|
||||||
|
1. Email-bomb the admin's own inbox (every request → email).
|
||||||
|
2. Probe whether arbitrary client IDs exist (200 + `sentToMaskedEmail`
|
||||||
|
vs 404 `client not found` is a UID oracle).
|
||||||
|
3. Burn SMTP quota.
|
||||||
|
|
||||||
|
**Fix:** add `withRateLimit('auth', ...)` or a new dedicated bucket
|
||||||
|
(e.g. 5 per hour per user). Pattern is already in
|
||||||
|
`src/app/api/v1/clients/[id]/gdpr-export/route.ts`.
|
||||||
|
|
||||||
|
### H2. Audit-page view fires on every paginated reload (log spam)
|
||||||
|
|
||||||
|
**File:** `src/app/api/v1/admin/audit/route.ts:48-72`
|
||||||
|
|
||||||
|
I added a "watch the watchers" `view` audit row for first-page audit
|
||||||
|
fetches. That's the right idea, but the page also re-fires the request
|
||||||
|
on every filter change (severity, source, action, date range, search).
|
||||||
|
A diligent admin filtering through the inspector for an investigation
|
||||||
|
will write dozens of `view` audit rows per minute — making it harder to
|
||||||
|
find the actual events they're looking for.
|
||||||
|
|
||||||
|
**Fix:** dedupe in Redis with a 60-second per-user TTL key, only emit
|
||||||
|
if the key didn't exist. Or only fire when no filters are active.
|
||||||
|
|
||||||
|
### H3. Hard-delete error messages distinguish "no code" vs "wrong code"
|
||||||
|
|
||||||
|
**File:** `src/lib/services/client-hard-delete.service.ts:166-174`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
if (!stored) throw new ValidationError('Confirmation code expired or not requested');
|
||||||
|
if (!safeEqualStr(stored, args.code.trim())) {
|
||||||
|
throw new ValidationError('Confirmation code is incorrect');
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The two messages let an attacker distinguish "you've never requested a
|
||||||
|
code" (so spam the request endpoint to open the window) from "wrong
|
||||||
|
code" (so brute-force more codes). 4-digit space is only 10,000 — with
|
||||||
|
distinguishable feedback an attacker can confirm code validity in
|
||||||
|
≤5,000 attempts on average.
|
||||||
|
|
||||||
|
**Fix:** collapse to a single `'Invalid or expired code'` message; the
|
||||||
|
operator already has the email open and knows what they typed.
|
||||||
|
|
||||||
|
### H4. Synthetic seed leaves `super_admin` linked-port-roles empty
|
||||||
|
|
||||||
|
**File:** `src/lib/db/seed-bootstrap.ts:147-160`
|
||||||
|
|
||||||
|
The bootstrap creates the `userProfiles` row with
|
||||||
|
`isSuperAdmin: true` for `super-admin-matt-portnimara`, but doesn't
|
||||||
|
create `userPortRoles` rows. The actual real `user` rows (admin@,
|
||||||
|
agent@, viewer@) are only created via the Playwright global-setup.
|
||||||
|
Anyone running `pnpm db:seed:synthetic` then `pnpm dev` and trying to
|
||||||
|
log in via the UI hits an unauthenticated state until they also run
|
||||||
|
playwright setup or sign up via better-auth manually.
|
||||||
|
|
||||||
|
**Fix:** either document this in `CLAUDE.md` Quick Reference, or add a
|
||||||
|
`pnpm db:seed:dev-users` companion script that signs up the three
|
||||||
|
test users + links roles. Today's synthetic-seed flow felt clean
|
||||||
|
because the playwright setup was still applied; in a fresh clone it
|
||||||
|
will surprise.
|
||||||
|
|
||||||
|
### H5. Documenso bad-secret 200 response is correct, but enables enum oracle
|
||||||
|
|
||||||
|
**File:** `src/app/api/webhooks/documenso/route.ts:67-86`
|
||||||
|
|
||||||
|
The route returns `200 ok=false error=Invalid secret` for a wrong
|
||||||
|
secret. That's webhook best-practice (don't leak signal to attackers),
|
||||||
|
but combined with the new audit row that captures
|
||||||
|
`metadata.providedLen`, an attacker can probe secret-length over time
|
||||||
|
without being detected (just a "warning" row per attempt). On an admin
|
||||||
|
inspector with 1000s of rows, a slow-rate probe is invisible.
|
||||||
|
|
||||||
|
**Fix:** add per-IP rate limit (5/min) to `/api/webhooks/documenso/`
|
||||||
|
when secret check fails. Don't block real Documenso traffic — it
|
||||||
|
shouldn't fail the secret check.
|
||||||
|
|
||||||
|
### H6. The audit-log inspector page itself isn't backed by a real "view" gate beyond `admin.view_audit_log`
|
||||||
|
|
||||||
|
**File:** `src/app/api/v1/admin/audit/route.ts:31`
|
||||||
|
|
||||||
|
Audit log has the most sensitive cross-cutting data in the system
|
||||||
|
(every login attempt with attempted email, every secret-regenerate,
|
||||||
|
every hard-delete). It's gated only by `admin.view_audit_log`. The
|
||||||
|
seed grants this to `director` AND `super_admin`. Consider:
|
||||||
|
|
||||||
|
- making the page super-admin-only for production, OR
|
||||||
|
- adding a secondary confirmation when viewing rows that contain
|
||||||
|
attempted emails / IP ranges (PII).
|
||||||
|
|
||||||
|
**Fix:** change `withPermission('admin', 'view_audit_log', ...)` to
|
||||||
|
add `if (!ctx.isSuperAdmin) check sensitive_audit_view`. Or accept
|
||||||
|
the current model but document it in the role docs.
|
||||||
|
|
||||||
|
### H7. Three "coming soon" stubs in production UI
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- `src/components/clients/client-tabs.tsx:276` — "File attachments coming soon."
|
||||||
|
- `src/components/clients/client-reservations-tab.tsx:41` — "History is coming soon."
|
||||||
|
- `src/components/berths/berth-tabs.tsx:327` — "{label} coming soon"
|
||||||
|
|
||||||
|
Visible to every user on every client / berth detail page. Either ship
|
||||||
|
the feature or hide the tab.
|
||||||
|
|
||||||
|
**Fix:** for `client-tabs.tsx` line 276 (Files), the `files` table
|
||||||
|
already exists and supports clientId — ship a list view.
|
||||||
|
For `berth-tabs.tsx` line 327 — find the calling tab labels and
|
||||||
|
either implement or remove from the tabs array.
|
||||||
|
For `client-reservations-tab.tsx` line 41 — query past reservations
|
||||||
|
when the user toggles a "show history" filter.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## MEDIUM
|
||||||
|
|
||||||
|
### M1. `attachWorkerAudit` recurring job names list duplicates scheduler.ts (drift risk)
|
||||||
|
|
||||||
|
**File:** `src/lib/queue/audit-helpers.ts:23-46`
|
||||||
|
|
||||||
|
The 20 recurring job names are hardcoded in the audit helper; the
|
||||||
|
scheduler also has its own list. If someone adds a new cron without
|
||||||
|
updating both, the cron_run audit row never fires for that job.
|
||||||
|
|
||||||
|
**Fix:** export the list from `scheduler.ts` and import it in
|
||||||
|
`audit-helpers.ts`. Single source of truth.
|
||||||
|
|
||||||
|
### M2. `client-merge-log.surviving_client_id` deleted by hard-delete (history loss)
|
||||||
|
|
||||||
|
**File:** `src/lib/services/client-hard-delete.service.ts:200-202`
|
||||||
|
|
||||||
|
Hard-delete drops every `client_merge_log` row whose surviving id
|
||||||
|
matches. Those rows are the audit trail of WHO was merged INTO this
|
||||||
|
client. Once deleted, you've lost evidence of the prior merge.
|
||||||
|
|
||||||
|
**Fix:** replace `delete` with a column nullification, or move the row
|
||||||
|
to a `client_merge_log_archive` table. Audit trail per GDPR Article 5
|
||||||
|
should outlive the data.
|
||||||
|
|
||||||
|
### M3. Bulk hard-delete loops one-shot codes through Redis (5x writes)
|
||||||
|
|
||||||
|
**File:** `src/lib/services/client-hard-delete.service.ts:382-396`
|
||||||
|
|
||||||
|
For a 100-client bulk delete, the function writes 100 single-client
|
||||||
|
codes to Redis just to satisfy `hardDeleteClient`'s expectation. Each
|
||||||
|
write is a round-trip; on a Redis hiccup mid-loop, you can end up
|
||||||
|
with a half-deleted batch.
|
||||||
|
|
||||||
|
**Fix:** refactor `hardDeleteClient` so the inner deletion can be called
|
||||||
|
without the per-client code check (extract `_doHardDelete()` private
|
||||||
|
helper used by both single and bulk paths). Keeps Redis clean.
|
||||||
|
|
||||||
|
### M4. Smart-restore wizard has dead reversal applier for `berth_released`
|
||||||
|
|
||||||
|
**File:** `src/lib/services/client-restore.service.ts:360-372`
|
||||||
|
|
||||||
|
The `applyReversal` switch case for `'berth_released'` does nothing —
|
||||||
|
it just leaves the berth available. The wizard surfaces this as
|
||||||
|
"auto-reversible" if the berth is still free, but the actual restore
|
||||||
|
doesn't re-attach the berth to any interest. Operator clicks Restore
|
||||||
|
expecting their berth back; nothing changes on the berth.
|
||||||
|
|
||||||
|
**Fix:** either (a) at archive time, persist the original interestId
|
||||||
|
in the decision metadata so we can re-link, or (b) update the wizard
|
||||||
|
copy to make clear the berth is "available for re-attach" rather than
|
||||||
|
"will be re-attached."
|
||||||
|
|
||||||
|
### M5. Several services use `void createAuditLog(...)` without `.catch()`
|
||||||
|
|
||||||
|
**Files:** widespread; e.g. `src/lib/services/client-hard-delete.service.ts:127-136, 230-240`,
|
||||||
|
`src/lib/services/portal-auth.service.ts:269-276`
|
||||||
|
|
||||||
|
`createAuditLog` is documented as never-throwing (catches internally),
|
||||||
|
but defense-in-depth: a `void` Promise that throws produces an
|
||||||
|
unhandled rejection event. Most paths are fine because the helper
|
||||||
|
catches; if anyone refactors `createAuditLog` and removes the catch,
|
||||||
|
this becomes a process-killer.
|
||||||
|
|
||||||
|
**Fix:** convention rule: every `void someAsync()` must have a `.catch()`.
|
||||||
|
Codify with a custom ESLint rule, or wrap at call sites:
|
||||||
|
`void createAuditLog({...}).catch(() => undefined);`
|
||||||
|
|
||||||
|
### M6. Hard-delete audit metadata leaks client `fullName`
|
||||||
|
|
||||||
|
**File:** `src/lib/services/client-hard-delete.service.ts:241-247`
|
||||||
|
|
||||||
|
After the hard-delete the audit row carries
|
||||||
|
`metadata: { fullName: client.fullName }`. The client record itself is
|
||||||
|
gone but their name lives on in the audit log. For a GDPR data subject
|
||||||
|
who exercised their right-to-erasure, this is technically a retention
|
||||||
|
of personal data in audit history. Not necessarily wrong (audit logs
|
||||||
|
have a legitimate-interest basis), but should be conscious.
|
||||||
|
|
||||||
|
**Fix:** decide policy: either (a) keep as-is and document, (b) replace
|
||||||
|
with a hash of the name, or (c) substitute a tombstone identifier.
|
||||||
|
|
||||||
|
### M7. Webhook delivery DLQ admin-replay can re-trigger downstream side-effects
|
||||||
|
|
||||||
|
**File:** `src/lib/services/webhooks.service.ts:282-326`
|
||||||
|
|
||||||
|
Replaying a successful webhook (operator presses Replay on a delivery
|
||||||
|
that already had `status: 'success'`) re-fires the same payload to the
|
||||||
|
recipient. If the recipient's idempotency check is weak, you've just
|
||||||
|
caused a duplicate. The replay payload includes `retried_from` /
|
||||||
|
`retried_at` markers, which is good — but most recipients won't honor
|
||||||
|
them.
|
||||||
|
|
||||||
|
**Fix:** disable the Replay button when `status === 'success'`. The UI
|
||||||
|
already gates on `'failed' || 'dead_letter'` — verify it stays that
|
||||||
|
way (`webhook-delivery-log.tsx:118-131` looks correct; double-check
|
||||||
|
no regressions).
|
||||||
|
|
||||||
|
### M8. `audit_logs` table has no DELETE permission gate
|
||||||
|
|
||||||
|
**Files:** schema and routes
|
||||||
|
|
||||||
|
There's no admin endpoint to delete audit rows (good). But there's no
|
||||||
|
DB-level guard either. A super_admin who runs `db:reset` wipes audit
|
||||||
|
history. Audit retention should be enforced at the schema level so
|
||||||
|
even a misconfigured operator can't blow away the trail.
|
||||||
|
|
||||||
|
**Fix:** create a `audit_logs_no_delete_role` postgres role that lacks
|
||||||
|
DELETE on the table; document that the app's DB user should not have
|
||||||
|
DELETE on `audit_logs` in production deployments.
|
||||||
|
|
||||||
|
### M9. Documenso void worker uses dynamic import every time
|
||||||
|
|
||||||
|
**File:** `src/lib/queue/workers/documents.ts:25`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const { voidDocument } = await import('@/lib/services/documenso-client');
|
||||||
|
```
|
||||||
|
|
||||||
|
Dynamic import inside a hot per-job path is fine the first time but
|
||||||
|
slows every subsequent call slightly. Move to top-of-file import
|
||||||
|
unless there's a deliberate reason (circular dep?).
|
||||||
|
|
||||||
|
**Fix:** test moving to top-level import; if it works (no circular
|
||||||
|
deps), keep it there.
|
||||||
|
|
||||||
|
### M10. Bulk archive wizard "blocked" reason copy truncates at first line
|
||||||
|
|
||||||
|
**File:** `src/components/clients/bulk-archive-wizard.tsx:153-163`
|
||||||
|
|
||||||
|
The wizard shows `b.blockers[0]` for blocked clients. If the dossier
|
||||||
|
has multiple blockers, only the first is shown. Operators may fix the
|
||||||
|
first one, retry, and discover a second.
|
||||||
|
|
||||||
|
**Fix:** show all blockers (joined with `·`) or a "+N more" badge
|
||||||
|
with click-to-expand.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LOW
|
||||||
|
|
||||||
|
### L1. `next-in-line-notify.service.ts` could double-fire on archive retry
|
||||||
|
|
||||||
|
**File:** `src/app/api/v1/clients/[id]/archive/route.ts:114-135`
|
||||||
|
|
||||||
|
If the smart-archive request succeeds at the DB transaction level but
|
||||||
|
the response upload-side fails (network blip, browser closes), the
|
||||||
|
operator may retry. Each retry re-fires the next-in-line notification
|
||||||
|
to all sales recipients. The `dedupeKey: berth-released:{berthId}`
|
||||||
|
inside the notification helper deduplicates within a cooldown window —
|
||||||
|
so this is mitigated, but worth verifying the cooldown is set and
|
||||||
|
not 0.
|
||||||
|
|
||||||
|
### L2. `interests.berth_id` reference in `seed-data.ts` (legacy seed)
|
||||||
|
|
||||||
|
**File:** `src/lib/db/seed-data.ts:973`
|
||||||
|
|
||||||
|
The realistic seed inserts `berthId: ...` on the interests table. Per
|
||||||
|
`CLAUDE.md`, that column was dropped in migration 0029 and replaced
|
||||||
|
with `interest_berths` junction. The synthetic seed uses the junction
|
||||||
|
correctly. The realistic seed will FAIL at insert time if anyone
|
||||||
|
tries to run it on a freshly-migrated DB.
|
||||||
|
|
||||||
|
**Fix:** rewrite `seed-data.ts:969-982` to insert into `interests`
|
||||||
|
without `berthId`, then insert the junction rows separately (mirror
|
||||||
|
the synthetic seed's pattern).
|
||||||
|
|
||||||
|
### L3. Audit log entry for failed login uses `entityId = attemptedEmail` (unbounded)
|
||||||
|
|
||||||
|
**File:** `src/app/api/auth/[...all]/route.ts:53-68`
|
||||||
|
|
||||||
|
If the entityId is very long (a 500-char "email"), it goes into the
|
||||||
|
DB column. The column is `text` (unbounded) so no DB error, but FTS
|
||||||
|
search-text may bloat.
|
||||||
|
|
||||||
|
**Fix:** truncate attempted email to 256 chars before using as
|
||||||
|
entityId.
|
||||||
|
|
||||||
|
### L4. The "watch the watchers" audit fires for filtered queries too
|
||||||
|
|
||||||
|
**File:** `src/app/api/v1/admin/audit/route.ts:48-72`
|
||||||
|
|
||||||
|
(See H2 above for the page-spam variant.) Even on a single search,
|
||||||
|
an audit row containing the search term is written. If the search
|
||||||
|
term itself is sensitive (e.g. an admin searches for a specific
|
||||||
|
client's name in audit logs), it's now in the audit log of audit-log
|
||||||
|
viewing. Acceptable but worth documenting.
|
||||||
|
|
||||||
|
### L5. Import worker is a stub
|
||||||
|
|
||||||
|
**File:** `src/lib/queue/workers/import.ts:13`
|
||||||
|
|
||||||
|
`// TODO(L2): implement import job handlers` — the worker is wired
|
||||||
|
into the queue and registered, but does nothing. If anyone enqueues
|
||||||
|
an `import:*` job, it returns immediately. Either ship the feature
|
||||||
|
or remove the queue.
|
||||||
|
|
||||||
|
### L6. `interest-form.tsx` two TODOs about company-yacht filter + add-yacht inline
|
||||||
|
|
||||||
|
**File:** `src/components/interests/interest-form.tsx:332-333`
|
||||||
|
|
||||||
|
Real product gaps. When creating an interest for a client who's a
|
||||||
|
member of a company, you can't pick a yacht owned by that company.
|
||||||
|
And there's no inline "Add yacht" shortcut in the form.
|
||||||
|
|
||||||
|
### L7. `berth-spec-template.ts` defaults to `'Price: TBD'` when price is null
|
||||||
|
|
||||||
|
**File:** `src/lib/pdf/templates/berth-spec-template.ts:128`
|
||||||
|
|
||||||
|
Generated berth-spec PDFs say "Price: TBD" for any berth without a
|
||||||
|
price. Cosmetic — verify whether sales considers this an acceptable
|
||||||
|
fallback or wants to suppress the line entirely.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Things checked and found OK (so we don't re-audit)
|
||||||
|
|
||||||
|
- Tenant isolation on hard-delete (`portId` filter on every query and
|
||||||
|
inside the tx).
|
||||||
|
- `withPermission` gates on every new route (bulk-archive-preflight,
|
||||||
|
hard-delete-_, bulk-hard-delete-_, redeliver).
|
||||||
|
- Audit log: no public DELETE endpoint, no PATCH endpoint.
|
||||||
|
- Sidebar nav properly gates marina sections from `residential_partner`
|
||||||
|
via `hasMarinaAccess`.
|
||||||
|
- Auth wrapper rebuilds the request body correctly so the upstream
|
||||||
|
better-auth handler can re-read it (no body-already-consumed bug).
|
||||||
|
- Webhook outbound SSRF guard with DNS rebinding protection still
|
||||||
|
intact.
|
||||||
|
- 1175/1175 vitest suite passing as of last run.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Recommended fix order (ROUND 1 + 2 combined — see below for Round 2)
|
||||||
|
|
||||||
|
See **"Triage list" at the end** of this document — combined ranking
|
||||||
|
across both audit rounds.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Round 2 — focused agents (added 2026-05-06 evening)
|
||||||
|
|
||||||
|
After the original synthesis above, four scoped agents (smaller blast
|
||||||
|
radius, hard finding caps) successfully audited their domains and
|
||||||
|
produced dedicated docs. Findings are linked here with `R2-`-prefixed
|
||||||
|
IDs. Detail in:
|
||||||
|
|
||||||
|
- [audit-reliability-2026-05-06.md](audit-reliability-2026-05-06.md) — 11 findings
|
||||||
|
- [audit-frontend-2026-05-06.md](audit-frontend-2026-05-06.md) — 12 findings
|
||||||
|
- [audit-permissions-2026-05-06.md](audit-permissions-2026-05-06.md) — 9 findings
|
||||||
|
- [audit-missing-features-2026-05-06.md](audit-missing-features-2026-05-06.md) — 12 findings
|
||||||
|
|
||||||
|
### Round 2 — CRITICAL
|
||||||
|
|
||||||
|
**R2-C1. Bulk archive discards post-commit side effects** ([reliability C1](audit-reliability-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/app/api/v1/clients/bulk/route.ts:68-134`
|
||||||
|
- The bulk wizard's `runBulk` callback discards the return value from
|
||||||
|
`archiveClientWithDecisions`. **Documenso envelopes marked
|
||||||
|
`void_documenso` are never queued for void; "next-in-line" sales
|
||||||
|
notifications never fire**. The CRM ends up showing `documents.status='cancelled'`
|
||||||
|
while the live envelope is still out for signature — a signer can
|
||||||
|
legally complete a doc the CRM thinks is voided.
|
||||||
|
- Same severity tier as the original C1 (worker-imports).
|
||||||
|
|
||||||
|
**R2-C2. Frontend: Restore icon hovers destructive-red on archived clients** ([frontend C1](audit-frontend-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/components/clients/client-detail-header.tsx:174-186`
|
||||||
|
- Conditional `hover:text-destructive` is overridden by an unconditional
|
||||||
|
`hover:text-foreground` earlier in the class string. Result: the
|
||||||
|
Restore button on archived clients hovers blood-red, signalling
|
||||||
|
"destructive" on a fully reversible action. Users hesitate to click.
|
||||||
|
Promoted to "critical UX" because it's directly misleading on every
|
||||||
|
archived client view.
|
||||||
|
|
||||||
|
### Round 2 — HIGH
|
||||||
|
|
||||||
|
**R2-H1. Smart-restore wizard's `berth_released` reversal is a no-op but the audit log claims success**
|
||||||
|
([reliability H1](audit-reliability-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/lib/services/client-restore.service.ts:359-372`
|
||||||
|
- Already noted as M4 in the original synthesis. Round-2 reliability
|
||||||
|
agent escalated to HIGH because the wizard counter increments and
|
||||||
|
the audit log records "1 auto-reversed" — operator believes the berth
|
||||||
|
was re-attached when nothing happened. Same fix path: persist the
|
||||||
|
original `interestId` in the decision detail and re-link on restore.
|
||||||
|
|
||||||
|
**R2-H2. Smart-archive berth status update has TOCTOU race**
|
||||||
|
([reliability H2](audit-reliability-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/lib/services/client-archive.service.ts:191-207`
|
||||||
|
- Berth row read outside tx, mutated inside tx without `for update`
|
||||||
|
lock. Concurrent archive + sale of the same berth can race: the
|
||||||
|
archive flow flips a freshly-sold berth back to `available`. Add
|
||||||
|
`select … for update` on `berths` before the status flip.
|
||||||
|
|
||||||
|
**R2-H3. Bulk archive can pick the wrong interest for berth release**
|
||||||
|
([reliability H3](audit-reliability-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/app/api/v1/clients/bulk/route.ts:95-103`
|
||||||
|
- Lookup by `primaryBerthMooring` falls back to `dossier.interests[0]?.interestId ?? ''`.
|
||||||
|
Empty-string `interestId` reaches the delete and silently matches
|
||||||
|
zero rows; the link is silently retained while the audit log claims
|
||||||
|
it was removed.
|
||||||
|
|
||||||
|
**R2-H4. External EOI runs five operations outside a transaction**
|
||||||
|
([reliability H4](audit-reliability-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/lib/services/external-eoi.service.ts:67-155`
|
||||||
|
- Storage upload + 4 DB writes are independent. Mid-flight failure
|
||||||
|
leaves orphan PDFs in S3/MinIO and partial DB state.
|
||||||
|
|
||||||
|
**R2-H5. Bulk wizard double-submit treats `ConflictError('already archived')` as a per-row error**
|
||||||
|
([reliability H5](audit-reliability-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/app/api/v1/clients/bulk/route.ts:68-120`
|
||||||
|
- No idempotency key on the bulk endpoint. A double-submit (network
|
||||||
|
retry, double click) makes the second response look like all rows
|
||||||
|
failed even though the first succeeded.
|
||||||
|
|
||||||
|
**R2-H6. Webhook replay button has no UI permission gate (403 toast spam)**
|
||||||
|
([permissions H1](audit-permissions-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/components/admin/webhooks/webhook-delivery-log.tsx:118-131`
|
||||||
|
- Replay button renders for any user who can load the page. Server gates
|
||||||
|
on `admin.manage_webhooks`. Non-admins see enabled buttons; clicking
|
||||||
|
surfaces a generic 403 toast.
|
||||||
|
|
||||||
|
**R2-H7. Bulk Archive bulk action exposed to roles without `clients.delete`**
|
||||||
|
([permissions H2](audit-permissions-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/components/clients/client-list.tsx:182-190`
|
||||||
|
- `sales_agent` and `viewer` see the Archive bulk action; clicking
|
||||||
|
surfaces a 403 from preflight. Mirror the `canHardDelete` pattern:
|
||||||
|
`const canBulkArchive = can('clients', 'delete');`
|
||||||
|
|
||||||
|
**R2-H8. Bulk add_tag / remove_tag exposed to viewer**
|
||||||
|
([permissions H3](audit-permissions-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/components/clients/client-list.tsx:165-181`
|
||||||
|
- Same pattern as R2-H7 — no UI gate; server gates on `clients.edit`.
|
||||||
|
|
||||||
|
**R2-H9. Bulk hard-delete silently skips rows that vanish between preflight and execute**
|
||||||
|
([permissions H4](audit-permissions-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/lib/services/client-hard-delete.service.ts:377`
|
||||||
|
- `if (!c) continue;` swallows any client that was archived/restored/
|
||||||
|
deleted by another operator between preflight and execute. Operator
|
||||||
|
sees a `deletedCount` lower than requested and no signal which IDs
|
||||||
|
were skipped.
|
||||||
|
|
||||||
|
**R2-H10. Frontend: `webhook-delivery-log` and `audit-log-list` swallow fetch errors silently**
|
||||||
|
([frontend H3, H4](audit-frontend-2026-05-06.md))
|
||||||
|
|
||||||
|
- Files: `src/components/admin/webhooks/webhook-delivery-log.tsx:61-74`,
|
||||||
|
`src/components/admin/audit/audit-log-list.tsx:150-175`
|
||||||
|
- Both wrap fetches in `try/finally` with no `catch`. Failed loads show
|
||||||
|
spinner forever or stale data; user has no signal that anything
|
||||||
|
failed. Surface via `toast.error` + inline retry banner.
|
||||||
|
|
||||||
|
**R2-H11. Frontend: `audit-log-card` renders as `<a href="#">` — page-jumps on mobile tap**
|
||||||
|
([frontend H5](audit-frontend-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/components/admin/audit/audit-log-card.tsx:96`
|
||||||
|
- Card view rows on mobile insert `#` in URL on tap (back-button trap).
|
||||||
|
Render as button or div, or link to a useful destination.
|
||||||
|
|
||||||
|
**R2-H12. Frontend: `smart-archive-dialog` doesn't invalidate the dossier or single-client query**
|
||||||
|
([frontend H6](audit-frontend-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/components/clients/smart-archive-dialog.tsx:197-212`
|
||||||
|
- Detail page header keeps showing client as un-archived after a
|
||||||
|
successful archive until hard reload. Add
|
||||||
|
`qc.invalidateQueries({queryKey: ['clients', clientId]})` and
|
||||||
|
`qc.removeQueries({queryKey: ['client-archive-dossier', clientId]})`.
|
||||||
|
|
||||||
|
**R2-H13. Frontend: bulk tag mutation uses `alert()` and lacks `onError`**
|
||||||
|
([frontend H2](audit-frontend-2026-05-06.md))
|
||||||
|
|
||||||
|
- File: `src/components/clients/client-list.tsx:88-106`
|
||||||
|
- Native `alert()` blocks the page on partial failure; pure network
|
||||||
|
failure shows nothing. Replace with `toast.warning` / `toast.error`.
|
||||||
|
|
||||||
|
**R2-H14. Email-template subject overrides are no-ops for 6 of 8 templates**
|
||||||
|
([missing-features V1](audit-missing-features-2026-05-06.md))
|
||||||
|
|
||||||
|
- Files: `src/components/admin/email-templates-admin.tsx:24-72` (UI),
|
||||||
|
`src/lib/services/portal-auth.service.ts:120,332` (only consumers)
|
||||||
|
- Admin sees an "Overridden" badge after saving a custom subject for
|
||||||
|
CRM invite, inquiry confirmation, residential templates, etc. — but
|
||||||
|
the senders ship the hardcoded subject regardless. Wire
|
||||||
|
`loadSubjectOverride(portId, key)` into the 6 missing senders.
|
||||||
|
|
||||||
|
**R2-H15. Branding admin saves 5 settings that nothing reads**
|
||||||
|
([missing-features V2](audit-missing-features-2026-05-06.md))
|
||||||
|
|
||||||
|
- Files: `src/app/(dashboard)/[portSlug]/admin/branding/page.tsx`,
|
||||||
|
`src/lib/services/port-config.ts:240-272`
|
||||||
|
- Logo URL, app name, primary color, header HTML, footer HTML all
|
||||||
|
dead-end. `getPortBrandingConfig` has zero callers. **Multi-tenant
|
||||||
|
promise broken — every port's emails ship Port Nimara's branding.**
|
||||||
|
|
||||||
|
**R2-H16. Reminder admin saves digest defaults that no scheduler applies**
|
||||||
|
([missing-features V3](audit-missing-features-2026-05-06.md))
|
||||||
|
|
||||||
|
- Files: `src/app/(dashboard)/[portSlug]/admin/reminders/page.tsx`,
|
||||||
|
`src/lib/services/port-config.ts:284-306`
|
||||||
|
- Sales reps think they configured a daily digest at 09:00 in their
|
||||||
|
TZ; they get fire-as-they-hit notifications instead. The digest
|
||||||
|
scheduler doesn't exist.
|
||||||
|
|
||||||
|
### Round 2 — MEDIUM (selected highlights)
|
||||||
|
|
||||||
|
**R2-M1. Portal "My Memberships" tile is a dead-end** ([missing-features V4](audit-missing-features-2026-05-06.md))
|
||||||
|
|
||||||
|
- Tile on `/portal/dashboard` has no `href`; route doesn't exist. Either
|
||||||
|
ship `/portal/memberships` or remove the tile.
|
||||||
|
|
||||||
|
**R2-M2. Company detail Documents tab is a "Coming soon" stub** ([missing-features V5](audit-missing-features-2026-05-06.md))
|
||||||
|
|
||||||
|
- `src/components/companies/company-tabs.tsx:230-234`. Same problem
|
||||||
|
as the three already-noted "coming soon" stubs but on a different
|
||||||
|
entity.
|
||||||
|
|
||||||
|
**R2-M3. Onboarding page is a static checklist not the wizard it advertises** ([missing-features V6](audit-missing-features-2026-05-06.md))
|
||||||
|
|
||||||
|
- The page literally says "what this page will become". Either build
|
||||||
|
the wizard or relabel the landing card.
|
||||||
|
|
||||||
|
**R2-M4. Backup admin page is a docs page despite landing copy promising "on-demand exports"** ([missing-features V7](audit-missing-features-2026-05-06.md))
|
||||||
|
|
||||||
|
- Once C1 (worker imports) is fixed, the existing `database-backup`
|
||||||
|
job is reachable; small lift to wire a "Take backup now" button.
|
||||||
|
|
||||||
|
**R2-M5. Inquiry inbox has zero triage actions** ([missing-features V8](audit-missing-features-2026-05-06.md))
|
||||||
|
|
||||||
|
- No "Convert to client", no "Resolve", no "Assign". `website_submissions`
|
||||||
|
table is permanent; sales has to copy-paste emails into client forms.
|
||||||
|
|
||||||
|
**R2-M6. external-eoi grants only `documents.upload_signed` but mutates interest state** ([permissions M1](audit-permissions-2026-05-06.md))
|
||||||
|
|
||||||
|
- A custom role with `documents.upload_signed:true` + `interests.edit:false`
|
||||||
|
can flip an interest to "signed" via the external-EOI route.
|
||||||
|
|
||||||
|
**R2-M7. `InlineStagePicker` never sends `override:true` — `override_stage` permission unreachable from the most-used UI path** ([permissions M2](audit-permissions-2026-05-06.md))
|
||||||
|
|
||||||
|
- Users with the perm have to fall back to the modal `InterestStagePicker`
|
||||||
|
to actually use it.
|
||||||
|
|
||||||
|
**R2-M8. `sales_agent` granted `interests.override_stage:true` — likely copy-paste from sales_manager** ([permissions M3](audit-permissions-2026-05-06.md))
|
||||||
|
|
||||||
|
- All other trust-elevated flags are stripped from sales_agent. Needs a
|
||||||
|
product decision; either flip to false or document intent.
|
||||||
|
|
||||||
|
**R2-M9. `bulk-archive-preflight` leaks dossier-loader error text in `blockers`** ([permissions M4](audit-permissions-2026-05-06.md))
|
||||||
|
|
||||||
|
- An attacker enumerating UUIDs can distinguish "doesn't exist" vs
|
||||||
|
"exists but you can't see it". Replace with generic "Could not load
|
||||||
|
dossier".
|
||||||
|
|
||||||
|
**R2-M10. Documenso void worker has no max-retry alert hook** ([reliability M2](audit-reliability-2026-05-06.md))
|
||||||
|
|
||||||
|
- A persistent 401/403 retries forever. On exhaustion, write back to
|
||||||
|
`documents` (`cancellation_failed=true`) and notify admin.
|
||||||
|
|
||||||
|
**R2-M11. Mobile More-sheet missing residential, notifications, berth-reservations, website-analytics** ([missing-features V9](audit-missing-features-2026-05-06.md))
|
||||||
|
|
||||||
|
- Mobile users have zero path to entire feature domains. Add to
|
||||||
|
`MORE_ITEMS`.
|
||||||
|
|
||||||
|
**R2-M12. Portal has no profile / change-password surface** ([missing-features V10](audit-missing-features-2026-05-06.md))
|
||||||
|
|
||||||
|
- Forces every portal user to use the forgot-password flow even when
|
||||||
|
they remember their old password. Ship `/portal/profile`.
|
||||||
|
|
||||||
|
**R2-M13. Portal invoices show amounts but no PDF download** ([missing-features V11](audit-missing-features-2026-05-06.md))
|
||||||
|
|
||||||
|
- Documents page does have downloads; mirror the pattern.
|
||||||
|
|
||||||
|
(Plus several more medium/low items in the dedicated docs; see those
|
||||||
|
for the full set.)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## TRIAGE LIST (combined Round 1 + Round 2)
|
||||||
|
|
||||||
|
### Ship now — CRITICAL
|
||||||
|
|
||||||
|
1. **C1** — wire the 5 missing BullMQ workers (`worker.ts`, `server.ts`)
|
||||||
|
— 5-line fix; every webhook + cron flow is currently dead.
|
||||||
|
2. **R2-C1** — make bulk archive enqueue Documenso voids + next-in-line
|
||||||
|
notifications (return value plumbing in `bulk/route.ts`).
|
||||||
|
3. **R2-C2** — fix the destructive-red hover on the Restore button
|
||||||
|
(`client-detail-header.tsx`). Trivial CSS fix.
|
||||||
|
|
||||||
|
### Ship this week — HIGH (security/UX with concrete user impact)
|
||||||
|
|
||||||
|
4. **H1** — rate-limit the hard-delete-request endpoints.
|
||||||
|
5. **H3** — collapse "no code" vs "wrong code" into one error message.
|
||||||
|
6. **H7** — three "coming soon" stubs in client/berth tabs.
|
||||||
|
7. **R2-H1** — fix smart-restore's silent `berth_released` no-op (or
|
||||||
|
reclassify as `reversibleWithPrompt`).
|
||||||
|
8. **R2-H2** — add `for update` lock on the smart-archive berth status
|
||||||
|
flip (TOCTOU race).
|
||||||
|
9. **R2-H3** — bulk-archive's wrong-interest fallback — empty-string
|
||||||
|
interestId silently no-ops.
|
||||||
|
10. **R2-H6, R2-H7, R2-H8** — three permission UI-gate misses on
|
||||||
|
bulk actions and the webhook-replay button. ~30 lines total.
|
||||||
|
11. **R2-H10, R2-H12, R2-H13** — frontend swallowed errors + missing
|
||||||
|
invalidation + alert() instead of toast. Small fixes, immediate UX
|
||||||
|
win.
|
||||||
|
12. **R2-H11** — `audit-log-card` `href="#"` mobile back-button trap.
|
||||||
|
13. **R2-H14** — wire 6 missing email-subject overrides through their
|
||||||
|
senders.
|
||||||
|
|
||||||
|
### Next sprint — HIGH/MEDIUM (operational + multi-tenant correctness)
|
||||||
|
|
||||||
|
14. **R2-H4** — wrap external-EOI in a transaction.
|
||||||
|
15. **R2-H5** — bulk-archive idempotency key + treat already-archived as
|
||||||
|
success in bulk.
|
||||||
|
16. **R2-H9** — bulk hard-delete should return `skipped: string[]`.
|
||||||
|
17. **R2-H15, R2-H16** — branding + reminder admin pages save settings
|
||||||
|
nothing reads (silently broken multi-tenancy).
|
||||||
|
18. **H2** — audit-page-view de-dupe (don't spam on every filter change).
|
||||||
|
19. **H4** — synthetic seed needs documented dev-user setup or its own
|
||||||
|
bootstrap script.
|
||||||
|
20. **H5** — Documenso bad-secret rate-limit per IP.
|
||||||
|
21. **R2-M1 through R2-M5** — portal memberships dead-end, company
|
||||||
|
Documents stub, onboarding wizard, backup page, inquiry inbox triage.
|
||||||
|
|
||||||
|
### Backlog — MEDIUM/LOW + remaining items
|
||||||
|
|
||||||
|
22. The remaining MEDIUM/LOW from both rounds — see the dedicated docs.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Headline numbers (combined)
|
||||||
|
|
||||||
|
- **3 CRITICAL** (worker imports, bulk-archive side-effects, restore-button hover)
|
||||||
|
- **22 HIGH** (security + UX with concrete impact)
|
||||||
|
- **~15 MEDIUM** (operational hygiene, multi-tenancy gaps, unfinished features)
|
||||||
|
- **~10 LOW** (cleanup, defensive)
|
||||||
|
|
||||||
|
Round 1 was a manual synthesis after agent-pool stalls; Round 2 was
|
||||||
|
four focused agents with hard finding caps that all completed inside
|
||||||
|
the watchdog window. Every finding is grounded in code references.
|
||||||
278
docs/audit-final-deferred.md
Normal file
278
docs/audit-final-deferred.md
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
# Final audit deferred findings
|
||||||
|
|
||||||
|
> **Status update (audit-v3 round)**: most of the v2 deferred items have
|
||||||
|
> now landed. Items struck through below are completed. The remaining
|
||||||
|
> open items are bigger refactors (custom-fields per-entity routes,
|
||||||
|
> systemSettings PK reconciliation, Documenso v2 voidDocument verification,
|
||||||
|
> partial-vs-composite archived index conversion, storage-proxy port_id
|
||||||
|
> claim, Documenso webhook port_id enforcement, response-shape
|
||||||
|
> standardization, berths.current_pdf_version_id Drizzle FK).
|
||||||
|
|
||||||
|
The pre-merge audit on `feat/berth-recommender` produced ~30 findings. The
|
||||||
|
critical + high-severity items were fixed in-branch. The items below are
|
||||||
|
medium / low severity and deferred to follow-up issues so the merge isn't
|
||||||
|
held up. Each entry is self-contained — pick one off and ship it.
|
||||||
|
|
||||||
|
## Cross-cutting integration
|
||||||
|
|
||||||
|
- **EOI in-app pathway silently swallows missing `Berth Range` AcroForm field**
|
||||||
|
— `src/lib/pdf/fill-eoi-form.ts:93`. `setText(form, 'Berth Range', ...)`
|
||||||
|
is wrapped in a try/catch that succeeds silently when the field is
|
||||||
|
absent. CLAUDE.md already warns ops about needing to add the field to
|
||||||
|
the live Documenso template; this code change would make the deployment
|
||||||
|
gap observable. Fix: when `context.eoiBerthRange` is non-empty AND the
|
||||||
|
field is absent, log at warn level + surface a structured response field.
|
||||||
|
|
||||||
|
- **Email body merge expansion happens after token validation** —
|
||||||
|
`src/lib/services/document-sends.service.ts:399-403`. If a merge value
|
||||||
|
contains a `{{token}}` substring (e.g. a client name like
|
||||||
|
`"Acme {{discount}} Inc."`), the expanded body will contain a token
|
||||||
|
the unresolved-check missed and ships with literal braces. Fix: HTML-
|
||||||
|
escape merge values before expansion, OR run a second
|
||||||
|
`findUnresolvedTokens` against the expanded body.
|
||||||
|
|
||||||
|
- **Filesystem dev-fallback HMAC secret can drift across processes** —
|
||||||
|
`src/lib/storage/filesystem.ts:328-331`. The dev-only fallback derives
|
||||||
|
the HMAC secret from `BETTER_AUTH_SECRET`. Two CRM processes running
|
||||||
|
with different secrets (web vs worker) reject each other's tokens.
|
||||||
|
Fix: assert `BETTER_AUTH_SECRET` is set when filesystem backend is
|
||||||
|
active in non-prod, or document the requirement loudly.
|
||||||
|
|
||||||
|
- **Berth PDF apply path: numeric column nulling silently drops** —
|
||||||
|
`src/lib/services/berth-pdf.service.ts:473-475`. When
|
||||||
|
`Number.isFinite(n)` is false the apply loop `continue`s without
|
||||||
|
pushing to `applied` and without warning. Combined with the
|
||||||
|
"no appliable fields supplied" check (only fires when ALL drop), partial
|
||||||
|
silent drops are invisible. Fix: collect dropped keys and surface them.
|
||||||
|
|
||||||
|
## Multi-tenant isolation hardening
|
||||||
|
|
||||||
|
- **document_sends row stores `interestId` without verifying port match** —
|
||||||
|
`src/lib/services/document-sends.service.ts:422`. Audit-log pollution
|
||||||
|
rather than data exposure (the recipient lookup is port-checked already).
|
||||||
|
Fix: when `recipient.interestId` is set, fetch with
|
||||||
|
`and(eq(interests.id, ...), eq(interests.portId, input.portId))` and
|
||||||
|
throw if missing.
|
||||||
|
|
||||||
|
- **Storage proxy token does not bind to port_id** —
|
||||||
|
`src/lib/storage/filesystem.ts:73-84`. ProxyTokenPayload is `{k, e, n,
|
||||||
|
f?, c?}` with a global HMAC. The current "issuer always checks port
|
||||||
|
first" relies on every issuer being correct in perpetuity. Fix: add a
|
||||||
|
`p` (portId) claim and have the proxy route resolve key→owner row +
|
||||||
|
assert `owner.portId === payload.p` before streaming.
|
||||||
|
|
||||||
|
- **Documenso webhook does not enforce port_id on document lookups** —
|
||||||
|
`src/app/api/webhooks/documenso/route.ts:96-148`. Handlers dispatch by
|
||||||
|
global `documensoId`. If two ports' documents were ever issued the
|
||||||
|
same Documenso ID (replay across staging/prod, forwarded webhook from
|
||||||
|
a foreign instance), the wrong port's interest could be mutated. The
|
||||||
|
per-body `signatureHash` dedup is partial mitigation. Fix: either
|
||||||
|
(a) include the originating Documenso instance/team in the lookup, or
|
||||||
|
(b) verify `documents(documenso_id)` has a unique index port-wide.
|
||||||
|
|
||||||
|
## Recent expense work polish
|
||||||
|
|
||||||
|
- **renderReceiptHeader cursor math drifts after multi-step writes** —
|
||||||
|
`src/lib/services/expense-pdf.service.ts:854`. After
|
||||||
|
`doc.text(...)` with auto-flow, `doc.y` advances. Using `doc.y -
|
||||||
|
headerH + 10` after the rect+stroke block computes against the
|
||||||
|
post-rect position; works only because pdfkit's text-after-rect
|
||||||
|
hasn't moved y yet. Headers may misalign on the first receipt page
|
||||||
|
after a soft page break. Fix: capture `const baseY = doc.y` before
|
||||||
|
drawing the rect and compute all subsequent offsets relative to it.
|
||||||
|
|
||||||
|
## Settings parsing
|
||||||
|
|
||||||
|
- **`loadRecommenderSettings` rejects string-shaped JSONB booleans** —
|
||||||
|
`src/lib/services/berth-recommender.service.ts:116`. Postgres returns
|
||||||
|
JSONB `true/false` as JS booleans, but if an admin saves `"true"`
|
||||||
|
via a UI that wraps the value as a string, `asBool` returns null and
|
||||||
|
the per-port override silently falls through to defaults. Not a
|
||||||
|
security bug; a tuning footgun. Fix: accept `"true"`/`"false"` string
|
||||||
|
forms in `asBool`.
|
||||||
|
|
||||||
|
# Audit-final v2 (post-merge platform-wide pass) deferred findings
|
||||||
|
|
||||||
|
A second comprehensive audit (security, routes, DB, integrations, UI/UX)
|
||||||
|
ran after the merge. The high-impact items landed in commit
|
||||||
|
`fix(audit-final-v2): platform-wide hardening` (or similar). Items below
|
||||||
|
are deferred follow-ups.
|
||||||
|
|
||||||
|
## Routes / API
|
||||||
|
|
||||||
|
- **Saved-views routes lack `withPermission`** —
|
||||||
|
`src/app/api/v1/saved-views/[id]/route.ts:4-5` and
|
||||||
|
`src/app/api/v1/saved-views/route.ts:24`. Convention is
|
||||||
|
`withAuth(withPermission(...))`. Verify the service applies
|
||||||
|
`(ctx.userId, ctx.portId)` ownership filtering, then add either an
|
||||||
|
explicit owner-only comment or wrap with a benign permission gate.
|
||||||
|
|
||||||
|
- **Custom-fields permission resource hardcoded to `clients`** —
|
||||||
|
`src/app/api/v1/custom-fields/[entityId]/route.ts:15,29`. Custom fields
|
||||||
|
attach to client / yacht / interest / berth / company, but the route
|
||||||
|
always checks `clients.view` / `clients.edit`. A user with
|
||||||
|
`companies.view` can read confidential company custom-field values via
|
||||||
|
this endpoint (the service-level `customFieldDefinitions.portId` filter
|
||||||
|
prevents cross-tenant access but not cross-resource within a tenant).
|
||||||
|
Fix: split into per-entity routes, OR resolve `entityType` and gate on
|
||||||
|
the matching permission inline.
|
||||||
|
|
||||||
|
- **`alerts/[id]/acknowledge|dismiss` ungated** —
|
||||||
|
`src/app/api/v1/alerts/[id]/acknowledge/route.ts:6` etc. only `withAuth`,
|
||||||
|
no `withPermission`. Verify the service requires user ownership; if
|
||||||
|
not, gate on `reports.view_dashboard` or similar.
|
||||||
|
|
||||||
|
- **Public POST routes bypass service layer** —
|
||||||
|
`src/app/api/public/interests/route.ts`, `…/website-inquiries/route.ts`,
|
||||||
|
`…/residential-inquiries/route.ts`. These do extensive `tx.insert(...)`
|
||||||
|
with hand-rolled audit logs (`userId: null as unknown as string`).
|
||||||
|
Extract a `publicInterestService.create(...)` so the same code path is
|
||||||
|
unit-testable and port-id discipline is uniform. Verify
|
||||||
|
`audit_logs.user_id` is nullable (the cast pattern signals it is, but
|
||||||
|
enforce in schema if not).
|
||||||
|
|
||||||
|
- **Inconsistent response shapes** — most endpoints return `{ data: ... }`,
|
||||||
|
but `notifications/[notificationId]` returns `{ success: true }`,
|
||||||
|
`website-inquiries` returns `{ id, deduped }`. Document a convention in
|
||||||
|
CLAUDE.md and migrate.
|
||||||
|
|
||||||
|
- **`req.json()` without `parseBody` helper** — admin custom-fields
|
||||||
|
routes use `await req.json(); schema.parse(body)` directly instead of
|
||||||
|
the project's `parseBody(req, schema)` helper. Migrate for uniform
|
||||||
|
400 error shapes.
|
||||||
|
|
||||||
|
## Documenso integration
|
||||||
|
|
||||||
|
- **v2 voidDocument endpoint may not match real API** —
|
||||||
|
`src/lib/services/documenso-client.ts:450-466`. The audit flagged that
|
||||||
|
Documenso 2.x exposes envelope deletion as
|
||||||
|
`POST /api/v2/envelope/delete` with `{ envelopeId }` body, not
|
||||||
|
`DELETE /api/v2/envelope/{id}`. The unit test mocks fetch so it can't
|
||||||
|
catch the real shape. Verify against a live Documenso 2.x instance
|
||||||
|
(`pnpm exec playwright test --project=realapi`) before flipping any
|
||||||
|
port to v2.
|
||||||
|
|
||||||
|
- **Webhook dedup vs per-recipient signed events** —
|
||||||
|
`src/app/api/webhooks/documenso/route.ts:103-110`. The top-level
|
||||||
|
`signatureHash` (sha256 of raw body) blocks exact replays, but a
|
||||||
|
duplicate webhook delivery for a multi-recipient document with a
|
||||||
|
re-encoded body will go through the per-recipient loop. Make
|
||||||
|
`documentEvents.signatureHash` unique cover the suffixed values OR add
|
||||||
|
a composite unique index `(documensoDocumentId, recipientEmail, eventType)`.
|
||||||
|
|
||||||
|
- **v1 `placeFields` per-field POST has no retry** —
|
||||||
|
`src/lib/services/documenso-client.ts:374-398`. A single transient 500
|
||||||
|
mid-loop leaves the document with a partial field set. Add 3-attempt
|
||||||
|
exponential backoff on 5xx + voidDocument on final failure.
|
||||||
|
|
||||||
|
## Storage
|
||||||
|
|
||||||
|
- **S3 backend has no startup bucket-exists check** —
|
||||||
|
`src/lib/storage/s3.ts:100-111`. A typo'd bucket name surfaces as a
|
||||||
|
500 inside a user-facing request rather than at boot. Add
|
||||||
|
`await client.bucketExists(bucket)` in `S3Backend.create` with a clear
|
||||||
|
error message.
|
||||||
|
|
||||||
|
- **Storage cache fingerprint includes encrypted secret** —
|
||||||
|
`src/lib/storage/index.ts:158-159`. After a key rotation the old
|
||||||
|
cached client survives until `resetStorageBackendCache()` is called
|
||||||
|
(already called via the settings-write hook). Document the
|
||||||
|
invariant or fingerprint on a content-hash that excludes encrypted
|
||||||
|
material.
|
||||||
|
|
||||||
|
- **Filesystem dev HMAC silent fallback** —
|
||||||
|
`src/lib/storage/filesystem.ts:309-332`. Two dev nodes started with
|
||||||
|
different `BETTER_AUTH_SECRET` derive different secrets and reject
|
||||||
|
each other's tokens. Log a one-line warn at backend boot in non-prod.
|
||||||
|
|
||||||
|
## DB schema
|
||||||
|
|
||||||
|
- **`berths.current_pdf_version_id` lacks Drizzle FK** —
|
||||||
|
`src/lib/db/schema/berths.ts:83`. The FK exists in migration 0030
|
||||||
|
but not in the schema source-of-truth, so `pnpm db:push` against an
|
||||||
|
empty DB skips the constraint. Either add the FK with a deferred
|
||||||
|
declaration or document that `db:push` is unsupported.
|
||||||
|
|
||||||
|
- **Missing indexes on FK columns** — `berthReservations.interestId`,
|
||||||
|
`berthReservations.contractFileId`, `documents.fileId`,
|
||||||
|
`documents.signedFileId`, `documentEvents.signerId`,
|
||||||
|
`documentTemplates.sourceFileId`, `formSubmissions.formTemplateId`,
|
||||||
|
`formSubmissions.clientId`, `documentSends.brochureId`,
|
||||||
|
`documentSends.brochureVersionId`, `documentSends.sentByUserId`. Add
|
||||||
|
`index(...)` declarations to avoid full-scan FK checks on parent
|
||||||
|
delete.
|
||||||
|
|
||||||
|
- **`systemSettings` PK / unique-index drift** —
|
||||||
|
`src/lib/db/schema/system.ts:119-133`. Schema declares only a
|
||||||
|
`uniqueIndex` on `(key, port_id)` but the migration uses `key` as PK.
|
||||||
|
`port_id` is nullable so `(key, port_id)` cannot serve as a PK with
|
||||||
|
default NULLs-not-equal semantics. Reconcile: declare
|
||||||
|
`primaryKey({ columns: [table.key, table.portId] })` (after making
|
||||||
|
`portId` non-null with a sentinel) OR use partial unique indexes for
|
||||||
|
global + per-port settings.
|
||||||
|
|
||||||
|
- **Composite vs partial archived indexes** — many tables use
|
||||||
|
`index('idx_*_archived').on(portId, archivedAt)` when the dominant
|
||||||
|
query is `WHERE port_id = ? AND archived_at IS NULL`. Convert to
|
||||||
|
`index(...).on(portId).where(sql\`archived_at IS NULL\`)` partial
|
||||||
|
indexes for smaller storage + faster planner choice.
|
||||||
|
|
||||||
|
- **`documentSends.sentByUserId` ungated FK** —
|
||||||
|
`src/lib/db/schema/brochures.ts:118` is `notNull()` but has no FK
|
||||||
|
reference. If a user is hard-deleted (rare; we soft-delete), an
|
||||||
|
orphan id remains. Add `.references(() => users.id, { onDelete: 'set null' })`
|
||||||
|
and make the column nullable. Same audit-trail rationale as the
|
||||||
|
other documentSends FK fixes (commit 0035).
|
||||||
|
|
||||||
|
## UI/UX
|
||||||
|
|
||||||
|
- **Storage admin migration mutation lacks toasts** —
|
||||||
|
`src/components/admin/storage-admin-panel.tsx:61-72`. Add `onSuccess`
|
||||||
|
toast with row count + `onError` toast.
|
||||||
|
|
||||||
|
- **Invoice detail send/payment mutations lack error feedback + gates** —
|
||||||
|
`src/components/invoices/invoice-detail.tsx:93-99,152-167`. Add
|
||||||
|
`onError: (e) => toast.error(...)` and wrap mutating buttons in
|
||||||
|
`<PermissionGate resource="invoices" action="send">` /
|
||||||
|
`record_payment`.
|
||||||
|
|
||||||
|
- **Admin user list edit button ungated** —
|
||||||
|
`src/components/admin/users/user-list.tsx:114`. Wrap in
|
||||||
|
`<PermissionGate resource="admin" action="manage_users">`.
|
||||||
|
|
||||||
|
- **Email threads list missing skeleton** —
|
||||||
|
`src/components/email/email-threads-list.tsx:29-45`. Use `<Skeleton>`
|
||||||
|
rows during load + `<EmptyState>` for the empty case.
|
||||||
|
|
||||||
|
- **Scan page mutations swallow OCR errors** —
|
||||||
|
`src/app/(dashboard)/[portSlug]/expenses/scan/page.tsx:67-87`. Add an
|
||||||
|
inline error state for `scanMutation.isError` (the upload-side
|
||||||
|
already does this).
|
||||||
|
|
||||||
|
- **Invoice detail uses `any` for query data** — strict-mode escape
|
||||||
|
hatch. Define a proper response type matching the API contract.
|
||||||
|
|
||||||
|
## Security defense-in-depth
|
||||||
|
|
||||||
|
- **Storage proxy token does not bind to port_id** —
|
||||||
|
`src/lib/storage/filesystem.ts:73-84`. Token's HMAC is global. Fix:
|
||||||
|
add `p` (portId) claim and have the proxy resolve key→owner row +
|
||||||
|
assert `owner.portId === payload.p`.
|
||||||
|
|
||||||
|
- **Documenso webhook does not enforce port_id** —
|
||||||
|
`src/app/api/webhooks/documenso/route.ts:96-148`. Handlers dispatch
|
||||||
|
by global `documensoId`. Verify `documents(documenso_id)` is unique
|
||||||
|
port-wide OR include the originating instance/team in the lookup.
|
||||||
|
|
||||||
|
- **EOI in-app pathway silently swallows missing `Berth Range` field** —
|
||||||
|
`src/lib/pdf/fill-eoi-form.ts:93`. Log warn when
|
||||||
|
`context.eoiBerthRange` is non-empty AND the field is absent so the
|
||||||
|
Documenso template deployment gap is observable.
|
||||||
|
|
||||||
|
- **AI worker has no cost-tracking ledger write** —
|
||||||
|
`src/lib/queue/workers/ai.ts:122-177`. Persist token usage to the
|
||||||
|
`ai_usage` ledger after every call.
|
||||||
|
|
||||||
|
- **Logger redact paths miss nested credentials** —
|
||||||
|
`src/lib/logger.ts:5-19`. Extend redact list to cover
|
||||||
|
`*.headers.authorization`, `**.token`, `secretKeyEncrypted`, etc.
|
||||||
223
docs/audit-frontend-2026-05-06.md
Normal file
223
docs/audit-frontend-2026-05-06.md
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
# Frontend audit — 2026-05-06
|
||||||
|
|
||||||
|
Scope: new archive/restore/hard-delete dialogs, bulk archive wizard, client
|
||||||
|
detail header, audit log inspector, webhook delivery log, client list bulk
|
||||||
|
section. Companion to `docs/audit-comprehensive-2026-05-06.md` (does NOT
|
||||||
|
re-flag the Files-tab / reservations / berth-tab "coming soon" stubs already
|
||||||
|
covered there).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Critical
|
||||||
|
|
||||||
|
### C1 — `client-detail-header` opens restore dialog from the Archive icon for archived clients
|
||||||
|
|
||||||
|
**File:** `src/components/clients/client-detail-header.tsx:174-186`
|
||||||
|
|
||||||
|
**Scenario:** On an archived client the icon button still renders `<Archive>`
|
||||||
|
when `isArchived` is true (`isArchived ? <RotateCcw /> : <Archive />` is
|
||||||
|
correct), BUT both states use the same `setArchiveOpen(true)` handler and
|
||||||
|
the conditional below routes `<SmartRestoreDialog>` vs `<SmartArchiveDialog>`
|
||||||
|
off of `isArchived`. That part is fine. The real problem: the destructive
|
||||||
|
hover colour `hover:text-destructive` is applied via
|
||||||
|
`isArchived ? 'hover:text-foreground' : 'hover:text-destructive'` — but the
|
||||||
|
preceding class string already sets `hover:text-foreground` unconditionally,
|
||||||
|
so the conditional is dead and the restore button hovers red the same as
|
||||||
|
archive. Misleading colour signal on a reversible action; users hesitate to
|
||||||
|
click it.
|
||||||
|
|
||||||
|
**Fix:** Drop the always-applied `hover:text-foreground` from the base class
|
||||||
|
list and let the conditional own the hover colour, or just colour the
|
||||||
|
restore icon emerald to differentiate.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## High
|
||||||
|
|
||||||
|
### H1 — `bulk-archive-wizard` lets users skip the reasons step by clicking Continue while preflight is loading then Cancel/reopen
|
||||||
|
|
||||||
|
**File:** `src/components/clients/bulk-archive-wizard.tsx:253-267, 80-107`
|
||||||
|
|
||||||
|
**Scenario:** In the `preflight` stage the Continue button is only disabled
|
||||||
|
when `archivable.length === 0 || preflight.isLoading`. But `archivable` is
|
||||||
|
derived from `items = preflight.data ?? []`. While loading, `archivable` is
|
||||||
|
`[]` so Continue is disabled — good. After load with all-blocked selection,
|
||||||
|
`archivable.length === 0` so still disabled — good. However, the
|
||||||
|
`reasonsByClientId: reasons` payload is sent verbatim, so a user who advances
|
||||||
|
to "reasons", types into one client's box, then uses the carousel back arrow
|
||||||
|
and edits another, can submit reasons for clients NOT in `archivable` (e.g.
|
||||||
|
if the preflight is refetched on stale-time). Reasons for blocked or removed
|
||||||
|
client IDs are forwarded to the API. Minor data-quality issue.
|
||||||
|
|
||||||
|
**Fix:** Filter `reasons` to `archivable` IDs before mutating:
|
||||||
|
`reasonsByClientId: Object.fromEntries(Object.entries(reasons).filter(([id]) => archivable.some(a => a.clientId === id)))`.
|
||||||
|
|
||||||
|
### H2 — `client-list` bulk tag mutation uses `alert()` for partial failures and has no `onError`
|
||||||
|
|
||||||
|
**File:** `src/components/clients/client-list.tsx:88-106`
|
||||||
|
|
||||||
|
**Scenario:** User bulk-adds a tag to 50 clients; backend returns 200 with
|
||||||
|
`{succeeded: 30, failed: 20}` → user sees a native browser `alert()` blocking
|
||||||
|
the page. If the request itself errors (network drop, 500), there is no
|
||||||
|
`onError` so the dialog closes via `onSettled` and the user sees nothing —
|
||||||
|
silent failure. Inconsistent UX vs. every other mutation in this audit which
|
||||||
|
uses `toast`.
|
||||||
|
|
||||||
|
**Fix:** Replace `alert(...)` with `toast.warning(...)`, add an
|
||||||
|
`onError: (err) => toast.error(...)` branch matching the pattern used in
|
||||||
|
`bulk-archive-wizard.tsx` and `bulk-hard-delete-dialog.tsx`.
|
||||||
|
|
||||||
|
### H3 — `webhook-delivery-log` swallows fetch errors silently
|
||||||
|
|
||||||
|
**File:** `src/components/admin/webhooks/webhook-delivery-log.tsx:61-74`
|
||||||
|
|
||||||
|
**Scenario:** Admin opens a webhook detail page while the API is down or the
|
||||||
|
webhook was just deleted. `load()` catches and discards the error
|
||||||
|
(`} catch { /* ignore */ }`). UI shows "Loading deliveries…" forever on the
|
||||||
|
first load, or stays on the last successful page on subsequent loads, with
|
||||||
|
no indication that anything failed. No error state, no toast, no retry.
|
||||||
|
|
||||||
|
**Fix:** Surface errors via `toast.error` and show an inline error state
|
||||||
|
("Couldn't load deliveries — Retry") instead of swallowing.
|
||||||
|
|
||||||
|
### H4 — `audit-log-list` first-page fetch swallows errors and shows no error state
|
||||||
|
|
||||||
|
**File:** `src/components/admin/audit/audit-log-list.tsx:150-175`
|
||||||
|
|
||||||
|
**Scenario:** Filter form is fully interactive, user changes a date — request
|
||||||
|
fires, server 500s. The `try/finally` has no `catch`, so the rejected promise
|
||||||
|
becomes an unhandled rejection. The list shows whatever was previously
|
||||||
|
loaded (or empty state), and the user has no idea their filter didn't apply.
|
||||||
|
Same applies to `loadMore`.
|
||||||
|
|
||||||
|
**Fix:** Add `catch` blocks that set an error state and render an inline
|
||||||
|
error banner above the table, with a Retry button.
|
||||||
|
|
||||||
|
### H5 — `audit-log-card` renders as a link to `href="#"` — clicking jumps the page
|
||||||
|
|
||||||
|
**File:** `src/components/admin/audit/audit-log-card.tsx:96`
|
||||||
|
|
||||||
|
**Scenario:** On mobile / card view the audit log entries become clickable
|
||||||
|
cards with `href="#"`. Tapping any card scrolls the page to top and inserts
|
||||||
|
`#` in the URL (back-button trap). There's no detail view to navigate to.
|
||||||
|
|
||||||
|
**Fix:** Either render a non-link wrapper (button or div) when no detail
|
||||||
|
target exists, or link to a useful destination like
|
||||||
|
`/{portSlug}/{entityType}/{entityId}` when the entity is resolvable.
|
||||||
|
|
||||||
|
### H6 — `smart-archive-dialog` `archiveMutation` doesn't invalidate the dossier or single-client query
|
||||||
|
|
||||||
|
**File:** `src/components/clients/smart-archive-dialog.tsx:197-212`
|
||||||
|
|
||||||
|
**Scenario:** User archives a client successfully. The dialog invalidates
|
||||||
|
`['clients']`, `['berths']`, `['interests']` but NOT
|
||||||
|
`['client-archive-dossier', clientId]` nor `['clients', clientId]`. If the
|
||||||
|
parent screen (e.g. detail page) keeps the client query mounted, the
|
||||||
|
detail header continues to show the client as un-archived until a hard
|
||||||
|
reload. The Restore icon won't appear.
|
||||||
|
|
||||||
|
**Fix:** Add `qc.invalidateQueries({queryKey: ['clients', clientId]})` and
|
||||||
|
`qc.removeQueries({queryKey: ['client-archive-dossier', clientId]})` so a
|
||||||
|
re-open re-fetches a fresh dossier (e.g. if user re-archives after restoring
|
||||||
|
in the same session).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Medium
|
||||||
|
|
||||||
|
### M1 — `smart-archive-dialog` derives `interestId` from a name match against `primaryBerthMooring` — wrong key
|
||||||
|
|
||||||
|
**File:** `src/components/clients/smart-archive-dialog.tsx:158-167`
|
||||||
|
|
||||||
|
**Scenario:** When building per-berth decisions the code does
|
||||||
|
`dossier.interests.find((i) => i.primaryBerthMooring === b.mooringNumber)?.interestId`.
|
||||||
|
Multiple interests can share the same primary mooring (rare, but possible
|
||||||
|
historically), and worse, when no interest has this berth as primary it
|
||||||
|
falls back to `dossier.interests[0]?.interestId` regardless of which berth
|
||||||
|
is being decided. The wrong interest gets credited with the release, which
|
||||||
|
is then audit-logged.
|
||||||
|
|
||||||
|
**Fix:** Have the dossier API return `interestId` per berth row (it already
|
||||||
|
joins `interest_berths`), or look up by membership not by primary flag.
|
||||||
|
|
||||||
|
### M2 — `hard-delete-dialog` doesn't reset state when switching from intent → confirm if request fails midway
|
||||||
|
|
||||||
|
**File:** `src/components/clients/hard-delete-dialog.tsx:39-46, 64-79`
|
||||||
|
|
||||||
|
**Scenario:** User submits hard delete with wrong code → backend returns 400
|
||||||
|
→ toast fires, but the dialog stays on `confirm` stage with the bad code
|
||||||
|
still in the input and no clear cue. If the user then closes (X) and
|
||||||
|
reopens, the `useEffect` resets correctly. But if the email code expired
|
||||||
|
(10 min) and they request a fresh one, there's no "Resend code" button —
|
||||||
|
they must cancel and start over from intent. Minor.
|
||||||
|
|
||||||
|
**Fix:** Add a "Send a new code" link in the confirm stage that calls
|
||||||
|
`requestCode.mutate()` again and clears `code`.
|
||||||
|
|
||||||
|
### M3 — `bulk-hard-delete-dialog` doesn't refetch / invalidate after partial failure shows totals
|
||||||
|
|
||||||
|
**File:** `src/components/clients/bulk-hard-delete-dialog.tsx:64-85`
|
||||||
|
|
||||||
|
**Scenario:** Bulk delete returns `{deletedCount: 7}` for 10 selected; toast
|
||||||
|
warns but `qc.invalidateQueries({queryKey: ['clients']})` is invalidated
|
||||||
|
unconditionally — fine. However, the dialog closes immediately
|
||||||
|
(`onOpenChange(false)`), so the user can't see WHICH 3 failed. The toast
|
||||||
|
just says "see audit log". For a destructive bulk op this is too sparse;
|
||||||
|
users will repeat the action thinking it didn't work.
|
||||||
|
|
||||||
|
**Fix:** Stay open on partial failure and render a list of failed IDs (the
|
||||||
|
API likely already returns per-item results — if not, return them).
|
||||||
|
|
||||||
|
### M4 — `audit-log-list` doesn't validate that `dateFrom <= dateTo`
|
||||||
|
|
||||||
|
**File:** `src/components/admin/audit/audit-log-list.tsx:142-146`
|
||||||
|
|
||||||
|
**Scenario:** User picks From=2026-06-01, To=2026-05-01. Query fires with an
|
||||||
|
empty result range; user sees "No audit log entries found" and assumes
|
||||||
|
their data isn't there. No client-side validation hint.
|
||||||
|
|
||||||
|
**Fix:** Show an inline warning "From date must be before To date" and skip
|
||||||
|
the request when invalid.
|
||||||
|
|
||||||
|
### M5 — `bulk-archive-wizard` `Cancel` during `archiveMutation.isPending` discards mutation tracking
|
||||||
|
|
||||||
|
**File:** `src/components/clients/bulk-archive-wizard.tsx:248-251, 293-307`
|
||||||
|
|
||||||
|
**Scenario:** User clicks "Archive 50" → mutation in flight (10s) → user
|
||||||
|
clicks Cancel. The dialog closes; the mutation continues server-side and
|
||||||
|
its onSuccess fires later, showing a toast for an action the user thought
|
||||||
|
they cancelled. Worse, the dialog is gone so they can't tell which clients
|
||||||
|
got archived.
|
||||||
|
|
||||||
|
**Fix:** Disable Cancel while `archiveMutation.isPending`, or relabel to
|
||||||
|
"Cancel (won't stop in-progress)" and keep the mutation visible.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Low
|
||||||
|
|
||||||
|
### L1 — `audit-log-list` filter row overflows on narrow viewports
|
||||||
|
|
||||||
|
**File:** `src/components/admin/audit/audit-log-list.tsx:321-467`
|
||||||
|
|
||||||
|
**Scenario:** 8 filter controls (`Search` 288px, `Entity` 144px, `Action`
|
||||||
|
176px, `Severity` 128px, `Source` 128px, `User id` 176px, `From` 144px,
|
||||||
|
`To` 144px, total ~1330px) sit in a single `flex-wrap` row. At <1280px
|
||||||
|
viewports they wrap onto multiple lines pushing the table down 200+px;
|
||||||
|
at <640px (mobile) each control wraps onto its own line and the "Clear"
|
||||||
|
button (`ml-auto`) lands on the wrong row.
|
||||||
|
|
||||||
|
**Fix:** Collapse rarely-used filters (User id / Severity / Source) into a
|
||||||
|
"More filters" Popover for sm: viewports.
|
||||||
|
|
||||||
|
### L2 — `audit-log-card` action map missing entries silently fall back to grey "Activity" icon and grey badge
|
||||||
|
|
||||||
|
**File:** `src/components/admin/audit/audit-log-card.tsx:27-44, 46-52`
|
||||||
|
|
||||||
|
**Scenario:** New webhook/cron/job actions are in `audit-log-list.tsx`
|
||||||
|
ACTION_COLORS but absent from `audit-log-card.tsx` ACTION_BADGE_COLORS and
|
||||||
|
ACTION_ACCENT. Card view of these entries looks identical to a generic
|
||||||
|
"unknown" entry — visual loss vs. table view.
|
||||||
|
|
||||||
|
**Fix:** Sync the two maps; consider extracting to a shared module so they
|
||||||
|
can't drift.
|
||||||
405
docs/audit-missing-features-2026-05-06.md
Normal file
405
docs/audit-missing-features-2026-05-06.md
Normal file
@@ -0,0 +1,405 @@
|
|||||||
|
# Missing-Features Audit — 2026-05-06
|
||||||
|
|
||||||
|
Focused pass on **features that look done in the UI but aren't fully
|
||||||
|
wired through the service layer**, plus **admin settings exposed to
|
||||||
|
users that no code reads**. Companion to
|
||||||
|
`docs/audit-comprehensive-2026-05-06.md` — the three "coming soon" stubs
|
||||||
|
already documented there (client Files tab, client reservations history,
|
||||||
|
berth tabs), the import-worker stub, the two interest-form TODOs, and
|
||||||
|
the EOI "Price: TBD" finding are NOT re-flagged here.
|
||||||
|
|
||||||
|
Hard cap: 12 findings. Severity tiers below.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## VISIBLE-BROKEN (admin sees a control, click is a no-op or wrong)
|
||||||
|
|
||||||
|
### V1. 6 of 8 admin-editable email subject overrides are silently ignored at send time
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- `src/components/admin/email-templates-admin.tsx:24-72` (UI)
|
||||||
|
- `src/lib/email/template-catalog.ts:16-25` (catalog of 8 keys)
|
||||||
|
- `src/lib/services/portal-auth.service.ts:120-127, 332-339` (the only
|
||||||
|
consumers of `loadSubjectOverride`)
|
||||||
|
|
||||||
|
The `/admin/email-templates` page lets an admin override the subject
|
||||||
|
line on **eight** transactional templates:
|
||||||
|
`portal_activation`, `portal_reset`, `portal_invite_resend`,
|
||||||
|
`crm_invite`, `inquiry_client_confirmation`,
|
||||||
|
`inquiry_sales_notification`, `residential_inquiry_client_confirmation`,
|
||||||
|
`residential_inquiry_sales_alert`. The save endpoint persists each one
|
||||||
|
to `system_settings` (`email_template_<key>_subject`).
|
||||||
|
|
||||||
|
Only **two** of those eight are ever read at send time —
|
||||||
|
`portal_activation` and `portal_reset` in `portal-auth.service.ts`.
|
||||||
|
A repo-wide search for `loadSubjectOverride` / `settingKeyForSubject`
|
||||||
|
returns no other consumers. The other six templates use their hardcoded
|
||||||
|
subject regardless of the admin override.
|
||||||
|
|
||||||
|
**Impact:** sales/ops teams will customize an inquiry confirmation
|
||||||
|
subject, hit Save, see the "Overridden" badge, and silently ship the
|
||||||
|
default subject to every prospect.
|
||||||
|
|
||||||
|
**Fix:** small per template — call `loadSubjectOverride(portId, key)`
|
||||||
|
in each sender (`crm-invite.service.ts`, the inquiry sender, the
|
||||||
|
residential inquiry sender, the portal-invite-resend path) and pass the
|
||||||
|
result through as the email subject.
|
||||||
|
|
||||||
|
**Scope:** small (5 callsites + tests).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### V2. Branding admin (logo / app name / primary color / email header & footer HTML) saves to settings but no code reads them
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- `src/app/(dashboard)/[portSlug]/admin/branding/page.tsx:7-46` — UI
|
||||||
|
with five fields.
|
||||||
|
- `src/lib/services/port-config.ts:240-272` — `getPortBrandingConfig()`
|
||||||
|
resolves the five `branding_*` settings into a typed config.
|
||||||
|
- Repo-wide: `getPortBrandingConfig` has **zero callers** outside its
|
||||||
|
declaration. The five `SETTING_KEYS.branding*` constants are only
|
||||||
|
read inside `getPortBrandingConfig` itself.
|
||||||
|
|
||||||
|
The admin panel is functional end-to-end (write hits the settings API,
|
||||||
|
"Reset to default" works), and the email-templates module hardcodes
|
||||||
|
`s3.portnimara.com/...` for the logo URL plus a fixed table layout.
|
||||||
|
None of the email-rendering helpers (`renderEmail`, the template
|
||||||
|
modules in `src/lib/email/templates/`) call `getPortBrandingConfig`,
|
||||||
|
and the `<BrandedAuthShell>` component sources its logo + colors from
|
||||||
|
constants too.
|
||||||
|
|
||||||
|
**Impact:** every multi-tenant assumption made about branding is
|
||||||
|
broken. A second port wired into this CRM will see Port Nimara's logo
|
||||||
|
|
||||||
|
- colors in every transactional email and on the auth pages, even
|
||||||
|
after their admin "configures branding" successfully.
|
||||||
|
|
||||||
|
**Fix:** plumb `getPortBrandingConfig(portId)` through the email
|
||||||
|
renderer (header/footer HTML + primary button color), and through
|
||||||
|
`<BrandedAuthShell>` via a server-fetched prop.
|
||||||
|
|
||||||
|
**Scope:** medium (touches every transactional email + auth shell).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### V3. Reminder admin page configures defaults that no service applies
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- `src/app/(dashboard)/[portSlug]/admin/reminders/page.tsx:7-50` — UI
|
||||||
|
for default-enabled, default-days, digest-enabled, digest-time,
|
||||||
|
digest-timezone.
|
||||||
|
- `src/lib/services/port-config.ts:284-306` —
|
||||||
|
`getPortReminderConfig()` defines the schema.
|
||||||
|
- Repo-wide: the keys (`reminder_default_*`, `reminder_digest_*`) and
|
||||||
|
`getPortReminderConfig` have **zero callers**.
|
||||||
|
|
||||||
|
Same pattern as V2. The admin sets "enable reminders by default on new
|
||||||
|
interests" → toggles to true → save succeeds → newly-created interests
|
||||||
|
still default to `reminderEnabled=false`. The digest-time +
|
||||||
|
timezone fields go nowhere — there is no scheduler that batches
|
||||||
|
pending reminders into a daily digest.
|
||||||
|
|
||||||
|
**Impact:** the entire reminder UX is decorative. Sales reps think
|
||||||
|
they configured a daily digest at 09:00 Europe/Warsaw, get
|
||||||
|
fire-as-they-hit notifications instead.
|
||||||
|
|
||||||
|
**Fix:** wire `getPortReminderConfig` into (a) the interest-create
|
||||||
|
service (defaults), (b) the maintenance/notifications worker that
|
||||||
|
fires reminders (digest batching + delivery window). The `digest`
|
||||||
|
behavior didn't exist before this audit — needs a new scheduled job.
|
||||||
|
|
||||||
|
**Scope:** medium (defaults are small, digest job is new code).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### V4. Portal dashboard "My Memberships" tile has no link, no destination page, and isn't reachable from nav
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- `src/app/(portal)/portal/dashboard/page.tsx:58-63` — `<PortalCard
|
||||||
|
title="My Memberships" ... icon={Building2} />` — note no `href`
|
||||||
|
prop.
|
||||||
|
- `src/components/portal/portal-nav.tsx:8-15` — six nav entries, no
|
||||||
|
memberships.
|
||||||
|
- Filesystem: `src/app/(portal)/portal/memberships/` does not exist.
|
||||||
|
|
||||||
|
The dashboard shows a count of "memberships" (companies the portal
|
||||||
|
user belongs to) but the tile is non-clickable and there is no
|
||||||
|
`/portal/memberships` route. A user with 3 memberships sees the tile,
|
||||||
|
clicks → nothing happens.
|
||||||
|
|
||||||
|
**Impact:** dead-end on the portal home for any client tied to a
|
||||||
|
company (the residential and yacht-ownership use-cases).
|
||||||
|
|
||||||
|
**Fix:** ship `/portal/memberships/page.tsx` listing the companies
|
||||||
|
returned by the existing `companyMemberships` query (already
|
||||||
|
aggregated in `getPortalDashboard`), and add it to `PortalNav`. Or
|
||||||
|
pull the tile if memberships isn't a portal feature.
|
||||||
|
|
||||||
|
**Scope:** small.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### V5. Company detail page Documents tab is a "Coming soon" stub
|
||||||
|
|
||||||
|
**File:** `src/components/companies/company-tabs.tsx:230-234`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
{
|
||||||
|
id: 'documents',
|
||||||
|
label: 'Documents',
|
||||||
|
content: <EmptyState title="Documents" description="Coming soon" />,
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
Visible alongside the working Notes / Activity / Addresses / Members
|
||||||
|
tabs on every company detail page. NOT covered by the existing audit
|
||||||
|
doc's H7 (which lists clients, client reservations, and berths).
|
||||||
|
|
||||||
|
**Impact:** the same UX problem H7 calls out for clients.
|
||||||
|
|
||||||
|
**Fix:** mirror what client-Files-tab needs — query `documents` joined
|
||||||
|
to a polymorphic billing-entity = company link, render a list, ship a
|
||||||
|
download button. Or hide the tab.
|
||||||
|
|
||||||
|
**Scope:** small to medium.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## HALF-WIRED (the page works but the surrounding promise overstates it)
|
||||||
|
|
||||||
|
### V6. "Onboarding" admin page is a static checklist, not the wizard the page itself promises
|
||||||
|
|
||||||
|
**File:** `src/app/(dashboard)/[portSlug]/admin/onboarding/page.tsx`
|
||||||
|
|
||||||
|
The page renders 8 stepwise links and explicitly says (lines 71-72,
|
||||||
|
98-110): "The future onboarding wizard will track progress per port…",
|
||||||
|
"What this page will become", "The wizard will record completion per
|
||||||
|
port in `system_settings`, gate the public marketing-site cutover…".
|
||||||
|
|
||||||
|
The admin landing card describes it as the "Initial-setup wizard for
|
||||||
|
fresh ports" — admins clicking through expect a wizard, get a static
|
||||||
|
table of contents.
|
||||||
|
|
||||||
|
**Impact:** the only "fresh port" workflow doesn't exist; cutover
|
||||||
|
gating logic mentioned in the page body is also unimplemented.
|
||||||
|
|
||||||
|
**Fix:** either (a) build the wizard with progress in `system_settings`
|
||||||
|
|
||||||
|
- banner integration, or (b) re-label both this page and the admin
|
||||||
|
landing card to "Setup checklist" so expectations match reality.
|
||||||
|
|
||||||
|
**Scope:** large for the wizard; tiny for the relabel.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### V7. Backup & Restore admin page is informational only — admin landing card promises actions
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- `src/app/(dashboard)/[portSlug]/admin/backup/page.tsx`
|
||||||
|
- `src/app/(dashboard)/[portSlug]/admin/page.tsx:148` — landing card
|
||||||
|
description: "Database snapshots and on-demand exports."
|
||||||
|
|
||||||
|
The landing card sells "on-demand exports". The actual page renders a
|
||||||
|
two-card explainer: "Current backup posture" (read-only) and "What
|
||||||
|
this page will become" (the entire interactive surface — list
|
||||||
|
snapshots, "Take backup now" button, per-port logical export, restore
|
||||||
|
preview, GDPR per-client export). None of those exist.
|
||||||
|
|
||||||
|
**Impact:** the "Backup & Restore" tile is functionally a docs page.
|
||||||
|
Compliance officers / users expecting a self-serve GDPR export
|
||||||
|
button have to file a support ticket.
|
||||||
|
|
||||||
|
**Fix:** match the language on the landing card to the page reality
|
||||||
|
("Backup posture" → docs only) until the snapshot/export buttons
|
||||||
|
ship. The maintenance worker already runs `database-backup` (per
|
||||||
|
`docs/audit-comprehensive-2026-05-06.md` C1 — though that worker isn't
|
||||||
|
imported), so wiring "Take backup now" against the existing job is
|
||||||
|
small once C1 is fixed.
|
||||||
|
|
||||||
|
**Scope:** small (doc tweak) or medium (button + per-port export
|
||||||
|
endpoint).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### V8. Inquiry inbox is read-only — no "Convert to Client" / "Mark resolved" / "Assign" actions
|
||||||
|
|
||||||
|
**File:** `src/components/admin/inquiry-inbox.tsx` (entire file, 207
|
||||||
|
lines, ends at the View payload toggle)
|
||||||
|
|
||||||
|
The inbox lists website-form submissions (berth_inquiry,
|
||||||
|
residence_inquiry, contact_form) with filter chips and a
|
||||||
|
"View payload" expand. There is no action to:
|
||||||
|
|
||||||
|
- create a client/interest from the submission,
|
||||||
|
- assign the inquiry to a sales rep,
|
||||||
|
- mark it resolved / triaged,
|
||||||
|
- reply directly,
|
||||||
|
- archive or trash the row,
|
||||||
|
- export.
|
||||||
|
|
||||||
|
The `website_submissions` table appears to be permanent — every
|
||||||
|
inquiry ever received remains in the inbox forever, with no triage
|
||||||
|
state. Sales has to manually copy the email into a new client form
|
||||||
|
and back-reference the original submission.
|
||||||
|
|
||||||
|
**Impact:** the inquiry-to-pipeline conversion step isn't supported in
|
||||||
|
the CRM. The marketing-site cutover (per the user's
|
||||||
|
`project_email_ownership_at_cutover.md` memory) will increase volume
|
||||||
|
on this surface and make the missing triage UX painful.
|
||||||
|
|
||||||
|
**Fix:** add a per-submission "Convert" action that prefills the
|
||||||
|
client + interest forms with the payload, plus a `triage_state`
|
||||||
|
column (open / converted / dismissed) and a default filter that hides
|
||||||
|
non-open rows.
|
||||||
|
|
||||||
|
**Scope:** medium.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## MOBILE PARITY
|
||||||
|
|
||||||
|
### V9. Mobile More-sheet is missing several real top-nav destinations
|
||||||
|
|
||||||
|
**File:** `src/components/layout/mobile/more-sheet.tsx:38-50`
|
||||||
|
|
||||||
|
`MORE_ITEMS` lists 11 entries. The dashboard route directory has at
|
||||||
|
least these top-level segments not represented anywhere in the mobile
|
||||||
|
bottom-tabs OR more-sheet:
|
||||||
|
|
||||||
|
- `residential` — exists at `/[portSlug]/residential/...`
|
||||||
|
- `notifications` — exists at `/[portSlug]/notifications/...`
|
||||||
|
- `berth-reservations` — exists at `/[portSlug]/berth-reservations/...`
|
||||||
|
- `documents` — exists as a top-level page (separate from the bottom
|
||||||
|
tab `documents`, which IS in mobile-bottom-tabs)
|
||||||
|
- `website-analytics` — exists at `/[portSlug]/website-analytics/...`
|
||||||
|
|
||||||
|
A mobile-only user has no path to any of them. The Documents bottom
|
||||||
|
tab does cover the doc list, but residential is an entire feature
|
||||||
|
domain (per the `(dashboard)/.../residential` directory) with no
|
||||||
|
mobile entry point.
|
||||||
|
|
||||||
|
**Impact:** anyone using the mobile chrome to triage on the go can't
|
||||||
|
reach residential clients/interests, alerts (`alerts` IS in the
|
||||||
|
sheet), or notifications.
|
||||||
|
|
||||||
|
**Fix:** add the missing segments to `MORE_ITEMS`. If the grid feels
|
||||||
|
too dense, reorganize into sections.
|
||||||
|
|
||||||
|
**Scope:** small.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### V10. Portal has no "Profile" / "Change password" surface
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- `src/components/portal/portal-nav.tsx:8-15` — six tabs, no profile.
|
||||||
|
- Filesystem: no `src/app/(portal)/portal/profile/` directory.
|
||||||
|
|
||||||
|
A portal user who wants to change their email, phone, mailing address,
|
||||||
|
or password has no UI. The portal sign-in flow goes through the
|
||||||
|
better-auth session but the app exposes zero account-management
|
||||||
|
controls. The "Need assistance?" card on the dashboard tells the user
|
||||||
|
to contact the port team — which is the explicit answer for data
|
||||||
|
edits, but does not cover password changes (a security expectation,
|
||||||
|
not a per-port-staff burden).
|
||||||
|
|
||||||
|
**Impact:** every portal user who forgets their password (after
|
||||||
|
already activating) has to use `/portal/forgot-password` even if they
|
||||||
|
remember the old one. There's no proactive password rotation. A user
|
||||||
|
who changes their phone number has to email the port to update it.
|
||||||
|
|
||||||
|
**Fix:** ship `/portal/profile` with at minimum: read-only PII view +
|
||||||
|
"Change password" form (re-uses the existing reset-password endpoint
|
||||||
|
or a new `change-password` endpoint that takes the current pw).
|
||||||
|
Phone/address editing is a longer fix because of the audit-trail
|
||||||
|
implications.
|
||||||
|
|
||||||
|
**Scope:** small for password; medium with PII edits.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### V11. Portal invoices page lists invoices but offers no view/download — even though documents do
|
||||||
|
|
||||||
|
**File:** `src/app/(portal)/portal/invoices/page.tsx:53-99`
|
||||||
|
|
||||||
|
Each invoice row shows number, status, due/paid dates, amount, and a
|
||||||
|
small payment-status caption. There is no link, no PDF view, no
|
||||||
|
download. By contrast, the portal Documents page (peer route) ends
|
||||||
|
each row with a `<DocumentDownloadButton documentId={doc.id} />` that
|
||||||
|
fetches a signed S3 URL.
|
||||||
|
|
||||||
|
Compare to admin/CRM where invoices have a full PDF render flow
|
||||||
|
(invoice service generates the PDF + signed URL).
|
||||||
|
|
||||||
|
**Impact:** a portal user can see they owe money and cannot retrieve
|
||||||
|
the actual invoice document. They have to email the port to ask for a
|
||||||
|
PDF copy.
|
||||||
|
|
||||||
|
**Fix:** add an invoice-PDF endpoint under `/api/portal/invoices/[id]/
|
||||||
|
download` mirroring the documents one, and a download button on each
|
||||||
|
row. The invoice PDF generator already exists (`src/lib/services/
|
||||||
|
invoices.ts`).
|
||||||
|
|
||||||
|
**Scope:** small.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## DEV-NOTES (legitimately staged-for-later, calling out so they're not forgotten)
|
||||||
|
|
||||||
|
### V12. Email-templates admin only edits subject lines — body editing is a documented "next iteration"
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
|
||||||
|
- `src/components/admin/email-templates-admin.tsx:78-79` —
|
||||||
|
"Customize the subject line of transactional emails per port. Body
|
||||||
|
editing is the next iteration; for now the layout and HTML stay
|
||||||
|
locked to the default template."
|
||||||
|
- `src/lib/email/template-catalog.ts:5-9` — same statement in the
|
||||||
|
catalog header.
|
||||||
|
|
||||||
|
The page is honest about the limitation, so this isn't a "broken"
|
||||||
|
finding. But it's a notable shipped-without-the-killer-feature gap:
|
||||||
|
the multi-tenant promise of per-port email customization can't deliver
|
||||||
|
the body changes that ports actually want (logo placement, signature,
|
||||||
|
language). Combined with V2 (branding HTML fragments aren't read at
|
||||||
|
all), there is currently NO way for a non-super-admin per-port admin
|
||||||
|
to customize the email body in any way.
|
||||||
|
|
||||||
|
**Impact:** confined to admin expectations — most ports will assume
|
||||||
|
"Email templates" = "edit the email", click in, see only a subject
|
||||||
|
field, and request the missing body editor.
|
||||||
|
|
||||||
|
**Fix:** scope a body-editing flow that reuses the
|
||||||
|
`merge_fields.ts` token catalog (the validator already exists for
|
||||||
|
document templates) for safety. Until that's built, V2 + this finding
|
||||||
|
together mean a "rebrand the emails" task is single-tenant only.
|
||||||
|
|
||||||
|
**Scope:** large (HTML editor + token validator + per-port override
|
||||||
|
storage + render-side composition).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
12 findings, four severity tiers:
|
||||||
|
|
||||||
|
- **Visible-broken (V1-V5):** five admin/portal controls produce no
|
||||||
|
effect. V1 (email overrides) and V2 (branding) are the highest
|
||||||
|
impact — both silently break the multi-tenant promise.
|
||||||
|
- **Half-wired (V6-V8):** three pages where the surrounding wrapper
|
||||||
|
oversells what's there. V8 (inquiry inbox) is the largest scope.
|
||||||
|
- **Mobile parity (V9-V11):** mobile users can't reach several real
|
||||||
|
features; portal users have no profile/password surface and can't
|
||||||
|
download invoices.
|
||||||
|
- **Dev-notes (V12):** documented limitations called out for the
|
||||||
|
roadmap.
|
||||||
|
|
||||||
|
The two highest-leverage quick wins are **V1** (wire 6 missing
|
||||||
|
template subject overrides — a few hours) and **V11** (portal invoice
|
||||||
|
download — small, fixes a real customer pain point).
|
||||||
266
docs/audit-permissions-2026-05-06.md
Normal file
266
docs/audit-permissions-2026-05-06.md
Normal file
@@ -0,0 +1,266 @@
|
|||||||
|
# Per-role permission audit — 2026-05-06
|
||||||
|
|
||||||
|
Focused review of UI/server permission divergence on the new endpoints
|
||||||
|
shipped during the smart-archive / hard-delete / bulk-wizard /
|
||||||
|
external-EOI / webhook-replay work bundle. Skips items already covered
|
||||||
|
in `docs/audit-comprehensive-2026-05-06.md` (audit-log gating H6,
|
||||||
|
residential_partner sidebar nav).
|
||||||
|
|
||||||
|
The pattern hunted for: `<PermissionGate>` (or `usePermissions().can`)
|
||||||
|
on the UI side hides a control under permission **X**, while the
|
||||||
|
matching API route gates on permission **Y** (or doesn't gate at all,
|
||||||
|
or gates strictly — producing 403 toast spam for users who can see the
|
||||||
|
button but can't use it).
|
||||||
|
|
||||||
|
Scope: 8 routes + 5 components + the seed permission matrix. Hard cap
|
||||||
|
of 10 findings, ranked by impact. Critical/High/Medium/Low.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## CRITICAL
|
||||||
|
|
||||||
|
_None._ The four new hard-delete endpoints all gate on
|
||||||
|
`admin.permanently_delete_clients` on both layers (UI hides the button
|
||||||
|
via `<PermissionGate resource="admin" action="permanently_delete_clients">`
|
||||||
|
in `client-detail-header.tsx:162` and via `canHardDelete = can('admin',
|
||||||
|
'permanently_delete_clients')` in `client-list.tsx:53`; the four routes
|
||||||
|
all wrap with `withPermission('admin', 'permanently_delete_clients', …)`).
|
||||||
|
The webhook-replay route gates on `admin.manage_webhooks` — see H1 below
|
||||||
|
for the matching UI gap.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## HIGH
|
||||||
|
|
||||||
|
### H1. Webhook replay button has no UI permission gate (403 toast for non-admins)
|
||||||
|
|
||||||
|
- **UI:** `src/components/admin/webhooks/webhook-delivery-log.tsx:118-131`
|
||||||
|
— the Replay `<Button>` renders for any user who can load the page,
|
||||||
|
with no `<PermissionGate>` wrapper and no `usePermissions().can('admin',
|
||||||
|
'manage_webhooks')` check.
|
||||||
|
- **Server:** `src/app/api/v1/admin/webhooks/[webhookId]/deliveries/[deliveryId]/redeliver/route.ts:15`
|
||||||
|
— `withPermission('admin', 'manage_webhooks', …)`.
|
||||||
|
|
||||||
|
**Divergence:** A `sales_manager` / `sales_agent` / `viewer` who
|
||||||
|
somehow lands on `/admin/webhooks/{id}` (e.g. via a deep link from a
|
||||||
|
shared message) sees enabled Replay buttons. Clicking surfaces a
|
||||||
|
generic 403 toast — the user has no signal that the action is
|
||||||
|
restricted, just that "Replay failed".
|
||||||
|
|
||||||
|
**Fix:** wrap the Replay `<Button>` in
|
||||||
|
`<PermissionGate resource="admin" action="manage_webhooks">…</PermissionGate>`,
|
||||||
|
or skip rendering the entire "Replay" column when
|
||||||
|
`!can('admin', 'manage_webhooks')`. The page-level guard on
|
||||||
|
`/admin/webhooks` should prevent non-admins from reaching the route in
|
||||||
|
the first place, but defense-in-depth is cheap and the toast UX is
|
||||||
|
poor.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### H2. Bulk-archive bulk action exposed to roles without `clients.delete`
|
||||||
|
|
||||||
|
- **UI:** `src/components/clients/client-list.tsx:182-190` — the
|
||||||
|
"Archive" entry in `bulkActions` is unconditionally rendered (only
|
||||||
|
the "Permanently delete" entry checks `canHardDelete`).
|
||||||
|
- **Server:** `src/app/api/v1/clients/bulk/route.ts:40-57` — gates
|
||||||
|
`archive` action on `clients.delete`. Also
|
||||||
|
`src/app/api/v1/clients/bulk-archive-preflight/route.ts:30` —
|
||||||
|
`withPermission('clients', 'delete', …)`.
|
||||||
|
|
||||||
|
**Divergence:** `sales_agent` (`clients.delete:false`,
|
||||||
|
seed-permissions.ts:246) and `viewer` (`clients.delete:false`,
|
||||||
|
seed-permissions.ts:323) both see the Archive bulk action. Selecting
|
||||||
|
clients and pressing it fires the `BulkArchiveWizard`, which calls
|
||||||
|
`bulk-archive-preflight` (returns 403) followed by `bulk` archive
|
||||||
|
(also 403). The wizard surfaces this as an opaque error.
|
||||||
|
|
||||||
|
**Fix:** mirror the `canHardDelete` pattern — compute
|
||||||
|
`const canBulkArchive = can('clients', 'delete');` near
|
||||||
|
`client-list.tsx:53` and conditionally include the Archive entry.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### H3. Bulk add_tag / remove_tag exposed to viewer (clients.edit:false)
|
||||||
|
|
||||||
|
- **UI:** `src/components/clients/client-list.tsx:165-181` — the "Add
|
||||||
|
tag" / "Remove tag" bulk actions render with no permission check.
|
||||||
|
- **Server:** `src/app/api/v1/clients/bulk/route.ts:40-57` — both gate
|
||||||
|
on `clients.edit`.
|
||||||
|
|
||||||
|
**Divergence:** A `viewer` can multi-select rows, click "Add tag" or
|
||||||
|
"Remove tag", pick a tag in the dialog, hit "Apply", and receive a 403. The standalone bulk tag dialog has no inline gating to prevent
|
||||||
|
this.
|
||||||
|
|
||||||
|
**Fix:** the bulk action menu entries should gate on
|
||||||
|
`can('clients', 'edit')`. (Sales agent and above pass; only `viewer`
|
||||||
|
and `residential_partner` see the bug.)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### H4. `client-merge-log.surviving_client_id` enforcement absent from per-row port check on bulk hard-delete
|
||||||
|
|
||||||
|
- **Server:** `src/lib/services/client-hard-delete.service.ts:269-272`
|
||||||
|
|
||||||
|
The bulk preflight loads **every** row in the port
|
||||||
|
(`db.select(...).from(clients).where(eq(clients.portId, args.portId))`)
|
||||||
|
into memory, then validates the requested `clientIds` against that map.
|
||||||
|
That's correct for tenant isolation — a foreign-port id can't appear in
|
||||||
|
the map — but the inner loop at lines 364-389 then re-fetches each
|
||||||
|
client by `(id, portId)` and **silently skips** rows where the second
|
||||||
|
fetch returns nothing (line 377: `if (!c) continue;`). If a client is
|
||||||
|
archived between preflight and execute by another operator, the bulk
|
||||||
|
delete reports `deletedCount` lower than the requested set with no
|
||||||
|
error — the operator has no way to tell which ids were skipped.
|
||||||
|
|
||||||
|
**Divergence (perm-adjacent):** the per-row gate is enforced for
|
||||||
|
tenancy but the failure mode masquerades as success. Combined with
|
||||||
|
the route's all-or-nothing `withPermission` at the top, a
|
||||||
|
`permanently_delete_clients`-bearing operator can quietly under-delete.
|
||||||
|
|
||||||
|
**Fix:** when `c` is null, push the id into a `skipped: string[]`
|
||||||
|
array and return it in the response so the UI can surface "3
|
||||||
|
deleted, 1 skipped (not archived / removed by another user)".
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## MEDIUM
|
||||||
|
|
||||||
|
### M1. `external-eoi` upload allows any role with `documents.upload_signed` regardless of `interests.edit`
|
||||||
|
|
||||||
|
- **UI:** `src/components/interests/interest-detail-header.tsx:382-395`
|
||||||
|
— `<PermissionGate resource="documents" action="upload_signed">`.
|
||||||
|
- **Server:** `src/app/api/v1/interests/[id]/external-eoi/route.ts:8`
|
||||||
|
— `withPermission('documents', 'upload_signed', …)`.
|
||||||
|
|
||||||
|
**Divergence:** UI and server agree on the permission, but the seed
|
||||||
|
matrix has `documents.upload_signed:true` for `sales_agent` (line 264) AND any custom role with that flag — uploading an externally
|
||||||
|
signed EOI mutates the **interest** (it's the operative `signedDocument`
|
||||||
|
that flips the interest into a "signed" state inside
|
||||||
|
`uploadExternallySignedEoi`). The user only needs `documents.upload_signed`,
|
||||||
|
not `interests.edit`. A custom role with `documents.upload_signed:true`
|
||||||
|
|
||||||
|
- `interests.edit:false` can mutate the interest's effective state.
|
||||||
|
|
||||||
|
**Fix:** add a second gate inside the route handler:
|
||||||
|
`if (!ctx.isSuperAdmin && !ctx.permissions?.interests?.edit) throw new ForbiddenError(...)`.
|
||||||
|
Rationale: signing a doc against an interest is an interest-state
|
||||||
|
change, not just a document upload. Mirror the same check in
|
||||||
|
`<PermissionGate>` (use `<PermissionGate resource="interests" action="edit">`
|
||||||
|
nested inside the `documents.upload_signed` gate).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### M2. `change_stage` UI doesn't expose override checkbox in `InlineStagePicker` — server still accepts override
|
||||||
|
|
||||||
|
- **UI:** `src/components/interests/inline-stage-picker.tsx:52-58` —
|
||||||
|
the inline picker (used in the detail header at
|
||||||
|
`interest-detail-header.tsx:221`) sends only
|
||||||
|
`{ pipelineStage, reason }` and never sets `override:true`. Users
|
||||||
|
with `override_stage` get no UI affordance to actually use the
|
||||||
|
permission from the inline picker; they have to open the modal
|
||||||
|
`InterestStagePicker` (which does expose the checkbox at line 137).
|
||||||
|
Worse, when a user picks a stage that isn't a legal forward
|
||||||
|
transition, the inline picker just shows the toast from the server's
|
||||||
|
`ConflictError` — instead of "you need override; toggle this box".
|
||||||
|
- **Server:** `src/app/api/v1/interests/[id]/stage/route.ts:14-22` —
|
||||||
|
reads `body.override` and re-checks `interests.override_stage`
|
||||||
|
permission.
|
||||||
|
|
||||||
|
**Divergence:** UI and permission map diverge in the affordance, not
|
||||||
|
the gate. End-result: the `override_stage` permission is partially
|
||||||
|
unreachable from the inline picker. Sales managers / agents can
|
||||||
|
override only via the modal picker.
|
||||||
|
|
||||||
|
**Fix:** when the inline picker sees a transition that isn't allowed
|
||||||
|
by `canTransitionStage(currentStage, newStage)`, check
|
||||||
|
`can('interests', 'override_stage')` and either auto-set
|
||||||
|
`override:true` (with a confirmation) or surface a "Use override"
|
||||||
|
secondary action. Keep the inline picker UX; just don't let the
|
||||||
|
override permission be silently inaccessible from the most-used
|
||||||
|
path.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### M3. `sales_agent` granted `interests.override_stage:true` — possible copy-paste from sales_manager
|
||||||
|
|
||||||
|
- **Seed:** `src/lib/db/seed-permissions.ts:253` — `SALES_AGENT_PERMISSIONS.interests.override_stage = true`.
|
||||||
|
|
||||||
|
This is identical to `SALES_MANAGER_PERMISSIONS.interests.override_stage = true`
|
||||||
|
at line 176. The same `sales_agent` block has `delete:false` for
|
||||||
|
clients/interests/yachts/companies/files/etc — all the other
|
||||||
|
"trust-elevated" flags are explicitly stripped from sales_agent. The
|
||||||
|
ability to bypass the pipeline-stage transition table is a meaningful
|
||||||
|
trust elevation: it lets an agent skip prerequisites (e.g. mark an
|
||||||
|
interest as `eoi_signed` without an actual signed doc) which has
|
||||||
|
downstream implications for the public berths feed (`Under Offer`
|
||||||
|
status), the recommender's tier ladder, and the EOI bundle.
|
||||||
|
|
||||||
|
**Divergence:** likely intent vs. permission map. Worth confirming
|
||||||
|
with a product owner; if intentional, leave a code comment. If
|
||||||
|
unintentional, flip to `false`.
|
||||||
|
|
||||||
|
**Fix:** product decision. If demoted, also update
|
||||||
|
`src/components/admin/roles/role-form.tsx → DEFAULT_PERMISSIONS`
|
||||||
|
(noted in the file header at seed-permissions.ts:9) so the UI
|
||||||
|
default for new roles matches.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### M4. `bulk-archive-preflight` returns dossier even when client is in another port (defense-in-depth)
|
||||||
|
|
||||||
|
- **Server:** `src/app/api/v1/clients/bulk-archive-preflight/route.ts:33-62`
|
||||||
|
|
||||||
|
The route loops through `ids` and calls `getClientArchiveDossier(id, ctx.portId)`
|
||||||
|
for each. If a `clientId` belongs to another port, `getClientArchiveDossier`
|
||||||
|
throws and the route catches it (line 52-61) and returns a fallback row
|
||||||
|
with `blockers: ['<error message>']`. This leaks **the existence of an
|
||||||
|
unknown client id** — an attacker enumerating UUIDs can distinguish
|
||||||
|
"client doesn't exist" from "client exists but you can't see it" by
|
||||||
|
parsing the blocker text. The bulk hard-delete route has the same
|
||||||
|
shape but returns `NotFoundError`.
|
||||||
|
|
||||||
|
**Divergence (perm-adjacent):** the preflight route doesn't enforce a
|
||||||
|
per-id port check before falling through to the dossier service, and
|
||||||
|
the catch block leaks the failure mode in the response.
|
||||||
|
|
||||||
|
**Fix:** in the catch block, replace the dossier error message with a
|
||||||
|
generic `'Could not load dossier'` blocker. The operator already
|
||||||
|
selected these ids so they know the count; they don't need the inner
|
||||||
|
error.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LOW
|
||||||
|
|
||||||
|
### L1. `external-eoi` route doesn't enforce `interests.edit` defense-in-depth on the interest port
|
||||||
|
|
||||||
|
- **Server:** `src/app/api/v1/interests/[id]/external-eoi/route.ts:8-14`
|
||||||
|
|
||||||
|
The route receives `interestId` from the URL and passes it +
|
||||||
|
`ctx.portId` into `uploadExternallySignedEoi`. The service is
|
||||||
|
expected to enforce port isolation, but the route itself does no
|
||||||
|
upfront `(interestId, portId)` existence check before reading the
|
||||||
|
multipart body — meaning a cross-port id will fully process the
|
||||||
|
upload (read the file into memory) before the service rejects.
|
||||||
|
|
||||||
|
**Divergence:** not strictly a permission divergence; it's resource
|
||||||
|
waste from missing early port-ownership check. Low because the
|
||||||
|
service-level reject does close the security hole.
|
||||||
|
|
||||||
|
**Fix:** add a one-row `select` on `interests` matching `id` + `portId`
|
||||||
|
before parsing form data, throw `NotFoundError` on miss.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
- 0 critical
|
||||||
|
- 4 high (H1–H4)
|
||||||
|
- 4 medium (M1–M4)
|
||||||
|
- 1 low (L1)
|
||||||
|
|
||||||
|
Top recommendation: H1 (webhook-replay UI gate) is a
|
||||||
|
ten-line fix that closes a 403-toast UX bug. H2 + H3 (bulk-archive +
|
||||||
|
bulk-tag UI gates) are also trivial and remove the same class of bug
|
||||||
|
across the bulk actions menu. M3 (sales_agent override_stage) needs a
|
||||||
|
product decision, not code; flag it before shipping the audit.
|
||||||
220
docs/audit-reliability-2026-05-06.md
Normal file
220
docs/audit-reliability-2026-05-06.md
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
# Reliability audit — 2026-05-06 (focused, post-batch deltas)
|
||||||
|
|
||||||
|
Scope: NEW services from the recent archive/restore/hard-delete/external-EOI batches.
|
||||||
|
Out of scope (already covered in `docs/audit-comprehensive-2026-05-06.md`):
|
||||||
|
worker imports, rate limits, hard-delete error message UX, smart-restore
|
||||||
|
dead reversal applier, bulk hard-delete redis loop, audit log spam.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Critical
|
||||||
|
|
||||||
|
### C1. Bulk archive enqueues zero post-commit side effects
|
||||||
|
|
||||||
|
- **File:** `src/app/api/v1/clients/bulk/route.ts:68-134`
|
||||||
|
- **Scenario:** When the bulk wizard archives 100 clients with high-stakes
|
||||||
|
reasons, `archiveClientWithDecisions` returns `externalCleanups` and
|
||||||
|
`releasedBerths` arrays per-client, but `runBulk` discards the return
|
||||||
|
value. Documenso envelopes that the wizard marked `void_documenso`
|
||||||
|
never get queued, and "next-in-line" notifications never fire. The
|
||||||
|
database is left in `documents.status='cancelled'` with the live
|
||||||
|
Documenso envelope still out for signature — the signer can complete
|
||||||
|
a legally-binding envelope that the CRM thinks is voided.
|
||||||
|
- **Fix:** Make the per-row callback return the result, then loop over
|
||||||
|
`results` after `runBulk` to enqueue Documenso voids and fire
|
||||||
|
next-in-line notifications (mirroring the single-client route).
|
||||||
|
Defaulting `documentDecisions` to `'leave'` (line 113-116) hides the
|
||||||
|
symptom for the bulk wizard but isn't enough — the single-client
|
||||||
|
service can still surface this if the bulk path is ever generalized.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## High
|
||||||
|
|
||||||
|
### H1. Restore wizard silently drops every released berth
|
||||||
|
|
||||||
|
- **File:** `src/lib/services/client-restore.service.ts:359-372`
|
||||||
|
- **Scenario:** `applyReversal` for `berth_released` is a no-op with a
|
||||||
|
comment saying "v1 leaves the berth available". But the dossier (line
|
||||||
|
122-129) classifies these as `autoReversible` and the UI tells the
|
||||||
|
operator "still available — re-attaching to the restored client". The
|
||||||
|
wizard increments `autoReversed` and the audit log records a
|
||||||
|
successful auto-reverse — but nothing actually happens. Operator
|
||||||
|
thinks restore re-linked their berth; it didn't.
|
||||||
|
- **Fix:** Either (a) actually re-link by persisting the original
|
||||||
|
`interestId` in the `berth_released` decision detail (it's already
|
||||||
|
there, line 211) and re-inserting an `interestBerths` row + flipping
|
||||||
|
the berth status back to `under_offer`, or (b) reclassify these as
|
||||||
|
`reversibleWithPrompt` with copy that says "berth left available —
|
||||||
|
re-add via the interest detail page".
|
||||||
|
|
||||||
|
### H2. Smart-archive berth status update has TOCTOU race
|
||||||
|
|
||||||
|
- **File:** `src/lib/services/client-archive.service.ts:191-207`
|
||||||
|
- **Scenario:** Berth row is read via `dossier.berths` (read outside the
|
||||||
|
tx) and modified inside the tx without a `for update` lock on
|
||||||
|
`berths`. Two concurrent flows — e.g. operator A archives client X
|
||||||
|
while operator B sells berth A1 to client Y — can race: A reads
|
||||||
|
`berth.status === 'sold' → false`, B's tx commits sold, A's tx then
|
||||||
|
flips it back to `available`. The "still under offer" subselect
|
||||||
|
doesn't catch this because berth.status is the source of truth, not
|
||||||
|
interest_berths.
|
||||||
|
- **Fix:** Add `tx.select(...).from(berths).where(eq(berths.id, d.berthId)).for('update')`
|
||||||
|
before the status flip and re-check `status !== 'sold'` against the
|
||||||
|
locked row.
|
||||||
|
|
||||||
|
### H3. Bulk archive can pick the wrong interest for berth release
|
||||||
|
|
||||||
|
- **File:** `src/app/api/v1/clients/bulk/route.ts:95-103`
|
||||||
|
- **Scenario:** When a client has multiple interests linked to the same
|
||||||
|
berth, the bulk wizard picks `dossier.interests.find((i) =>
|
||||||
|
i.primaryBerthMooring === b.mooringNumber)` and falls back to
|
||||||
|
`dossier.interests[0]?.interestId ?? ''`. The fallback to the
|
||||||
|
first-interest-or-empty-string can hand `archiveClientWithDecisions`
|
||||||
|
an `interestId` that was never linked to that berth — so the
|
||||||
|
`delete from interest_berths where berthId=… and interestId=…`
|
||||||
|
matches zero rows and the link is silently retained. Worse: an empty
|
||||||
|
string `''` reaches the delete, which still matches zero rows but
|
||||||
|
leaves the berth status check believing the link was removed.
|
||||||
|
- **Fix:** Build the berth→interest map from `interestBerthRows` (the
|
||||||
|
authoritative join) rather than guessing by `primaryBerthMooring`,
|
||||||
|
and skip berths with no resolvable interest rather than emitting an
|
||||||
|
empty-string interestId.
|
||||||
|
|
||||||
|
### H4. External EOI runs four writes outside a transaction
|
||||||
|
|
||||||
|
- **File:** `src/lib/services/external-eoi.service.ts:67-155`
|
||||||
|
- **Scenario:** `getStorageBackend().put()`, `files.insert`,
|
||||||
|
`documents.insert`, `documentEvents.insert`, and the interests
|
||||||
|
update happen as five independent operations. If any one fails after
|
||||||
|
the storage upload, you're left with an orphan PDF in S3/MinIO and
|
||||||
|
partial DB state. If the documents insert fails after the file
|
||||||
|
insert, the file row points to a storage key with no document
|
||||||
|
referencing it — and the interest never advances.
|
||||||
|
- **Fix:** Wrap files/documents/documentEvents/interests in a single
|
||||||
|
`db.transaction`. Storage upload stays outside (S3 isn't
|
||||||
|
transactional) but on tx failure, schedule a cleanup job that deletes
|
||||||
|
the orphan storage object, or accept the orphan and add a janitor.
|
||||||
|
|
||||||
|
### H5. Bulk wizard double-submit re-archives the same client and racy errors
|
||||||
|
|
||||||
|
- **File:** `src/app/api/v1/clients/bulk/route.ts:68-120` +
|
||||||
|
`src/lib/services/client-archive.service.ts:165-173`
|
||||||
|
- **Scenario:** The single-client `archiveClientWithDecisions` locks
|
||||||
|
the row and throws `ConflictError('Client is already archived')` on
|
||||||
|
re-entry — good. But `runBulk` swallows the error string and returns
|
||||||
|
it as `{ok:false, error:"Client is already archived"}` for that
|
||||||
|
client. If the bulk wizard double-submits (network retry, double
|
||||||
|
click), partial successes from the first request now look like
|
||||||
|
per-client failures in the response, confusing the operator. There's
|
||||||
|
no idempotency key on the bulk submit.
|
||||||
|
- **Fix:** Treat `ConflictError('already archived')` as success in the
|
||||||
|
bulk per-row handler (the desired end state is reached). Or add an
|
||||||
|
idempotency-key header on the bulk endpoint that short-circuits a
|
||||||
|
duplicate request with the cached response.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Medium
|
||||||
|
|
||||||
|
### M1. Hard-delete `clientMergeLog.surviving_client_id` deletes audit history
|
||||||
|
|
||||||
|
- **File:** `src/lib/services/client-hard-delete.service.ts:209`
|
||||||
|
- **Scenario:** The comment says "merged records remain in the log
|
||||||
|
because mergedClientId has no FK", but the delete is wider than
|
||||||
|
needed: it removes every merge-log row where this client was the
|
||||||
|
survivor. If client X (being deleted) previously absorbed clients
|
||||||
|
A/B/C, the audit trail of those merges is lost on X's deletion. The
|
||||||
|
surviving rows that remain (`mergedClientId = X`) are now
|
||||||
|
inconsistent — they reference a survivor that no longer exists.
|
||||||
|
- **Fix:** Either preserve the survivor rows by setting
|
||||||
|
`surviving_client_id = NULL` (requires column nullable) or keep the
|
||||||
|
current behavior but document it more visibly. At minimum, log the
|
||||||
|
deleted merge-log row count so operators can investigate gaps.
|
||||||
|
|
||||||
|
### M2. Documenso void worker has no max-retry guard for non-404 errors
|
||||||
|
|
||||||
|
- **File:** `src/lib/queue/workers/documents.ts:19-37`
|
||||||
|
- **Scenario:** `voidDocument` throws `CodedError` on non-404 failures
|
||||||
|
(auth error, network blip, Documenso 500). BullMQ retries with
|
||||||
|
backoff, but there's no per-job idempotency check — the second
|
||||||
|
retry hits the same envelope, voidDocument's 404 short-circuit only
|
||||||
|
kicks in if Documenso has actually voided it on the first retry
|
||||||
|
before the API call returned an error. A persistent 401 / 403 will
|
||||||
|
retry forever (until BullMQ exhausts attempts) and the documents row
|
||||||
|
stays `cancelled` in the CRM with the envelope still live in
|
||||||
|
Documenso. The DLQ is mentioned in the comment but the worker
|
||||||
|
doesn't surface a DLQ alert hook.
|
||||||
|
- **Fix:** On exhaustion, write back to `documents` (e.g.
|
||||||
|
`cancellation_failed=true`) and emit an admin notification so the
|
||||||
|
envelope can be voided manually.
|
||||||
|
|
||||||
|
### M3. Next-in-line notification fan-out unhandled rejection
|
||||||
|
|
||||||
|
- **File:** `src/lib/services/next-in-line-notify.service.ts:75-87`
|
||||||
|
- **Scenario:** Each `void createNotification(...)` is a fire-and-forget
|
||||||
|
promise with no `.catch` handler. If `notifications.service`
|
||||||
|
dispatches to a DB that's transiently down, the unhandled rejection
|
||||||
|
will surface in the Node process with no recipient context (the
|
||||||
|
closure captured `userId` is in the stack but pino won't include it
|
||||||
|
unless explicitly logged). Process-level handlers will log it but
|
||||||
|
individual recipients silently lose their notification.
|
||||||
|
- **Fix:** `.catch((err) => logger.warn({err, userId, berthId:
|
||||||
|
input.berthId}, 'next-in-line notification failed'))`.
|
||||||
|
|
||||||
|
### M4. Restore service uses `any` for transaction type
|
||||||
|
|
||||||
|
- **File:** `src/lib/services/client-restore.service.ts:354-355`
|
||||||
|
- **Scenario:** `applyReversal(tx: any, ...)` defeats Drizzle's type
|
||||||
|
safety. A future schema rename (e.g. `yachts.status` enum change)
|
||||||
|
won't fail at compile time inside this function. Combined with the
|
||||||
|
documented v1 no-op for `berth_released`, the function looks
|
||||||
|
innocuous but carries the most risk.
|
||||||
|
- **Fix:** Use the proper Drizzle tx type — `Parameters<Parameters<typeof
|
||||||
|
db.transaction>[0]>[0]` or a named type alias from
|
||||||
|
`@/lib/db/types.ts` if one exists.
|
||||||
|
|
||||||
|
### M5. interests.changeInterestStage milestones write outside tx
|
||||||
|
|
||||||
|
- **File:** `src/lib/services/interests.service.ts:630-648`
|
||||||
|
- **Scenario:** The override path (and normal path) writes
|
||||||
|
`pipelineStage` in one update and milestone fields
|
||||||
|
(`dateEoiSent`, `dateContractSigned`, etc.) in a second update. If
|
||||||
|
the process crashes between the two, the stage advances but the
|
||||||
|
milestone is never recorded. Funnel/conversion math then under-
|
||||||
|
counts that interest. Over-the-wire this is rare but the audit log
|
||||||
|
fires before the milestone update succeeds, so the audit trail
|
||||||
|
claims a complete transition that's actually half-applied.
|
||||||
|
- **Fix:** Combine both into a single update statement, computing the
|
||||||
|
milestone fields in JS and merging them into the `set({...})` clause.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Low
|
||||||
|
|
||||||
|
### L1. Smart-archive coalesces invoice notes via SQL string concat
|
||||||
|
|
||||||
|
- **File:** `src/lib/services/client-archive.service.ts:288-291`
|
||||||
|
- **Scenario:** `notes: sql\`coalesce(${invoices.notes}, '') || ${...}\``embeds`new Date().toISOString()`and the action label inside a
|
||||||
|
parameterized string. The values are bound, so it's not an injection
|
||||||
|
risk, but the`\n[archive ...]` marker is appended unconditionally —
|
||||||
|
re-running the archive on a not-yet-committed client would double
|
||||||
|
the marker. Combined with H5 (no idempotency on bulk), a retry could
|
||||||
|
bloat invoice notes with duplicate markers.
|
||||||
|
- **Fix:** Append only when the marker isn't already present, or rely
|
||||||
|
on the `clients.archivedAt is null` precheck (which already guards
|
||||||
|
re-entry) and accept the duplicate as theoretically impossible.
|
||||||
|
|
||||||
|
### L2. Hard-delete `requestHardDeleteCode` reveals client existence pre-archive
|
||||||
|
|
||||||
|
- **File:** `src/lib/services/client-hard-delete.service.ts:77-85`
|
||||||
|
- **Scenario:** A user without `admin.permanently_delete_clients`
|
||||||
|
shouldn't reach this service, so this is theoretical, but the
|
||||||
|
ConflictError "Client must be archived" leaks the existence of an
|
||||||
|
unarchived client to anyone who can reach the route. The audit doc
|
||||||
|
flagged hard-delete error messages already (out of scope), but this
|
||||||
|
specific error path isn't covered there.
|
||||||
|
- **Fix:** Same as the audit-doc finding for the symmetric path —
|
||||||
|
return a generic `NotFoundError` instead of distinguishing
|
||||||
|
"not found" from "not archived" externally; log the distinction
|
||||||
|
internally only.
|
||||||
147
docs/berth-feature-handoff-prompt.md
Normal file
147
docs/berth-feature-handoff-prompt.md
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
# Handoff prompt for new Claude Code session
|
||||||
|
|
||||||
|
Copy everything below the `---` line into the new chat as your first message.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
I'm continuing work on a comprehensive multi-feature push that was fully designed in a prior session but not yet implemented. The complete plan lives at `docs/berth-recommender-and-pdf-plan.md` (~1030 lines). **Read that file end-to-end before doing anything else — every design decision, schema change, edge case, and confirmed answer to a product question is captured there.** Don't re-litigate decisions; if something seems unclear, the answer is almost certainly in the plan.
|
||||||
|
|
||||||
|
## What the project is
|
||||||
|
|
||||||
|
A multi-tenant marina/port-management CRM at `/Users/matt/Repos/new-pn-crm`. Next.js 15 App Router, React 19, TypeScript strict, Drizzle ORM on Postgres, MinIO for files, BullMQ on Redis, better-auth, shadcn/ui, Tailwind. See `CLAUDE.md` for the conventions.
|
||||||
|
|
||||||
|
## What we're building (high level)
|
||||||
|
|
||||||
|
The plan bundles 8 capabilities into one branch (`feat/berth-recommender`):
|
||||||
|
|
||||||
|
1. **/clients + /interests list-column fix** (the original bug — list views show `-` everywhere because the service didn't join contacts/yachts)
|
||||||
|
2. **Full NocoDB Berths import** + seeding + mooring-number normalization (current CRM has `A-01..E-18`; canonical is `A1..E18`)
|
||||||
|
3. **Schema refactor** to many-to-many `interest_berths` with role flags (`is_primary`, `is_specific_interest`, `is_in_eoi_bundle`)
|
||||||
|
4. **Berth recommender** (SQL ranking, tier ladder, heat scoring, UI panel) — no AI; pure SQL
|
||||||
|
5. **EOI bundle** support (multi-berth EOIs + range formatter for the Documenso PDF: `["A1","A2","A3","B5","B6"]` → `"A1-A3, B5-B6"`)
|
||||||
|
6. **Pluggable storage backend** (s3-compatible OR local filesystem) so admins can run without MinIO if they want
|
||||||
|
7. **Per-berth PDFs** (versioned uploads, OCR-based reverse parser, conflict-resolution diff dialog)
|
||||||
|
8. **Sales send-out emails** (berth PDF + brochure) with full audit + size-aware fallback to download links
|
||||||
|
|
||||||
|
## Phase ordering (from plan §2)
|
||||||
|
|
||||||
|
```
|
||||||
|
Phase 0: Full NocoDB berth import + mooring normalization + 5 new pricing columns
|
||||||
|
Phase 1: /clients + /interests list column fix
|
||||||
|
Phase 2: M:M interest_berths schema refactor + desired dimensions on interests
|
||||||
|
Phase 3: CRM /api/public/berths endpoint + website cutover
|
||||||
|
Phase 4: Recommender SQL + tier ladder + heat + UI panel
|
||||||
|
Phase 5: EOI bundle + range formatter
|
||||||
|
Phase 6a: Pluggable storage backend + migration CLI + admin UI
|
||||||
|
Phase 6b: Per-berth PDF storage (versioned) + reverse parser
|
||||||
|
Phase 7: Sales send-outs + brochure admin + email-from settings
|
||||||
|
Phase 8: CLAUDE.md updates + final validation
|
||||||
|
```
|
||||||
|
|
||||||
|
**Start with Phase 0**.
|
||||||
|
|
||||||
|
## Working tree state at handoff
|
||||||
|
|
||||||
|
- Branch: `main` (you'll create `feat/berth-recommender` from here)
|
||||||
|
- Recent commits (already pushed):
|
||||||
|
- `8699f81 chore(style): codebase em-dash sweep + minor layout polish`
|
||||||
|
- `d62822c fix(migration): NocoDB import safety + dedup helpers + lead-source backfill`
|
||||||
|
- `089f4a6 feat(receipts): upload guide page + scanner head-tag fix`
|
||||||
|
- `77ad10c feat(dashboard): custom date range + KPI port-hydration gate`
|
||||||
|
- `e598cc0 feat(layout): unified Inbox + UserMenu extraction`
|
||||||
|
- `f5772ce feat(analytics): Umami integration with per-port admin settings`
|
||||||
|
- `49d34e0 feat(website-intake): dual-write endpoint + migration chain repair`
|
||||||
|
- Untracked / uncommitted at handoff:
|
||||||
|
- `docs/berth-recommender-and-pdf-plan.md` (the plan — read this first)
|
||||||
|
- `docs/berth-feature-handoff-prompt.md` (this file)
|
||||||
|
- `berth_pdf_example/` (two reference files — see below)
|
||||||
|
- `.env.example` (modified — adds `WEBSITE_INTAKE_SECRET=`; pre-commit hook blocks `.env*` files so user adds this manually)
|
||||||
|
- Dev DB state:
|
||||||
|
- 245 clients (210 with no `nationality_iso` — Phase 1 backfills from primary phone's `value_country`)
|
||||||
|
- 4 test rows in `website_submissions` (from a previous live audit; safe to ignore)
|
||||||
|
- 90 berths with `mooring_number` in `A-01` format (Phase 0 normalizes to `A1`)
|
||||||
|
- vitest: 956 tests passing
|
||||||
|
- tsc: clean (one pre-existing issue in `scripts/smoke-test-redirect.ts` that's unrelated)
|
||||||
|
|
||||||
|
## Reference files
|
||||||
|
|
||||||
|
- `berth_pdf_example/Berth_Spec_Sheet_A1.pdf` (358 KB) — sample per-berth PDF. **0 AcroForm fields** (confirmed via pdf-lib) so OCR with positional heuristics is the primary parser tier; the AcroForm tier is built defensively. Plan §9.2 captures the layout structure.
|
||||||
|
- `berth_pdf_example/Port-Nimara-Brochure-March-2025_5nT92g.pdf` (10.26 MB) — sample brochure. Sized so it ships as an attachment under the 15 MB threshold. Plan §11.1 covers brochure handling.
|
||||||
|
|
||||||
|
## NocoDB access
|
||||||
|
|
||||||
|
You have `mcp__NocoDB_Base_-_Port_Nimara__*` tools available. Tables you'll touch most:
|
||||||
|
|
||||||
|
- `mczgos9hr3oa9qc` — Berths (Phase 0 imports from here; mooring numbers are stored as `A1..E18`)
|
||||||
|
- `mbs9hjauug4eseo` — Interests (the combined client+deal table the old system used)
|
||||||
|
|
||||||
|
## Branch & commit conventions
|
||||||
|
|
||||||
|
- Create the branch: `git checkout -b feat/berth-recommender`
|
||||||
|
- Commit messages match recent history style: `<type>(<scope>): <subject>` lowercase, terse subject, body explains why not what.
|
||||||
|
- **Pre-commit hook blocks any `.env*` file** including `.env.example`. If you need to update `.env.example`, leave it staged and tell the user to commit manually with `--no-verify` (they're aware of this).
|
||||||
|
- **Don't push without explicit user permission.** Commits are fine; pushes need approval.
|
||||||
|
- **Don't run `git rebase`, `git push --force`, or anything destructive without checking.** The branch is solo-owned but the repo's `main` is shared.
|
||||||
|
|
||||||
|
## User communication preferences (from prior session)
|
||||||
|
|
||||||
|
- Direct, no fluff. If something is a bad idea, say so — don't sycophant.
|
||||||
|
- When proposing changes, include trade-offs explicitly.
|
||||||
|
- For multi-question decisions, use `AskUserQuestion` rather than long bulleted lists.
|
||||||
|
- Run validation (vitest + tsc) at logical checkpoints. Don't ship a commit with regressions.
|
||||||
|
- The user prefers small focused commits over mega-commits. Within Phase 0 alone there will probably be 2-3 commits (e.g. mooring normalization, schema additions, NocoDB import script).
|
||||||
|
|
||||||
|
## Critical rules (from plan §14)
|
||||||
|
|
||||||
|
Eleven 🔴 critical items requiring tests before their phase ships:
|
||||||
|
|
||||||
|
1. NocoDB mooring collisions → unique constraint + ON CONFLICT
|
||||||
|
2. Non-PDF disguised upload → magic-byte check
|
||||||
|
3. Recipient email typos → pre-send confirmation
|
||||||
|
4. XSS in email body markdown → DOMPurify + payload tests
|
||||||
|
5. SMTP credentials silently failing → loud error + failed `document_sends` row
|
||||||
|
6. Wrong-environment `CRM_PUBLIC_URL` → health-check env match
|
||||||
|
7. Mooring format drift breaking `/berths/A1` URLs → Phase 0 normalization gates Phase 3
|
||||||
|
8. Multi-port isolation in recommender → explicit `port_id` filter + cross-port test
|
||||||
|
9. Permission escalation on SMTP creds → per-port admin only, no rep visibility
|
||||||
|
10. Filesystem backend in multi-node deployment → refuse to start; documented + health-check enforced
|
||||||
|
11. Path traversal via storage key in filesystem mode → strict regex validation + path realpath check
|
||||||
|
|
||||||
|
## Pending items (from plan §9)
|
||||||
|
|
||||||
|
These are non-blocking but worth knowing:
|
||||||
|
|
||||||
|
- Sample brochure already provided (the 10.26 MB file above).
|
||||||
|
- SMTP app password for `sales@portnimara.com` — not yet obtained; expected close to production cutover. Phase 7 ships the admin UI immediately and the credential gets entered when available.
|
||||||
|
- `CRM_PUBLIC_URL` confirmed as `https://crm.portnimara.com` once live; configurable via env.
|
||||||
|
- GDPR cascade behavior for `document_sends` (delete vs. anonymize-PII vs. keep) — left `OPEN` in §14.10, default lean: anonymize-PII. Revisit when Phase 7 schema lands.
|
||||||
|
|
||||||
|
## Scope reminder
|
||||||
|
|
||||||
|
- **No prod data depends on the current CRM schema** — refactors don't need backwards-compatibility shims. But every schema change still ships as a Drizzle migration with `pnpm db:generate`.
|
||||||
|
- **Pluggable storage** rejects Postgres `bytea` as an option (§4.7a). The two backends are s3-compatible (MinIO/AWS/B2/R2/etc.) and local filesystem. Filesystem is single-node only.
|
||||||
|
|
||||||
|
## What to do first
|
||||||
|
|
||||||
|
1. Read `docs/berth-recommender-and-pdf-plan.md` end-to-end. Don't skim. The edge-case audit in §14 alone is critical context.
|
||||||
|
2. Confirm you've understood the plan by stating back the 8-phase outline and the 11 critical items, then ask the user if they want to proceed with Phase 0.
|
||||||
|
3. Once approved, create `feat/berth-recommender` and start Phase 0.
|
||||||
|
|
||||||
|
Phase 0 deliverables (per plan):
|
||||||
|
|
||||||
|
- One commit normalizing existing CRM mooring numbers from `A-01` → `A1` form (via `regexp_replace` migration). Delete the offending `scripts/load-berths-to-port-nimara.ts`.
|
||||||
|
- One commit adding the 5 new berth columns (`weekly_rate_high_usd`, `weekly_rate_low_usd`, `daily_rate_high_usd`, `daily_rate_low_usd`, `pricing_valid_until`, `last_imported_at`). Run `pnpm db:generate`. Verify `meta/_journal.json` prevId chain stays contiguous.
|
||||||
|
- One commit adding `scripts/import-berths-from-nocodb.ts` — the idempotent NocoDB import (handles updates, preserves CRM-side edits via `last_imported_at vs updated_at` check, `pg_advisory_lock`, dry-run flag, etc. per §4.1 and §14.1).
|
||||||
|
- Update `src/lib/db/seed-data.ts` with the imported berth set so fresh installs get them.
|
||||||
|
- Final vitest + tsc validation at the end of Phase 0.
|
||||||
|
|
||||||
|
## Don't
|
||||||
|
|
||||||
|
- Don't push to remote during this session (user will batch the push later).
|
||||||
|
- Don't commit `.env*` files (hook blocks them anyway).
|
||||||
|
- Don't edit `.gitignore` to exclude generated artifacts; the repo's existing ignores are correct.
|
||||||
|
- Don't add documentation files unless the plan asks for them — the plan itself is the doc.
|
||||||
|
- Don't add features not in the plan. If something seems missing, ask.
|
||||||
|
- Don't use AI for the recommender (plan §1 + §13). Pure SQL ranking.
|
||||||
|
|
||||||
|
Once you've read the plan and confirmed understanding, ask me whether to proceed with Phase 0.
|
||||||
1086
docs/berth-recommender-and-pdf-plan.md
Normal file
1086
docs/berth-recommender-and-pdf-plan.md
Normal file
File diff suppressed because it is too large
Load Diff
722
docs/documenso-build-plan.md
Normal file
722
docs/documenso-build-plan.md
Normal file
@@ -0,0 +1,722 @@
|
|||||||
|
# Documenso signing-flow build plan
|
||||||
|
|
||||||
|
Captures every Documenso-related piece that isn't shipped yet, in attack order. A fresh session should be able to pick this up without re-reading the whole conversation.
|
||||||
|
|
||||||
|
**Companion docs:**
|
||||||
|
|
||||||
|
- [docs/documenso-integration-audit.md](./documenso-integration-audit.md) — what's already built, v1/v2 endpoint mapping, nginx CORS block
|
||||||
|
- Old system reference: [client-portal/server/api/eoi/generate-quick-eoi.ts](../client-portal/server/api/eoi/generate-quick-eoi.ts), [client-portal/server/api/webhooks/documenso.post.ts](../client-portal/server/api/webhooks/documenso.post.ts), [client-portal/server/services/documenso-notifications.ts](../client-portal/server/services/documenso-notifications.ts), [Port Nimara/Website/pages/sign/[type]/[token].vue](../../Port%20Nimara/Website/pages/sign/%5Btype%5D/%5Btoken%5D.vue)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Locked design decisions (from user, do NOT re-ask)
|
||||||
|
|
||||||
|
| Q | Decision |
|
||||||
|
| --------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
|
| Embedded signing host | `portnimara.com/sign/<role>/<token>` (marketing website hosts the embed page; CRM emits URLs in this format) |
|
||||||
|
| Initial "please sign" email | **Per-port admin setting** `eoi_send_mode`: `auto` = send branded email immediately on generate; `manual` = generate + show URL + Send button |
|
||||||
|
| Contract / Reservation generation | **Upload-and-place-fields per deal only.** EOI is the only template-driven flow. (Resolved Q6 — template-fallback dropped.) |
|
||||||
|
| Reminder cadence | **Manual by default.** Rep clicks "Send reminder" button. Per-doc opt-in for auto-reminders at upload time. (Resolved Q1) |
|
||||||
|
| Document expiration | **Never expire.** No `expiresAt` UI in v1. (Resolved Q2) |
|
||||||
|
| Approver vs CC | **Two concepts**: `APPROVER` = real Documenso recipient that gates signing; `Completion CC` = passive recipient that only receives the signed PDF. (Resolved Q4) |
|
||||||
|
| Witness | **First-class signer role.** Configurable per-document; full reminder/tracking flow. (Resolved Q7) |
|
||||||
|
| Per-port developer label | **Configurable** via `documenso_developer_label` / `documenso_approver_label`. (Resolved Q8 bonus) |
|
||||||
|
| Multi-port template config | All Documenso settings are per-port via `/[portSlug]/admin/documenso` (already wired) |
|
||||||
|
| Documenso API version | Both v1 + v2 supported. Per-port config picks. v1 is prod (1.32) — primary. v2 unlocks embed + envelope |
|
||||||
|
| nginx CORS | User applies manually. Block is in [docs/documenso-integration-audit.md](./documenso-integration-audit.md). Supports multi-origin via `set $cors_origin` regex |
|
||||||
|
| Signer override | **Hybrid** — template docs (EOI) keep template-fixed signers (per-port settings fill the slots). Custom-uploaded docs (contract, reservation) get full per-deal signer customization. |
|
||||||
|
| Multi-berth | EOI keeps existing bundle support. Contract/reservation are custom-uploaded PDFs — no PDF form-fill, just Documenso signature/initials/date fields |
|
||||||
|
| Test mode | Reuse `EMAIL_REDIRECT_TO` env var (already redirects every outbound email + Documenso recipient) |
|
||||||
|
| Regenerate handling | Match old system: 3 retries to delete prior Documenso doc with 2-second wait. **Plus** a confirm modal: "Retain old EOI? (default no)" |
|
||||||
|
| Field placement strategy | **Auto-detect (anchor text scanner) + manual drag-drop UI as safety net.** Auto-detect populates the initial state; rep can drag/delete/reassign before sending. |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What's already shipped (foundation)
|
||||||
|
|
||||||
|
Files in place; do NOT rebuild:
|
||||||
|
|
||||||
|
- `src/lib/services/port-config.ts` — extended with: `documenso_developer_name/email`, `documenso_approver_name/email`, `eoi_send_mode`, `embedded_signing_host`, `documenso_contract_template_id`, `documenso_reservation_template_id`
|
||||||
|
- `src/app/(dashboard)/[portSlug]/admin/documenso/page.tsx` — admin UI exposes every Documenso knob across 5 cards
|
||||||
|
- `src/lib/email/templates/document-signing.ts` — `signingInvitationEmail`, `signingCompletedEmail`, `signingReminderEmail` with per-port branding
|
||||||
|
- `src/lib/services/document-signing-emails.service.ts` — `sendSigningInvitation`, `sendSigningReminder`, `sendSigningCompleted`. Includes `transformSigningUrl(rawUrl, host, role)` for embed URL wrapping
|
||||||
|
- `src/lib/services/documenso-client.ts` — extended `DocumensoFieldType` to all 11 types: SIGNATURE, FREE_SIGNATURE, INITIALS, DATE, EMAIL, NAME, TEXT, NUMBER, CHECKBOX, DROPDOWN, RADIO. Plus typed `DocumensoTextFieldMeta`/`NumberFieldMeta`/`ChoiceFieldMeta` interfaces and `fieldTypeNeedsMeta(type)` helper
|
||||||
|
- `src/components/interests/interest-eoi-tab.tsx` — EOI workspace with active-doc hero, signing progress, paper-signed upload, history strip
|
||||||
|
- `src/components/interests/interest-contract-tab.tsx` — Contract workspace shell with paper-signed upload + "send for signing" placeholder dialog
|
||||||
|
- `src/components/interests/interest-reservation-tab.tsx` — Reservation workspace shell (clone of Contract)
|
||||||
|
- `src/components/interests/interest-tabs.tsx` — stage-conditional visibility wired
|
||||||
|
|
||||||
|
What works today end-to-end: generate EOI → Documenso template path → manual link sharing (rep copies URL out of UI). What does NOT yet work: auto-send branded invitation, cascading "your turn" emails, custom-doc upload-to-Documenso, embedded signing URL emission to the website, on-completion PDF distribution.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 1 — EOI generate flow polish (~3 hours)
|
||||||
|
|
||||||
|
> **Updated for Q1, Q4, Q6, Q8 resolutions.** Adds manual-reminder endpoint, two new per-port label settings, drop of contract/reservation template settings, schema columns for completion CCs + auto-reminder. Also folds in webhook-secret hardening (Risk #7 Option A) and `transformSigningUrl` role mapping (Risk #5 fix).
|
||||||
|
|
||||||
|
**Why first**: Smallest surface area, validates the per-port `eoi_send_mode` setting works end-to-end, gets the cascading-email mental model in place before tackling the bigger pieces.
|
||||||
|
|
||||||
|
### Tasks
|
||||||
|
|
||||||
|
1. **Auto-send wiring**: in `src/components/documents/eoi-generate-dialog.tsx`, after `handleGenerate()` succeeds:
|
||||||
|
- Fetch port's `eoi_send_mode` (already on `getPortDocumensoConfig(portId)`)
|
||||||
|
- If `auto`: server-side already sent the doc to Documenso with `sendEmail: false`. Now call new endpoint `POST /api/v1/documents/[id]/send-invitation` (build it) which:
|
||||||
|
- Looks up the document's signers
|
||||||
|
- Calls `sendSigningInvitation()` for the first signer (the client; signing order 1)
|
||||||
|
- Stores `sent_at` timestamp on the signer row
|
||||||
|
- If `manual`: do nothing. Surface the signing URL in the EOI tab + a "Send invitation" button that hits the same endpoint.
|
||||||
|
|
||||||
|
2. **Regenerate confirm modal**: when EOI tab's "Generate EOI" button is clicked AND a Documenso doc already exists for this interest (`activeDoc !== null`):
|
||||||
|
- Show a `<Dialog>` asking: "There's already an EOI in flight. Regenerating will create a new document and the existing one will be cancelled."
|
||||||
|
- Two buttons: "Cancel" (default), "Regenerate" (destructive)
|
||||||
|
- Below the buttons, a checkbox: "Keep the previous EOI in Documenso (don't delete)" — defaults UNCHECKED
|
||||||
|
- On confirm: if checkbox unchecked, call `voidDocument(oldId, portId)` with 3 retries + 2-second wait between (mirror old system's `generate-quick-eoi.ts` lines 110-162). Then run the normal generate flow.
|
||||||
|
|
||||||
|
3. **Send-invitation endpoint**: new file `src/app/api/v1/documents/[id]/send-invitation/route.ts`:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
POST /api/v1/documents/[id]/send-invitation
|
||||||
|
Body: { recipientId?: string } // optional — defaults to first unsigned recipient
|
||||||
|
```
|
||||||
|
|
||||||
|
- Loads the document + signers
|
||||||
|
- Resolves the target recipient (passed-in or first unsigned in signing order)
|
||||||
|
- Resolves port's documenso config + the recipient's signing URL from the document_signers row
|
||||||
|
- Calls `sendSigningInvitation` from the email service
|
||||||
|
- Updates `document_signers.invited_at` (need to add column — see schema migration below)
|
||||||
|
|
||||||
|
4. **Schema migration**: add `invited_at` and `last_reminder_sent_at` columns to `document_signers`:
|
||||||
|
```sql
|
||||||
|
ALTER TABLE document_signers ADD COLUMN invited_at timestamptz;
|
||||||
|
ALTER TABLE document_signers ADD COLUMN last_reminder_sent_at timestamptz;
|
||||||
|
```
|
||||||
|
The webhook handler updates these (Phase 2). Apply via psql then restart dev server (per CLAUDE.md migration note).
|
||||||
|
|
||||||
|
### Acceptance criteria
|
||||||
|
|
||||||
|
- Setting `eoi_send_mode=auto` in admin → generating an EOI fires off our branded HTML email to the client immediately
|
||||||
|
- Setting `eoi_send_mode=manual` → no email fires; "Send invitation" button in EOI tab hits the endpoint
|
||||||
|
- Clicking Generate when an active EOI exists → confirm dialog with checkbox; default deletes prior doc with retries
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 2 — Webhook handler enhancement (~3-4 hours)
|
||||||
|
|
||||||
|
**Why second**: Once invitations are flowing (Phase 1), the webhook needs to track the lifecycle and fire the cascading "your turn" emails as each signer completes. Without this, the system goes silent after the initial invite.
|
||||||
|
|
||||||
|
### Tasks
|
||||||
|
|
||||||
|
1. **Extend `src/app/api/webhooks/documenso/route.ts`** to handle `DOCUMENT_OPENED`, `DOCUMENT_SIGNED`, `DOCUMENT_COMPLETED` (DOCUMENT_OPENED currently ignored).
|
||||||
|
|
||||||
|
2. **For `DOCUMENT_SIGNED`** (fires when one recipient signs, can fire multiple times per doc):
|
||||||
|
- Resolve the (port, document, signer) — existing per-port secret lookup already does this
|
||||||
|
- Update `document_signers.signed_at` for the matching signer
|
||||||
|
- Find the next unsigned signer in signing order
|
||||||
|
- If next signer exists AND we haven't already invited them: call `sendSigningInvitation()` with the next signer + their signing URL + role='developer' (or 'approver' depending on signing order). Mark `document_signers.invited_at` for them.
|
||||||
|
- This is the cascading "your turn" flow that mirrors `client-portal/server/services/documenso-notifications.ts`
|
||||||
|
|
||||||
|
3. **For `DOCUMENT_OPENED`**:
|
||||||
|
- Update `document_signers.opened_at` for the matching recipient (matched by token in payload)
|
||||||
|
- Used for analytics later ("12% of clients open within an hour")
|
||||||
|
|
||||||
|
4. **For `DOCUMENT_COMPLETED`** (fires once when all signers have signed):
|
||||||
|
- Update document `status='completed'`, `completed_at=...`
|
||||||
|
- Download signed PDF: `await downloadSignedPdf(documensoId, portId)` (existing)
|
||||||
|
- Store in storage backend via the file ingestion flow — this creates a `files` row
|
||||||
|
- Update the document row to point at the signed file (`signed_file_id`)
|
||||||
|
- Call `sendSigningCompleted()` with all signers + the signed file's id
|
||||||
|
- Update the linked interest's pipeline stage:
|
||||||
|
- If document type = `eoi` → `eoi_signed`
|
||||||
|
- If document type = `contract` → `contract_signed`
|
||||||
|
- If document type = `reservation_agreement` → leave stage; reservation is post-deal-close anyway
|
||||||
|
|
||||||
|
5. **Recipient-token matching**: webhooks include `payload.recipients[]` with each recipient's `token`. Use the token to match against `document_signers.signing_token` (need to add the column if not already). Old system's webhook does this via email match — fragile when the same email serves multiple roles. Token match is robust.
|
||||||
|
|
||||||
|
6. **Idempotency**: webhook can fire duplicates. Old system's `acquireWebhookLock` + signature comparison pattern is good. Port that logic.
|
||||||
|
|
||||||
|
### Schema migration
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Add fine-grained tracking columns to document_signers
|
||||||
|
ALTER TABLE document_signers ADD COLUMN invited_at timestamptz;
|
||||||
|
ALTER TABLE document_signers ADD COLUMN opened_at timestamptz;
|
||||||
|
ALTER TABLE document_signers ADD COLUMN last_reminder_sent_at timestamptz;
|
||||||
|
ALTER TABLE document_signers ADD COLUMN signing_token text; -- index this
|
||||||
|
|
||||||
|
CREATE INDEX idx_ds_signing_token ON document_signers (signing_token);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Acceptance criteria
|
||||||
|
|
||||||
|
- Client signs → developer receives our branded "your turn" email within seconds
|
||||||
|
- Developer signs → approver receives the same
|
||||||
|
- All signed → all three recipients receive the signed PDF as attachment
|
||||||
|
- Interest's pipeline stage advances to `eoi_signed` automatically
|
||||||
|
- Re-firing of duplicate webhooks is no-op
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 3 — Custom document upload-to-Documenso (~6-8 hours)
|
||||||
|
|
||||||
|
**Why third**: Backend foundation for contract + reservation flows. Without this, the "Upload draft for signing" CTA on those tabs is a placeholder.
|
||||||
|
|
||||||
|
### Tasks
|
||||||
|
|
||||||
|
1. **New service** `src/lib/services/custom-document-upload.service.ts`:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
export async function uploadDocumentForSigning(args: {
|
||||||
|
interestId: string;
|
||||||
|
portId: string;
|
||||||
|
documentType: 'contract' | 'reservation_agreement';
|
||||||
|
pdfBuffer: Buffer;
|
||||||
|
filename: string;
|
||||||
|
title: string;
|
||||||
|
recipients: Array<{
|
||||||
|
name: string;
|
||||||
|
email: string;
|
||||||
|
role: 'SIGNER' | 'APPROVER' | 'CC';
|
||||||
|
signingOrder: number;
|
||||||
|
}>;
|
||||||
|
fields: DocumensoFieldPlacement[]; // from auto-detect or manual placement
|
||||||
|
}): Promise<{ documentId: string; signingUrls: Record<string, string> }>;
|
||||||
|
```
|
||||||
|
|
||||||
|
Steps:
|
||||||
|
- Convert pdfBuffer → base64
|
||||||
|
- Call `createDocument(title, base64, recipients, portId)` — existing client function
|
||||||
|
- Call `placeFields(docId, fields, portId)` — existing client function (handles v1 + v2)
|
||||||
|
- Call `sendDocument(docId, portId)` — existing
|
||||||
|
- Return doc ID + per-recipient signing URLs
|
||||||
|
- Mirror the timing-safe URL extraction from old system's generate-quick-eoi (recipients[].signingUrl)
|
||||||
|
- Insert a row into our `documents` table with the new doc_id + signers + interest link
|
||||||
|
- If port's `eoi_send_mode === 'auto'`: kick off `sendSigningInvitation()` to first signer
|
||||||
|
|
||||||
|
2. **API endpoint**: `POST /api/v1/interests/[id]/upload-for-signing`
|
||||||
|
- Accepts multipart: `file` (the PDF), `documentType`, `title`, `recipients` (JSON), `fields` (JSON)
|
||||||
|
- Validates: file is PDF (magic-byte check, see berth-pdf flow), recipients ≥ 1, fields ≥ 1
|
||||||
|
- Calls service
|
||||||
|
- Returns 201 with the new document row
|
||||||
|
|
||||||
|
3. **Update Contract + Reservation tab placeholders** to open a real upload dialog (see Phase 4).
|
||||||
|
|
||||||
|
### Acceptance criteria
|
||||||
|
|
||||||
|
- Endpoint accepts a PDF + recipients + fields and returns a Documenso doc ID
|
||||||
|
- Document appears in the Documents tab with status `sent`
|
||||||
|
- v1 and v2 paths both work (same code path; client chooses based on per-port config)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 4 — Recipient configurator + Field placement UI (~10-14 hours)
|
||||||
|
|
||||||
|
**Why fourth**: This is the BIG visual piece. Don't start until Phase 3 backend is proven via curl.
|
||||||
|
|
||||||
|
### Sub-phase 4a: Recipient configurator (~2-3 hours)
|
||||||
|
|
||||||
|
UI inside a new `<UploadForSigningDialog>` component:
|
||||||
|
|
||||||
|
- File picker (drag-drop + click)
|
||||||
|
- Title input (defaults to filename minus extension)
|
||||||
|
- Recipients list:
|
||||||
|
- Add row → name + email + role (SIGNER/APPROVER/CC) + signing order (number, auto-increments)
|
||||||
|
- Drag to reorder (uses `dnd-kit`, already in deps)
|
||||||
|
- Delete row
|
||||||
|
- Defaults: client (signing order 1) prefilled from interest's linked client; developer + approver prefilled from port settings
|
||||||
|
- "Configure fields →" button advances to sub-phase 4b
|
||||||
|
|
||||||
|
### Sub-phase 4b: PDF rendering (~3-4 hours)
|
||||||
|
|
||||||
|
- Install: `pnpm add react-pdf` (uses pdfjs-dist under the hood; pdfme already pulls pdfjs-dist so no new dep weight)
|
||||||
|
- Render the uploaded PDF page-by-page using `<Document>` + `<Page>` from react-pdf
|
||||||
|
- Page navigation (prev/next, page picker)
|
||||||
|
- Zoom controls (50%, 75%, 100%, 125%, 150%)
|
||||||
|
|
||||||
|
### Sub-phase 4c: Auto-detect scanner (~4-6 hours)
|
||||||
|
|
||||||
|
New file `src/lib/services/document-field-detector.ts`:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
export interface DetectedField {
|
||||||
|
type: DocumensoFieldType;
|
||||||
|
pageNumber: number;
|
||||||
|
pageX: number; // 0-100 percent
|
||||||
|
pageY: number;
|
||||||
|
pageWidth: number;
|
||||||
|
pageHeight: number;
|
||||||
|
/** Confidence 0-1 — how sure the scanner is. */
|
||||||
|
confidence: number;
|
||||||
|
/** Original anchor text (for debugging / display). */
|
||||||
|
anchorText?: string;
|
||||||
|
/** Inferred recipient (from nearby labels). null = unassigned. */
|
||||||
|
inferredRecipientLabel?: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function detectFields(pdfBuffer: Buffer): Promise<DetectedField[]>;
|
||||||
|
```
|
||||||
|
|
||||||
|
Implementation:
|
||||||
|
|
||||||
|
- Use `pdfjs-dist` to extract text per page with `getTextContent()` — gives `{str, transform: [a,b,c,d,e,f]}` per text item where `e,f` is position in PDF user space, plus `width/height`
|
||||||
|
- Anchor patterns:
|
||||||
|
- `SIGNATURE`: `/signature[:\s_-]+/i`, `/sign\s*here[:\s_-]*/i`, `/X\s*_{4,}/i`, `/signed\s*by[:\s]+/i`
|
||||||
|
- `INITIALS`: `/initials?[:\s_-]+/i`
|
||||||
|
- `DATE`: `/dated?[:\s_-]+/i`, `/date\s+of\s+signature/i`
|
||||||
|
- `NAME`: `/(printed?\s*)?name[:\s_-]+/i`, `/full\s+name[:\s_-]+/i`
|
||||||
|
- `EMAIL`: `/email[:\s_-]+/i`
|
||||||
|
- Catch-all: `/_{8,}/` → if not preceded by name/email/date keyword, default to TEXT
|
||||||
|
- For each match: place field bounding box immediately AFTER the matched text (offset 5pt right), with type-appropriate width:
|
||||||
|
- SIGNATURE: 150pt × 30pt
|
||||||
|
- INITIALS: 50pt × 30pt
|
||||||
|
- DATE: 80pt × 20pt
|
||||||
|
- NAME: 150pt × 20pt
|
||||||
|
- EMAIL: 200pt × 20pt
|
||||||
|
- TEXT: 200pt × 20pt
|
||||||
|
- Convert to PERCENT (divide by page width/height)
|
||||||
|
- Recipient inference: scan ±100pt of the field for labels like "Buyer", "Seller", "Client", "Developer", "Witness", "Notary". Map to recipient by role.
|
||||||
|
|
||||||
|
### Sub-phase 4d: Drag-drop overlay (~3-4 hours)
|
||||||
|
|
||||||
|
- Overlay absolute-positioned divs on top of the PDF viewer for each field
|
||||||
|
- Each field shows: type icon + recipient color + delete (×) handle + drag affordance
|
||||||
|
- Use `dnd-kit` to enable drag — update `pageX/pageY` in state on drop
|
||||||
|
- Field palette toolbar: 11 buttons (one per Documenso field type) — click to enter "place mode" → next click on the PDF places a new field at that coord
|
||||||
|
- Side panel for selected field:
|
||||||
|
- Type changer (dropdown)
|
||||||
|
- Recipient assignment (dropdown of configured recipients)
|
||||||
|
- Required toggle
|
||||||
|
- Per-type config (TEXT label, NUMBER min/max, CHECKBOX/DROPDOWN/RADIO options) — drives `fieldMeta`
|
||||||
|
- Width/height inputs
|
||||||
|
- Delete button
|
||||||
|
|
||||||
|
### Sub-phase 4e: Send (~1 hour)
|
||||||
|
|
||||||
|
"Send for signing" button:
|
||||||
|
|
||||||
|
- Validates: ≥1 recipient, ≥1 field, every field has a recipient assigned
|
||||||
|
- POSTs to `/api/v1/interests/[id]/upload-for-signing` (Phase 3)
|
||||||
|
- On success, closes dialog and refreshes the Contract/Reservation tab
|
||||||
|
|
||||||
|
### Acceptance criteria
|
||||||
|
|
||||||
|
- Upload a draft PDF → auto-detect runs → fields appear overlaid in their detected positions
|
||||||
|
- Rep can drag any field to reposition (state updates, persists to backend on send)
|
||||||
|
- Rep can change a field's type, recipient, or metadata via side panel
|
||||||
|
- Rep can add new fields by clicking palette button + clicking on PDF
|
||||||
|
- Rep can delete fields they don't want
|
||||||
|
- Click Send → fields ship to Documenso, signing flow starts, Contract tab shows the active doc
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 5 — Embedded signing URL emission verification (~1-2 hours)
|
||||||
|
|
||||||
|
**Why later**: The Vue page on the marketing website already exists. This phase is a verification + documentation pass, not a code build.
|
||||||
|
|
||||||
|
### Tasks
|
||||||
|
|
||||||
|
1. **Verify URL transformation matches website expectations**:
|
||||||
|
- Website route: `/sign/[type]/[token]` where `type ∈ {client, cc, developer}`
|
||||||
|
- Our `transformSigningUrl()` emits `/sign/<role>/<token>` where role can be `client | developer | approver | witness | other`
|
||||||
|
- Mismatch: website only handles `client | cc | developer`. Our email service may emit `approver` (which the website doesn't route).
|
||||||
|
- **Fix**: either (a) update website's `[type].vue` to accept `approver` (and `witness | other` if needed), OR (b) map our role names to the website's expected names in `transformSigningUrl()`.
|
||||||
|
|
||||||
|
2. **For contract + reservation document types**: the website's `signerMessages` map only covers EOI-specific copy. When a contract goes out for signing and the recipient hits `portnimara.com/sign/client/<token>`, the page would show "Sign Your Expression of Interest" — wrong copy.
|
||||||
|
- **Fix**: add document-type to the URL too: `/sign/<docType>/<role>/<token>`. Update website's signerMessages to be keyed on `(docType, role)`.
|
||||||
|
|
||||||
|
3. **Webhook callback URL**: website POSTs to `client-portal.portnimara.com/api/webhook/document-signed` after signing. The new CRM is at a different domain. Update website's `handleDocumentSigned` to POST to the new CRM's webhook (a thin "client confirmed sign" notification, separate from Documenso's own webhook).
|
||||||
|
|
||||||
|
4. **Apply nginx CORS block** — already documented in [docs/documenso-integration-audit.md](./documenso-integration-audit.md). Apply via ssh when user grants access.
|
||||||
|
|
||||||
|
### Acceptance criteria
|
||||||
|
|
||||||
|
- Embedded URL points at a working website page that loads the right Documenso embed for any document type / role combo
|
||||||
|
- Post-sign callback updates our document_signers row (redundant with the Documenso webhook but useful as a real-time UI signal)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 6 — Polish & deferred items (~2-3 hours each, do as needed)
|
||||||
|
|
||||||
|
- **`auto` send mode delay**: optional per-port `eoi_send_delay_minutes` setting. When set, the auto-send fires after N minutes (BullMQ scheduled job) so the rep can review + cancel during the window. Default 0 (immediate).
|
||||||
|
- **Audit log entries**: every Documenso-related action (generate, send, remind, cancel, sign-event-received) writes to `audit_logs` with structured metadata. Mostly already there for the existing flow; extend to cover Phase 1-3 additions.
|
||||||
|
- **Per-document customization of email copy**: rep can override the default signing-invitation body before send. New textarea in the upload dialog. Stored as `documents.invitation_message`.
|
||||||
|
- **Document expiration**: Documenso supports `expiresAt`. Surface as a per-document field in the upload dialog.
|
||||||
|
- **Reminder rate-limit display**: surface "next reminder available in X days" on each unsigned signer in the signing-progress UI.
|
||||||
|
- **Failed-webhook recovery UI**: admin page showing webhooks that errored, with a "Replay" button. Old system has the foundation; CRM doesn't.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 7 — Project Director role + RBAC layer (~6-8 hours)
|
||||||
|
|
||||||
|
> **Surfaced from Q8 conversation.** The `developer` signer slot is conceptually the "Project Director" — the person at the port who countersigns deals on behalf of the port. Today every CRM user is either a sales rep or admin; there's no Project Director user role. Attack alongside the Documenso build because (a) the Documenso developer-label setting is meaningless if no user actually has the role, and (b) a few permissions naturally cluster around it.
|
||||||
|
|
||||||
|
### What a Project Director needs (vs sales rep)
|
||||||
|
|
||||||
|
| Capability | Sales rep | Project Director | Admin |
|
||||||
|
| -------------------------------------------------------- | --------- | ---------------- | ----------------------------- |
|
||||||
|
| Generate EOI / contract / reservation | ✓ | ✓ | ✓ |
|
||||||
|
| Approve / sign as the "developer" recipient on Documenso | — | ✓ | — (unless also designated PD) |
|
||||||
|
| View own deals | ✓ | ✓ | ✓ |
|
||||||
|
| View other reps' deals | — | ✓ | ✓ |
|
||||||
|
| View audit logs (read-only) | — | ✓ | ✓ |
|
||||||
|
| Trigger CSV / report exports | — | ✓ | ✓ |
|
||||||
|
| Re-assign deals between reps | — | ✓ | ✓ |
|
||||||
|
| Edit per-port settings | — | — | ✓ |
|
||||||
|
| Manage users + invitations | — | — | ✓ |
|
||||||
|
| Manage Documenso config | — | — | ✓ |
|
||||||
|
|
||||||
|
So Project Director sits between sales rep and admin: read-everywhere + a few action capabilities (re-assign, export, sign-as-PD), but no settings/user management.
|
||||||
|
|
||||||
|
### Tasks
|
||||||
|
|
||||||
|
1. **Add `project_director` to the role enum** in `src/lib/db/schema/users.ts` (or wherever port_roles enum lives). Existing role values (sales, admin, super_admin) stay; this is additive.
|
||||||
|
|
||||||
|
2. **Permission flags**: extend the per-port permissions matrix (`src/lib/auth/permissions.ts` or equivalent) with new flags:
|
||||||
|
- `viewAllDeals` — true for project_director, admin, super_admin
|
||||||
|
- `viewAuditLogs` — true for project_director, admin, super_admin
|
||||||
|
- `exportReports` — true for project_director, admin, super_admin
|
||||||
|
- `reassignDeals` — true for project_director, admin, super_admin
|
||||||
|
- `signAsProjectDirector` — true for project_director only (admin can sign as PD only if also assigned the role on this port)
|
||||||
|
|
||||||
|
These flags get checked in the relevant API handlers via the existing `withPermission()` middleware.
|
||||||
|
|
||||||
|
3. **Documenso developer-slot binding**: per-port admin UI gets a "Project Director user" dropdown alongside the existing developer-name/email free-text inputs. When a real CRM user is selected, the admin UI:
|
||||||
|
- Populates `documenso_developer_name/email` from the user's profile (read-only when bound)
|
||||||
|
- When that user signs an EOI/contract via Documenso, the webhook handler can match by user-email and update the in-CRM signing UI in real time (signer chip turns green for them specifically)
|
||||||
|
- Free-text fallback stays for ports without a CRM-PD user yet
|
||||||
|
|
||||||
|
4. **User invitations + role selection**: extend `src/components/admin/invite-user-dialog.tsx` to surface "Project Director" alongside Sales / Admin as a selectable role at invitation time.
|
||||||
|
|
||||||
|
5. **Audit-log access**: surface a new `/[portSlug]/admin/audit-log` route (or extend the existing one's permission gate) so Project Directors can read but not write. Hide write controls for non-admins.
|
||||||
|
|
||||||
|
6. **Reports page permission gate**: existing `/[portSlug]/reports` (or wherever exports live) checks `exportReports` permission flag instead of admin-only.
|
||||||
|
|
||||||
|
7. **Re-assign deals UI**: add a "Re-assign owner" action on the interest detail page, gated by `reassignDeals`. Writes to `interests.owner_user_id` (or whatever the assigned-rep field is) and audit-logs the change.
|
||||||
|
|
||||||
|
### Schema migration
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Add project_director as a valid role; depends on how roles are stored.
|
||||||
|
-- If port_roles uses an enum:
|
||||||
|
ALTER TYPE port_role ADD VALUE 'project_director';
|
||||||
|
-- Or if it's a text column with check constraint:
|
||||||
|
ALTER TABLE port_roles DROP CONSTRAINT port_roles_role_check;
|
||||||
|
ALTER TABLE port_roles ADD CONSTRAINT port_roles_role_check
|
||||||
|
CHECK (role IN ('sales', 'admin', 'super_admin', 'project_director'));
|
||||||
|
|
||||||
|
-- Optional: link the per-port Documenso developer slot to a real user
|
||||||
|
ALTER TABLE system_settings ADD COLUMN IF NOT EXISTS user_id text REFERENCES users(id) ON DELETE SET NULL;
|
||||||
|
-- (Used for the documenso_developer_user_id setting; null for free-text fallback)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Acceptance criteria
|
||||||
|
|
||||||
|
- A user invited as `project_director` can view all deals across the port (not just their own), read audit logs, trigger exports, and re-assign deals — but cannot edit settings or invite users
|
||||||
|
- Admin can bind a CRM user to the per-port Documenso developer slot; the user's name + email auto-populate in invitations and emails
|
||||||
|
- Non-PD users cannot trigger PD-only actions (server returns 403; UI hides the controls)
|
||||||
|
- Existing sales / admin / super_admin permissions are unchanged
|
||||||
|
|
||||||
|
### Why attack at the same time as the Documenso build
|
||||||
|
|
||||||
|
- Both touch `port-config.ts` and `admin/documenso/page.tsx` — fewer rebases if done in one push
|
||||||
|
- The `documenso_developer_label` setting (Q8 bonus) and the PD-user binding overlap; doing them together avoids re-touching the same admin card twice
|
||||||
|
- The Documenso webhook's per-signer matching benefits from having a real `users.email` to bind against, not just a free-text developer name
|
||||||
|
|
||||||
|
### Out of scope (defer to a later RBAC pass)
|
||||||
|
|
||||||
|
- Custom permission templates (e.g. "PD with no audit-log access")
|
||||||
|
- Per-deal ACLs (sharing a single interest with another rep)
|
||||||
|
- Time-bound role grants
|
||||||
|
- Cross-port role overrides for super_admin
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Risks + decisions (resolved through code review)
|
||||||
|
|
||||||
|
Each entry below was checked against the current code. The original "open question" form is preserved in italics for traceability; the **Decision** is what the next session should implement.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 1. `fieldMeta` on Documenso v1.32
|
||||||
|
|
||||||
|
_Q: Does v1.32 silently ignore unknown properties, or does it reject the request?_
|
||||||
|
|
||||||
|
**Decision: not a risk in current code.** [src/lib/services/documenso-client.ts:491-501](../src/lib/services/documenso-client.ts#L491) shows the v1 path constructs its own body containing only `recipientId, type, pageNumber, pageX/Y/Width/Height` — `fieldMeta` is never sent on v1. The code comment at [line 341-344](../src/lib/services/documenso-client.ts#L341) is misleading — update it. Action for next session: change the comment to "v1 does not receive `fieldMeta` (we never send it). v1 renders TEXT/NUMBER/CHECKBOX/DROPDOWN/RADIO as blank inputs; if the per-port admin chose v1 the field UI should warn 'Configurable field types require Documenso v2'." The placement UI in Phase 4d should disable the meta-config side panel when the resolved port is on v1.
|
||||||
|
|
||||||
|
### 2. PDF dimension extraction (non-A4 contracts)
|
||||||
|
|
||||||
|
_Q: How do we get real page dimensions on the v1 path?_
|
||||||
|
|
||||||
|
**Decision: parse the PDF with pdf-lib in the upload service before calling `placeFields()`.** pdf-lib is already a transitive dep via the EOI form-fill flow ([src/lib/pdf/fill-eoi-form.ts](../src/lib/pdf/fill-eoi-form.ts)). Concrete change for Phase 3:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// In src/lib/services/custom-document-upload.service.ts
|
||||||
|
import { PDFDocument } from 'pdf-lib';
|
||||||
|
const pdfDoc = await PDFDocument.load(pdfBuffer);
|
||||||
|
const pageDims = pdfDoc.getPages().map((p) => {
|
||||||
|
const { width, height } = p.getSize();
|
||||||
|
return { width, height };
|
||||||
|
});
|
||||||
|
// Pass to placeFields as a per-page dimension map override
|
||||||
|
```
|
||||||
|
|
||||||
|
Then extend `placeFields` signature to accept an optional `pageDimensionsOverride?: DocumensoPageDimensions[]` (one entry per page). When provided, the v1 path uses `pageDimensionsOverride[fieldPageIndex]` instead of [`getPageDimensions()`'s A4 default](../src/lib/services/documenso-client.ts#L427). Falls back to A4 when override is missing — keeps the EOI template path (which IS A4) unchanged.
|
||||||
|
|
||||||
|
### 3. Multi-page signature blocks not picked up by auto-detect
|
||||||
|
|
||||||
|
_Q: What's the recovery path if the scanner misses a signature block on the last page?_
|
||||||
|
|
||||||
|
**Decision: not a risk — by design.** Phase 4d's drag-drop overlay + field palette is the explicit fallback. Auto-detect populates initial state; rep MUST be able to add fields manually. The acceptance criterion at the end of Phase 4 already covers this. Demoted from "risk" to "design note": every page must be reachable in the PDF viewer (Phase 4b's page navigation) and the field palette must be enabled even on auto-detected pages.
|
||||||
|
|
||||||
|
### 4. Webhook payload differences v1 vs v2
|
||||||
|
|
||||||
|
_Q: Does our webhook handler decode both v1 and v2 payload shapes correctly?_
|
||||||
|
|
||||||
|
**Decision: partially confirmed; finish the audit in Phase 2.** Confirmed working today:
|
||||||
|
|
||||||
|
- Secret transport: identical (`X-Documenso-Secret` plaintext) — see [route.ts:53](../src/app/api/webhooks/documenso/route.ts#L53)
|
||||||
|
- Event names: both versions send the uppercase Prisma enum (`DOCUMENT_SIGNED`); CLAUDE.md note documents this. The route also normalizes lowercase-dotted variants for forward-compat.
|
||||||
|
- Top-level shape `{ event, payload: { id, ... } }`: same on both versions
|
||||||
|
|
||||||
|
Still unverified (defer to Phase 2 implementation):
|
||||||
|
|
||||||
|
- v2 may rename `payload.id` → `payload.documentId` and `recipient.id` → `recipient.recipientId` (mirrors the API-response rename — see [src/lib/services/documenso-client.ts](../src/lib/services/documenso-client.ts) `normalizeDocument()`). Apply the same dual-field read pattern in the webhook handler: `const docId = payload.documentId ?? payload.id`.
|
||||||
|
- v2 may include `payload.envelopeId` instead of `payload.id` for envelope-level events (DOCUMENT_COMPLETED). Read both.
|
||||||
|
- Recipient token field: v1 uses `recipient.token`; v2 may differ. Phase 2's token-based matching (step 5) needs to handle both.
|
||||||
|
|
||||||
|
Test with a v2 instance during Phase 2; until then keep the per-port API version setting on v1 only.
|
||||||
|
|
||||||
|
### 5. `approver` role → `cc` URL mapping
|
||||||
|
|
||||||
|
_Q: How do we keep the website's signing page (which only routes `client | cc | developer`) working when our `SignerRole` includes `approver | witness | other`?_
|
||||||
|
|
||||||
|
**Decision: confirmed bug in current code; fix in Phase 5.** [Website route validation](../../Port%20Nimara/Website/pages/sign/%5Btype%5D/%5Btoken%5D.vue#L175) explicitly redirects to `/sign/error` for any `signerType` not in `['client', 'cc', 'developer']`. Our [transformSigningUrl()](../src/lib/services/document-signing-emails.service.ts#L106) emits `${host}/sign/${signerRole}/${token}` with the raw `SignerRole` value. Today, an `approver` invite would land on `/sign/error`.
|
||||||
|
|
||||||
|
Concrete fix in `transformSigningUrl()`:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const ROLE_TO_URL_SEGMENT: Record<SignerRole, 'client' | 'cc' | 'developer'> = {
|
||||||
|
client: 'client',
|
||||||
|
developer: 'developer',
|
||||||
|
approver: 'cc', // legacy: approver showed as "EmbeddedSignatureLinkCC"
|
||||||
|
witness: 'cc', // route through cc page; copy needs a witness override (Phase 5)
|
||||||
|
other: 'cc',
|
||||||
|
};
|
||||||
|
const urlRole = ROLE_TO_URL_SEGMENT[signerRole];
|
||||||
|
return `${host}/sign/${urlRole}/${token}`;
|
||||||
|
```
|
||||||
|
|
||||||
|
Two follow-ups for Phase 5:
|
||||||
|
|
||||||
|
- Add the mapping above to `transformSigningUrl()` — DO this in Phase 1 already since Phase 1 fires the first invitation email.
|
||||||
|
- Update website's `signerMessages` (currently EOI-specific) to be keyed on `(documentType, signerType)` so contract+reservation invites get the right copy — see Phase 5 task 2.
|
||||||
|
|
||||||
|
### 6. Storage backend for signed PDFs
|
||||||
|
|
||||||
|
_Q: Does the on-completion download in Phase 2 use the pluggable storage backend?_
|
||||||
|
|
||||||
|
**Decision: confirmed — pattern already established, just follow it.** [`getStorageBackend()`](../src/lib/storage/index.ts) is used by 9 services in the codebase (berth-pdf, brochures, expense-pdf, invoices, gdpr-export, reports, document-templates, document-sends, email-compose). The [`documents` schema](../src/lib/db/schema/documents.ts) already has the `signedFileId` column with index `idx_docs_signed_file_id`. Phase 2 step 4 is just: `const buffer = await downloadSignedPdf(docId, portId); const file = await ingestFile({ buffer, portId, ... }); await db.update(documents).set({ signedFileId: file.id })...`. Demoted from "risk" to "implementation note" inside Phase 2.
|
||||||
|
|
||||||
|
### 7. Cross-port webhook secret collision
|
||||||
|
|
||||||
|
_Q: Can two ports happen to share the same webhook secret?_
|
||||||
|
|
||||||
|
**Decision: real risk — fix at write-time, not schema.** [system_settings](../src/lib/db/schema/system.ts#L137) is unique on `(key, port_id)`, so the same key+port combo is enforced unique, but there's no global uniqueness on the _value_. The [webhook handler](../src/app/api/webhooks/documenso/route.ts#L62) iterates all configured secrets and breaks on first match — if two ports paste the same secret, the second port's webhooks get attributed to the first. Three options, in preference order:
|
||||||
|
|
||||||
|
**Option A (recommended): generate, never paste.** Replace the textbox in [admin/documenso/page.tsx](<../src/app/(dashboard)/[portSlug]/admin/documenso/page.tsx>) for `documenso_webhook_secret` with a "Generate secret" button that calls `crypto.randomBytes(32).toString('base64url')` server-side and writes it. Display once, mask after. Collision probability is negligible. Admin still has a "Regenerate" button for rotation.
|
||||||
|
|
||||||
|
**Option B: warn at write.** Keep the textbox but on PUT to the setting, query `system_settings WHERE key='documenso_webhook_secret' AND value=?` and fail with a 409 if any other port has this value. Cheap, defensive, but exposes that a value exists somewhere.
|
||||||
|
|
||||||
|
**Option C: schema-level enforcement.** Add a partial unique index `CREATE UNIQUE INDEX system_settings_documenso_secret_unique ON system_settings (value) WHERE key = 'documenso_webhook_secret'`. Strongest, but requires careful ordering during port-clone or restore-from-backup operations.
|
||||||
|
|
||||||
|
Pick Option A. Add to Phase 1 as a polish item — small change, eliminates the risk class.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Open questions — RESOLVED 2026-05-07
|
||||||
|
|
||||||
|
All 10 questions plus the bonus role-label question have user-locked answers. Implementation must follow these decisions; do not re-litigate.
|
||||||
|
|
||||||
|
### Q1. Reminder cadence — RESOLVED
|
||||||
|
|
||||||
|
**Decision**: **Manual reminders by default.** Rep clicks a "Send reminder" button in the EOI/Contract tab. Per-document opt-in: rep can configure auto-reminders on a specific doc at send time (e.g. "remind every 7 days until signed").
|
||||||
|
|
||||||
|
**Implications**:
|
||||||
|
|
||||||
|
- No port-wide reminder schedule setting needed.
|
||||||
|
- Phase 1 / 2: skip the BullMQ scheduled-reminder job for now. Add a `POST /api/v1/documents/[id]/send-reminder` endpoint that calls `sendSigningReminder()` for the next-pending signer. Track `last_reminder_sent_at` to enforce Documenso's 24h rate limit on the UI ("Next reminder available in X").
|
||||||
|
- Phase 4a (upload dialog): add an optional "Auto-reminder schedule" field — None (default) / Every 3d / Every 7d. When set, store on `documents.auto_reminder_interval_days`; a once-daily worker iterates unsigned documents and fires due reminders.
|
||||||
|
|
||||||
|
### Q2. Document expiration — RESOLVED
|
||||||
|
|
||||||
|
**Decision**: **Never expire by default.** No expiration UI in v1. Skip Documenso's `expiresAt` entirely.
|
||||||
|
|
||||||
|
**Reasoning**: link expiration doesn't help the regenerate flow (regen already voids+recreates). Adding the UI is overhead with no immediate user benefit.
|
||||||
|
|
||||||
|
**Implications**:
|
||||||
|
|
||||||
|
- Phase 3 `uploadDocumentForSigning`: don't expose `expiresAt`.
|
||||||
|
- Phase 4a recipient configurator: no expiration field.
|
||||||
|
- Phase 6 deferred-items list: drop the "Document expiration" item.
|
||||||
|
|
||||||
|
### Q3. Auto-detect confidence threshold — RESOLVED
|
||||||
|
|
||||||
|
**Decision**: **Default ≥0.8 silent / 0.5–0.8 flagged / <0.5 drop**, with the drag-drop overlay (Phase 4d) as the universal fix mechanism — rep can reposition or delete any auto-placed field.
|
||||||
|
|
||||||
|
**Implications**:
|
||||||
|
|
||||||
|
- Phase 4c scanner: emit `DetectedField.confidence`; threshold checks live in the UI layer (Phase 4d) so they're easy to tune.
|
||||||
|
- Phase 4d overlay: flagged fields render with a yellow border + "?" badge; rep can click to confirm-as-correct (clears the badge) or drag/delete.
|
||||||
|
|
||||||
|
### Q4. Approver semantics — RESOLVED
|
||||||
|
|
||||||
|
**Decision**: **TWO concepts, not one.**
|
||||||
|
|
||||||
|
1. **APPROVER** = real Documenso `APPROVER` recipient. Gates signing flow (e.g. client signs → approver approves → developer signs). Configured per-port (existing `documenso_approver_name/email` settings).
|
||||||
|
2. **Completion CC** = passive recipient. Does NOT participate in signing. Receives only the final signed PDF as attachment when the doc completes. Set per-document by the rep at send time.
|
||||||
|
|
||||||
|
**Implications**:
|
||||||
|
|
||||||
|
- Phase 3 `uploadDocumentForSigning` recipients: support `role: 'SIGNER' | 'APPROVER' | 'CC'`. CCs are NOT created as Documenso recipients — they're stored on `documents.completion_cc_emails` (text array) and emailed by our own service when DOCUMENT_COMPLETED webhook fires.
|
||||||
|
- Phase 4a recipient configurator: split into two sections:
|
||||||
|
- **Signing recipients**: name + email + role (Signer / Approver) + signing order
|
||||||
|
- **Copy on completion** (CC): just email addresses, comma-separated
|
||||||
|
- Phase 2 step 4 (on-completion email distribution): include `documents.completion_cc_emails` recipients with the signed PDF. Dedup by email (see Q5).
|
||||||
|
- Schema migration: `ALTER TABLE documents ADD COLUMN completion_cc_emails text[] DEFAULT '{}'::text[];`
|
||||||
|
|
||||||
|
### Q5. On-completion PDF distribution — RESOLVED
|
||||||
|
|
||||||
|
**Decision**: **All signing recipients + rep who generated + per-deal CC**, deduplicated by email address.
|
||||||
|
|
||||||
|
**Implications**:
|
||||||
|
|
||||||
|
- Phase 2 step 4: build the recipient list as union of (a) all `document_signers` for this doc, (b) the user who created the doc (`documents.createdBy` → `users.email`), (c) `documents.completion_cc_emails`. Lowercase + dedupe before calling `sendSigningCompleted`.
|
||||||
|
- Common case (rep IS the approver): one email, not two.
|
||||||
|
- Per-port distribution list (originally proposed) is NOT needed — the per-deal CC field covers it. If a port wants `legal@portnimara.com` on every deal, the rep types it once per doc; if it's truly always-on, add a port-default later (deferred to Phase 6).
|
||||||
|
|
||||||
|
### Q6. `documenso_contract_template_id` / `documenso_reservation_template_id` — RESOLVED
|
||||||
|
|
||||||
|
**Decision**: **DROP both settings. EOI is the only template-driven flow.** Contracts and reservations are custom-uploaded per deal — no template fallback.
|
||||||
|
|
||||||
|
**Implications**:
|
||||||
|
|
||||||
|
- Remove `documenso_contract_template_id` and `documenso_reservation_template_id` from `port-config.ts` `SETTING_KEYS` and `PortDocumensoConfig` type.
|
||||||
|
- Remove the corresponding fields from `admin/documenso/page.tsx`. Card title becomes "Templates" with just the EOI template ID field.
|
||||||
|
- Phase 3: contract/reservation tabs go straight into the upload dialog — no `if (templateId) { ... }` branch.
|
||||||
|
- Locked design decisions table at top of this doc: update the "Contract / Reservation generation" row to remove the template-fallback option.
|
||||||
|
|
||||||
|
### Q7. Witness role — RESOLVED
|
||||||
|
|
||||||
|
**Decision**: **First-class. Configurable per-document at generation time.** Witness goes through the full invitation/reminder/tracking flow same as any other signer; signs the document attesting to having witnessed.
|
||||||
|
|
||||||
|
**Implications**:
|
||||||
|
|
||||||
|
- Keep `witness` in `SignerRole`.
|
||||||
|
- Phase 4a recipient configurator: "Witness" is a selectable role in the role dropdown (alongside Signer / Approver / CC).
|
||||||
|
- Phase 5 website edit: add witness copy to `signerMessages` map ("Witness this signing of…"). Add `witness` to the validated role list at line 175 of `[type]/[token].vue` — currently `['client', 'cc', 'developer']`, becomes `['client', 'cc', 'developer', 'witness']`.
|
||||||
|
- Risk #5 mapping in `transformSigningUrl()`: `witness → 'witness'` (NOT mapped to `cc`). Update the role-to-URL-segment table accordingly.
|
||||||
|
- Witness gets the same reminder/auto-reminder support as any signer — no special-casing.
|
||||||
|
|
||||||
|
### Q8. Multiple developers/approvers per port — RESOLVED (with rename)
|
||||||
|
|
||||||
|
**Decision**: **Stay single per port** for the standard `developer` and `approver` slots. If a port needs more on a custom doc, the rep adds extra signers via the upload-for-signing dialog (Phase 4a recipient configurator).
|
||||||
|
|
||||||
|
**Plus the bonus**: the per-port "developer" label IS configurable via a new `documenso_developer_label` setting (default: "Developer"). Used in email subjects, signer chips, and signing-progress UI. Backend type-name stays `developer` so no schema churn.
|
||||||
|
|
||||||
|
**Implications**:
|
||||||
|
|
||||||
|
- Add `documenso_developer_label` and `documenso_approver_label` to `SETTING_KEYS` + `PortDocumensoConfig`.
|
||||||
|
- Admin UI in `documenso/page.tsx` Signers card: each signer card gets a "Display label" input next to name/email.
|
||||||
|
- Email templates in `document-signing.ts`: read the label from the per-port branding config and use it in copy ("Your Project Director, {{name}}, has signed…").
|
||||||
|
- **Open follow-up (out of scope for Documenso build)**: the user mentioned the project-director user MIGHT need different CRM permissions/access from a sales rep (e.g. exclusive audit-log access, more prominent reports). That's a separate RBAC initiative — note it on the audit backlog and don't action here.
|
||||||
|
|
||||||
|
### Q9. Field placement draft persistence — RESOLVED
|
||||||
|
|
||||||
|
**Decision**: **No persistence.** If the rep closes the dialog mid-placement, state is lost.
|
||||||
|
|
||||||
|
**Implications**:
|
||||||
|
|
||||||
|
- Phase 4 architecture: keep all placement state in React component state. No localStorage, no DB drafts table.
|
||||||
|
- Add a confirm-close on the dialog if the rep has placed any fields ("Discard placement work?").
|
||||||
|
|
||||||
|
### Q10. Embedded signing host fallback — RESOLVED
|
||||||
|
|
||||||
|
**Decision**: **Send raw Documenso URLs** when host is unset. The Documenso API already returns a working signing URL per recipient (e.g. `https://signatures.portnimara.dev/sign/<token>`); `transformSigningUrl()` returns this raw URL untouched when `embeddedSigningHost` is null/empty (current behaviour, see [document-signing-emails.service.ts:106-117](../src/lib/services/document-signing-emails.service.ts#L106)).
|
||||||
|
|
||||||
|
**Implications**:
|
||||||
|
|
||||||
|
- Phase 1: no behaviour change in `transformSigningUrl()`. The current null-host short-circuit IS the fallback.
|
||||||
|
- Add a banner in the EOI/Contract tab when port has unset `embedded_signing_host` and at least one outstanding doc: "Signing emails currently link to signatures.portnimara.dev directly. Configure an embedded host in admin for branded signing pages."
|
||||||
|
- No new env var. No blocking-on-send.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Schema migration summary (resolved)
|
||||||
|
|
||||||
|
Combining all resolved decisions, the migrations needed are:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Phase 1 (also covers Phase 2's lifecycle tracking)
|
||||||
|
ALTER TABLE document_signers ADD COLUMN invited_at timestamptz;
|
||||||
|
ALTER TABLE document_signers ADD COLUMN opened_at timestamptz;
|
||||||
|
ALTER TABLE document_signers ADD COLUMN last_reminder_sent_at timestamptz;
|
||||||
|
ALTER TABLE document_signers ADD COLUMN signing_token text;
|
||||||
|
CREATE INDEX idx_ds_signing_token ON document_signers (signing_token);
|
||||||
|
|
||||||
|
-- Phase 1 / Q4 (completion CCs are per-document)
|
||||||
|
ALTER TABLE documents ADD COLUMN completion_cc_emails text[] DEFAULT '{}'::text[];
|
||||||
|
|
||||||
|
-- Phase 1 / Q1 (auto-reminder opt-in per document)
|
||||||
|
ALTER TABLE documents ADD COLUMN auto_reminder_interval_days integer;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Settings to add / remove (resolved)
|
||||||
|
|
||||||
|
**Add to `SETTING_KEYS` + `PortDocumensoConfig`:**
|
||||||
|
|
||||||
|
- `documenso_developer_label` (text, default "Developer") — Q8 bonus
|
||||||
|
- `documenso_approver_label` (text, default "Approver") — Q8 bonus
|
||||||
|
|
||||||
|
**Remove from `SETTING_KEYS` + `PortDocumensoConfig`:**
|
||||||
|
|
||||||
|
- `documenso_contract_template_id` — Q6
|
||||||
|
- `documenso_reservation_template_id` — Q6
|
||||||
|
|
||||||
|
**Remove from admin UI** (`admin/documenso/page.tsx`):
|
||||||
|
|
||||||
|
- Contract template ID input — Q6
|
||||||
|
- Reservation template ID input — Q6
|
||||||
|
|
||||||
|
**Add to admin UI:**
|
||||||
|
|
||||||
|
- Display-label inputs next to developer + approver name/email pairs — Q8 bonus
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Status**: Plan is now fully resolved. Phase 1 can start without further clarification.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Quick file reference
|
||||||
|
|
||||||
|
**Existing — modify in place:**
|
||||||
|
|
||||||
|
- `src/lib/services/documenso-client.ts` (extend createDocument for v2; add recipient management functions)
|
||||||
|
- `src/lib/services/port-config.ts` (no changes expected)
|
||||||
|
- `src/lib/email/index.ts` (consider: add raw-Buffer attachment option to skip MinIO round-trip for one-off PDFs)
|
||||||
|
- `src/app/api/webhooks/documenso/route.ts` (Phase 2 — major rewrite)
|
||||||
|
- `src/components/interests/interest-contract-tab.tsx` (replace ComingSoonDialog with UploadForSigningDialog in Phase 4)
|
||||||
|
- `src/components/interests/interest-reservation-tab.tsx` (same)
|
||||||
|
- `src/components/documents/eoi-generate-dialog.tsx` (Phase 1 — add regenerate confirm)
|
||||||
|
|
||||||
|
**New files to create:**
|
||||||
|
|
||||||
|
- `src/lib/services/custom-document-upload.service.ts` (Phase 3)
|
||||||
|
- `src/lib/services/document-field-detector.ts` (Phase 4c)
|
||||||
|
- `src/components/documents/upload-for-signing-dialog.tsx` (Phase 4)
|
||||||
|
- `src/components/documents/pdf-field-canvas.tsx` (Phase 4b/4d)
|
||||||
|
- `src/components/documents/recipient-configurator.tsx` (Phase 4a)
|
||||||
|
- `src/components/documents/field-palette-toolbar.tsx` (Phase 4d)
|
||||||
|
- `src/components/documents/field-config-side-panel.tsx` (Phase 4d)
|
||||||
|
- `src/app/api/v1/documents/[id]/send-invitation/route.ts` (Phase 1)
|
||||||
|
- `src/app/api/v1/interests/[id]/upload-for-signing/route.ts` (Phase 3)
|
||||||
|
- DB migrations for `document_signers.invited_at` etc. (Phase 1, Phase 2)
|
||||||
223
docs/documenso-integration-audit.md
Normal file
223
docs/documenso-integration-audit.md
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
# Documenso integration audit
|
||||||
|
|
||||||
|
Reference for the multi-port Documenso signing pipeline in this CRM. Mirrors the legacy client portal's flow ([generate-quick-eoi.ts](../client-portal/server/api/eoi/generate-quick-eoi.ts), [documeso.ts](../client-portal/server/utils/documeso.ts), [documenso.post.ts](../client-portal/server/api/webhooks/documenso.post.ts), [website /sign/[type]/[token].vue](../../Port%20Nimara/Website/pages/sign/%5Btype%5D/%5Btoken%5D.vue)) but rewired for multi-tenant + better-auth + Drizzle.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Per-port configuration
|
||||||
|
|
||||||
|
All Documenso settings live in `system_settings` keyed by `(key, port_id)` and are read via [`getPortDocumensoConfig(portId)`](../src/lib/services/port-config.ts). Falls back to env vars when no per-port row exists. Surfaced in the admin UI at `/[portSlug]/admin/documenso`.
|
||||||
|
|
||||||
|
| Setting key | Type | Purpose |
|
||||||
|
| ----------------------------------- | --------------------------------- | ----------------------------------------------------------------------------------- |
|
||||||
|
| `documenso_api_url_override` | string | Per-port Documenso instance URL. Falls back to `DOCUMENSO_API_URL` env. |
|
||||||
|
| `documenso_api_key_override` | string | API key. Stored plaintext. |
|
||||||
|
| `documenso_api_version_override` | `'v1' \| 'v2'` | Different ports may run different Documenso versions. |
|
||||||
|
| `documenso_eoi_template_id` | int | Template ID for EOI generation. |
|
||||||
|
| `documenso_client_recipient_id` | int | Template recipient slot — client (signing order 1). |
|
||||||
|
| `documenso_developer_recipient_id` | int | Template recipient slot — developer (signing order 2). |
|
||||||
|
| `documenso_approval_recipient_id` | int | Template recipient slot — approver (signing order 3). |
|
||||||
|
| `documenso_developer_name` | string | Display name for developer signer (legacy hardcoded "David Mizrahi"). |
|
||||||
|
| `documenso_developer_email` | string | Developer signer email. |
|
||||||
|
| `documenso_approver_name` | string | Approver display name. |
|
||||||
|
| `documenso_approver_email` | string | Approver email. |
|
||||||
|
| `documenso_webhook_secret` | string | Per-port webhook secret. Receiver tries each enabled secret with timing-safe equal. |
|
||||||
|
| `eoi_default_pathway` | `'documenso-template' \| 'inapp'` | Which path is used when EOI is generated without explicit choice. |
|
||||||
|
| `eoi_send_mode` | `'auto' \| 'manual'` | Auto = send branded invitation email immediately; manual = rep clicks Send. |
|
||||||
|
| `embedded_signing_host` | string | Public host that wraps Documenso URLs into `{host}/sign/<type>/<token>`. |
|
||||||
|
| `documenso_contract_template_id` | int (optional) | Optional template for sales contracts. Blank = upload-and-place-fields per deal. |
|
||||||
|
| `documenso_reservation_template_id` | int (optional) | Optional template for reservation agreements. Same logic as contract. |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Document type matrix
|
||||||
|
|
||||||
|
| Type | Generation flow | Signers | Field placement |
|
||||||
|
| --------------- | ----------------------------------------------------------------- | ---------------------------------------------- | ----------------------------------------------------- |
|
||||||
|
| **EOI** | Documenso template (`eoi_template_id`) + form-fill values | Static: client, developer, approver (per-port) | Templated — fields baked into Documenso template |
|
||||||
|
| **Contract** | Per-deal upload (drafted custom). Template fallback if configured | Custom per deal — rep specifies | Per-deal placement — default footer-anchored fallback |
|
||||||
|
| **Reservation** | Per-deal upload OR template if configured | Custom per deal | Per-deal placement |
|
||||||
|
|
||||||
|
## Documenso field types
|
||||||
|
|
||||||
|
Custom-uploaded documents (contracts, reservations) need a per-deal field placement step — different documents need different mixes. The CRM exposes the full Documenso-supported field palette so reps can place whatever the document calls for without code changes.
|
||||||
|
|
||||||
|
| Field type | Use case | Needs `fieldMeta`? | What goes in meta |
|
||||||
|
| ---------------- | ------------------------------------------------------- | ------------------ | --------------------------------------------------- |
|
||||||
|
| `SIGNATURE` | Drawn signature — almost every signing flow | No | — |
|
||||||
|
| `FREE_SIGNATURE` | Type-or-draw signature variant | No | — |
|
||||||
|
| `INITIALS` | Per-page initials block | No | — |
|
||||||
|
| `DATE` | Auto-fills the date when the recipient signs | No | — |
|
||||||
|
| `EMAIL` | Auto-fills the recipient's email | No | — |
|
||||||
|
| `NAME` | Auto-fills the recipient's name | No | — |
|
||||||
|
| `TEXT` | Free text input (e.g. address, notes, place of signing) | Yes | `{ text?, label?, required?, readOnly? }` |
|
||||||
|
| `NUMBER` | Numeric input with optional min/max | Yes | `{ numberFormat?, min?, max?, required? }` |
|
||||||
|
| `CHECKBOX` | Boolean / single checkbox | Yes | `{ values: [{ checked, value }], validationRule? }` |
|
||||||
|
| `DROPDOWN` | Pick from a fixed list | Yes | `{ values: [{ value }], defaultValue? }` |
|
||||||
|
| `RADIO` | Mutually-exclusive options | Yes | `{ values: [{ checked, value }] }` |
|
||||||
|
|
||||||
|
Helper: [`fieldTypeNeedsMeta(type)`](../src/lib/services/documenso-client.ts) returns true for the configurable types so the placement UI knows when to surface a config side-panel.
|
||||||
|
|
||||||
|
`fieldMeta` is forwarded verbatim by [`placeFields()`](../src/lib/services/documenso-client.ts) on the v2 path. v1 silently ignores the property — fields render as blank inputs. Configurable behaviour (validation, defaults) only fires on v2 instances.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Documenso v1 vs v2 endpoint mapping
|
||||||
|
|
||||||
|
The [`documenso-client.ts`](../src/lib/services/documenso-client.ts) abstracts both. Each function picks v1 or v2 from `getPortDocumensoConfig(portId).apiVersion`.
|
||||||
|
|
||||||
|
| Operation | v1 (1.13–1.32) | v2.x |
|
||||||
|
| ------------------------------- | --------------------------------------------------------------------------- | --------------------------------------------------------- |
|
||||||
|
| Create document from upload | `POST /api/v1/documents` (body: `{ title, document, recipients }`) | `POST /api/v2/envelope` |
|
||||||
|
| Generate document from template | `POST /api/v1/templates/{id}/generate-document` | (template-from-envelope path) |
|
||||||
|
| Send for signing | `POST /api/v1/documents/{id}/send` | `POST /api/v2/envelope/{id}/send` |
|
||||||
|
| Place a field | `POST /api/v1/documents/{id}/fields` (PIXEL coords, one at a time) | `POST /api/v2/envelope/field/create-many` (PERCENT, bulk) |
|
||||||
|
| Get document state | `GET /api/v1/documents/{id}` | `GET /api/v2/envelope/{id}` |
|
||||||
|
| Send reminder to one recipient | `POST /api/v1/documents/{id}/recipients/{rid}/remind` | `POST /api/v2/envelope/{id}/recipient/{rid}/remind` |
|
||||||
|
| Download finalized PDF | `GET /api/v1/documents/{id}/download` → `{ downloadUrl }` then GET that URL | `GET /api/v2/envelope/{id}/download` (same shape) |
|
||||||
|
| Cancel / void | `DELETE /api/v1/documents/{id}` | `DELETE /api/v2/envelope/{id}` |
|
||||||
|
| Healthcheck | `GET /api/v1/health` | (v1 path used) |
|
||||||
|
|
||||||
|
**Field key rename in v2 responses**: `id` → `documentId` and recipient `id` → `recipientId`. Our [`normalizeDocument()`](../src/lib/services/documenso-client.ts) handles both shapes.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Signing-flow lifecycle
|
||||||
|
|
||||||
|
```
|
||||||
|
[rep clicks Generate] (CRM)
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
buildEoiContext(interestId, portId) service
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
generateAndSign(templateId, ctx, signers) creates Documenso doc
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
POST /documents/{id}/send {sendEmail:false} Documenso starts the chain;
|
||||||
|
it does NOT email signers
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
extract signing URLs from response service
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
transformSigningUrl(url, host, role) wrap as {host}/sign/<role>/<token>
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
if eoi_send_mode === 'auto':
|
||||||
|
sendSigningInvitation(client) our branded HTML email goes out
|
||||||
|
else:
|
||||||
|
UI shows the URL + Send button rep dispatches manually
|
||||||
|
```
|
||||||
|
|
||||||
|
When the client signs:
|
||||||
|
|
||||||
|
```
|
||||||
|
Documenso fires DOCUMENT_SIGNED webhook ──► /api/webhooks/documenso
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
verify x-documenso-secret (per-port lookup)
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
update document_signers row: status='signed', signedAt=...
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
if next signer in chain has not been notified:
|
||||||
|
sendSigningInvitation(developer) cascading "your turn" email
|
||||||
|
```
|
||||||
|
|
||||||
|
When the document reaches fully-signed:
|
||||||
|
|
||||||
|
```
|
||||||
|
Documenso fires DOCUMENT_COMPLETED webhook
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
download signed PDF from Documenso
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
store in storage backend → creates files row
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
update document: status='completed', completedAt=...
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
sendSigningCompleted([client, developer, approver], pdfFileId)
|
||||||
|
all parties get the signed PDF
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
update interest: pipelineStage='eoi_signed' (or contract_signed, etc)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Embedded signing on the marketing website
|
||||||
|
|
||||||
|
The CRM emits signing URLs in the form `{embeddedSigningHost}/sign/<role>/<token>`. The marketing website ([Port Nimara/Website/pages/sign/[type]/[token].vue](../../Port%20Nimara/Website/pages/sign/%5Btype%5D/%5Btoken%5D.vue)) hosts the page, embeds Documenso via `@documenso/embed-vue`'s `<EmbedSignDocument>`, and POSTs back to the CRM webhook on completion.
|
||||||
|
|
||||||
|
For the embed to work, the Documenso instance MUST send `Access-Control-Allow-Origin` headers permitting the website origin.
|
||||||
|
|
||||||
|
### nginx CORS block to apply on `signatures.portnimara.dev`
|
||||||
|
|
||||||
|
Add to the relevant `server { ... }` block:
|
||||||
|
|
||||||
|
```nginx
|
||||||
|
location / {
|
||||||
|
# CORS for embedded signing — allow the marketing-website origin
|
||||||
|
# to load the Documenso signing iframe.
|
||||||
|
add_header 'Access-Control-Allow-Origin' 'https://portnimara.com' always;
|
||||||
|
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always;
|
||||||
|
add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Authorization' always;
|
||||||
|
add_header 'Access-Control-Allow-Credentials' 'true' always;
|
||||||
|
|
||||||
|
# Preflight
|
||||||
|
if ($request_method = 'OPTIONS') {
|
||||||
|
add_header 'Access-Control-Allow-Origin' 'https://portnimara.com' always;
|
||||||
|
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always;
|
||||||
|
add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Authorization' always;
|
||||||
|
add_header 'Access-Control-Max-Age' 1728000;
|
||||||
|
add_header 'Content-Type' 'text/plain charset=UTF-8';
|
||||||
|
add_header 'Content-Length' 0;
|
||||||
|
return 204;
|
||||||
|
}
|
||||||
|
|
||||||
|
# ... your existing proxy_pass block to Documenso
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
To support multiple website origins (e.g. Port Amador hosted on a different domain), use a regex:
|
||||||
|
|
||||||
|
```nginx
|
||||||
|
set $cors_origin "";
|
||||||
|
if ($http_origin ~* "^https://(portnimara\.com|portamador\.com)$") {
|
||||||
|
set $cors_origin $http_origin;
|
||||||
|
}
|
||||||
|
add_header 'Access-Control-Allow-Origin' $cors_origin always;
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What's deferred vs landed in this build
|
||||||
|
|
||||||
|
**Landed:**
|
||||||
|
|
||||||
|
- Per-port admin settings — every Documenso config knob is exposed at `/admin/documenso`
|
||||||
|
- Branded invitation, completion, and reminder email templates
|
||||||
|
- `transformSigningUrl()` for `{host}/sign/<role>/<token>` URL wrapping
|
||||||
|
- Documenso v1 + v2 dual-version client (existing)
|
||||||
|
- Webhook handler with timing-safe per-port secret resolution (existing)
|
||||||
|
- Contract + Reservation tab UI shells with paper-signed upload + "send for signing" placeholder
|
||||||
|
- Stage-conditional tab visibility for EOI / Contract / Reservation
|
||||||
|
|
||||||
|
**Deferred (separate sessions):**
|
||||||
|
|
||||||
|
- Custom document upload-to-Documenso service for contract/reservation (POST PDF → place fields → send). The tabs currently surface a "coming soon" dialog.
|
||||||
|
- Recipient + signing order configurator UI (rep specifies signers per deal for custom-uploaded docs).
|
||||||
|
- Drag-and-drop field placement UI on uploaded PDF previews. The fallback when this lands will be `computeDefaultSignatureLayout()` (footer-anchored fields).
|
||||||
|
- Webhook handler enhancements to track per-signer `sent_at`/`opened_at`/`signed_at` and trigger the cascading "your turn" branded emails. Currently the webhook just updates document status.
|
||||||
|
- Auto-store signed PDFs in storage backend and trigger `sendSigningCompleted()` on `DOCUMENT_COMPLETED`. Old system has this; needs porting.
|
||||||
|
|
||||||
|
**Manual ops work for you:**
|
||||||
|
|
||||||
|
- Apply the nginx CORS block above on your prod Documenso instance.
|
||||||
|
- Decide whether to upgrade prod Documenso to v2 (would unlock cleaner field placement + better envelope semantics).
|
||||||
|
- Configure each port's developer/approver names and template IDs at `/[portSlug]/admin/documenso`.
|
||||||
@@ -19,19 +19,24 @@ The template exposes eight text fields (`formValues` keys) and two boolean check
|
|||||||
|
|
||||||
## Field mapping
|
## Field mapping
|
||||||
|
|
||||||
|
The legacy template (Documenso template `8`, configured in production) auto-fills exactly the fields below. All eight text fields + two booleans are populated by `buildDocumensoPayload()` from the resolved `EoiContext`. Anything else on the form (signature, date, terms acknowledgment) is filled in by the client inside Documenso.
|
||||||
|
|
||||||
| Documenso key | Type | Legacy source | New `EoiContext` path | Notes |
|
| Documenso key | Type | Legacy source | New `EoiContext` path | Notes |
|
||||||
| -------------- | ------- | --------------------------- | ----------------------------------------------------- | ------------------------------------------------------------------------- |
|
| -------------- | ------- | --------------------------- | ----------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
| `Name` | text | `interest['Full Name']` | `context.client.fullName` | The interest's point-of-contact client (billing signer). |
|
| `Name` | text | `interest['Full Name']` | `context.client.fullName` | The interest's point-of-contact client (billing signer). |
|
||||||
| `Email` | text | `interest['Email Address']` | `context.client.primaryEmail` | Primary email contact from `client_contacts`. |
|
| `Email` | text | `interest['Email Address']` | `context.client.primaryEmail` | Primary email contact from `client_contacts`. |
|
||||||
| `Address` | text | `interest['Address']` | concat `context.client.address.{street,city,country}` | Concatenate street, city, country with `', '`. Empty if address is null. |
|
| `Address` | text | `interest['Address']` | concat `context.client.address.{street,city,country}` | Concatenate street, city, country with `', '`. Empty if address is null. |
|
||||||
| `Yacht Name` | text | `interest['Yacht Name']` | `context.yacht.name` | Yacht is now a first-class row; pulled via `interest.yachtId`. |
|
| `Yacht Name` | text | `interest['Yacht Name']` | `context.yacht.name` | Yacht is now a first-class row; pulled via `interest.yachtId`. Empty string when no yacht is linked yet. |
|
||||||
| `Length` | text | `interest['Length']` | `context.yacht.lengthFt` | Send as string. Documenso doesn't enforce numeric format. |
|
| `Length` | text | `interest['Length']` | `context.yacht.lengthFt` | Boat dimension. Send as string. Documenso doesn't enforce numeric format. Empty string when not applicable. |
|
||||||
| `Width` | text | `interest['Width']` | `context.yacht.widthFt` | Same. |
|
| `Width` | text | `interest['Width']` | `context.yacht.widthFt` | Same. |
|
||||||
| `Draft` | text | `interest['Depth']` | `context.yacht.draftFt` | Legacy field was named "Depth" in NocoDB; Documenso key is "Draft". |
|
| `Draft` | text | `interest['Depth']` | `context.yacht.draftFt` | Legacy field was named "Depth" in NocoDB; Documenso key is "Draft". |
|
||||||
| `Berth Number` | text | `berthNumbers` (joined) | `context.berth.mooringNumber` | One berth per reservation. Multi-berth case was multi-interest in legacy. |
|
| `Berth Number` | text | `berthNumbers` (joined) | `context.berth.mooringNumber` | The interest's PRIMARY berth (resolved via `interest_berths.is_primary=true`). Empty string when no primary set. |
|
||||||
|
| `Berth Range` | text | (new) | `context.eoiBerthRange` | **NEW IN PHASE 5** — compact range string for multi-berth EOIs (e.g. `"A1-A3, B5-B7"`) covering every junction row marked `is_in_eoi_bundle=true`. Empty string when the bundle is empty. **The live Documenso template (id `8`) does NOT yet have this field. Add a `Berth Range` text field to the template before multi-berth EOIs render the range; until then Documenso silently drops the value and only `Berth Number` (the primary mooring) renders.** |
|
||||||
| `Lease_10` | boolean | hardcoded `false` | `false` | Hardcoded — legacy flow defaults to Purchase (not Lease). |
|
| `Lease_10` | boolean | hardcoded `false` | `false` | Hardcoded — legacy flow defaults to Purchase (not Lease). |
|
||||||
| `Purchase` | boolean | hardcoded `true` | `true` | Hardcoded — legacy flow defaults to Purchase. |
|
| `Purchase` | boolean | hardcoded `true` | `true` | Hardcoded — legacy flow defaults to Purchase. |
|
||||||
|
|
||||||
|
**Backwards-compatibility guarantee**: every legacy `formValues` key is still emitted with the same name and type. The only addition is `Berth Range` (Phase 5). Documenso silently ignores unknown formValues keys, so old templates that don't have `Berth Range` will simply not render it — single-berth EOIs continue to work identically. No template changes are required for legacy use.
|
||||||
|
|
||||||
## Document `meta` fields (non-`formValues`)
|
## Document `meta` fields (non-`formValues`)
|
||||||
|
|
||||||
| Documenso key | Type | Legacy source | New source |
|
| Documenso key | Type | Legacy source | New source |
|
||||||
|
|||||||
188
docs/error-handling.md
Normal file
188
docs/error-handling.md
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
# Error handling
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Every authenticated request runs inside an `AsyncLocalStorage` frame
|
||||||
|
that carries a `requestId` (UUID) plus the resolved `portId` / `userId`
|
||||||
|
/ HTTP method / path / start time. The id surfaces:
|
||||||
|
|
||||||
|
- as `X-Request-Id` on every response header (success or failure)
|
||||||
|
- inside every pino log line emitted during the request
|
||||||
|
- in the JSON error body returned to the client (`requestId` field)
|
||||||
|
- as the primary key of the `error_events` row written when a 5xx fires
|
||||||
|
|
||||||
|
A user who hits a failure can copy the **Reference ID** from the toast
|
||||||
|
and a super admin can paste it into `/<port>/admin/errors/<requestId>`
|
||||||
|
to see the full request context, sanitized body, error stack, and a
|
||||||
|
heuristic "likely culprit" hint.
|
||||||
|
|
||||||
|
## Throwing errors from a service
|
||||||
|
|
||||||
|
Use `CodedError` with a registered code:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { CodedError } from '@/lib/errors';
|
||||||
|
|
||||||
|
if (!hasReceipts && !ack) {
|
||||||
|
throw new CodedError('EXPENSES_RECEIPT_REQUIRED');
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The code drives:
|
||||||
|
|
||||||
|
- the HTTP status (defined in `src/lib/error-codes.ts`)
|
||||||
|
- the **plain-text user-facing message** (no jargon — written for the
|
||||||
|
rep on the phone with a customer)
|
||||||
|
- the stable identifier the user can quote to support
|
||||||
|
|
||||||
|
For more verbose internal context — admin-only — use `internalMessage`:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
throw new CodedError('CROSS_PORT_LINK_REJECTED', {
|
||||||
|
internalMessage: `interest ${a.id} (port ${a.portId}) ↔ berth ${b.id} (port ${b.portId})`,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
The `internalMessage` lands in the `error_events` row and the admin
|
||||||
|
inspector but **never** reaches the client.
|
||||||
|
|
||||||
|
## Adding a new error code
|
||||||
|
|
||||||
|
1. Open `src/lib/error-codes.ts`.
|
||||||
|
2. Add an entry to the `ERROR_CODES` map. Convention: `DOMAIN_REASON`
|
||||||
|
in SCREAMING_SNAKE_CASE.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
FOO_INVALID_BAR: {
|
||||||
|
status: 400,
|
||||||
|
userMessage: 'That bar value is no good. Please try another.',
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Use it: `throw new CodedError('FOO_INVALID_BAR')`.
|
||||||
|
4. The code, status, and message are now contractually stable —
|
||||||
|
never rename a code once it has shipped. Documentation, UI, and
|
||||||
|
external integrations may pin to it.
|
||||||
|
|
||||||
|
## Plain-text message guidelines
|
||||||
|
|
||||||
|
User-facing messages should:
|
||||||
|
|
||||||
|
- Avoid internal jargon (no "constraint violation", "FK", "row lock").
|
||||||
|
- Be written for a rep on the phone with a customer.
|
||||||
|
- Include the suggested next action when natural ("Ask an admin if you
|
||||||
|
think you should").
|
||||||
|
- Not include any technical detail that doesn't help the user — the
|
||||||
|
request id + error code carry that.
|
||||||
|
|
||||||
|
Verbose technical detail belongs in `internalMessage` (admin-only).
|
||||||
|
|
||||||
|
## Client side
|
||||||
|
|
||||||
|
In a `useMutation`, render errors with the shared helper:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { toastError } from '@/lib/api/toast-error';
|
||||||
|
|
||||||
|
const mutation = useMutation({
|
||||||
|
mutationFn: () => apiFetch('/api/v1/foo', { method: 'POST', body: { ... } }),
|
||||||
|
onSuccess: () => { ... },
|
||||||
|
onError: (err) => toastError(err),
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
The toast renders three lines:
|
||||||
|
|
||||||
|
```
|
||||||
|
{plain-text message}
|
||||||
|
|
||||||
|
Error code: EXPENSES_RECEIPT_REQUIRED
|
||||||
|
Reference ID: 8f3c-ab12-… [Copy ID]
|
||||||
|
```
|
||||||
|
|
||||||
|
The "Copy ID" action puts the request id on the clipboard so the
|
||||||
|
user can paste it into a support ticket.
|
||||||
|
|
||||||
|
## Admin inspector
|
||||||
|
|
||||||
|
`/<port>/admin/errors` lists captured 5xx errors:
|
||||||
|
|
||||||
|
- Status badge + method + path
|
||||||
|
- "Likely culprit" badge (heuristic — Postgres SQLSTATE, error name,
|
||||||
|
stack-path patterns, message keywords)
|
||||||
|
- Truncated error name + message
|
||||||
|
- Timestamp + reference id
|
||||||
|
|
||||||
|
Click any row for `/<port>/admin/errors/<requestId>` which shows:
|
||||||
|
|
||||||
|
- Request shape (method / path / when / duration / port / user / IP / UA)
|
||||||
|
- Likely culprit + plain-English hint + subsystem tag
|
||||||
|
- Full error name, message, stack head (first 4 KB)
|
||||||
|
- Sanitized request body excerpt (max 1 KB; sensitive keys redacted)
|
||||||
|
- Raw metadata (Postgres SQLSTATE codes, internalMessage, etc.)
|
||||||
|
|
||||||
|
Permission: `admin.view_audit_log`. Super admins see every port's
|
||||||
|
errors; regular admins are scoped to their active port.
|
||||||
|
|
||||||
|
## What gets persisted
|
||||||
|
|
||||||
|
| Status | error_events row? | Toast shows code? |
|
||||||
|
| ------ | ----------------- | ----------------- |
|
||||||
|
| 4xx | No | Yes |
|
||||||
|
| 5xx | **Yes** | Yes |
|
||||||
|
|
||||||
|
4xx errors are user-action mistakes (validation, not-found, permission
|
||||||
|
denied). They're visible in the audit log but not the error inspector
|
||||||
|
— that table is reserved for platform faults.
|
||||||
|
|
||||||
|
5xx errors hit the `errorEvents` table via `captureErrorEvent` inside
|
||||||
|
`errorResponse`, which:
|
||||||
|
|
||||||
|
1. Reads the request context from ALS.
|
||||||
|
2. Sanitizes + truncates the body (1 KB cap, sensitive keys redacted).
|
||||||
|
3. Pulls Postgres `code` / `severity` / `cause.code` if the underlying
|
||||||
|
error is a `postgres` driver error.
|
||||||
|
4. Truncates the stack to 4 KB.
|
||||||
|
5. Inserts one row keyed on `requestId` with `ON CONFLICT DO NOTHING`.
|
||||||
|
|
||||||
|
Failure to persist NEVER throws — the user is already getting an
|
||||||
|
error response; we don't want a logging-pipeline failure to mask it.
|
||||||
|
|
||||||
|
## Likely-culprit classifier
|
||||||
|
|
||||||
|
`src/lib/error-classifier.ts` runs four passes against an
|
||||||
|
`error_events` row, first match wins:
|
||||||
|
|
||||||
|
1. **Postgres SQLSTATE** (from `metadata.code`): 23502 NOT NULL,
|
||||||
|
23503 FK, 23505 unique, 23514 CHECK, 42703 schema drift, 42P01
|
||||||
|
missing table, 40001 serialization, 53300 connection limit, …
|
||||||
|
2. **Error class name**: `AbortError`, `TimeoutError`, `FetchError`,
|
||||||
|
`ZodError`.
|
||||||
|
3. **Stack path**: `/lib/storage/`, `/lib/email/`, `documenso`,
|
||||||
|
`openai|claude`, `/queue/workers/`.
|
||||||
|
4. **Message free-text**: `econnrefused`, `rate limit`, `timeout`,
|
||||||
|
`unauthorized|invalid api key`.
|
||||||
|
|
||||||
|
Returns `null` when nothing matches; the inspector renders
|
||||||
|
"Uncategorized" in that case. Adding a new heuristic is a one-line
|
||||||
|
edit to the relevant array.
|
||||||
|
|
||||||
|
## Pruning
|
||||||
|
|
||||||
|
`error_events` rows are dropped after 90 days by the maintenance
|
||||||
|
worker (TODO: confirm the worker has the deletion path; if not, add
|
||||||
|
a periodic job that runs `DELETE FROM error_events WHERE created_at <
|
||||||
|
now() - interval '90 days'`).
|
||||||
|
|
||||||
|
## Migration path for legacy throws
|
||||||
|
|
||||||
|
Existing `NotFoundError` / `ForbiddenError` / `ConflictError` /
|
||||||
|
`ValidationError` / `RateLimitError` still work — the user-facing
|
||||||
|
messages on these classes have been rewritten to plain-text defaults.
|
||||||
|
|
||||||
|
Migration to `CodedError` happens opportunistically: when touching a
|
||||||
|
service to fix something else, swap the throw site for a registered
|
||||||
|
code.
|
||||||
|
|
||||||
|
A follow-up audit pass should walk `git grep "throw new ValidationError"`
|
||||||
|
and migrate the user-impactful ones to specific codes.
|
||||||
123
docs/operations/outbound-comms-safety.md
Normal file
123
docs/operations/outbound-comms-safety.md
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
# Outbound communications safety net
|
||||||
|
|
||||||
|
**Last reviewed:** 2026-05-03
|
||||||
|
**Owner:** matt@portnimara.com
|
||||||
|
|
||||||
|
This doc enumerates every channel through which the CRM can produce
|
||||||
|
outbound communication (email, document signing, webhooks) and describes
|
||||||
|
how each channel respects the `EMAIL_REDIRECT_TO` env var. The goal: a
|
||||||
|
single environment flip pauses **all** outbound traffic, so a production
|
||||||
|
data import, dedup migration dry-run, or staging environment can run
|
||||||
|
against real data without anyone getting paged or spammed.
|
||||||
|
|
||||||
|
> **Single env switch:** when `EMAIL_REDIRECT_TO` is set to an address,
|
||||||
|
> all outbound communication is rerouted there or short-circuited. Unset
|
||||||
|
> it in production.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Channels
|
||||||
|
|
||||||
|
### 1. Direct email (`sendEmail`)
|
||||||
|
|
||||||
|
**Path:** `src/lib/email/index.ts` → `sendEmail()` → nodemailer SMTP transport.
|
||||||
|
|
||||||
|
**Safety:** YES — covered.
|
||||||
|
|
||||||
|
When `EMAIL_REDIRECT_TO` is set, `sendEmail()` rewrites the `to` header
|
||||||
|
to the redirect address and prefixes the subject with
|
||||||
|
`[redirected from <orig>]`. The original recipient is logged.
|
||||||
|
|
||||||
|
**Call sites** (all flow through `sendEmail`, so all are covered):
|
||||||
|
|
||||||
|
- `src/lib/services/portal-auth.service.ts` — portal activation + reset
|
||||||
|
- `src/lib/services/crm-invite.service.ts` — CRM user invitations
|
||||||
|
- `src/lib/services/document-templates.ts` — template-generated PDFs sent
|
||||||
|
as attachments (the PDF body is generated locally; the email itself
|
||||||
|
goes through SMTP)
|
||||||
|
- `src/lib/services/email-compose.service.ts` — ad-hoc emails composed
|
||||||
|
in the in-app UI
|
||||||
|
- `src/lib/services/gdpr-export.service.ts` — GDPR export delivery
|
||||||
|
|
||||||
|
### 2. Documenso e-signature recipients
|
||||||
|
|
||||||
|
**Path:** `src/lib/services/documenso-client.ts` → `createDocument()` /
|
||||||
|
`generateDocumentFromTemplate()` → Documenso REST API.
|
||||||
|
|
||||||
|
**Safety:** YES — covered as of 2026-05-03.
|
||||||
|
|
||||||
|
Documenso's own server sends the signing-request email on our behalf.
|
||||||
|
We can't intercept that at the SMTP layer because it's external. The
|
||||||
|
fix is at the REST-call boundary: when `EMAIL_REDIRECT_TO` is set,
|
||||||
|
`createDocument` rewrites every recipient's email to the redirect
|
||||||
|
address and prefixes the recipient name with `(was: <orig email>)` so
|
||||||
|
the doc is still traceable to its intended recipient.
|
||||||
|
`generateDocumentFromTemplate` does the same for both shapes the
|
||||||
|
template-generate endpoint accepts (v1.13 `formValues.*Email` keys and
|
||||||
|
v2.x `recipients` array).
|
||||||
|
|
||||||
|
The redirect happens **before** the API call, so even if Documenso has
|
||||||
|
its own retry logic the original email never leaves our process.
|
||||||
|
|
||||||
|
### 3. Webhooks (outbound to user-configured URLs)
|
||||||
|
|
||||||
|
**Path:** `src/lib/queue/workers/webhooks.ts` → BullMQ job → `fetch(webhook.url, ...)`.
|
||||||
|
|
||||||
|
**Safety:** YES — covered as of 2026-05-03.
|
||||||
|
|
||||||
|
When `EMAIL_REDIRECT_TO` is set, the webhook worker short-circuits
|
||||||
|
before the HTTP call. The delivery row is marked `dead_letter` with a
|
||||||
|
human-readable reason so it's still visible in the deliveries listing.
|
||||||
|
The SSRF guard remains in place independently.
|
||||||
|
|
||||||
|
### 4. WhatsApp / phone deep-links
|
||||||
|
|
||||||
|
**Path:** `<a href="https://wa.me/...">` and `<a href="tel:...">` in
|
||||||
|
client / interest detail headers.
|
||||||
|
|
||||||
|
**Safety:** N/A — user-initiated only.
|
||||||
|
|
||||||
|
These are deep links the user explicitly clicks. No automated dispatch.
|
||||||
|
A deep link click opens the user's WhatsApp / phone app, which is the
|
||||||
|
intended interaction. No safety net needed.
|
||||||
|
|
||||||
|
### 5. SMS
|
||||||
|
|
||||||
|
Not implemented. The `interests.preferredContactMethod` enum includes
|
||||||
|
`'sms'` as a value but no sending path exists. If/when SMS is added (e.g.
|
||||||
|
via Twilio), the new send function should respect `EMAIL_REDIRECT_TO`
|
||||||
|
the same way `sendEmail` does — log the original number, drop the
|
||||||
|
message, or reroute to a configurable `SMS_REDIRECT_TO` env.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Verification checklist before importing real data
|
||||||
|
|
||||||
|
- [ ] `.env` has `EMAIL_REDIRECT_TO=<my-address>` set.
|
||||||
|
- [ ] Restart dev server (or worker) so the new env is picked up — env
|
||||||
|
vars are read at import time in some paths.
|
||||||
|
- [ ] Send a test email via `pnpm tsx scripts/dev-trigger-portal-invite.ts`
|
||||||
|
or similar. Confirm subject is prefixed with `[redirected from ...]`.
|
||||||
|
- [ ] Trigger an EOI send through the UI (any client). Confirm Documenso
|
||||||
|
shows the redirect address as recipient (not the real client email).
|
||||||
|
- [ ] If any webhooks are configured, trigger an event that fires one and
|
||||||
|
confirm the delivery is recorded as `dead_letter` with the
|
||||||
|
"EMAIL_REDIRECT_TO is set" reason.
|
||||||
|
- [ ] Run the NocoDB migration `--dry-run` to count clients/interests; the
|
||||||
|
`--apply` step is what creates real records but emails/webhooks are
|
||||||
|
still gated by the redirect env.
|
||||||
|
|
||||||
|
## Production cutover
|
||||||
|
|
||||||
|
When ready to go live:
|
||||||
|
|
||||||
|
1. Run a final dry-run of the data migration with `EMAIL_REDIRECT_TO` set
|
||||||
|
to a sandbox address.
|
||||||
|
2. Verify the snapshot looks right (counts, client coverage).
|
||||||
|
3. Unset `EMAIL_REDIRECT_TO` in the production env.
|
||||||
|
4. Restart the app + worker.
|
||||||
|
5. Run the migration with `--apply`. From this point forward, real
|
||||||
|
recipients will receive real comms.
|
||||||
|
|
||||||
|
If you ever need to re-pause outbound (e.g. handling a security incident,
|
||||||
|
re-importing on top of existing data), set `EMAIL_REDIRECT_TO` again.
|
||||||
564
docs/superpowers/specs/2026-05-03-dedup-and-migration-design.md
Normal file
564
docs/superpowers/specs/2026-05-03-dedup-and-migration-design.md
Normal file
@@ -0,0 +1,564 @@
|
|||||||
|
# Client Deduplication and NocoDB Migration Design
|
||||||
|
|
||||||
|
**Status**: Design draft 2026-05-03 — pending approval.
|
||||||
|
**Plan decomposition**: Three implementation plans stack from this design — (P1) normalization + dedup core library; (P2) admin settings + at-create + interest-level guards (runtime); (P3) NocoDB migration script + review queue UI. P1 unblocks P2 and P3.
|
||||||
|
**Branch base**: stacks on `feat/mobile-foundation` once it merges to `main`.
|
||||||
|
**Out of scope**: live merge of two clients across ports (cross-tenant), automated AI-judged matches, profile-photo / face-match dedup, web-of-trust referrer relationships.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Background
|
||||||
|
|
||||||
|
### 1.1 Why this exists
|
||||||
|
|
||||||
|
The legacy CRM lives in a NocoDB base whose `Interests` table conflates _the human_ with _the deal_. A row contains `Full Name`, `Email Address`, `Phone Number`, `Address`, `Place of Residence` _and_ the sales-pipeline state for one specific berth. A single human pursuing two berths becomes two rows with semi-duplicated personal data. A 2026-05-03 read-only audit confirmed:
|
||||||
|
|
||||||
|
- **252 Interests rows** in NocoDB, against an estimated ~190–200 unique humans (~20–25% duplication rate).
|
||||||
|
- **35 Residential Interests rows** in a parallel residential pathway with the same conflation.
|
||||||
|
- **64 Website Interest Submissions + 47 Website Contact Form Submissions + 1 EOI Supplemental Form** as inbound capture surfaces.
|
||||||
|
- **No Clients table.** The conflated structure is structural, not accidental.
|
||||||
|
|
||||||
|
The new CRM (`src/lib/db/schema/clients.ts`) splits this into `clients` (people) ↔ `interests` (deals), with `clientContacts` (multi-channel), `clientAddresses` (multi-address), and a pre-existing `clientMergeLog` table that anticipates merge with undo. The design has been ready; what's missing is (a) a normalization + matching library, (b) the at-create / at-import surfaces that use it, and (c) the migration of the existing 252+35 records.
|
||||||
|
|
||||||
|
### 1.2 Real duplicate patterns observed in the live data
|
||||||
|
|
||||||
|
Sampled 200 of the 252 NocoDB Interests rows. Confirmed duplicate clusters fall into six patterns:
|
||||||
|
|
||||||
|
| Pattern | Example rows | Signature |
|
||||||
|
| ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------- |
|
||||||
|
| **A. Pure double-submit** | Deepak Ramchandani #624/#625; John Lynch #716/#725 | All fields identical; created same day |
|
||||||
|
| **B. Phone format variance** | Howard Wiarda #236/#536 (`574-274-0548` vs `+15742740548`); Christophe Zasso #701/#702 (`0651381036` vs `0033651381036`) | Same email, normalize-equal phone |
|
||||||
|
| **C. Name capitalization** | Nicolas Ruiz #681/#682/#683; Jean-Charles Miege/MIEGE #37/#163; John Farmer/FARMER #35/#161 | Same email or empty; surname case differs |
|
||||||
|
| **D. Name shortening** | Chris vs Christopher Allen #700/#534; Emma c vs Emma Cauchefer #661/#673 | Same email + phone; given-name truncated |
|
||||||
|
| **E. Resubmit with typo** | Christopher Camazou #649/#650 (phone last 4 digits typo); Gianfranco Di Constanzo/Costanzo #585/#336 (surname typo, **different yacht** — should be ONE client + TWO interests) | Score-on-everything-else high, one field has small-edit-distance noise |
|
||||||
|
| **F. Hard cases** | Etiennette Clamouze #188/#717 (same name, different country phone + email); Bruno Joyerot #18 with email belonging to Bruce Hearn #19 (couple sharing contact) | Cannot resolve without a human |
|
||||||
|
|
||||||
|
This dataset will be the fixture for the dedup library's tests — every pattern above must be either auto-detected or flagged for review, and the false-positive bar must be high enough that Pattern F doesn't get force-merged.
|
||||||
|
|
||||||
|
### 1.3 Dirty data inventory
|
||||||
|
|
||||||
|
The migration normalizer must survive these real values from production:
|
||||||
|
|
||||||
|
**Phone fields**: `+1-264-235-8840\r` (with carriage return), `'+1.214.603.4235` (apostrophe + dots), `0677580750/0690511494` (two numbers in one field), `00447956657022` (00 prefix), `+447000000000` (placeholder all-zeros), `+4901637039672` (impossible — stripped 0 + country prefix), various unprefixed local formats, dashed US numbers without country code.
|
||||||
|
|
||||||
|
**Email fields**: mixed case rampant (`Arthur@laser-align.com` vs `arthur@laser-align.com`); ALL-CAPS local parts; trailing whitespace.
|
||||||
|
|
||||||
|
**Name fields**: ALL-CAPS surnames mixed with title-case given names; embedded `\n` and `\r`; double spaces; lowercase-only entries; slash-with-company variants (`Daniel Wainstein / 7 Knots, LLC`, `Bruno Joyerot / SAS TIKI`); placeholder `Mr DADER`, `TBC`.
|
||||||
|
|
||||||
|
**Place of Residence (free text)**: `Saint barthelemy`, `St Barth`, `Saint-Barthélemy` (same place, three forms); `anguilla`, `United States `, `USA`, `Kansas City` (city without country), `Sag Harbor Y` (typo).
|
||||||
|
|
||||||
|
### 1.4 Existing battle-tested algorithm
|
||||||
|
|
||||||
|
`client-portal/server/utils/duplicate-detection.ts` already implements blocking + weighted-rules dedup against this same NocoDB. It runs in production today. We **port it forward** (don't reinvent), then add: soundex/metaphone for surname matching, compounded-confidence when multiple rules match, and negative evidence (same email + different country phone reduces confidence).
|
||||||
|
|
||||||
|
### 1.5 Why the website is no longer the source of new dirty data
|
||||||
|
|
||||||
|
The website forms (`website/components/pn/specific/website/{berths-item,register,form}/form.vue`) use `<v-phone-input>` with a country picker (`prefer-countries: ['US', 'GB', 'DE', 'FR']`) and `[(value) => !!value || 'Phone number is required']` validation. Output is E.164-shaped. The 252 dirty rows are legacy — pre-form-redesign submissions, sales-rep manual entries, and external CSV imports. Future inbound is clean.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Approach
|
||||||
|
|
||||||
|
Three artifacts, layered:
|
||||||
|
|
||||||
|
1. **A pure-logic normalization + matching library** at `src/lib/dedup/`. JSX-free, vitest-native (proven pattern: `realtime-invalidation-core.ts`). Tested against the dirty-data fixture corpus drawn from §1.2.
|
||||||
|
2. **Three runtime surfaces** that use the library: at-create suggestion in client/interest forms; interest-level same-berth guard; admin review queue powered by a nightly background scoring job.
|
||||||
|
3. **A one-shot migration script** that pulls NocoDB → normalizes → dedupes → writes new schema → produces a CSV report with auto-merge log + flagged-for-review pile.
|
||||||
|
|
||||||
|
**Configurability via admin settings** (`system_settings` per port) so the team can tune sensitivity without code changes. Defaults err on the safe side — a flagged review is cheaper than a wrongly-merged record.
|
||||||
|
|
||||||
|
**Reversibility**: every merge writes a `client_merge_log` row containing the loser's full pre-state JSON. A 7-day undo window lets a wrong merge be reversed without engineering involvement. After 7 days the snapshot is purged for GDPR; merges become permanent.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Normalization library
|
||||||
|
|
||||||
|
Lives at `src/lib/dedup/normalize.ts`. Pure functions, no DB, vitest-tested. Used by the dedup algorithm AND by all create-paths so what gets stored is already normalized.
|
||||||
|
|
||||||
|
### 3.1 `normalizeName(raw: string)`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
export function normalizeName(raw: string): {
|
||||||
|
display: string; // human-readable, kept for UI
|
||||||
|
normalized: string; // for matching
|
||||||
|
surnameToken?: string; // for surname-based blocking
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
- Trim leading/trailing whitespace
|
||||||
|
- Replace `\r`, `\n`, tabs with single space
|
||||||
|
- Collapse consecutive whitespace to single space
|
||||||
|
- Smart title-case: keep particles (`van`, `de`, `del`, `O'`, `di`, `le`, `da`) lowercase except as first token
|
||||||
|
- `display` preserves user's intent (slash-with-company stays intact)
|
||||||
|
- `normalized` is `display.toLowerCase()` for comparison
|
||||||
|
- `surnameToken` is the last non-particle token for blocking
|
||||||
|
|
||||||
|
### 3.2 `normalizeEmail(raw: string)`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
export function normalizeEmail(raw: string): string | null;
|
||||||
|
```
|
||||||
|
|
||||||
|
- Trim + lowercase
|
||||||
|
- Validate via `zod.email()` schema
|
||||||
|
- Returns `null` for empty / invalid (caller decides what to do)
|
||||||
|
- **Does NOT strip plus-aliases** (`user+tag@domain.com`) — both intentional (real distinct addresses) and malicious-prevention apply. Compare by full localpart.
|
||||||
|
|
||||||
|
### 3.3 `normalizePhone(raw: string, defaultCountry: string)`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
export function normalizePhone(
|
||||||
|
raw: string,
|
||||||
|
defaultCountry: string,
|
||||||
|
): {
|
||||||
|
e164: string | null; // canonical, e.g. '+15742740548'
|
||||||
|
country: string | null; // ISO-3166-1 alpha-2
|
||||||
|
display: string | null; // user-facing pretty
|
||||||
|
flagged?: 'multi_number' | 'placeholder' | 'unparseable';
|
||||||
|
} | null;
|
||||||
|
```
|
||||||
|
|
||||||
|
Pipeline:
|
||||||
|
|
||||||
|
1. Strip `\r`, `\n`, tabs, single quotes, dots, dashes, parens, spaces
|
||||||
|
2. If contains `/` or `;` or `,` → flag `multi_number`, take first segment
|
||||||
|
3. If matches `+\d{2}0+$` (e.g., `+447000000000`) → flag `placeholder`, return null
|
||||||
|
4. If starts with `00` → replace with `+`
|
||||||
|
5. If starts with `+` → parse as E.164
|
||||||
|
6. Else if `defaultCountry` provided → parse against that country
|
||||||
|
7. Else return null (caller's problem)
|
||||||
|
|
||||||
|
Backed by `libphonenumber-js` (already in deps via `tests/integration/factories.ts` usage if not, will add). The hostile cases above all need explicit handling — naïve regex won't survive.
|
||||||
|
|
||||||
|
### 3.4 `resolveCountry(text: string)`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
export function resolveCountry(text: string): {
|
||||||
|
iso: string | null; // ISO-3166-1 alpha-2
|
||||||
|
confidence: 'exact' | 'fuzzy' | 'city' | null;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Reuses `src/lib/i18n/countries.ts`. Pipeline:
|
||||||
|
|
||||||
|
1. Lowercase + strip diacritics
|
||||||
|
2. Exact match against country names (any locale we ship)
|
||||||
|
3. Fuzzy match (Levenshtein ≤ 2 against canonical English names)
|
||||||
|
4. City fallback — small in-package mapping for high-frequency cities seen in legacy data (`Sag Harbor → US`, `Kansas City → US`, `St Barth → BL`, etc.). Order: exact → city → fuzzy.
|
||||||
|
|
||||||
|
The mapping is opinionated and small (~30 entries covering the actual values seen in the 252-row dataset). Anything that fails to resolve returns `null` and lands in the migration's flagged pile.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Dedup algorithm
|
||||||
|
|
||||||
|
Lives at `src/lib/dedup/find-matches.ts`. Pure function. Vitest-tested against the §1.2 cluster fixtures.
|
||||||
|
|
||||||
|
### 4.1 Public API
|
||||||
|
|
||||||
|
```ts
|
||||||
|
export interface MatchCandidate {
|
||||||
|
id: string;
|
||||||
|
fullName: string | null;
|
||||||
|
emails: string[]; // already normalized
|
||||||
|
phonesE164: string[]; // already normalized E.164
|
||||||
|
countryIso: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MatchResult {
|
||||||
|
candidate: MatchCandidate;
|
||||||
|
score: number; // 0–100
|
||||||
|
reasons: string[]; // human-readable, e.g. ["email match", "phone match"]
|
||||||
|
confidence: 'high' | 'medium' | 'low';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function findClientMatches(
|
||||||
|
input: MatchCandidate,
|
||||||
|
pool: MatchCandidate[],
|
||||||
|
thresholds: DedupThresholds,
|
||||||
|
): MatchResult[];
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.2 Scoring rules (compound)
|
||||||
|
|
||||||
|
Each rule produces a score addition. **Compounding**: when two strong rules match (e.g., email AND phone), the result is ~95+ rather than max(50, 50). Negative evidence subtracts.
|
||||||
|
|
||||||
|
| Rule | Score | Notes |
|
||||||
|
| --------------------------------------------------------------- | ----- | ------------------------------------------------------ |
|
||||||
|
| Exact email match (case-insensitive, normalized) | +60 | One match suffices |
|
||||||
|
| Exact phone E.164 match (≥ 8 significant digits) | +50 | Excludes placeholder all-zeros |
|
||||||
|
| Exact normalized full-name match | +20 | Many "John Smith"s exist |
|
||||||
|
| Surname soundex match + given-name fuzzy match (Lev ≤ 1) | +15 | Catches `Constanzo/Costanzo`, `Christophe/Christopher` |
|
||||||
|
| Same address (normalized fuzzy ≥ 0.8) | +10 | Bonus signal |
|
||||||
|
| **Negative**: Same email but different country code on phone | −15 | Suggests spouse / coworker / shared inbox |
|
||||||
|
| **Negative**: Same name but DIFFERENT email AND DIFFERENT phone | −20 | Two distinct people with the same name |
|
||||||
|
|
||||||
|
### 4.3 Confidence tiers (post-compound)
|
||||||
|
|
||||||
|
- **score ≥ 90 — `high`** — email AND phone match, or email + name + address. Block-create suggest "Use existing." Auto-link on public-form submit by default.
|
||||||
|
- **score 50–89 — `medium`** — single strong signal (email or phone alone), or email + same-name + different country (Etiennette case). Soft-warn but allow.
|
||||||
|
- **score < 50 — `low`** — weak signals only. Don't surface in UI; only relevant in background-job review queue.
|
||||||
|
|
||||||
|
### 4.4 Blocking strategy
|
||||||
|
|
||||||
|
For O(n) scan over a pool of N existing clients, build three lookup maps once per scan:
|
||||||
|
|
||||||
|
- `byEmail: Map<string, MatchCandidate[]>` — keyed by normalized email
|
||||||
|
- `byPhoneE164: Map<string, MatchCandidate[]>` — keyed by E.164
|
||||||
|
- `bySurnameToken: Map<string, MatchCandidate[]>` — keyed by `normalizeName(...).surnameToken`
|
||||||
|
|
||||||
|
For an incoming `MatchCandidate`, the candidate set to compare is the union of pool entries reachable through any of its emails/phones/surname-token. Typically 0–5 candidates per query, regardless of N.
|
||||||
|
|
||||||
|
### 4.5 Performance budget
|
||||||
|
|
||||||
|
For migration: 252 rows compared pairwise once. ~30k comparisons after blocking — a few seconds.
|
||||||
|
|
||||||
|
For runtime at-create: incoming candidate against existing pool of N clients per port. Expected pool size at maturity: 1k–10k. With blocking: <10 comparisons, <1ms target. No DB query needed beyond the initial pool fetch (which itself uses the indexed columns).
|
||||||
|
|
||||||
|
For background nightly job: full pairwise within port, blocked. 10k clients → ~50k pairwise checks per port → <30s. Fine for a nightly cron.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Configurable thresholds (admin settings)
|
||||||
|
|
||||||
|
New rows in `system_settings` per port. Default values err safe (more confirmation, less auto-action).
|
||||||
|
|
||||||
|
| Key | Default | Effect |
|
||||||
|
| ------------------------------ | ----------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||||
|
| `dedup_block_create_threshold` | `90` | Score above which the client-create form interrupts: "Use existing client?" |
|
||||||
|
| `dedup_soft_warn_threshold` | `50` | Score above which a soft-warn panel surfaces below the form |
|
||||||
|
| `dedup_review_queue_threshold` | `40` | Background job lands pairs ≥ this score in `/admin/duplicates` |
|
||||||
|
| `dedup_public_form_auto_link` | `true` | When a public-form submission scores ≥ block-threshold against existing client, attach the new interest to that client without prompting. **Safe**: no merge, just attaching a deal. |
|
||||||
|
| `dedup_auto_merge_threshold` | `null` (disabled) | If non-null, merges happen automatically at this threshold without human confirmation. Recommend leaving null until the team is comfortable; `95` is a reasonable cautious value. |
|
||||||
|
| `dedup_undo_window_days` | `7` | How long the loser's pre-state JSON is retained for merge-undo. After this, the snapshot is purged (GDPR) and merges are permanent. |
|
||||||
|
|
||||||
|
Each setting is a row in `system_settings`. UI surface in `/[portSlug]/admin/dedup` (a new admin page) with an "Advanced" toggle to expose the thresholds and brief explanations.
|
||||||
|
|
||||||
|
If the sales team complains the safer mode is too click-heavy, an admin flips `dedup_auto_merge_threshold` to `95` without any code change.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Merge service contract
|
||||||
|
|
||||||
|
### 6.1 Data flow
|
||||||
|
|
||||||
|
`mergeClients(winnerId, loserId, fieldChoices, ctx)` does, in a single transaction:
|
||||||
|
|
||||||
|
1. **Snapshot loser** — full row + all attached `clientContacts`, `clientAddresses`, `clientNotes`, `clientTags`, plus a count of dependent rows about to be moved (interests, yacht-memberships, etc.). Stored as `mergeDetails` JSONB in `clientMergeLog`.
|
||||||
|
2. **Reattach** — every row pointing at `loserId` updates to point at `winnerId`:
|
||||||
|
- `interests.clientId`
|
||||||
|
- `clientContacts.clientId` — with conflict handling: if winner already has the same email, keep winner's; flag the duplicate for the user
|
||||||
|
- `clientAddresses.clientId` — same conflict handling
|
||||||
|
- `clientNotes.clientId` — preserve `authorId` + `createdAt` (never overwrite)
|
||||||
|
- `clientTags.clientId`
|
||||||
|
- `clientYachtMembership.clientId` (or whatever the table is called)
|
||||||
|
- `auditLogs.entityId` — annotate, don't move (audit truth)
|
||||||
|
3. **Apply fieldChoices** — for each field where the user picked the loser's value, copy that into the winner row.
|
||||||
|
4. **Soft-archive loser** — `loser.archivedAt = now()`, `loser.mergedIntoClientId = winnerId`. Row stays in DB so the merge is reversible.
|
||||||
|
5. **Write `clientMergeLog`** — `{ winnerId, loserId, mergedBy, mergedAt, mergeDetails: <snapshot>, fieldChoices }`.
|
||||||
|
6. **Audit log** — top-level `auditLogs` row: `{ action: 'merge', entityType: 'client', entityId: winnerId, metadata: { loserId, score, reasons } }`.
|
||||||
|
|
||||||
|
### 6.2 Schema additions (migration)
|
||||||
|
|
||||||
|
`clients` table gets a new column:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
mergedIntoClientId: text('merged_into_client_id').references(() => clients.id),
|
||||||
|
```
|
||||||
|
|
||||||
|
The existing `clientMergeLog` table is reused. Add a partial index for the undo-window query:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE INDEX idx_cml_recent ON client_merge_log (port_id, created_at DESC) WHERE created_at > NOW() - INTERVAL '7 days';
|
||||||
|
```
|
||||||
|
|
||||||
|
A daily maintenance job (using the existing `maintenance-cleanup.test.ts` infrastructure) purges `mergeDetails` JSONB older than `dedup_undo_window_days` setting.
|
||||||
|
|
||||||
|
### 6.3 Undo
|
||||||
|
|
||||||
|
`unmergeClients(mergeLogId, ctx)`:
|
||||||
|
|
||||||
|
1. Within the undo window, look up the snapshot
|
||||||
|
2. Restore loser: clear `archivedAt`, `mergedIntoClientId`
|
||||||
|
3. Restore loser's contacts/addresses/notes/tags from snapshot
|
||||||
|
4. Detach reattached rows: `interests` etc. that were touching `winnerId` and originally belonged to loser go back. The snapshot stores the original `(rowType, rowId)` list explicitly so this is deterministic.
|
||||||
|
5. Mark log row `undoneAt = now()`, `undoneBy = userId`
|
||||||
|
|
||||||
|
After 7 days the snapshot is gone and unmerge returns `410 Gone`.
|
||||||
|
|
||||||
|
### 6.4 Concurrency
|
||||||
|
|
||||||
|
Both merge and unmerge wrap in a single transaction with `SELECT … FOR UPDATE` on `clients.id` of both winner and loser. A second merge attempt against the same loser sees `mergedIntoClientId` already set and refuses (clear error: "Already merged into …").
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Runtime surfaces
|
||||||
|
|
||||||
|
### 7.1 Layer 1 — At-create suggestion
|
||||||
|
|
||||||
|
In `ClientForm` (and the public `register` form once that hits the new system):
|
||||||
|
|
||||||
|
- Debounced 300ms after email or phone field changes
|
||||||
|
- Calls `findClientMatches` against current port's clients
|
||||||
|
- Renders top-1 match if score ≥ `dedup_soft_warn_threshold`:
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────┐
|
||||||
|
│ This looks like an existing client │
|
||||||
|
│ ML Marcus Laurent │
|
||||||
|
│ marcus@… +33 6 12 34 56 78 │
|
||||||
|
│ 2 interests · last 9d ago │
|
||||||
|
│ [ Use this client ] [ Create new ] │
|
||||||
|
└─────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
- "Use this client" → form switches to "create new interest under existing client" mode (preserves whatever other fields the user typed)
|
||||||
|
- "Create new" → audit-log `dedup_override` with the candidate's id and reasons (so we have data on false positives)
|
||||||
|
|
||||||
|
### 7.2 Layer 2 — Interest-level same-berth guard
|
||||||
|
|
||||||
|
Cheap one-liner in `createInterest` service:
|
||||||
|
|
||||||
|
- Check `(clientId, berthId)` against existing non-archived interests
|
||||||
|
- If hit, throw `BerthDuplicateError` with the existing interest details
|
||||||
|
- UI catches and prompts: "Update existing or create separate?"
|
||||||
|
|
||||||
|
This is NOT the same as client-level dedup. Same client legitimately can pursue the same berth a second time after it falls through. But the prompt-before-create catches the accidental double-submit case.
|
||||||
|
|
||||||
|
### 7.3 Layer 3 — Background scoring + review queue
|
||||||
|
|
||||||
|
- A nightly cron (using existing BullMQ infrastructure — search for `scheduled-tasks` in repo) runs `findClientMatches` over each port's full client pool
|
||||||
|
- Pairs scoring ≥ `dedup_review_queue_threshold` land in a `client_merge_candidates` table:
|
||||||
|
```ts
|
||||||
|
export const clientMergeCandidates = pgTable('client_merge_candidates', {
|
||||||
|
id: text('id').primaryKey()...,
|
||||||
|
portId: text('port_id').notNull()...,
|
||||||
|
clientAId: text('client_a_id').notNull()...,
|
||||||
|
clientBId: text('client_b_id').notNull()...,
|
||||||
|
score: integer('score').notNull(),
|
||||||
|
reasons: jsonb('reasons').notNull(),
|
||||||
|
status: text('status').notNull().default('pending'), // pending | dismissed | merged
|
||||||
|
createdAt: timestamp('created_at')...,
|
||||||
|
resolvedAt: timestamp('resolved_at'),
|
||||||
|
resolvedBy: text('resolved_by'),
|
||||||
|
})
|
||||||
|
```
|
||||||
|
- `/[portSlug]/admin/duplicates` lists pending candidates sorted by score desc, with `[Review →]` opening a side-by-side merge dialog
|
||||||
|
- Dismissing a candidate marks it `status=dismissed` so the job doesn't re-surface the same pair tomorrow (a future score increase re-creates it).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. NocoDB → new system field mapping
|
||||||
|
|
||||||
|
This is the explicit mapping the migration script applies. One NocoDB Interest row produces multiple new rows.
|
||||||
|
|
||||||
|
### 8.1 Top-level transform
|
||||||
|
|
||||||
|
```
|
||||||
|
NocoDB Interests row
|
||||||
|
─→ 0–1 client (deduped against existing pool)
|
||||||
|
─→ 0–1 client_address
|
||||||
|
─→ 0–2 client_contacts (email, phone)
|
||||||
|
─→ exactly 1 interest
|
||||||
|
─→ 0–1 yacht (when Yacht Name present and not "TBC"/"Na"/empty placeholders)
|
||||||
|
─→ 0–1 document (when documensoID present)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 8.2 Field map
|
||||||
|
|
||||||
|
| NocoDB field | Target | Transform |
|
||||||
|
| ----------------------------------------------------------------- | ------------------------------------------------------------------ | ---------------------------------------------------------------------------- |
|
||||||
|
| `Full Name` | `clients.fullName` | `normalizeName().display` |
|
||||||
|
| `Email Address` | `clientContacts(channel='email', value=...)` | `normalizeEmail()` |
|
||||||
|
| `Phone Number` | `clientContacts(channel='phone', valueE164=..., valueCountry=...)` | `normalizePhone(raw, defaultCountry)` |
|
||||||
|
| `Address` | `clientAddresses.streetAddress` (LongText preserved) | trim |
|
||||||
|
| `Place of Residence` | `clientAddresses.countryIso` AND `clients.nationalityIso` | `resolveCountry()` |
|
||||||
|
| `Contact Method Preferred` | `clients.preferredContactMethod` | lowercase, mapped: Email→email, Phone→phone |
|
||||||
|
| `Source` | `clients.source` | mapped: portal→website, Form→website, External→manual; null → manual |
|
||||||
|
| `Date Added` | `interests.createdAt` (fallback to NocoDB `Created At` then now) | parse: try `DD-MM-YYYY`, then `YYYY-MM-DD`, then ISO |
|
||||||
|
| `Sales Process Level` | `interests.pipelineStage` | see §8.3 |
|
||||||
|
| `Lead Category` | `interests.leadCategory` | General→general_interest, Friends and Family→general_interest with tag |
|
||||||
|
| `Berth` (FK) | `interests.berthId` | resolve via `Berths` table by `Mooring Number` |
|
||||||
|
| `Berth Size Desired` | `interests.notes` (appended) | preserve |
|
||||||
|
| `Yacht Name`, `Length`, `Width`, `Depth` | `yachts.name`, `lengthM`, `widthM`, `draughtM` | skip if name in {`TBC`, `Na`, ``, null}; ft→m via `\* 0.3048` |
|
||||||
|
| `EOI Status` | `interests.eoiStatus` | Awaiting Further Details→pending; Waiting for Signatures→sent; Signed→signed |
|
||||||
|
| `Deposit 10% Status` | `interests.depositStatus` | Pending→pending; Received→received |
|
||||||
|
| `Contract Status` | `interests.contractStatus` | Pending→pending; 40% Received→partial; Complete→complete |
|
||||||
|
| `EOI Time Sent` | `interests.dateEoiSent` | parse |
|
||||||
|
| `clientSignTime` / `developerSignTime` / `all_signed_notified_at` | `interests.dateEoiSigned` (use latest) | parse |
|
||||||
|
| `Time LOI Sent` | `interests.dateContractSent` | parse |
|
||||||
|
| `Internal Notes` + `Extra Comments` | `clientNotes` (one row, system author) | concatenate with section markers |
|
||||||
|
| `documensoID` | `documents.documensoId` (when present, type='eoi') | preserve |
|
||||||
|
| `Signature Link Client/CC/Developer`, `EmbeddedSignature*` | `documents.signers[]` | one row per non-null signer |
|
||||||
|
| `reminder_enabled`, `last_reminder_sent`, etc. | `interests.reminderEnabled`, `interests.reminderLastFired` | parse, default true |
|
||||||
|
|
||||||
|
### 8.3 Sales-stage mapping (8 → 9)
|
||||||
|
|
||||||
|
| NocoDB | New (PIPELINE_STAGES) |
|
||||||
|
| ------------------------------- | ------------------------------------------------------------------------ |
|
||||||
|
| General Qualified Interest | `open` |
|
||||||
|
| Specific Qualified Interest | `details_sent` |
|
||||||
|
| EOI and NDA Sent | `eoi_sent` |
|
||||||
|
| Signed EOI and NDA | `eoi_signed` |
|
||||||
|
| Made Reservation | `deposit_10pct` |
|
||||||
|
| Contract Negotiation | `contract_sent` |
|
||||||
|
| Contract Negotiations Finalized | `contract_sent` (with audit-note: legacy "negotiations finalized") |
|
||||||
|
| Contract Signed | `contract_signed` (or `completed` when deposit + contract both complete) |
|
||||||
|
|
||||||
|
### 8.4 Other tables
|
||||||
|
|
||||||
|
- **Residential Interests** (35 rows) — same shape as Interests but maps to `residentialClients` + `residentialInterests`. Smaller and cleaner. Same dedup runs within this pool independently.
|
||||||
|
- **Website - Interest Submissions** (64 rows) — these are **inbound capture, not yet a client**. Treat as if each row is a fresh public-form submission today: run dedup against the migrated client pool. Auto-link if `dedup_public_form_auto_link` setting allows.
|
||||||
|
- **Website - Contact Form Submissions** (47 rows) — sparse data (just name + email + interest type). Skip migration; export as CSV for manual triage. Not the source of truth for any deal.
|
||||||
|
- **Website - Berth EOI Details Supplements** (1 row) — single record, preserved as a one-off attached to the matching Interest.
|
||||||
|
- **Newsletter Sending** (69 rows) — out of scope; that's a marketing surface, not CRM.
|
||||||
|
- **Interests Backup, Interests copy** — historical artifacts. Skipped by default. A `--include-backups` flag attaches them as audit-note entries on the corresponding live Interest if the user wants the history.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Migration script
|
||||||
|
|
||||||
|
Located at `scripts/migrate-from-nocodb.ts`. Idempotent: safe to re-run. Three main flags:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ pnpm tsx scripts/migrate-from-nocodb.ts --dry-run [--port-slug X]
|
||||||
|
Pulls everything, transforms, runs dedup, writes CSV report to .migration/<timestamp>/. No DB writes.
|
||||||
|
|
||||||
|
$ pnpm tsx scripts/migrate-from-nocodb.ts --apply --report .migration/<timestamp>/
|
||||||
|
Reads the report, performs the writes the dry-run promised. Refuses if the source data has changed since the report was generated (hash mismatch).
|
||||||
|
|
||||||
|
$ pnpm tsx scripts/migrate-from-nocodb.ts --rollback --apply-id <id>
|
||||||
|
Reads the apply log, undoes the writes (only valid within the undo window).
|
||||||
|
```
|
||||||
|
|
||||||
|
Reuses the `client-portal/server/utils/nocodb.ts` adapter for the NocoDB API client (no need to rebuild). Writes to the new system via Drizzle (re-using the existing services like `createClient`, `createInterest`, etc., so all the same validation runs).
|
||||||
|
|
||||||
|
### 9.1 Dry-run report format
|
||||||
|
|
||||||
|
`.migration/<timestamp>/report.csv`:
|
||||||
|
|
||||||
|
```csv
|
||||||
|
op,reason,nocodb_row_id,target_table,target_value,confidence,manual_review_required
|
||||||
|
create_client,new,624,clients.fullName,Deepak Ramchandani,N/A,false
|
||||||
|
create_contact,new,624,clientContacts.email,dannyrams8888@gmail.com,N/A,false
|
||||||
|
create_contact,new,624,clientContacts.phone,+17215868888,N/A,false
|
||||||
|
create_interest,new,624,interests.berthId,a1b2c3...,N/A,false
|
||||||
|
auto_link,score=98 (email+phone),625,clients.id,<existing client UUID from row 624>,high,false
|
||||||
|
flag_for_review,score=72 (same name diff country),188,client.id,<existing client UUID from row 717>,medium,true
|
||||||
|
country_unresolved,fallback to AI (port country),198,clientAddresses.countryIso,AI,low,true
|
||||||
|
phone_unparseable,placeholder all-zeros,641,clientContacts.phone,<skipped>,N/A,true
|
||||||
|
```
|
||||||
|
|
||||||
|
Plus `.migration/<timestamp>/summary.md`:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Migration Dry-Run — 2026-05-03 14:23 UTC
|
||||||
|
|
||||||
|
NocoDB: 252 Interests + 35 Residences + 64 Website Submissions
|
||||||
|
Outcome: 198 clients, 287 interests (incl. residences), 91 yachts, 412 contacts
|
||||||
|
|
||||||
|
Auto-linked (high confidence, no human action needed):
|
||||||
|
- Nicolas Ruiz: rows 681,682,683 → 1 client + 3 interests
|
||||||
|
- John Lynch: rows 716,725 → 1 client + 2 interests
|
||||||
|
- Deepak Ramchandani: rows 624,625 → 1 client + 2 interests
|
||||||
|
- [12 more]
|
||||||
|
|
||||||
|
Flagged for manual review (medium confidence):
|
||||||
|
- Etiennette Clamouze (rows 188,717): same name, different country phone + email
|
||||||
|
- Bruno Joyerot #18 + Bruce Hearn #19: shared household contact
|
||||||
|
- [4 more]
|
||||||
|
|
||||||
|
Country resolution failed for 7 rows. All defaulted to port country (AI). Review:
|
||||||
|
- Row 239: "Sag Harbor Y" → AI (likely US)
|
||||||
|
- [6 more]
|
||||||
|
|
||||||
|
Phone parsing failed for 3 rows. All flagged, no contact created:
|
||||||
|
- Row 178: empty
|
||||||
|
- Row 641: placeholder "+447000000000"
|
||||||
|
- Row 175: empty
|
||||||
|
|
||||||
|
Run `--apply` to commit these changes.
|
||||||
|
```
|
||||||
|
|
||||||
|
### 9.2 Apply phase
|
||||||
|
|
||||||
|
`--apply` reads the report, re-fetches the source rows (via NocoDB MCP / API), recomputes the hash, fails fast if NocoDB changed since dry-run. Then performs the writes within a single PostgreSQL transaction per port (commit at end). On any error mid-transaction, full rollback.
|
||||||
|
|
||||||
|
After successful apply, an `apply_id` is generated and an audit-log row written. The `apply_id` is the handle used for `--rollback`.
|
||||||
|
|
||||||
|
### 9.3 Idempotency
|
||||||
|
|
||||||
|
The script tracks NocoDB row IDs in a `migration_source_links` table:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
export const migrationSourceLinks = pgTable('migration_source_links', {
|
||||||
|
id: text('id').primaryKey()...,
|
||||||
|
sourceSystem: text('source_system').notNull(), // 'nocodb_interests' | 'nocodb_residences' | …
|
||||||
|
sourceId: text('source_id').notNull(), // NocoDB row id as string
|
||||||
|
targetEntityType: text('target_entity_type').notNull(), // client | interest | yacht | …
|
||||||
|
targetEntityId: text('target_entity_id').notNull(),
|
||||||
|
appliedAt: timestamp('applied_at')...,
|
||||||
|
appliedBy: text('applied_by'),
|
||||||
|
}, (table) => [
|
||||||
|
uniqueIndex('idx_msl_source').on(table.sourceSystem, table.sourceId, table.targetEntityType),
|
||||||
|
]);
|
||||||
|
```
|
||||||
|
|
||||||
|
Re-running `--apply` against the same report skips rows already in this table. Useful for partial-failure resumption.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Test plan
|
||||||
|
|
||||||
|
### 10.1 Library-level (vitest unit)
|
||||||
|
|
||||||
|
- `tests/unit/dedup/normalize.test.ts` — every dirty-data pattern from §1.3 has a fixture asserting the expected normalized output.
|
||||||
|
- `tests/unit/dedup/find-matches.test.ts` — every duplicate cluster from §1.2 has a fixture asserting score + confidence tier. Hard cases (Pattern F) assert "medium" not "high" — false-positive guard.
|
||||||
|
|
||||||
|
### 10.2 Service-level (vitest integration)
|
||||||
|
|
||||||
|
- `tests/integration/dedup/client-merge.test.ts` — merge service exercised: full reattach, clientMergeLog written, undo within window restores, undo after window returns 410, concurrent merge of same loser fails the second.
|
||||||
|
- `tests/integration/dedup/at-create-suggestion.test.ts` — `findClientMatches` against a seeded pool returns expected matches + reasons.
|
||||||
|
|
||||||
|
### 10.3 Migration script (vitest integration with NocoDB mock)
|
||||||
|
|
||||||
|
- `tests/integration/dedup/migration-dry-run.test.ts` — feed the script a fixture NocoDB dump (the 252 rows, frozen as a JSON snapshot in fixtures), assert the resulting CSV matches a golden file. Catch any future regression in the transform pipeline.
|
||||||
|
- `tests/integration/dedup/migration-apply.test.ts` — apply the dry-run output to a clean test DB, assert all expected rows exist, assert idempotency (re-apply is a no-op).
|
||||||
|
|
||||||
|
### 10.4 E2E (Playwright)
|
||||||
|
|
||||||
|
- `tests/e2e/smoke/30-dedup-create.spec.ts` — type into ClientForm with an email matching seeded client; assert suggestion card appears; click "Use this client"; assert form switches to interest-create mode.
|
||||||
|
- `tests/e2e/smoke/31-admin-duplicates.spec.ts` — admin views review queue, opens a candidate, side-by-side merge UI works, merge succeeds, undo within window works.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 11. Rollback plan
|
||||||
|
|
||||||
|
Three layers of safety, ordered by reversibility:
|
||||||
|
|
||||||
|
1. **Per-merge undo** — admin clicks Undo on a wrongly-merged pair, system rolls back from `clientMergeLog` snapshot. 7-day window. No engineering needed.
|
||||||
|
2. **Migration `--rollback` flag** — entire migration apply is reversed via the `apply_id` and `migration_source_links` table. Useful in the first 24h after `--apply`. Engineering-supervised.
|
||||||
|
3. **DB restore from backup** — the existing `docs/ops/backup-runbook.md` covers this. Last resort if both above are blocked.
|
||||||
|
|
||||||
|
Pre-migration, take a hot backup of the new DB (`pg_dump`). Pre-merge in production (before any human-facing surface ships), the `dedup_auto_merge_threshold` defaults to `null` so no automatic merges happen — every merge is human-confirmed.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 12. Open items
|
||||||
|
|
||||||
|
- **Soundex vs metaphone** — Soundex is simpler but English-leaning. Metaphone handles non-English surnames better (the dataset has French, German, Italian, Slavic names). Default to metaphone via the `natural` package; revisit if it adds significant install size.
|
||||||
|
- **Cross-port dedup** — not in scope. Each port's clients are deduped within that port. A future "shared address book" feature would need its own design.
|
||||||
|
- **Profile photo / face match** — out of scope.
|
||||||
|
- **AI-assisted match resolution** — out of scope. The Layer-3 review queue is human-only.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Implementation sequence
|
||||||
|
|
||||||
|
P1 (this design's library) → P2 (runtime surfaces) → P3 (migration). Each is a separate plan / PR.
|
||||||
|
|
||||||
|
**P1 deliverables**: `src/lib/dedup/{normalize,find-matches}.ts` + tests. No UI changes. No DB changes (except indexed lookups added to existing `clientContacts`). ~1.5 days.
|
||||||
|
|
||||||
|
**P2 deliverables**: at-create suggestion in `ClientForm` + interest-level guard in `createInterest` service + admin settings UI for thresholds + `clientMergeCandidates` table + nightly job + admin review queue page + merge service + side-by-side merge UI. ~5–7 days.
|
||||||
|
|
||||||
|
**P3 deliverables**: `scripts/migrate-from-nocodb.ts` + `migration_source_links` table + dry-run + apply + rollback. CSV report format frozen against fixture. ~3 days, including fixture creation from the live NocoDB snapshot.
|
||||||
|
|
||||||
|
Total: ~10–12 engineering days from approval. Can be split across three PRs landing independently — each is testable in isolation and the runtime surfaces (P2) work even without P3 being run.
|
||||||
@@ -1,5 +1,55 @@
|
|||||||
import type { NextConfig } from 'next';
|
import type { NextConfig } from 'next';
|
||||||
|
|
||||||
|
const isProd = process.env.NODE_ENV === 'production';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Security headers applied to every response. Per audit-pass-#3 finding:
|
||||||
|
* the previous config emitted no CSP, X-Frame-Options, HSTS, or
|
||||||
|
* X-Content-Type-Options — the app was open to clickjacking + MIME
|
||||||
|
* sniffing.
|
||||||
|
*
|
||||||
|
* CSP notes:
|
||||||
|
* - 'unsafe-inline' on style-src is required by Tailwind's runtime
|
||||||
|
* style injection and Radix; revisit when Tailwind v4 ships a
|
||||||
|
* nonce story.
|
||||||
|
* - 'unsafe-eval' on script-src is dev-only — Next dev uses eval for
|
||||||
|
* HMR. Production drops it.
|
||||||
|
* - connect-src allows ws/wss for Socket.IO and https: for outgoing
|
||||||
|
* fetches; tighten in prod via per-port branding URLs once we move
|
||||||
|
* the s3 image references into a known allowlist.
|
||||||
|
* - img-src https: is wide because port branding pulls from
|
||||||
|
* s3.portnimara.com plus per-port image URLs configured at runtime.
|
||||||
|
*/
|
||||||
|
// Dev-only allow-list: react-grab (the in-page click-to-source devtool)
|
||||||
|
// is fetched from unpkg, so script/style/connect must allow it. Strip
|
||||||
|
// these entries in prod via the conditional below.
|
||||||
|
const devScriptHosts = isProd ? '' : ' http://unpkg.com https://unpkg.com';
|
||||||
|
const devConnectHosts = isProd ? '' : ' http://unpkg.com https://unpkg.com';
|
||||||
|
|
||||||
|
const csp = [
|
||||||
|
"default-src 'self'",
|
||||||
|
`script-src 'self' 'unsafe-inline'${isProd ? '' : " 'unsafe-eval'"}${devScriptHosts}`,
|
||||||
|
"style-src 'self' 'unsafe-inline'",
|
||||||
|
"img-src 'self' data: blob: https:",
|
||||||
|
"font-src 'self' data:",
|
||||||
|
`connect-src 'self' ws: wss: https:${devConnectHosts}`,
|
||||||
|
"frame-ancestors 'none'",
|
||||||
|
"base-uri 'self'",
|
||||||
|
"form-action 'self'",
|
||||||
|
"object-src 'none'",
|
||||||
|
].join('; ');
|
||||||
|
|
||||||
|
const securityHeaders = [
|
||||||
|
{ key: 'Content-Security-Policy', value: csp },
|
||||||
|
{ key: 'X-Frame-Options', value: 'DENY' },
|
||||||
|
{ key: 'X-Content-Type-Options', value: 'nosniff' },
|
||||||
|
{ key: 'Referrer-Policy', value: 'strict-origin-when-cross-origin' },
|
||||||
|
{ key: 'Permissions-Policy', value: 'camera=(self), microphone=(), geolocation=()' },
|
||||||
|
...(isProd
|
||||||
|
? [{ key: 'Strict-Transport-Security', value: 'max-age=31536000; includeSubDomains' }]
|
||||||
|
: []),
|
||||||
|
];
|
||||||
|
|
||||||
const nextConfig: NextConfig = {
|
const nextConfig: NextConfig = {
|
||||||
output: 'standalone',
|
output: 'standalone',
|
||||||
serverExternalPackages: [
|
serverExternalPackages: [
|
||||||
@@ -24,6 +74,14 @@ const nextConfig: NextConfig = {
|
|||||||
// process.cwd() requires the file to be traced explicitly.
|
// process.cwd() requires the file to be traced explicitly.
|
||||||
'/api/v1/document-templates/**': ['./assets/eoi-template.pdf'],
|
'/api/v1/document-templates/**': ['./assets/eoi-template.pdf'],
|
||||||
},
|
},
|
||||||
|
async headers() {
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
source: '/:path*',
|
||||||
|
headers: securityHeaders,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
export default nextConfig;
|
export default nextConfig;
|
||||||
|
|||||||
@@ -4,6 +4,10 @@ proxy_set_header X-Real-IP $remote_addr;
|
|||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
proxy_set_header Connection "";
|
proxy_set_header Connection "";
|
||||||
|
# Defense-in-depth for CVE-2025-29927: strip the header attackers use to
|
||||||
|
# skip Next.js middleware. Patched in next>=15.2.3, but neutralizing the
|
||||||
|
# input at the edge means a future regression cannot reopen the bypass.
|
||||||
|
proxy_set_header X-Middleware-Subrequest "";
|
||||||
proxy_cache_bypass $http_upgrade;
|
proxy_cache_bypass $http_upgrade;
|
||||||
proxy_read_timeout 60s;
|
proxy_read_timeout 60s;
|
||||||
proxy_send_timeout 60s;
|
proxy_send_timeout 60s;
|
||||||
|
|||||||
14
package.json
14
package.json
@@ -2,6 +2,7 @@
|
|||||||
"name": "port-nimara-crm",
|
"name": "port-nimara-crm",
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
|
"packageManager": "pnpm@10.33.2",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "next dev",
|
"dev": "next dev",
|
||||||
"build": "next build && pnpm build:server",
|
"build": "next build && pnpm build:server",
|
||||||
@@ -14,6 +15,11 @@
|
|||||||
"db:push": "drizzle-kit push",
|
"db:push": "drizzle-kit push",
|
||||||
"db:studio": "drizzle-kit studio",
|
"db:studio": "drizzle-kit studio",
|
||||||
"db:seed": "tsx src/lib/db/seed.ts",
|
"db:seed": "tsx src/lib/db/seed.ts",
|
||||||
|
"db:seed:realistic": "tsx src/lib/db/seed.ts",
|
||||||
|
"db:seed:synthetic": "tsx src/lib/db/seed-synthetic.ts",
|
||||||
|
"db:reset": "tsx scripts/db-reset.ts --confirm",
|
||||||
|
"db:reseed:realistic": "pnpm db:reset && pnpm db:seed:realistic",
|
||||||
|
"db:reseed:synthetic": "pnpm db:reset && pnpm db:seed:synthetic",
|
||||||
"test:e2e": "playwright test",
|
"test:e2e": "playwright test",
|
||||||
"test:e2e:smoke": "playwright test --project=smoke",
|
"test:e2e:smoke": "playwright test --project=smoke",
|
||||||
"test:e2e:exhaustive": "playwright test --project=exhaustive",
|
"test:e2e:exhaustive": "playwright test --project=exhaustive",
|
||||||
@@ -52,6 +58,7 @@
|
|||||||
"@tanstack/react-query": "^5.62.0",
|
"@tanstack/react-query": "^5.62.0",
|
||||||
"@tanstack/react-query-devtools": "^5.62.0",
|
"@tanstack/react-query-devtools": "^5.62.0",
|
||||||
"@tanstack/react-table": "^8.21.3",
|
"@tanstack/react-table": "^8.21.3",
|
||||||
|
"@types/pdfkit": "^0.17.6",
|
||||||
"archiver": "^7.0.1",
|
"archiver": "^7.0.1",
|
||||||
"better-auth": "^1.2.0",
|
"better-auth": "^1.2.0",
|
||||||
"bullmq": "^5.25.0",
|
"bullmq": "^5.25.0",
|
||||||
@@ -68,19 +75,22 @@
|
|||||||
"lucide-react": "^0.460.0",
|
"lucide-react": "^0.460.0",
|
||||||
"mailparser": "^3.9.4",
|
"mailparser": "^3.9.4",
|
||||||
"minio": "^8.0.0",
|
"minio": "^8.0.0",
|
||||||
"next": "15.1.0",
|
"next": "15.2.9",
|
||||||
"next-themes": "^0.4.0",
|
"next-themes": "^0.4.0",
|
||||||
"nodemailer": "^6.9.0",
|
"nodemailer": "^6.9.0",
|
||||||
"openai": "^6.27.0",
|
"openai": "^6.27.0",
|
||||||
"pdf-lib": "^1.17.1",
|
"pdf-lib": "^1.17.1",
|
||||||
|
"pdfkit": "^0.18.0",
|
||||||
"pino": "^9.5.0",
|
"pino": "^9.5.0",
|
||||||
"pino-pretty": "^13.0.0",
|
"pino-pretty": "^13.0.0",
|
||||||
"postgres": "^3.4.0",
|
"postgres": "^3.4.0",
|
||||||
"react": "^19.0.0",
|
"react": "^19.0.0",
|
||||||
"react-day-picker": "^9.14.0",
|
"react-day-picker": "^9.14.0",
|
||||||
"react-dom": "^19.0.0",
|
"react-dom": "^19.0.0",
|
||||||
|
"react-easy-crop": "^5.5.7",
|
||||||
"react-hook-form": "^7.54.0",
|
"react-hook-form": "^7.54.0",
|
||||||
"recharts": "^3.8.0",
|
"recharts": "^3.8.0",
|
||||||
|
"sharp": "^0.34.5",
|
||||||
"socket.io": "^4.8.0",
|
"socket.io": "^4.8.0",
|
||||||
"socket.io-client": "^4.8.0",
|
"socket.io-client": "^4.8.0",
|
||||||
"sonner": "^1.7.0",
|
"sonner": "^1.7.0",
|
||||||
@@ -107,7 +117,7 @@
|
|||||||
"drizzle-kit": "^0.30.0",
|
"drizzle-kit": "^0.30.0",
|
||||||
"esbuild": "^0.25.0",
|
"esbuild": "^0.25.0",
|
||||||
"eslint": "^9.0.0",
|
"eslint": "^9.0.0",
|
||||||
"eslint-config-next": "15.1.0",
|
"eslint-config-next": "15.2.9",
|
||||||
"eslint-config-prettier": "^9.1.0",
|
"eslint-config-prettier": "^9.1.0",
|
||||||
"husky": "^9.1.0",
|
"husky": "^9.1.0",
|
||||||
"lint-staged": "^15.2.0",
|
"lint-staged": "^15.2.0",
|
||||||
|
|||||||
485
pnpm-lock.yaml
generated
485
pnpm-lock.yaml
generated
@@ -101,12 +101,15 @@ importers:
|
|||||||
'@tanstack/react-table':
|
'@tanstack/react-table':
|
||||||
specifier: ^8.21.3
|
specifier: ^8.21.3
|
||||||
version: 8.21.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
version: 8.21.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||||
|
'@types/pdfkit':
|
||||||
|
specifier: ^0.17.6
|
||||||
|
version: 0.17.6
|
||||||
archiver:
|
archiver:
|
||||||
specifier: ^7.0.1
|
specifier: ^7.0.1
|
||||||
version: 7.0.1
|
version: 7.0.1
|
||||||
better-auth:
|
better-auth:
|
||||||
specifier: ^1.2.0
|
specifier: ^1.2.0
|
||||||
version: 1.5.5(drizzle-kit@0.30.6)(drizzle-orm@0.38.4(@types/react@19.2.14)(kysely@0.28.11)(postgres@3.4.8)(react@19.2.4))(mongodb@7.1.0(socks@2.8.7))(next@15.1.0(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@4.1.0(@types/node@22.19.15)(vite@8.0.0(@types/node@22.19.15)(esbuild@0.25.12)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)))
|
version: 1.5.5(drizzle-kit@0.30.6)(drizzle-orm@0.38.4(@types/react@19.2.14)(kysely@0.28.11)(postgres@3.4.8)(react@19.2.4))(mongodb@7.1.0(socks@2.8.7))(next@15.2.9(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@4.1.0(@types/node@22.19.15)(vite@8.0.0(@types/node@22.19.15)(esbuild@0.25.12)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)))
|
||||||
bullmq:
|
bullmq:
|
||||||
specifier: ^5.25.0
|
specifier: ^5.25.0
|
||||||
version: 5.71.0
|
version: 5.71.0
|
||||||
@@ -150,8 +153,8 @@ importers:
|
|||||||
specifier: ^8.0.0
|
specifier: ^8.0.0
|
||||||
version: 8.0.7
|
version: 8.0.7
|
||||||
next:
|
next:
|
||||||
specifier: 15.1.0
|
specifier: 15.2.9
|
||||||
version: 15.1.0(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
version: 15.2.9(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||||
next-themes:
|
next-themes:
|
||||||
specifier: ^0.4.0
|
specifier: ^0.4.0
|
||||||
version: 0.4.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
version: 0.4.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||||
@@ -164,6 +167,9 @@ importers:
|
|||||||
pdf-lib:
|
pdf-lib:
|
||||||
specifier: ^1.17.1
|
specifier: ^1.17.1
|
||||||
version: 1.17.1
|
version: 1.17.1
|
||||||
|
pdfkit:
|
||||||
|
specifier: ^0.18.0
|
||||||
|
version: 0.18.0
|
||||||
pino:
|
pino:
|
||||||
specifier: ^9.5.0
|
specifier: ^9.5.0
|
||||||
version: 9.14.0
|
version: 9.14.0
|
||||||
@@ -182,12 +188,18 @@ importers:
|
|||||||
react-dom:
|
react-dom:
|
||||||
specifier: ^19.0.0
|
specifier: ^19.0.0
|
||||||
version: 19.2.4(react@19.2.4)
|
version: 19.2.4(react@19.2.4)
|
||||||
|
react-easy-crop:
|
||||||
|
specifier: ^5.5.7
|
||||||
|
version: 5.5.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||||
react-hook-form:
|
react-hook-form:
|
||||||
specifier: ^7.54.0
|
specifier: ^7.54.0
|
||||||
version: 7.71.2(react@19.2.4)
|
version: 7.71.2(react@19.2.4)
|
||||||
recharts:
|
recharts:
|
||||||
specifier: ^3.8.0
|
specifier: ^3.8.0
|
||||||
version: 3.8.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react-is@18.3.1)(react@19.2.4)(redux@5.0.1)
|
version: 3.8.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react-is@18.3.1)(react@19.2.4)(redux@5.0.1)
|
||||||
|
sharp:
|
||||||
|
specifier: ^0.34.5
|
||||||
|
version: 0.34.5
|
||||||
socket.io:
|
socket.io:
|
||||||
specifier: ^4.8.0
|
specifier: ^4.8.0
|
||||||
version: 4.8.3
|
version: 4.8.3
|
||||||
@@ -262,8 +274,8 @@ importers:
|
|||||||
specifier: ^9.0.0
|
specifier: ^9.0.0
|
||||||
version: 9.39.4(jiti@1.21.7)
|
version: 9.39.4(jiti@1.21.7)
|
||||||
eslint-config-next:
|
eslint-config-next:
|
||||||
specifier: 15.1.0
|
specifier: 15.2.9
|
||||||
version: 15.1.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3)
|
version: 15.2.9(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3)
|
||||||
eslint-config-prettier:
|
eslint-config-prettier:
|
||||||
specifier: ^9.1.0
|
specifier: ^9.1.0
|
||||||
version: 9.1.2(eslint@9.39.4(jiti@1.21.7))
|
version: 9.1.2(eslint@9.39.4(jiti@1.21.7))
|
||||||
@@ -1153,64 +1165,138 @@ packages:
|
|||||||
resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==}
|
resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==}
|
||||||
engines: {node: '>=18.18'}
|
engines: {node: '>=18.18'}
|
||||||
|
|
||||||
|
'@img/colour@1.1.0':
|
||||||
|
resolution: {integrity: sha512-Td76q7j57o/tLVdgS746cYARfSyxk8iEfRxewL9h4OMzYhbW4TAcppl0mT4eyqXddh6L/jwoM75mo7ixa/pCeQ==}
|
||||||
|
engines: {node: '>=18'}
|
||||||
|
|
||||||
'@img/sharp-darwin-arm64@0.33.5':
|
'@img/sharp-darwin-arm64@0.33.5':
|
||||||
resolution: {integrity: sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==}
|
resolution: {integrity: sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
cpu: [arm64]
|
cpu: [arm64]
|
||||||
os: [darwin]
|
os: [darwin]
|
||||||
|
|
||||||
|
'@img/sharp-darwin-arm64@0.34.5':
|
||||||
|
resolution: {integrity: sha512-imtQ3WMJXbMY4fxb/Ndp6HBTNVtWCUI0WdobyheGf5+ad6xX8VIDO8u2xE4qc/fr08CKG/7dDseFtn6M6g/r3w==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [arm64]
|
||||||
|
os: [darwin]
|
||||||
|
|
||||||
'@img/sharp-darwin-x64@0.33.5':
|
'@img/sharp-darwin-x64@0.33.5':
|
||||||
resolution: {integrity: sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==}
|
resolution: {integrity: sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
cpu: [x64]
|
cpu: [x64]
|
||||||
os: [darwin]
|
os: [darwin]
|
||||||
|
|
||||||
|
'@img/sharp-darwin-x64@0.34.5':
|
||||||
|
resolution: {integrity: sha512-YNEFAF/4KQ/PeW0N+r+aVVsoIY0/qxxikF2SWdp+NRkmMB7y9LBZAVqQ4yhGCm/H3H270OSykqmQMKLBhBJDEw==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [x64]
|
||||||
|
os: [darwin]
|
||||||
|
|
||||||
'@img/sharp-libvips-darwin-arm64@1.0.4':
|
'@img/sharp-libvips-darwin-arm64@1.0.4':
|
||||||
resolution: {integrity: sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==}
|
resolution: {integrity: sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==}
|
||||||
cpu: [arm64]
|
cpu: [arm64]
|
||||||
os: [darwin]
|
os: [darwin]
|
||||||
|
|
||||||
|
'@img/sharp-libvips-darwin-arm64@1.2.4':
|
||||||
|
resolution: {integrity: sha512-zqjjo7RatFfFoP0MkQ51jfuFZBnVE2pRiaydKJ1G/rHZvnsrHAOcQALIi9sA5co5xenQdTugCvtb1cuf78Vf4g==}
|
||||||
|
cpu: [arm64]
|
||||||
|
os: [darwin]
|
||||||
|
|
||||||
'@img/sharp-libvips-darwin-x64@1.0.4':
|
'@img/sharp-libvips-darwin-x64@1.0.4':
|
||||||
resolution: {integrity: sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==}
|
resolution: {integrity: sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==}
|
||||||
cpu: [x64]
|
cpu: [x64]
|
||||||
os: [darwin]
|
os: [darwin]
|
||||||
|
|
||||||
|
'@img/sharp-libvips-darwin-x64@1.2.4':
|
||||||
|
resolution: {integrity: sha512-1IOd5xfVhlGwX+zXv2N93k0yMONvUlANylbJw1eTah8K/Jtpi15KC+WSiaX/nBmbm2HxRM1gZ0nSdjSsrZbGKg==}
|
||||||
|
cpu: [x64]
|
||||||
|
os: [darwin]
|
||||||
|
|
||||||
'@img/sharp-libvips-linux-arm64@1.0.4':
|
'@img/sharp-libvips-linux-arm64@1.0.4':
|
||||||
resolution: {integrity: sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==}
|
resolution: {integrity: sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==}
|
||||||
cpu: [arm64]
|
cpu: [arm64]
|
||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [glibc]
|
libc: [glibc]
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linux-arm64@1.2.4':
|
||||||
|
resolution: {integrity: sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==}
|
||||||
|
cpu: [arm64]
|
||||||
|
os: [linux]
|
||||||
|
libc: [glibc]
|
||||||
|
|
||||||
'@img/sharp-libvips-linux-arm@1.0.5':
|
'@img/sharp-libvips-linux-arm@1.0.5':
|
||||||
resolution: {integrity: sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==}
|
resolution: {integrity: sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==}
|
||||||
cpu: [arm]
|
cpu: [arm]
|
||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [glibc]
|
libc: [glibc]
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linux-arm@1.2.4':
|
||||||
|
resolution: {integrity: sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==}
|
||||||
|
cpu: [arm]
|
||||||
|
os: [linux]
|
||||||
|
libc: [glibc]
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linux-ppc64@1.2.4':
|
||||||
|
resolution: {integrity: sha512-FMuvGijLDYG6lW+b/UvyilUWu5Ayu+3r2d1S8notiGCIyYU/76eig1UfMmkZ7vwgOrzKzlQbFSuQfgm7GYUPpA==}
|
||||||
|
cpu: [ppc64]
|
||||||
|
os: [linux]
|
||||||
|
libc: [glibc]
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linux-riscv64@1.2.4':
|
||||||
|
resolution: {integrity: sha512-oVDbcR4zUC0ce82teubSm+x6ETixtKZBh/qbREIOcI3cULzDyb18Sr/Wcyx7NRQeQzOiHTNbZFF1UwPS2scyGA==}
|
||||||
|
cpu: [riscv64]
|
||||||
|
os: [linux]
|
||||||
|
libc: [glibc]
|
||||||
|
|
||||||
'@img/sharp-libvips-linux-s390x@1.0.4':
|
'@img/sharp-libvips-linux-s390x@1.0.4':
|
||||||
resolution: {integrity: sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==}
|
resolution: {integrity: sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==}
|
||||||
cpu: [s390x]
|
cpu: [s390x]
|
||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [glibc]
|
libc: [glibc]
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linux-s390x@1.2.4':
|
||||||
|
resolution: {integrity: sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ==}
|
||||||
|
cpu: [s390x]
|
||||||
|
os: [linux]
|
||||||
|
libc: [glibc]
|
||||||
|
|
||||||
'@img/sharp-libvips-linux-x64@1.0.4':
|
'@img/sharp-libvips-linux-x64@1.0.4':
|
||||||
resolution: {integrity: sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==}
|
resolution: {integrity: sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==}
|
||||||
cpu: [x64]
|
cpu: [x64]
|
||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [glibc]
|
libc: [glibc]
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linux-x64@1.2.4':
|
||||||
|
resolution: {integrity: sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==}
|
||||||
|
cpu: [x64]
|
||||||
|
os: [linux]
|
||||||
|
libc: [glibc]
|
||||||
|
|
||||||
'@img/sharp-libvips-linuxmusl-arm64@1.0.4':
|
'@img/sharp-libvips-linuxmusl-arm64@1.0.4':
|
||||||
resolution: {integrity: sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==}
|
resolution: {integrity: sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==}
|
||||||
cpu: [arm64]
|
cpu: [arm64]
|
||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [musl]
|
libc: [musl]
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linuxmusl-arm64@1.2.4':
|
||||||
|
resolution: {integrity: sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==}
|
||||||
|
cpu: [arm64]
|
||||||
|
os: [linux]
|
||||||
|
libc: [musl]
|
||||||
|
|
||||||
'@img/sharp-libvips-linuxmusl-x64@1.0.4':
|
'@img/sharp-libvips-linuxmusl-x64@1.0.4':
|
||||||
resolution: {integrity: sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==}
|
resolution: {integrity: sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==}
|
||||||
cpu: [x64]
|
cpu: [x64]
|
||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [musl]
|
libc: [musl]
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linuxmusl-x64@1.2.4':
|
||||||
|
resolution: {integrity: sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==}
|
||||||
|
cpu: [x64]
|
||||||
|
os: [linux]
|
||||||
|
libc: [musl]
|
||||||
|
|
||||||
'@img/sharp-linux-arm64@0.33.5':
|
'@img/sharp-linux-arm64@0.33.5':
|
||||||
resolution: {integrity: sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==}
|
resolution: {integrity: sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
@@ -1218,6 +1304,13 @@ packages:
|
|||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [glibc]
|
libc: [glibc]
|
||||||
|
|
||||||
|
'@img/sharp-linux-arm64@0.34.5':
|
||||||
|
resolution: {integrity: sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [arm64]
|
||||||
|
os: [linux]
|
||||||
|
libc: [glibc]
|
||||||
|
|
||||||
'@img/sharp-linux-arm@0.33.5':
|
'@img/sharp-linux-arm@0.33.5':
|
||||||
resolution: {integrity: sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==}
|
resolution: {integrity: sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
@@ -1225,6 +1318,27 @@ packages:
|
|||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [glibc]
|
libc: [glibc]
|
||||||
|
|
||||||
|
'@img/sharp-linux-arm@0.34.5':
|
||||||
|
resolution: {integrity: sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [arm]
|
||||||
|
os: [linux]
|
||||||
|
libc: [glibc]
|
||||||
|
|
||||||
|
'@img/sharp-linux-ppc64@0.34.5':
|
||||||
|
resolution: {integrity: sha512-7zznwNaqW6YtsfrGGDA6BRkISKAAE1Jo0QdpNYXNMHu2+0dTrPflTLNkpc8l7MUP5M16ZJcUvysVWWrMefZquA==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [ppc64]
|
||||||
|
os: [linux]
|
||||||
|
libc: [glibc]
|
||||||
|
|
||||||
|
'@img/sharp-linux-riscv64@0.34.5':
|
||||||
|
resolution: {integrity: sha512-51gJuLPTKa7piYPaVs8GmByo7/U7/7TZOq+cnXJIHZKavIRHAP77e3N2HEl3dgiqdD/w0yUfiJnII77PuDDFdw==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [riscv64]
|
||||||
|
os: [linux]
|
||||||
|
libc: [glibc]
|
||||||
|
|
||||||
'@img/sharp-linux-s390x@0.33.5':
|
'@img/sharp-linux-s390x@0.33.5':
|
||||||
resolution: {integrity: sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==}
|
resolution: {integrity: sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
@@ -1232,6 +1346,13 @@ packages:
|
|||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [glibc]
|
libc: [glibc]
|
||||||
|
|
||||||
|
'@img/sharp-linux-s390x@0.34.5':
|
||||||
|
resolution: {integrity: sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [s390x]
|
||||||
|
os: [linux]
|
||||||
|
libc: [glibc]
|
||||||
|
|
||||||
'@img/sharp-linux-x64@0.33.5':
|
'@img/sharp-linux-x64@0.33.5':
|
||||||
resolution: {integrity: sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==}
|
resolution: {integrity: sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
@@ -1239,6 +1360,13 @@ packages:
|
|||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [glibc]
|
libc: [glibc]
|
||||||
|
|
||||||
|
'@img/sharp-linux-x64@0.34.5':
|
||||||
|
resolution: {integrity: sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [x64]
|
||||||
|
os: [linux]
|
||||||
|
libc: [glibc]
|
||||||
|
|
||||||
'@img/sharp-linuxmusl-arm64@0.33.5':
|
'@img/sharp-linuxmusl-arm64@0.33.5':
|
||||||
resolution: {integrity: sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==}
|
resolution: {integrity: sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
@@ -1246,6 +1374,13 @@ packages:
|
|||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [musl]
|
libc: [musl]
|
||||||
|
|
||||||
|
'@img/sharp-linuxmusl-arm64@0.34.5':
|
||||||
|
resolution: {integrity: sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [arm64]
|
||||||
|
os: [linux]
|
||||||
|
libc: [musl]
|
||||||
|
|
||||||
'@img/sharp-linuxmusl-x64@0.33.5':
|
'@img/sharp-linuxmusl-x64@0.33.5':
|
||||||
resolution: {integrity: sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==}
|
resolution: {integrity: sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
@@ -1253,23 +1388,53 @@ packages:
|
|||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [musl]
|
libc: [musl]
|
||||||
|
|
||||||
|
'@img/sharp-linuxmusl-x64@0.34.5':
|
||||||
|
resolution: {integrity: sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [x64]
|
||||||
|
os: [linux]
|
||||||
|
libc: [musl]
|
||||||
|
|
||||||
'@img/sharp-wasm32@0.33.5':
|
'@img/sharp-wasm32@0.33.5':
|
||||||
resolution: {integrity: sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==}
|
resolution: {integrity: sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
cpu: [wasm32]
|
cpu: [wasm32]
|
||||||
|
|
||||||
|
'@img/sharp-wasm32@0.34.5':
|
||||||
|
resolution: {integrity: sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [wasm32]
|
||||||
|
|
||||||
|
'@img/sharp-win32-arm64@0.34.5':
|
||||||
|
resolution: {integrity: sha512-WQ3AgWCWYSb2yt+IG8mnC6Jdk9Whs7O0gxphblsLvdhSpSTtmu69ZG1Gkb6NuvxsNACwiPV6cNSZNzt0KPsw7g==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [arm64]
|
||||||
|
os: [win32]
|
||||||
|
|
||||||
'@img/sharp-win32-ia32@0.33.5':
|
'@img/sharp-win32-ia32@0.33.5':
|
||||||
resolution: {integrity: sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==}
|
resolution: {integrity: sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
cpu: [ia32]
|
cpu: [ia32]
|
||||||
os: [win32]
|
os: [win32]
|
||||||
|
|
||||||
|
'@img/sharp-win32-ia32@0.34.5':
|
||||||
|
resolution: {integrity: sha512-FV9m/7NmeCmSHDD5j4+4pNI8Cp3aW+JvLoXcTUo0IqyjSfAZJ8dIUmijx1qaJsIiU+Hosw6xM5KijAWRJCSgNg==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [ia32]
|
||||||
|
os: [win32]
|
||||||
|
|
||||||
'@img/sharp-win32-x64@0.33.5':
|
'@img/sharp-win32-x64@0.33.5':
|
||||||
resolution: {integrity: sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==}
|
resolution: {integrity: sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
cpu: [x64]
|
cpu: [x64]
|
||||||
os: [win32]
|
os: [win32]
|
||||||
|
|
||||||
|
'@img/sharp-win32-x64@0.34.5':
|
||||||
|
resolution: {integrity: sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
cpu: [x64]
|
||||||
|
os: [win32]
|
||||||
|
|
||||||
'@ioredis/commands@1.5.0':
|
'@ioredis/commands@1.5.0':
|
||||||
resolution: {integrity: sha512-eUgLqrMf8nJkZxT24JvVRrQya1vZkQh8BBeYNwGDqa5I0VUi8ACx7uFvAaLxintokpTenkK6DASvo/bvNbBGow==}
|
resolution: {integrity: sha512-eUgLqrMf8nJkZxT24JvVRrQya1vZkQh8BBeYNwGDqa5I0VUi8ACx7uFvAaLxintokpTenkK6DASvo/bvNbBGow==}
|
||||||
|
|
||||||
@@ -1332,68 +1497,76 @@ packages:
|
|||||||
'@napi-rs/wasm-runtime@1.1.1':
|
'@napi-rs/wasm-runtime@1.1.1':
|
||||||
resolution: {integrity: sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==}
|
resolution: {integrity: sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==}
|
||||||
|
|
||||||
'@next/env@15.1.0':
|
'@next/env@15.2.9':
|
||||||
resolution: {integrity: sha512-UcCO481cROsqJuszPPXJnb7GGuLq617ve4xuAyyNG4VSSocJNtMU5Fsx+Lp6mlN8c7W58aZLc5y6D/2xNmaK+w==}
|
resolution: {integrity: sha512-0JJ6OlIb1kZiAbY/Hi5XHb2ZT7B5/l8CyGX3GxtTY8LNl1Inm9EU8PnCtVzUR8N2Si3a1pX02PbKBlDcsHNvUQ==}
|
||||||
|
|
||||||
'@next/eslint-plugin-next@15.1.0':
|
'@next/eslint-plugin-next@15.2.9':
|
||||||
resolution: {integrity: sha512-+jPT0h+nelBT6HC9ZCHGc7DgGVy04cv4shYdAe6tKlEbjQUtwU3LzQhzbDHQyY2m6g39m6B0kOFVuLGBrxxbGg==}
|
resolution: {integrity: sha512-AgCS3+FYsSU4aHcmL+FutRWIJ52x9v/etDT+1ttWXEJILn3yo9ALp9lGgC6REtsj1/uPAsLFUh1uvs4LxW2KvQ==}
|
||||||
|
|
||||||
'@next/swc-darwin-arm64@15.1.0':
|
'@next/swc-darwin-arm64@15.2.5':
|
||||||
resolution: {integrity: sha512-ZU8d7xxpX14uIaFC3nsr4L++5ZS/AkWDm1PzPO6gD9xWhFkOj2hzSbSIxoncsnlJXB1CbLOfGVN4Zk9tg83PUw==}
|
resolution: {integrity: sha512-4OimvVlFTbgzPdA0kh8A1ih6FN9pQkL4nPXGqemEYgk+e7eQhsst/p35siNNqA49eQA6bvKZ1ASsDtu9gtXuog==}
|
||||||
engines: {node: '>= 10'}
|
engines: {node: '>= 10'}
|
||||||
cpu: [arm64]
|
cpu: [arm64]
|
||||||
os: [darwin]
|
os: [darwin]
|
||||||
|
|
||||||
'@next/swc-darwin-x64@15.1.0':
|
'@next/swc-darwin-x64@15.2.5':
|
||||||
resolution: {integrity: sha512-DQ3RiUoW2XC9FcSM4ffpfndq1EsLV0fj0/UY33i7eklW5akPUCo6OX2qkcLXZ3jyPdo4sf2flwAED3AAq3Om2Q==}
|
resolution: {integrity: sha512-ohzRaE9YbGt1ctE0um+UGYIDkkOxHV44kEcHzLqQigoRLaiMtZzGrA11AJh2Lu0lv51XeiY1ZkUvkThjkVNBMA==}
|
||||||
engines: {node: '>= 10'}
|
engines: {node: '>= 10'}
|
||||||
cpu: [x64]
|
cpu: [x64]
|
||||||
os: [darwin]
|
os: [darwin]
|
||||||
|
|
||||||
'@next/swc-linux-arm64-gnu@15.1.0':
|
'@next/swc-linux-arm64-gnu@15.2.5':
|
||||||
resolution: {integrity: sha512-M+vhTovRS2F//LMx9KtxbkWk627l5Q7AqXWWWrfIzNIaUFiz2/NkOFkxCFyNyGACi5YbA8aekzCLtbDyfF/v5Q==}
|
resolution: {integrity: sha512-FMSdxSUt5bVXqqOoZCc/Seg4LQep9w/fXTazr/EkpXW2Eu4IFI9FD7zBDlID8TJIybmvKk7mhd9s+2XWxz4flA==}
|
||||||
engines: {node: '>= 10'}
|
engines: {node: '>= 10'}
|
||||||
cpu: [arm64]
|
cpu: [arm64]
|
||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [glibc]
|
libc: [glibc]
|
||||||
|
|
||||||
'@next/swc-linux-arm64-musl@15.1.0':
|
'@next/swc-linux-arm64-musl@15.2.5':
|
||||||
resolution: {integrity: sha512-Qn6vOuwaTCx3pNwygpSGtdIu0TfS1KiaYLYXLH5zq1scoTXdwYfdZtwvJTpB1WrLgiQE2Ne2kt8MZok3HlFqmg==}
|
resolution: {integrity: sha512-4ZNKmuEiW5hRKkGp2HWwZ+JrvK4DQLgf8YDaqtZyn7NYdl0cHfatvlnLFSWUayx9yFAUagIgRGRk8pFxS8Qniw==}
|
||||||
engines: {node: '>= 10'}
|
engines: {node: '>= 10'}
|
||||||
cpu: [arm64]
|
cpu: [arm64]
|
||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [musl]
|
libc: [musl]
|
||||||
|
|
||||||
'@next/swc-linux-x64-gnu@15.1.0':
|
'@next/swc-linux-x64-gnu@15.2.5':
|
||||||
resolution: {integrity: sha512-yeNh9ofMqzOZ5yTOk+2rwncBzucc6a1lyqtg8xZv0rH5znyjxHOWsoUtSq4cUTeeBIiXXX51QOOe+VoCjdXJRw==}
|
resolution: {integrity: sha512-bE6lHQ9GXIf3gCDE53u2pTl99RPZW5V1GLHSRMJ5l/oB/MT+cohu9uwnCK7QUph2xIOu2a6+27kL0REa/kqwZw==}
|
||||||
engines: {node: '>= 10'}
|
engines: {node: '>= 10'}
|
||||||
cpu: [x64]
|
cpu: [x64]
|
||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [glibc]
|
libc: [glibc]
|
||||||
|
|
||||||
'@next/swc-linux-x64-musl@15.1.0':
|
'@next/swc-linux-x64-musl@15.2.5':
|
||||||
resolution: {integrity: sha512-t9IfNkHQs/uKgPoyEtU912MG6a1j7Had37cSUyLTKx9MnUpjj+ZDKw9OyqTI9OwIIv0wmkr1pkZy+3T5pxhJPg==}
|
resolution: {integrity: sha512-y7EeQuSkQbTAkCEQnJXm1asRUuGSWAchGJ3c+Qtxh8LVjXleZast8Mn/rL7tZOm7o35QeIpIcid6ufG7EVTTcA==}
|
||||||
engines: {node: '>= 10'}
|
engines: {node: '>= 10'}
|
||||||
cpu: [x64]
|
cpu: [x64]
|
||||||
os: [linux]
|
os: [linux]
|
||||||
libc: [musl]
|
libc: [musl]
|
||||||
|
|
||||||
'@next/swc-win32-arm64-msvc@15.1.0':
|
'@next/swc-win32-arm64-msvc@15.2.5':
|
||||||
resolution: {integrity: sha512-WEAoHyG14t5sTavZa1c6BnOIEukll9iqFRTavqRVPfYmfegOAd5MaZfXgOGG6kGo1RduyGdTHD4+YZQSdsNZXg==}
|
resolution: {integrity: sha512-gQMz0yA8/dskZM2Xyiq2FRShxSrsJNha40Ob/M2n2+JGRrZ0JwTVjLdvtN6vCxuq4ByhOd4a9qEf60hApNR2gQ==}
|
||||||
engines: {node: '>= 10'}
|
engines: {node: '>= 10'}
|
||||||
cpu: [arm64]
|
cpu: [arm64]
|
||||||
os: [win32]
|
os: [win32]
|
||||||
|
|
||||||
'@next/swc-win32-x64-msvc@15.1.0':
|
'@next/swc-win32-x64-msvc@15.2.5':
|
||||||
resolution: {integrity: sha512-J1YdKuJv9xcixzXR24Dv+4SaDKc2jj31IVUEMdO5xJivMTXuE6MAdIi4qPjSymHuFG8O5wbfWKnhJUcHHpj5CA==}
|
resolution: {integrity: sha512-tBDNVUcI7U03+3oMvJ11zrtVin5p0NctiuKmTGyaTIEAVj9Q77xukLXGXRnWxKRIIdFG4OTA2rUVGZDYOwgmAA==}
|
||||||
engines: {node: '>= 10'}
|
engines: {node: '>= 10'}
|
||||||
cpu: [x64]
|
cpu: [x64]
|
||||||
os: [win32]
|
os: [win32]
|
||||||
|
|
||||||
|
'@noble/ciphers@1.3.0':
|
||||||
|
resolution: {integrity: sha512-2I0gnIVPtfnMw9ee9h1dJG7tp81+8Ob3OJb3Mv37rx5L40/b0i7djjCVvGOVqc9AEIQyvyu1i6ypKdFw8R8gQw==}
|
||||||
|
engines: {node: ^14.21.3 || >=16}
|
||||||
|
|
||||||
'@noble/ciphers@2.1.1':
|
'@noble/ciphers@2.1.1':
|
||||||
resolution: {integrity: sha512-bysYuiVfhxNJuldNXlFEitTVdNnYUc+XNJZd7Qm2a5j1vZHgY+fazadNFWFaMK/2vye0JVlxV3gHmC0WDfAOQw==}
|
resolution: {integrity: sha512-bysYuiVfhxNJuldNXlFEitTVdNnYUc+XNJZd7Qm2a5j1vZHgY+fazadNFWFaMK/2vye0JVlxV3gHmC0WDfAOQw==}
|
||||||
engines: {node: '>= 20.19.0'}
|
engines: {node: '>= 20.19.0'}
|
||||||
|
|
||||||
|
'@noble/hashes@1.8.0':
|
||||||
|
resolution: {integrity: sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==}
|
||||||
|
engines: {node: ^14.21.3 || >=16}
|
||||||
|
|
||||||
'@noble/hashes@2.0.1':
|
'@noble/hashes@2.0.1':
|
||||||
resolution: {integrity: sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==}
|
resolution: {integrity: sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==}
|
||||||
engines: {node: '>= 20.19.0'}
|
engines: {node: '>= 20.19.0'}
|
||||||
@@ -2328,6 +2501,9 @@ packages:
|
|||||||
'@types/nodemailer@6.4.23':
|
'@types/nodemailer@6.4.23':
|
||||||
resolution: {integrity: sha512-aFV3/NsYFLSx9mbb5gtirBSXJnAlrusoKNuPbxsASWc7vrKLmIrTQRpdcxNcSFL3VW2A2XpeLEavwb2qMi6nlQ==}
|
resolution: {integrity: sha512-aFV3/NsYFLSx9mbb5gtirBSXJnAlrusoKNuPbxsASWc7vrKLmIrTQRpdcxNcSFL3VW2A2XpeLEavwb2qMi6nlQ==}
|
||||||
|
|
||||||
|
'@types/pdfkit@0.17.6':
|
||||||
|
resolution: {integrity: sha512-tIwzxk2uWKp0Cq9JIluQXJid77lYhF52EsIOwhsMF4iWLA6YneoBR1xVKYYdAysHuepUB0OX4tdwMiUDdGKmig==}
|
||||||
|
|
||||||
'@types/react-dom@19.2.3':
|
'@types/react-dom@19.2.3':
|
||||||
resolution: {integrity: sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==}
|
resolution: {integrity: sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
@@ -2776,6 +2952,10 @@ packages:
|
|||||||
bare-url@2.4.2:
|
bare-url@2.4.2:
|
||||||
resolution: {integrity: sha512-/9a2j4ac6ckpmAHvod/ob7x439OAHst/drc2Clnq+reRYd/ovddwcF4LfoxHyNk5AuGBnPg+HqFjmE/Zpq6v0A==}
|
resolution: {integrity: sha512-/9a2j4ac6ckpmAHvod/ob7x439OAHst/drc2Clnq+reRYd/ovddwcF4LfoxHyNk5AuGBnPg+HqFjmE/Zpq6v0A==}
|
||||||
|
|
||||||
|
base64-js@0.0.8:
|
||||||
|
resolution: {integrity: sha512-3XSA2cR/h/73EzlXXdU6YNycmYI7+kicTxks4eJg2g39biHR84slg2+des+p7iHYhbRg/udIS4TD53WabcOUkw==}
|
||||||
|
engines: {node: '>= 0.4'}
|
||||||
|
|
||||||
base64-js@1.5.1:
|
base64-js@1.5.1:
|
||||||
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
|
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
|
||||||
|
|
||||||
@@ -2893,6 +3073,9 @@ packages:
|
|||||||
browser-or-node@2.1.1:
|
browser-or-node@2.1.1:
|
||||||
resolution: {integrity: sha512-8CVjaLJGuSKMVTxJ2DpBl5XnlNDiT4cQFeuCJJrvJmts9YrTZDizTX7PjC2s6W4x+MBGZeEY6dGMrF04/6Hgqg==}
|
resolution: {integrity: sha512-8CVjaLJGuSKMVTxJ2DpBl5XnlNDiT4cQFeuCJJrvJmts9YrTZDizTX7PjC2s6W4x+MBGZeEY6dGMrF04/6Hgqg==}
|
||||||
|
|
||||||
|
browserify-zlib@0.2.0:
|
||||||
|
resolution: {integrity: sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==}
|
||||||
|
|
||||||
browserslist@4.28.1:
|
browserslist@4.28.1:
|
||||||
resolution: {integrity: sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==}
|
resolution: {integrity: sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==}
|
||||||
engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
|
engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
|
||||||
@@ -3489,8 +3672,8 @@ packages:
|
|||||||
resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==}
|
resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==}
|
||||||
engines: {node: '>=10'}
|
engines: {node: '>=10'}
|
||||||
|
|
||||||
eslint-config-next@15.1.0:
|
eslint-config-next@15.2.9:
|
||||||
resolution: {integrity: sha512-gADO+nKVseGso3DtOrYX9H7TxB/MuX7AUYhMlvQMqLYvUWu4HrOQuU7cC1HW74tHIqkAvXdwgAz3TCbczzSEXw==}
|
resolution: {integrity: sha512-MWpGYzLdkJ38OF1g1R4wQe9GVvoinCyIeYofITHh5D3FmHuIOgeWAK46M+iUYrIG1cJNX0HPh5fHpjmuC3dnrw==}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
eslint: ^7.23.0 || ^8.0.0 || ^9.0.0
|
eslint: ^7.23.0 || ^8.0.0 || ^9.0.0
|
||||||
typescript: '>=3.3.1'
|
typescript: '>=3.3.1'
|
||||||
@@ -4134,6 +4317,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-cEiJEAEoIbWfCZYKWhVwFuvPX1gETRYPw6LlaTKoxD3s2AkXzkCjnp6h0V77ozyqj0jakteJ4YqDJT830+lVGw==}
|
resolution: {integrity: sha512-cEiJEAEoIbWfCZYKWhVwFuvPX1gETRYPw6LlaTKoxD3s2AkXzkCjnp6h0V77ozyqj0jakteJ4YqDJT830+lVGw==}
|
||||||
engines: {node: '>=14'}
|
engines: {node: '>=14'}
|
||||||
|
|
||||||
|
js-md5@0.8.3:
|
||||||
|
resolution: {integrity: sha512-qR0HB5uP6wCuRMrWPTrkMaev7MJZwJuuw4fnwAzRgP4J4/F8RwtodOKpGp4XpqsLBFzzgqIO42efFAyz2Et6KQ==}
|
||||||
|
|
||||||
js-tokens@10.0.0:
|
js-tokens@10.0.0:
|
||||||
resolution: {integrity: sha512-lM/UBzQmfJRo9ABXbPWemivdCW8V2G8FHaHdypQaIy523snUjog0W71ayWXTjiR+ixeMyVHN2XcpnTd/liPg/Q==}
|
resolution: {integrity: sha512-lM/UBzQmfJRo9ABXbPWemivdCW8V2G8FHaHdypQaIy523snUjog0W71ayWXTjiR+ixeMyVHN2XcpnTd/liPg/Q==}
|
||||||
|
|
||||||
@@ -4286,6 +4472,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==}
|
resolution: {integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==}
|
||||||
engines: {node: '>=14'}
|
engines: {node: '>=14'}
|
||||||
|
|
||||||
|
linebreak@1.1.0:
|
||||||
|
resolution: {integrity: sha512-MHp03UImeVhB7XZtjd0E4n6+3xr5Dq/9xI/5FptGk5FrbDR3zagPa2DS6U8ks/3HjbKWG9Q1M2ufOzxV2qLYSQ==}
|
||||||
|
|
||||||
lines-and-columns@1.2.4:
|
lines-and-columns@1.2.4:
|
||||||
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
|
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
|
||||||
|
|
||||||
@@ -4491,10 +4680,9 @@ packages:
|
|||||||
react: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc
|
react: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc
|
||||||
react-dom: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc
|
react-dom: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc
|
||||||
|
|
||||||
next@15.1.0:
|
next@15.2.9:
|
||||||
resolution: {integrity: sha512-QKhzt6Y8rgLNlj30izdMbxAwjHMFANnLwDwZ+WQh5sMhyt4lEBqDK9QpvWHtIM4rINKPoJ8aiRZKg5ULSybVHw==}
|
resolution: {integrity: sha512-jXEBIPi+kIkMe5KI4okvGIWvot9hyiDz2fT4OqxxsSeZTA6zhSwrQkJwTE3GmQ1HQlolcQjTNMjHMvc8hhog7g==}
|
||||||
engines: {node: ^18.18.0 || ^19.8.0 || >= 20.0.0}
|
engines: {node: ^18.18.0 || ^19.8.0 || >= 20.0.0}
|
||||||
deprecated: This version has a security vulnerability. Please upgrade to a patched version. See https://nextjs.org/blog/CVE-2025-66478 for more details.
|
|
||||||
hasBin: true
|
hasBin: true
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
'@opentelemetry/api': ^1.1.0
|
'@opentelemetry/api': ^1.1.0
|
||||||
@@ -4551,6 +4739,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
|
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
|
||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
|
|
||||||
|
normalize-wheel@1.0.1:
|
||||||
|
resolution: {integrity: sha512-1OnlAPZ3zgrk8B91HyRj+eVv+kS5u+Z0SCsak6Xil/kmgEia50ga7zfkumayonZrImffAxPU/5WcyGhzetHNPA==}
|
||||||
|
|
||||||
notepack.io@3.0.1:
|
notepack.io@3.0.1:
|
||||||
resolution: {integrity: sha512-TKC/8zH5pXIAMVQio2TvVDTtPRX+DJPHDqjRbxogtFiByHyzKmy96RA0JtCQJ+WouyyL4A10xomQzgbUT+1jCg==}
|
resolution: {integrity: sha512-TKC/8zH5pXIAMVQio2TvVDTtPRX+DJPHDqjRbxogtFiByHyzKmy96RA0JtCQJ+WouyyL4A10xomQzgbUT+1jCg==}
|
||||||
|
|
||||||
@@ -4696,6 +4887,9 @@ packages:
|
|||||||
pdf-lib@1.17.1:
|
pdf-lib@1.17.1:
|
||||||
resolution: {integrity: sha512-V/mpyJAoTsN4cnP31vc0wfNA1+p20evqqnap0KLoRUN0Yk/p3wN52DOEsL4oBFcLdb76hlpKPtzJIgo67j/XLw==}
|
resolution: {integrity: sha512-V/mpyJAoTsN4cnP31vc0wfNA1+p20evqqnap0KLoRUN0Yk/p3wN52DOEsL4oBFcLdb76hlpKPtzJIgo67j/XLw==}
|
||||||
|
|
||||||
|
pdfkit@0.18.0:
|
||||||
|
resolution: {integrity: sha512-NvUwSDZ0eYEzqAiWwVQkRkjYUkZ48kcsHuCO31ykqPPIVkwoSDjDGiwIgHHNtsiwls3z3P/zy4q00hl2chg2Ug==}
|
||||||
|
|
||||||
peberminta@0.9.0:
|
peberminta@0.9.0:
|
||||||
resolution: {integrity: sha512-XIxfHpEuSJbITd1H3EeQwpcZbTLHc+VVr8ANI9t5sit565tsI4/xK3KWTUFE2e6QiangUkh3B0jihzmGnNrRsQ==}
|
resolution: {integrity: sha512-XIxfHpEuSJbITd1H3EeQwpcZbTLHc+VVr8ANI9t5sit565tsI4/xK3KWTUFE2e6QiangUkh3B0jihzmGnNrRsQ==}
|
||||||
|
|
||||||
@@ -4757,6 +4951,9 @@ packages:
|
|||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
|
png-js@1.1.0:
|
||||||
|
resolution: {integrity: sha512-PM/uYGzGdNSzqeOgly68+6wKQDL1SY0a/N+OEa/+br6LnHWOAJB0Npiamnodfq3jd2LS/i2fMeOKSAILjA+m5Q==}
|
||||||
|
|
||||||
possible-typed-array-names@1.1.0:
|
possible-typed-array-names@1.1.0:
|
||||||
resolution: {integrity: sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==}
|
resolution: {integrity: sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==}
|
||||||
engines: {node: '>= 0.4'}
|
engines: {node: '>= 0.4'}
|
||||||
@@ -5126,6 +5323,12 @@ packages:
|
|||||||
peerDependencies:
|
peerDependencies:
|
||||||
react: ^19.2.4
|
react: ^19.2.4
|
||||||
|
|
||||||
|
react-easy-crop@5.5.7:
|
||||||
|
resolution: {integrity: sha512-kYo4NtMeXFQB7h1U+h5yhUkE46WQbQdq7if54uDlbMdZHdRgNehfvaFrXnFw5NR1PNoUOJIfTwLnWmEx/MaZnA==}
|
||||||
|
peerDependencies:
|
||||||
|
react: '>=16.4.0'
|
||||||
|
react-dom: '>=16.4.0'
|
||||||
|
|
||||||
react-fast-compare@3.2.2:
|
react-fast-compare@3.2.2:
|
||||||
resolution: {integrity: sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ==}
|
resolution: {integrity: sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ==}
|
||||||
|
|
||||||
@@ -5386,6 +5589,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==}
|
resolution: {integrity: sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==}
|
||||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
|
||||||
|
sharp@0.34.5:
|
||||||
|
resolution: {integrity: sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==}
|
||||||
|
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||||
|
|
||||||
shebang-command@2.0.0:
|
shebang-command@2.0.0:
|
||||||
resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
|
resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
|
||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
@@ -6648,81 +6855,177 @@ snapshots:
|
|||||||
|
|
||||||
'@humanwhocodes/retry@0.4.3': {}
|
'@humanwhocodes/retry@0.4.3': {}
|
||||||
|
|
||||||
|
'@img/colour@1.1.0': {}
|
||||||
|
|
||||||
'@img/sharp-darwin-arm64@0.33.5':
|
'@img/sharp-darwin-arm64@0.33.5':
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@img/sharp-libvips-darwin-arm64': 1.0.4
|
'@img/sharp-libvips-darwin-arm64': 1.0.4
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-darwin-arm64@0.34.5':
|
||||||
|
optionalDependencies:
|
||||||
|
'@img/sharp-libvips-darwin-arm64': 1.2.4
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-darwin-x64@0.33.5':
|
'@img/sharp-darwin-x64@0.33.5':
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@img/sharp-libvips-darwin-x64': 1.0.4
|
'@img/sharp-libvips-darwin-x64': 1.0.4
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-darwin-x64@0.34.5':
|
||||||
|
optionalDependencies:
|
||||||
|
'@img/sharp-libvips-darwin-x64': 1.2.4
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-libvips-darwin-arm64@1.0.4':
|
'@img/sharp-libvips-darwin-arm64@1.0.4':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-libvips-darwin-arm64@1.2.4':
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-libvips-darwin-x64@1.0.4':
|
'@img/sharp-libvips-darwin-x64@1.0.4':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-libvips-darwin-x64@1.2.4':
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-libvips-linux-arm64@1.0.4':
|
'@img/sharp-libvips-linux-arm64@1.0.4':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linux-arm64@1.2.4':
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-libvips-linux-arm@1.0.5':
|
'@img/sharp-libvips-linux-arm@1.0.5':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linux-arm@1.2.4':
|
||||||
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linux-ppc64@1.2.4':
|
||||||
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linux-riscv64@1.2.4':
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-libvips-linux-s390x@1.0.4':
|
'@img/sharp-libvips-linux-s390x@1.0.4':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linux-s390x@1.2.4':
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-libvips-linux-x64@1.0.4':
|
'@img/sharp-libvips-linux-x64@1.0.4':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linux-x64@1.2.4':
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-libvips-linuxmusl-arm64@1.0.4':
|
'@img/sharp-libvips-linuxmusl-arm64@1.0.4':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linuxmusl-arm64@1.2.4':
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-libvips-linuxmusl-x64@1.0.4':
|
'@img/sharp-libvips-linuxmusl-x64@1.0.4':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-libvips-linuxmusl-x64@1.2.4':
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-linux-arm64@0.33.5':
|
'@img/sharp-linux-arm64@0.33.5':
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@img/sharp-libvips-linux-arm64': 1.0.4
|
'@img/sharp-libvips-linux-arm64': 1.0.4
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-linux-arm64@0.34.5':
|
||||||
|
optionalDependencies:
|
||||||
|
'@img/sharp-libvips-linux-arm64': 1.2.4
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-linux-arm@0.33.5':
|
'@img/sharp-linux-arm@0.33.5':
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@img/sharp-libvips-linux-arm': 1.0.5
|
'@img/sharp-libvips-linux-arm': 1.0.5
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-linux-arm@0.34.5':
|
||||||
|
optionalDependencies:
|
||||||
|
'@img/sharp-libvips-linux-arm': 1.2.4
|
||||||
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-linux-ppc64@0.34.5':
|
||||||
|
optionalDependencies:
|
||||||
|
'@img/sharp-libvips-linux-ppc64': 1.2.4
|
||||||
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-linux-riscv64@0.34.5':
|
||||||
|
optionalDependencies:
|
||||||
|
'@img/sharp-libvips-linux-riscv64': 1.2.4
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-linux-s390x@0.33.5':
|
'@img/sharp-linux-s390x@0.33.5':
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@img/sharp-libvips-linux-s390x': 1.0.4
|
'@img/sharp-libvips-linux-s390x': 1.0.4
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-linux-s390x@0.34.5':
|
||||||
|
optionalDependencies:
|
||||||
|
'@img/sharp-libvips-linux-s390x': 1.2.4
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-linux-x64@0.33.5':
|
'@img/sharp-linux-x64@0.33.5':
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@img/sharp-libvips-linux-x64': 1.0.4
|
'@img/sharp-libvips-linux-x64': 1.0.4
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-linux-x64@0.34.5':
|
||||||
|
optionalDependencies:
|
||||||
|
'@img/sharp-libvips-linux-x64': 1.2.4
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-linuxmusl-arm64@0.33.5':
|
'@img/sharp-linuxmusl-arm64@0.33.5':
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@img/sharp-libvips-linuxmusl-arm64': 1.0.4
|
'@img/sharp-libvips-linuxmusl-arm64': 1.0.4
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-linuxmusl-arm64@0.34.5':
|
||||||
|
optionalDependencies:
|
||||||
|
'@img/sharp-libvips-linuxmusl-arm64': 1.2.4
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-linuxmusl-x64@0.33.5':
|
'@img/sharp-linuxmusl-x64@0.33.5':
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@img/sharp-libvips-linuxmusl-x64': 1.0.4
|
'@img/sharp-libvips-linuxmusl-x64': 1.0.4
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-linuxmusl-x64@0.34.5':
|
||||||
|
optionalDependencies:
|
||||||
|
'@img/sharp-libvips-linuxmusl-x64': 1.2.4
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-wasm32@0.33.5':
|
'@img/sharp-wasm32@0.33.5':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@emnapi/runtime': 1.9.0
|
'@emnapi/runtime': 1.9.0
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-wasm32@0.34.5':
|
||||||
|
dependencies:
|
||||||
|
'@emnapi/runtime': 1.9.0
|
||||||
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-win32-arm64@0.34.5':
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-win32-ia32@0.33.5':
|
'@img/sharp-win32-ia32@0.33.5':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-win32-ia32@0.34.5':
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@img/sharp-win32-x64@0.33.5':
|
'@img/sharp-win32-x64@0.33.5':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@img/sharp-win32-x64@0.34.5':
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@ioredis/commands@1.5.0': {}
|
'@ioredis/commands@1.5.0': {}
|
||||||
|
|
||||||
'@ioredis/commands@1.5.1': {}
|
'@ioredis/commands@1.5.1': {}
|
||||||
@@ -6786,38 +7089,42 @@ snapshots:
|
|||||||
'@tybys/wasm-util': 0.10.1
|
'@tybys/wasm-util': 0.10.1
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@next/env@15.1.0': {}
|
'@next/env@15.2.9': {}
|
||||||
|
|
||||||
'@next/eslint-plugin-next@15.1.0':
|
'@next/eslint-plugin-next@15.2.9':
|
||||||
dependencies:
|
dependencies:
|
||||||
fast-glob: 3.3.1
|
fast-glob: 3.3.1
|
||||||
|
|
||||||
'@next/swc-darwin-arm64@15.1.0':
|
'@next/swc-darwin-arm64@15.2.5':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@next/swc-darwin-x64@15.1.0':
|
'@next/swc-darwin-x64@15.2.5':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@next/swc-linux-arm64-gnu@15.1.0':
|
'@next/swc-linux-arm64-gnu@15.2.5':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@next/swc-linux-arm64-musl@15.1.0':
|
'@next/swc-linux-arm64-musl@15.2.5':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@next/swc-linux-x64-gnu@15.1.0':
|
'@next/swc-linux-x64-gnu@15.2.5':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@next/swc-linux-x64-musl@15.1.0':
|
'@next/swc-linux-x64-musl@15.2.5':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@next/swc-win32-arm64-msvc@15.1.0':
|
'@next/swc-win32-arm64-msvc@15.2.5':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@next/swc-win32-x64-msvc@15.1.0':
|
'@next/swc-win32-x64-msvc@15.2.5':
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
'@noble/ciphers@1.3.0': {}
|
||||||
|
|
||||||
'@noble/ciphers@2.1.1': {}
|
'@noble/ciphers@2.1.1': {}
|
||||||
|
|
||||||
|
'@noble/hashes@1.8.0': {}
|
||||||
|
|
||||||
'@noble/hashes@2.0.1': {}
|
'@noble/hashes@2.0.1': {}
|
||||||
|
|
||||||
'@nodelib/fs.scandir@2.1.5':
|
'@nodelib/fs.scandir@2.1.5':
|
||||||
@@ -7747,6 +8054,10 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
'@types/node': 22.19.15
|
'@types/node': 22.19.15
|
||||||
|
|
||||||
|
'@types/pdfkit@0.17.6':
|
||||||
|
dependencies:
|
||||||
|
'@types/node': 22.19.15
|
||||||
|
|
||||||
'@types/react-dom@19.2.3(@types/react@19.2.14)':
|
'@types/react-dom@19.2.3(@types/react@19.2.14)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@types/react': 19.2.14
|
'@types/react': 19.2.14
|
||||||
@@ -8283,13 +8594,15 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
bare-path: 3.0.0
|
bare-path: 3.0.0
|
||||||
|
|
||||||
|
base64-js@0.0.8: {}
|
||||||
|
|
||||||
base64-js@1.5.1: {}
|
base64-js@1.5.1: {}
|
||||||
|
|
||||||
base64id@2.0.0: {}
|
base64id@2.0.0: {}
|
||||||
|
|
||||||
baseline-browser-mapping@2.10.8: {}
|
baseline-browser-mapping@2.10.8: {}
|
||||||
|
|
||||||
better-auth@1.5.5(drizzle-kit@0.30.6)(drizzle-orm@0.38.4(@types/react@19.2.14)(kysely@0.28.11)(postgres@3.4.8)(react@19.2.4))(mongodb@7.1.0(socks@2.8.7))(next@15.1.0(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@4.1.0(@types/node@22.19.15)(vite@8.0.0(@types/node@22.19.15)(esbuild@0.25.12)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))):
|
better-auth@1.5.5(drizzle-kit@0.30.6)(drizzle-orm@0.38.4(@types/react@19.2.14)(kysely@0.28.11)(postgres@3.4.8)(react@19.2.4))(mongodb@7.1.0(socks@2.8.7))(next@15.2.9(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@4.1.0(@types/node@22.19.15)(vite@8.0.0(@types/node@22.19.15)(esbuild@0.25.12)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))):
|
||||||
dependencies:
|
dependencies:
|
||||||
'@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@3.25.76))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1)
|
'@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@3.25.76))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1)
|
||||||
'@better-auth/drizzle-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@3.25.76))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.38.4(@types/react@19.2.14)(kysely@0.28.11)(postgres@3.4.8)(react@19.2.4))
|
'@better-auth/drizzle-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@3.25.76))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.38.4(@types/react@19.2.14)(kysely@0.28.11)(postgres@3.4.8)(react@19.2.4))
|
||||||
@@ -8312,7 +8625,7 @@ snapshots:
|
|||||||
drizzle-kit: 0.30.6
|
drizzle-kit: 0.30.6
|
||||||
drizzle-orm: 0.38.4(@types/react@19.2.14)(kysely@0.28.11)(postgres@3.4.8)(react@19.2.4)
|
drizzle-orm: 0.38.4(@types/react@19.2.14)(kysely@0.28.11)(postgres@3.4.8)(react@19.2.4)
|
||||||
mongodb: 7.1.0(socks@2.8.7)
|
mongodb: 7.1.0(socks@2.8.7)
|
||||||
next: 15.1.0(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
next: 15.2.9(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||||
react: 19.2.4
|
react: 19.2.4
|
||||||
react-dom: 19.2.4(react@19.2.4)
|
react-dom: 19.2.4(react@19.2.4)
|
||||||
vitest: 4.1.0(@types/node@22.19.15)(vite@8.0.0(@types/node@22.19.15)(esbuild@0.25.12)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))
|
vitest: 4.1.0(@types/node@22.19.15)(vite@8.0.0(@types/node@22.19.15)(esbuild@0.25.12)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))
|
||||||
@@ -8363,6 +8676,10 @@ snapshots:
|
|||||||
|
|
||||||
browser-or-node@2.1.1: {}
|
browser-or-node@2.1.1: {}
|
||||||
|
|
||||||
|
browserify-zlib@0.2.0:
|
||||||
|
dependencies:
|
||||||
|
pako: 1.0.11
|
||||||
|
|
||||||
browserslist@4.28.1:
|
browserslist@4.28.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
baseline-browser-mapping: 2.10.8
|
baseline-browser-mapping: 2.10.8
|
||||||
@@ -9022,9 +9339,9 @@ snapshots:
|
|||||||
|
|
||||||
escape-string-regexp@4.0.0: {}
|
escape-string-regexp@4.0.0: {}
|
||||||
|
|
||||||
eslint-config-next@15.1.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3):
|
eslint-config-next@15.2.9(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3):
|
||||||
dependencies:
|
dependencies:
|
||||||
'@next/eslint-plugin-next': 15.1.0
|
'@next/eslint-plugin-next': 15.2.9
|
||||||
'@rushstack/eslint-patch': 1.16.1
|
'@rushstack/eslint-patch': 1.16.1
|
||||||
'@typescript-eslint/eslint-plugin': 8.57.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3)
|
'@typescript-eslint/eslint-plugin': 8.57.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3)
|
||||||
'@typescript-eslint/parser': 8.57.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3)
|
'@typescript-eslint/parser': 8.57.0(eslint@9.39.4(jiti@1.21.7))(typescript@5.9.3)
|
||||||
@@ -9774,6 +10091,8 @@ snapshots:
|
|||||||
|
|
||||||
js-cookie@3.0.5: {}
|
js-cookie@3.0.5: {}
|
||||||
|
|
||||||
|
js-md5@0.8.3: {}
|
||||||
|
|
||||||
js-tokens@10.0.0: {}
|
js-tokens@10.0.0: {}
|
||||||
|
|
||||||
js-tokens@4.0.0: {}
|
js-tokens@4.0.0: {}
|
||||||
@@ -9894,6 +10213,11 @@ snapshots:
|
|||||||
|
|
||||||
lilconfig@3.1.3: {}
|
lilconfig@3.1.3: {}
|
||||||
|
|
||||||
|
linebreak@1.1.0:
|
||||||
|
dependencies:
|
||||||
|
base64-js: 0.0.8
|
||||||
|
unicode-trie: 2.0.0
|
||||||
|
|
||||||
lines-and-columns@1.2.4: {}
|
lines-and-columns@1.2.4: {}
|
||||||
|
|
||||||
linkify-it@5.0.0:
|
linkify-it@5.0.0:
|
||||||
@@ -10103,9 +10427,9 @@ snapshots:
|
|||||||
react: 19.2.4
|
react: 19.2.4
|
||||||
react-dom: 19.2.4(react@19.2.4)
|
react-dom: 19.2.4(react@19.2.4)
|
||||||
|
|
||||||
next@15.1.0(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4):
|
next@15.2.9(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4):
|
||||||
dependencies:
|
dependencies:
|
||||||
'@next/env': 15.1.0
|
'@next/env': 15.2.9
|
||||||
'@swc/counter': 0.1.3
|
'@swc/counter': 0.1.3
|
||||||
'@swc/helpers': 0.5.15
|
'@swc/helpers': 0.5.15
|
||||||
busboy: 1.6.0
|
busboy: 1.6.0
|
||||||
@@ -10115,14 +10439,14 @@ snapshots:
|
|||||||
react-dom: 19.2.4(react@19.2.4)
|
react-dom: 19.2.4(react@19.2.4)
|
||||||
styled-jsx: 5.1.6(react@19.2.4)
|
styled-jsx: 5.1.6(react@19.2.4)
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@next/swc-darwin-arm64': 15.1.0
|
'@next/swc-darwin-arm64': 15.2.5
|
||||||
'@next/swc-darwin-x64': 15.1.0
|
'@next/swc-darwin-x64': 15.2.5
|
||||||
'@next/swc-linux-arm64-gnu': 15.1.0
|
'@next/swc-linux-arm64-gnu': 15.2.5
|
||||||
'@next/swc-linux-arm64-musl': 15.1.0
|
'@next/swc-linux-arm64-musl': 15.2.5
|
||||||
'@next/swc-linux-x64-gnu': 15.1.0
|
'@next/swc-linux-x64-gnu': 15.2.5
|
||||||
'@next/swc-linux-x64-musl': 15.1.0
|
'@next/swc-linux-x64-musl': 15.2.5
|
||||||
'@next/swc-win32-arm64-msvc': 15.1.0
|
'@next/swc-win32-arm64-msvc': 15.2.5
|
||||||
'@next/swc-win32-x64-msvc': 15.1.0
|
'@next/swc-win32-x64-msvc': 15.2.5
|
||||||
'@playwright/test': 1.58.2
|
'@playwright/test': 1.58.2
|
||||||
sharp: 0.33.5
|
sharp: 0.33.5
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
@@ -10159,6 +10483,8 @@ snapshots:
|
|||||||
|
|
||||||
normalize-path@3.0.0: {}
|
normalize-path@3.0.0: {}
|
||||||
|
|
||||||
|
normalize-wheel@1.0.1: {}
|
||||||
|
|
||||||
notepack.io@3.0.1: {}
|
notepack.io@3.0.1: {}
|
||||||
|
|
||||||
npm-run-path@5.3.0:
|
npm-run-path@5.3.0:
|
||||||
@@ -10308,6 +10634,15 @@ snapshots:
|
|||||||
pako: 1.0.11
|
pako: 1.0.11
|
||||||
tslib: 1.14.1
|
tslib: 1.14.1
|
||||||
|
|
||||||
|
pdfkit@0.18.0:
|
||||||
|
dependencies:
|
||||||
|
'@noble/ciphers': 1.3.0
|
||||||
|
'@noble/hashes': 1.8.0
|
||||||
|
fontkit: 2.0.4
|
||||||
|
js-md5: 0.8.3
|
||||||
|
linebreak: 1.1.0
|
||||||
|
png-js: 1.1.0
|
||||||
|
|
||||||
peberminta@0.9.0: {}
|
peberminta@0.9.0: {}
|
||||||
|
|
||||||
performance-now@2.1.0: {}
|
performance-now@2.1.0: {}
|
||||||
@@ -10386,6 +10721,10 @@ snapshots:
|
|||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
fsevents: 2.3.2
|
fsevents: 2.3.2
|
||||||
|
|
||||||
|
png-js@1.1.0:
|
||||||
|
dependencies:
|
||||||
|
browserify-zlib: 0.2.0
|
||||||
|
|
||||||
possible-typed-array-names@1.1.0: {}
|
possible-typed-array-names@1.1.0: {}
|
||||||
|
|
||||||
postcss-import@15.1.0(postcss@8.5.8):
|
postcss-import@15.1.0(postcss@8.5.8):
|
||||||
@@ -10863,6 +11202,13 @@ snapshots:
|
|||||||
react: 19.2.4
|
react: 19.2.4
|
||||||
scheduler: 0.27.0
|
scheduler: 0.27.0
|
||||||
|
|
||||||
|
react-easy-crop@5.5.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4):
|
||||||
|
dependencies:
|
||||||
|
normalize-wheel: 1.0.1
|
||||||
|
react: 19.2.4
|
||||||
|
react-dom: 19.2.4(react@19.2.4)
|
||||||
|
tslib: 2.8.1
|
||||||
|
|
||||||
react-fast-compare@3.2.2: {}
|
react-fast-compare@3.2.2: {}
|
||||||
|
|
||||||
react-grab@0.1.32(react@19.2.4):
|
react-grab@0.1.32(react@19.2.4):
|
||||||
@@ -11175,6 +11521,37 @@ snapshots:
|
|||||||
'@img/sharp-win32-x64': 0.33.5
|
'@img/sharp-win32-x64': 0.33.5
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
sharp@0.34.5:
|
||||||
|
dependencies:
|
||||||
|
'@img/colour': 1.1.0
|
||||||
|
detect-libc: 2.1.2
|
||||||
|
semver: 7.7.4
|
||||||
|
optionalDependencies:
|
||||||
|
'@img/sharp-darwin-arm64': 0.34.5
|
||||||
|
'@img/sharp-darwin-x64': 0.34.5
|
||||||
|
'@img/sharp-libvips-darwin-arm64': 1.2.4
|
||||||
|
'@img/sharp-libvips-darwin-x64': 1.2.4
|
||||||
|
'@img/sharp-libvips-linux-arm': 1.2.4
|
||||||
|
'@img/sharp-libvips-linux-arm64': 1.2.4
|
||||||
|
'@img/sharp-libvips-linux-ppc64': 1.2.4
|
||||||
|
'@img/sharp-libvips-linux-riscv64': 1.2.4
|
||||||
|
'@img/sharp-libvips-linux-s390x': 1.2.4
|
||||||
|
'@img/sharp-libvips-linux-x64': 1.2.4
|
||||||
|
'@img/sharp-libvips-linuxmusl-arm64': 1.2.4
|
||||||
|
'@img/sharp-libvips-linuxmusl-x64': 1.2.4
|
||||||
|
'@img/sharp-linux-arm': 0.34.5
|
||||||
|
'@img/sharp-linux-arm64': 0.34.5
|
||||||
|
'@img/sharp-linux-ppc64': 0.34.5
|
||||||
|
'@img/sharp-linux-riscv64': 0.34.5
|
||||||
|
'@img/sharp-linux-s390x': 0.34.5
|
||||||
|
'@img/sharp-linux-x64': 0.34.5
|
||||||
|
'@img/sharp-linuxmusl-arm64': 0.34.5
|
||||||
|
'@img/sharp-linuxmusl-x64': 0.34.5
|
||||||
|
'@img/sharp-wasm32': 0.34.5
|
||||||
|
'@img/sharp-win32-arm64': 0.34.5
|
||||||
|
'@img/sharp-win32-ia32': 0.34.5
|
||||||
|
'@img/sharp-win32-x64': 0.34.5
|
||||||
|
|
||||||
shebang-command@2.0.0:
|
shebang-command@2.0.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
shebang-regex: 3.0.0
|
shebang-regex: 3.0.0
|
||||||
|
|||||||
30
public/manifest.json
Normal file
30
public/manifest.json
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"name": "Port Nimara CRM",
|
||||||
|
"short_name": "Port Nimara",
|
||||||
|
"description": "Marina/port management CRM",
|
||||||
|
"start_url": "/",
|
||||||
|
"display": "standalone",
|
||||||
|
"background_color": "#f2f2f2",
|
||||||
|
"theme_color": "#0f172a",
|
||||||
|
"orientation": "any",
|
||||||
|
"icons": [
|
||||||
|
{
|
||||||
|
"src": "/icon-192.png",
|
||||||
|
"sizes": "192x192",
|
||||||
|
"type": "image/png",
|
||||||
|
"purpose": "any"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": "/icon-512.png",
|
||||||
|
"sizes": "512x512",
|
||||||
|
"type": "image/png",
|
||||||
|
"purpose": "any"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": "/icon-512-maskable.png",
|
||||||
|
"sizes": "512x512",
|
||||||
|
"type": "image/png",
|
||||||
|
"purpose": "maskable"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
135
scripts/backfill-legacy-lead-source.ts
Normal file
135
scripts/backfill-legacy-lead-source.ts
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
/**
|
||||||
|
* One-shot: backfill `interests.source` for legacy NocoDB-imported rows.
|
||||||
|
*
|
||||||
|
* Why this exists: the legacy NocoDB Interests table left the `Source`
|
||||||
|
* column null for ~95 % of rows. The migration mapped null → null, so the
|
||||||
|
* Lead Source Attribution chart shows them as "Unspecified". Per the
|
||||||
|
* operator's best knowledge, almost all of those legacy rows came in
|
||||||
|
* through the website (web form / portal) — the few that didn't are the
|
||||||
|
* ones that already carry an explicit `Source` value (Form / portal /
|
||||||
|
* External). Defaulting null → 'website' is therefore the closest
|
||||||
|
* truth we can reconstruct without per-row sales notes review.
|
||||||
|
*
|
||||||
|
* Idempotent: only updates rows where `source IS NULL` AND the row has a
|
||||||
|
* `migration_source_links` entry tying it back to the legacy NocoDB import,
|
||||||
|
* so net-new manually-created interests with null source aren't touched.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* pnpm tsx scripts/backfill-legacy-lead-source.ts --port-slug port-nimara [--dry-run]
|
||||||
|
*/
|
||||||
|
import 'dotenv/config';
|
||||||
|
import { eq, and, isNull, inArray } from 'drizzle-orm';
|
||||||
|
|
||||||
|
import { db } from '@/lib/db';
|
||||||
|
import { ports } from '@/lib/db/schema/ports';
|
||||||
|
import { interests } from '@/lib/db/schema/interests';
|
||||||
|
import { migrationSourceLinks } from '@/lib/db/schema/migration';
|
||||||
|
|
||||||
|
interface CliArgs {
|
||||||
|
portSlug: string | null;
|
||||||
|
dryRun: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseArgs(argv: string[]): CliArgs {
|
||||||
|
const args: CliArgs = { portSlug: null, dryRun: false };
|
||||||
|
for (let i = 0; i < argv.length; i += 1) {
|
||||||
|
const a = argv[i]!;
|
||||||
|
if (a === '--port-slug') args.portSlug = argv[++i] ?? null;
|
||||||
|
else if (a === '--dry-run') args.dryRun = true;
|
||||||
|
else if (a === '-h' || a === '--help') {
|
||||||
|
console.log(
|
||||||
|
'Usage: pnpm tsx scripts/backfill-legacy-lead-source.ts --port-slug <slug> [--dry-run]',
|
||||||
|
);
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!args.portSlug) {
|
||||||
|
console.error('Missing required --port-slug');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const args = parseArgs(process.argv.slice(2));
|
||||||
|
|
||||||
|
const [port] = await db
|
||||||
|
.select({ id: ports.id, name: ports.name })
|
||||||
|
.from(ports)
|
||||||
|
.where(eq(ports.slug, args.portSlug!))
|
||||||
|
.limit(1);
|
||||||
|
if (!port) {
|
||||||
|
console.error(`No port found with slug "${args.portSlug}"`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
console.log(`[backfill] target: ${port.name} (${port.id})`);
|
||||||
|
|
||||||
|
// Pull every interest id this port owns that has a NULL source.
|
||||||
|
const candidateInterests = await db
|
||||||
|
.select({ id: interests.id })
|
||||||
|
.from(interests)
|
||||||
|
.where(and(eq(interests.portId, port.id), isNull(interests.source)));
|
||||||
|
|
||||||
|
console.log(`[backfill] interests with NULL source in this port: ${candidateInterests.length}`);
|
||||||
|
|
||||||
|
if (candidateInterests.length === 0) {
|
||||||
|
console.log('Nothing to backfill.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter to ONLY those that came in via the legacy migration — preserves
|
||||||
|
// null on net-new rows where the operator hasn't picked a source yet.
|
||||||
|
const candidateIds = candidateInterests.map((r) => r.id);
|
||||||
|
const legacyLinks = await db
|
||||||
|
.select({ targetEntityId: migrationSourceLinks.targetEntityId })
|
||||||
|
.from(migrationSourceLinks)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(migrationSourceLinks.sourceSystem, 'nocodb_interests'),
|
||||||
|
eq(migrationSourceLinks.targetEntityType, 'interest'),
|
||||||
|
inArray(migrationSourceLinks.targetEntityId, candidateIds),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
const legacyIds = new Set(legacyLinks.map((l) => l.targetEntityId));
|
||||||
|
const toUpdate = candidateIds.filter((id) => legacyIds.has(id));
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[backfill] of those, ${toUpdate.length} are legacy migration rows (will set source='website')`,
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
`[backfill] ${candidateInterests.length - toUpdate.length} are net-new rows (left untouched)`,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (args.dryRun) {
|
||||||
|
console.log('[backfill] --dry-run set; no writes.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toUpdate.length === 0) {
|
||||||
|
console.log('Nothing to write.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update in chunks of 500 to keep query size sane.
|
||||||
|
const CHUNK = 500;
|
||||||
|
let updated = 0;
|
||||||
|
for (let i = 0; i < toUpdate.length; i += CHUNK) {
|
||||||
|
const chunk = toUpdate.slice(i, i + CHUNK);
|
||||||
|
// Belt-and-suspenders: re-assert `source IS NULL` in the WHERE so
|
||||||
|
// a concurrent process that set source on one of these rows
|
||||||
|
// between SELECT and UPDATE doesn't get its value clobbered.
|
||||||
|
const result = await db
|
||||||
|
.update(interests)
|
||||||
|
.set({ source: 'website' })
|
||||||
|
.where(and(inArray(interests.id, chunk), isNull(interests.source)))
|
||||||
|
.returning({ id: interests.id });
|
||||||
|
updated += result.length;
|
||||||
|
}
|
||||||
|
console.log(`[backfill] updated ${updated} rows.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error('FATAL', err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
144
scripts/backfill-phone-e164.ts
Normal file
144
scripts/backfill-phone-e164.ts
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
/**
|
||||||
|
* Backfill `client_contacts.value_e164` from `value` for phone / whatsapp
|
||||||
|
* contacts where it's null or empty.
|
||||||
|
*
|
||||||
|
* The legacy seed (and pre-normalization production data) stored phone
|
||||||
|
* numbers in `value` as free text — "+33 4 93 00 0002" — but `value_e164`
|
||||||
|
* is what every UI surface and dedup matcher reads. This script runs the
|
||||||
|
* raw `value` through libphonenumber-js (via the script-safe wrapper to
|
||||||
|
* avoid the Node 25 metadata-loader bug) and writes the canonical E.164
|
||||||
|
* form back.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* pnpm tsx scripts/backfill-phone-e164.ts # dry-run report
|
||||||
|
* pnpm tsx scripts/backfill-phone-e164.ts --apply # actually write
|
||||||
|
*
|
||||||
|
* The dry-run report prints, for each unparseable row, the contact id +
|
||||||
|
* raw value so you can hand-clean before re-running.
|
||||||
|
*/
|
||||||
|
import 'dotenv/config';
|
||||||
|
import { and, eq, inArray, isNull, or, sql } from 'drizzle-orm';
|
||||||
|
|
||||||
|
import { db } from '@/lib/db';
|
||||||
|
import { clientContacts } from '@/lib/db/schema/clients';
|
||||||
|
import { parsePhoneScriptSafe } from '@/lib/dedup/phone-parse';
|
||||||
|
import type { CountryCode } from '@/lib/i18n/countries';
|
||||||
|
|
||||||
|
const APPLY = process.argv.includes('--apply');
|
||||||
|
|
||||||
|
interface PhoneRow {
|
||||||
|
id: string;
|
||||||
|
channel: string;
|
||||||
|
value: string | null;
|
||||||
|
valueCountry: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
console.log(`Phone E.164 backfill — ${APPLY ? 'APPLY MODE' : 'dry-run'}`);
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
// Find candidate rows: phone or whatsapp contacts with a `value` set but
|
||||||
|
// `value_e164` null/empty.
|
||||||
|
const rows: PhoneRow[] = await db
|
||||||
|
.select({
|
||||||
|
id: clientContacts.id,
|
||||||
|
channel: clientContacts.channel,
|
||||||
|
value: clientContacts.value,
|
||||||
|
valueCountry: clientContacts.valueCountry,
|
||||||
|
})
|
||||||
|
.from(clientContacts)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
inArray(clientContacts.channel, ['phone', 'whatsapp']),
|
||||||
|
or(isNull(clientContacts.valueE164), eq(clientContacts.valueE164, '')),
|
||||||
|
sql`${clientContacts.value} IS NOT NULL AND ${clientContacts.value} <> ''`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(` found ${rows.length} candidate rows`);
|
||||||
|
|
||||||
|
let parsedFull = 0;
|
||||||
|
let parsedE164Only = 0;
|
||||||
|
let unparseable = 0;
|
||||||
|
const updates: Array<{
|
||||||
|
id: string;
|
||||||
|
valueE164: string;
|
||||||
|
valueCountry: CountryCode | null;
|
||||||
|
}> = [];
|
||||||
|
const fails: Array<{ id: string; value: string; reason: string }> = [];
|
||||||
|
|
||||||
|
for (const row of rows) {
|
||||||
|
if (!row.value) continue;
|
||||||
|
const defaultCountry = (row.valueCountry as CountryCode | null) ?? undefined;
|
||||||
|
const parsed1 = parsePhoneScriptSafe(row.value, defaultCountry);
|
||||||
|
|
||||||
|
if (parsed1.e164 && parsed1.country) {
|
||||||
|
// Both e164 + country resolved — best case.
|
||||||
|
updates.push({ id: row.id, valueE164: parsed1.e164, valueCountry: parsed1.country });
|
||||||
|
parsedFull++;
|
||||||
|
} else if (parsed1.e164) {
|
||||||
|
// E.164 came back but country didn't (e.g. UK +44 7700 900xxx
|
||||||
|
// fictional/reserved range — libphonenumber returns the e164 form
|
||||||
|
// but refuses to assign a country). Still safe to write — the e164
|
||||||
|
// is canonical. Country stays null.
|
||||||
|
updates.push({
|
||||||
|
id: row.id,
|
||||||
|
valueE164: parsed1.e164,
|
||||||
|
valueCountry: (row.valueCountry as CountryCode | null) ?? null,
|
||||||
|
});
|
||||||
|
parsedE164Only++;
|
||||||
|
} else {
|
||||||
|
fails.push({
|
||||||
|
id: row.id,
|
||||||
|
value: row.value,
|
||||||
|
reason: row.value.trim().startsWith('+')
|
||||||
|
? 'has + prefix but parse failed'
|
||||||
|
: 'no leading + and no country hint',
|
||||||
|
});
|
||||||
|
unparseable++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('');
|
||||||
|
console.log(' ✓ parsed cleanly (e164 + country)', parsedFull);
|
||||||
|
console.log(' ✓ parsed e164 only (no country) ', parsedE164Only);
|
||||||
|
console.log(' ✗ unparseable ', unparseable);
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
if (fails.length > 0) {
|
||||||
|
console.log('Failures (first 10):');
|
||||||
|
for (const f of fails.slice(0, 10)) {
|
||||||
|
console.log(` [${f.id}] "${f.value}" — ${f.reason}`);
|
||||||
|
}
|
||||||
|
console.log('');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!APPLY) {
|
||||||
|
console.log('Dry-run only. Re-run with --apply to write the updates.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (updates.length === 0) {
|
||||||
|
console.log('No updates to write.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Writing ${updates.length} updates...`);
|
||||||
|
|
||||||
|
for (const u of updates) {
|
||||||
|
await db
|
||||||
|
.update(clientContacts)
|
||||||
|
.set({
|
||||||
|
valueE164: u.valueE164,
|
||||||
|
valueCountry: u.valueCountry,
|
||||||
|
})
|
||||||
|
.where(eq(clientContacts.id, u.id));
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✓ wrote ${updates.length} rows`);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
97
scripts/db-reset.ts
Normal file
97
scripts/db-reset.ts
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
/**
|
||||||
|
* Wipe all data from the database, preserving schema + drizzle migration
|
||||||
|
* history. Run before swapping seed fixtures.
|
||||||
|
*
|
||||||
|
* pnpm tsx scripts/db-reset.ts (refuses without --confirm)
|
||||||
|
* pnpm tsx scripts/db-reset.ts --confirm
|
||||||
|
*
|
||||||
|
* Truncates every table in the `public` schema except the drizzle
|
||||||
|
* migration tracker, then resets sequences. Wraps the loop in a single
|
||||||
|
* transaction so a mid-wipe failure rolls back cleanly.
|
||||||
|
*
|
||||||
|
* Refuses to run when DATABASE_URL points at anything that doesn't look
|
||||||
|
* like a local/dev host. Override with --i-know-what-im-doing.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import 'dotenv/config';
|
||||||
|
import postgres from 'postgres';
|
||||||
|
|
||||||
|
const url: string = process.env.DATABASE_URL ?? '';
|
||||||
|
if (!url) {
|
||||||
|
console.error('DATABASE_URL is not set; aborting.');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const args = new Set(process.argv.slice(2));
|
||||||
|
if (!args.has('--confirm')) {
|
||||||
|
console.error('Refusing to wipe without --confirm');
|
||||||
|
console.error('Run again as: pnpm tsx scripts/db-reset.ts --confirm');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Best-effort safety: refuse for anything that doesn't look like a local DB.
|
||||||
|
function looksLocal(u: string): boolean {
|
||||||
|
try {
|
||||||
|
const parsed = new URL(u);
|
||||||
|
return (
|
||||||
|
parsed.hostname === 'localhost' ||
|
||||||
|
parsed.hostname === '127.0.0.1' ||
|
||||||
|
parsed.hostname === '::1' ||
|
||||||
|
parsed.hostname.endsWith('.local') ||
|
||||||
|
parsed.hostname.endsWith('.internal') ||
|
||||||
|
parsed.hostname === 'host.docker.internal' ||
|
||||||
|
// Docker compose service names commonly used here
|
||||||
|
parsed.hostname === 'postgres' ||
|
||||||
|
parsed.hostname === 'db'
|
||||||
|
);
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!looksLocal(url) && !args.has('--i-know-what-im-doing')) {
|
||||||
|
console.error(
|
||||||
|
`DATABASE_URL host doesn't look local. Refusing to wipe a remote DB without --i-know-what-im-doing.`,
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const sql = postgres(url, { max: 1 });
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
console.log('Resetting database...');
|
||||||
|
console.log(` url: ${url.replace(/:[^:@]*@/, ':***@')}`);
|
||||||
|
|
||||||
|
const tables = await sql<{ tablename: string }[]>`
|
||||||
|
SELECT tablename FROM pg_tables
|
||||||
|
WHERE schemaname = 'public'
|
||||||
|
AND tablename NOT LIKE 'drizzle_%'
|
||||||
|
AND tablename != '__drizzle_migrations'
|
||||||
|
`;
|
||||||
|
|
||||||
|
if (tables.length === 0) {
|
||||||
|
console.log(' no user tables found, nothing to do.');
|
||||||
|
await sql.end();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Single TRUNCATE … CASCADE is faster than per-table loops and handles
|
||||||
|
// FK ordering for us. Quote table names defensively.
|
||||||
|
const tableList = tables.map((t) => `"public"."${t.tablename}"`).join(', ');
|
||||||
|
|
||||||
|
console.log(` truncating ${tables.length} tables...`);
|
||||||
|
await sql.unsafe(`TRUNCATE ${tableList} RESTART IDENTITY CASCADE`);
|
||||||
|
console.log(' done.');
|
||||||
|
|
||||||
|
await sql.end();
|
||||||
|
console.log('');
|
||||||
|
console.log('Database reset complete. Run a seed script next:');
|
||||||
|
console.log(' pnpm db:seed # realistic NocoDB-shaped fixture');
|
||||||
|
console.log(' pnpm db:seed:synthetic # one client per pipeline stage');
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch(async (err) => {
|
||||||
|
console.error('Reset failed:', err);
|
||||||
|
await sql.end().catch(() => undefined);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
83
scripts/dev-open-browser.ts
Normal file
83
scripts/dev-open-browser.ts
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
/**
|
||||||
|
* Launch a headed Chromium with NO viewport override so it adopts the
|
||||||
|
* host monitor's natural size — useful when you want to drive the CRM
|
||||||
|
* manually and have full-screen real estate.
|
||||||
|
*
|
||||||
|
* Pre-fills the login form for the synthetic admin (admin@portnimara.test
|
||||||
|
* / SuperAdmin12345!) but does not submit; press Enter when ready.
|
||||||
|
*
|
||||||
|
* The script keeps running until the browser window is closed by the
|
||||||
|
* user or until you Ctrl-C.
|
||||||
|
*
|
||||||
|
* pnpm tsx scripts/dev-open-browser.ts # super_admin
|
||||||
|
* pnpm tsx scripts/dev-open-browser.ts sales_agent
|
||||||
|
* pnpm tsx scripts/dev-open-browser.ts viewer
|
||||||
|
* pnpm tsx scripts/dev-open-browser.ts --no-prefill
|
||||||
|
*/
|
||||||
|
|
||||||
|
import 'dotenv/config';
|
||||||
|
// @playwright/test re-exports the same chromium driver and is already
|
||||||
|
// installed as a dev dep; using it avoids needing to add the standalone
|
||||||
|
// `playwright` package as a separate dependency.
|
||||||
|
import { chromium } from '@playwright/test';
|
||||||
|
|
||||||
|
const USERS: Record<string, { email: string; password: string }> = {
|
||||||
|
super_admin: { email: 'admin@portnimara.test', password: 'SuperAdmin12345!' },
|
||||||
|
sales_agent: { email: 'agent@portnimara.test', password: 'SalesAgent12345!' },
|
||||||
|
viewer: { email: 'viewer@portnimara.test', password: 'ViewerUser12345!' },
|
||||||
|
};
|
||||||
|
|
||||||
|
const BASE_URL = process.env.DEV_BASE_URL ?? 'http://localhost:3000';
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const args = process.argv.slice(2);
|
||||||
|
const noPrefill = args.includes('--no-prefill');
|
||||||
|
const role =
|
||||||
|
args.find((a) => !a.startsWith('--')) && USERS[args.find((a) => !a.startsWith('--'))!]
|
||||||
|
? args.find((a) => !a.startsWith('--'))!
|
||||||
|
: 'super_admin';
|
||||||
|
const user = USERS[role]!;
|
||||||
|
|
||||||
|
console.log(`Launching headed Chromium → ${BASE_URL}`);
|
||||||
|
console.log(` role: ${role} (${user.email})`);
|
||||||
|
|
||||||
|
const browser = await chromium.launch({
|
||||||
|
headless: false,
|
||||||
|
args: ['--start-maximized'],
|
||||||
|
});
|
||||||
|
|
||||||
|
// viewport: null lets the page fill the OS window. Combined with
|
||||||
|
// --start-maximized this matches the host monitor's natural size.
|
||||||
|
const context = await browser.newContext({ viewport: null });
|
||||||
|
const page = await context.newPage();
|
||||||
|
|
||||||
|
await page.goto(`${BASE_URL}/login`);
|
||||||
|
|
||||||
|
if (!noPrefill) {
|
||||||
|
try {
|
||||||
|
await page.waitForSelector('#email', { timeout: 5000 });
|
||||||
|
await page.fill('#email', user.email);
|
||||||
|
await page.fill('#password', user.password);
|
||||||
|
console.log(' Login form pre-filled — press Enter in the browser to submit.');
|
||||||
|
} catch {
|
||||||
|
console.log(' Could not find login form (page may have redirected).');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('');
|
||||||
|
console.log("Browser is open. Close it when you're done; the script will exit.");
|
||||||
|
console.log('Or Ctrl-C here to force-quit.');
|
||||||
|
|
||||||
|
// Keep the process alive until the browser window is closed.
|
||||||
|
await new Promise<void>((resolve) => {
|
||||||
|
browser.on('disconnected', () => resolve());
|
||||||
|
});
|
||||||
|
|
||||||
|
await browser.close().catch(() => undefined);
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error('Open-browser failed:', err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
52
scripts/dev-recommender-smoke.ts
Normal file
52
scripts/dev-recommender-smoke.ts
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
/**
|
||||||
|
* Dev-only smoke check for the berth recommender. Resolves the first
|
||||||
|
* port-nimara interest (with desired dims set) and prints the top-N
|
||||||
|
* recommendations.
|
||||||
|
*
|
||||||
|
* pnpm tsx scripts/dev-recommender-smoke.ts
|
||||||
|
*/
|
||||||
|
import 'dotenv/config';
|
||||||
|
import { eq, isNotNull, and } from 'drizzle-orm';
|
||||||
|
|
||||||
|
import { db } from '@/lib/db';
|
||||||
|
import { ports } from '@/lib/db/schema/ports';
|
||||||
|
import { interests } from '@/lib/db/schema/interests';
|
||||||
|
import { recommendBerths } from '@/lib/services/berth-recommender.service';
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const [port] = await db
|
||||||
|
.select({ id: ports.id })
|
||||||
|
.from(ports)
|
||||||
|
.where(eq(ports.slug, 'port-nimara'))
|
||||||
|
.limit(1);
|
||||||
|
if (!port) throw new Error('port-nimara not found');
|
||||||
|
|
||||||
|
const [interest] = await db
|
||||||
|
.select({ id: interests.id })
|
||||||
|
.from(interests)
|
||||||
|
.where(and(eq(interests.portId, port.id), isNotNull(interests.desiredLengthFt)))
|
||||||
|
.limit(1);
|
||||||
|
if (!interest) throw new Error('No interest with desired dims set');
|
||||||
|
|
||||||
|
console.log(`> Recommending berths for interest ${interest.id} on port ${port.id}…`);
|
||||||
|
const recs = await recommendBerths({
|
||||||
|
interestId: interest.id,
|
||||||
|
portId: port.id,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`> ${recs.length} recommendations:`);
|
||||||
|
for (const r of recs) {
|
||||||
|
console.log(
|
||||||
|
` ${r.mooringNumber.padEnd(5)} tier=${r.tier} fit=${r.fitScore} ` +
|
||||||
|
`${r.lengthFt}×${r.widthFt}×${r.draftFt} ft buf=${r.sizeBufferPct}% ` +
|
||||||
|
`${r.reasons.dimensional}; ${r.reasons.pipeline}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main()
|
||||||
|
.then(() => process.exit(0))
|
||||||
|
.catch((err) => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
409
scripts/import-berths-from-nocodb.ts
Normal file
409
scripts/import-berths-from-nocodb.ts
Normal file
@@ -0,0 +1,409 @@
|
|||||||
|
/**
|
||||||
|
* Idempotent NocoDB Berths → CRM `berths` import.
|
||||||
|
*
|
||||||
|
* Re-running picks up NocoDB additions/edits without clobbering CRM-side
|
||||||
|
* overrides: rows where `updated_at > last_imported_at` are treated as
|
||||||
|
* human-edited and skipped (use `--force` to override). Map Data JSON
|
||||||
|
* is validated and upserted into `berth_map_data` as a separate step.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* pnpm tsx scripts/import-berths-from-nocodb.ts --dry-run [--port-slug port-nimara]
|
||||||
|
* pnpm tsx scripts/import-berths-from-nocodb.ts --apply [--port-slug port-nimara]
|
||||||
|
* pnpm tsx scripts/import-berths-from-nocodb.ts --apply --force
|
||||||
|
* pnpm tsx scripts/import-berths-from-nocodb.ts --apply --update-snapshot
|
||||||
|
*
|
||||||
|
* Edge cases mitigated (see plan §14.1):
|
||||||
|
* - Mooring collisions : unique (port_id, mooring_number) on the table.
|
||||||
|
* - Concurrent runs : pg_advisory_xact_lock on a stable key.
|
||||||
|
* - Numeric-with-units : parseDecimalWithUnit() strips trailing units.
|
||||||
|
* - Metric drift : NocoDB metric formula columns are ignored;
|
||||||
|
* metric values are recomputed from imperial.
|
||||||
|
* - Map Data shape : zod-validated; failures are skipped silently
|
||||||
|
* rather than aborting the whole import.
|
||||||
|
* - Status enum : NocoDB display strings → CRM snake_case.
|
||||||
|
* - NocoDB row deleted : reported as "orphaned in CRM"; not auto-deleted.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import 'dotenv/config';
|
||||||
|
import { eq, sql } from 'drizzle-orm';
|
||||||
|
import { promises as fs } from 'node:fs';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
import { db } from '@/lib/db';
|
||||||
|
import { ports } from '@/lib/db/schema/ports';
|
||||||
|
import { berths, berthMapData } from '@/lib/db/schema/berths';
|
||||||
|
import { fetchAllRows, loadNocoDbConfig, NOCO_TABLES } from '@/lib/dedup/nocodb-source';
|
||||||
|
import {
|
||||||
|
buildPlan,
|
||||||
|
mapRow,
|
||||||
|
type Action,
|
||||||
|
type ImportedBerth,
|
||||||
|
type PlanEntry,
|
||||||
|
type ExistingBerthRow,
|
||||||
|
} from '@/lib/services/berth-import';
|
||||||
|
|
||||||
|
// ─── CLI ────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
interface CliArgs {
|
||||||
|
dryRun: boolean;
|
||||||
|
apply: boolean;
|
||||||
|
portSlug: string;
|
||||||
|
force: boolean;
|
||||||
|
updateSnapshot: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseArgs(argv: string[]): CliArgs {
|
||||||
|
const args: CliArgs = {
|
||||||
|
dryRun: false,
|
||||||
|
apply: false,
|
||||||
|
portSlug: 'port-nimara',
|
||||||
|
force: false,
|
||||||
|
updateSnapshot: false,
|
||||||
|
};
|
||||||
|
for (let i = 0; i < argv.length; i += 1) {
|
||||||
|
const a = argv[i]!;
|
||||||
|
if (a === '--dry-run') args.dryRun = true;
|
||||||
|
else if (a === '--apply') args.apply = true;
|
||||||
|
else if (a === '--port-slug') args.portSlug = argv[++i] ?? 'port-nimara';
|
||||||
|
else if (a === '--force') args.force = true;
|
||||||
|
else if (a === '--update-snapshot') args.updateSnapshot = true;
|
||||||
|
else if (a === '-h' || a === '--help') {
|
||||||
|
printHelp();
|
||||||
|
process.exit(0);
|
||||||
|
} else {
|
||||||
|
console.error(`Unknown argument: ${a}`);
|
||||||
|
printHelp();
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!args.dryRun && !args.apply) {
|
||||||
|
console.error('Must specify either --dry-run or --apply.');
|
||||||
|
printHelp();
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
|
||||||
|
function printHelp(): void {
|
||||||
|
console.log(`Usage:
|
||||||
|
pnpm tsx scripts/import-berths-from-nocodb.ts --dry-run [--port-slug <slug>]
|
||||||
|
pnpm tsx scripts/import-berths-from-nocodb.ts --apply [--port-slug <slug>] [--force] [--update-snapshot]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
--dry-run Read NocoDB + diff vs CRM. No writes.
|
||||||
|
--apply Apply the plan to the DB.
|
||||||
|
--port-slug <slug> Target port slug (default: port-nimara).
|
||||||
|
--force Overwrite rows where CRM updated_at > last_imported_at.
|
||||||
|
--update-snapshot Rewrite src/lib/db/seed-data/berths.json after apply.
|
||||||
|
-h, --help Show this help.
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Stable advisory lock key ───────────────────────────────────────────────
|
||||||
|
// 64-bit BIGINT - first 4 bytes spell "BRTH" so it's grep-able in pg_locks.
|
||||||
|
const BERTH_IMPORT_LOCK_KEY = 0x4252544800000001n;
|
||||||
|
|
||||||
|
// ─── Apply ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
interface ApplyResult {
|
||||||
|
inserted: number;
|
||||||
|
updated: number;
|
||||||
|
skipped: number;
|
||||||
|
mapDataWritten: number;
|
||||||
|
warnings: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
async function apply(
|
||||||
|
portId: string,
|
||||||
|
plan: PlanEntry[],
|
||||||
|
orphans: ExistingBerthRow[],
|
||||||
|
importedAt: Date,
|
||||||
|
): Promise<ApplyResult> {
|
||||||
|
const result: ApplyResult = {
|
||||||
|
inserted: 0,
|
||||||
|
updated: 0,
|
||||||
|
skipped: 0,
|
||||||
|
mapDataWritten: 0,
|
||||||
|
warnings: [],
|
||||||
|
};
|
||||||
|
for (const orphan of orphans) {
|
||||||
|
result.warnings.push(
|
||||||
|
`Orphan: CRM has mooring="${orphan.mooringNumber}" but NocoDB no longer does (id=${orphan.id})`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.transaction(async (tx) => {
|
||||||
|
// Stable lock so two simultaneous --apply runs serialize.
|
||||||
|
await tx.execute(sql`SELECT pg_advisory_xact_lock(${BERTH_IMPORT_LOCK_KEY})`);
|
||||||
|
|
||||||
|
for (const entry of plan) {
|
||||||
|
if (entry.action === 'skip-edited' || entry.action === 'noop') {
|
||||||
|
result.skipped += 1;
|
||||||
|
result.warnings.push(`Skipped ${entry.imported.mooringNumber}: ${entry.reason ?? 'no-op'}`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const i = entry.imported;
|
||||||
|
const n = i.numerics;
|
||||||
|
const baseValues = {
|
||||||
|
portId,
|
||||||
|
mooringNumber: i.mooringNumber,
|
||||||
|
area: i.area,
|
||||||
|
status: i.status,
|
||||||
|
lengthFt: n.lengthFt != null ? String(n.lengthFt) : null,
|
||||||
|
widthFt: n.widthFt != null ? String(n.widthFt) : null,
|
||||||
|
draftFt: n.draftFt != null ? String(n.draftFt) : null,
|
||||||
|
lengthM: n.lengthM != null ? String(n.lengthM) : null,
|
||||||
|
widthM: n.widthM != null ? String(n.widthM) : null,
|
||||||
|
draftM: n.draftM != null ? String(n.draftM) : null,
|
||||||
|
widthIsMinimum: i.widthIsMinimum,
|
||||||
|
nominalBoatSize: n.nominalBoatSize != null ? String(n.nominalBoatSize) : null,
|
||||||
|
nominalBoatSizeM: n.nominalBoatSizeM != null ? String(n.nominalBoatSizeM) : null,
|
||||||
|
waterDepth: n.waterDepth != null ? String(n.waterDepth) : null,
|
||||||
|
waterDepthM: n.waterDepthM != null ? String(n.waterDepthM) : null,
|
||||||
|
waterDepthIsMinimum: i.waterDepthIsMinimum,
|
||||||
|
sidePontoon: i.sidePontoon,
|
||||||
|
powerCapacity: n.powerCapacity != null ? String(n.powerCapacity) : null,
|
||||||
|
voltage: n.voltage != null ? String(n.voltage) : null,
|
||||||
|
mooringType: i.mooringType,
|
||||||
|
cleatType: i.cleatType,
|
||||||
|
cleatCapacity: i.cleatCapacity,
|
||||||
|
bollardType: i.bollardType,
|
||||||
|
bollardCapacity: i.bollardCapacity,
|
||||||
|
access: i.access,
|
||||||
|
price: n.price != null ? String(n.price) : null,
|
||||||
|
priceCurrency: 'USD' as const,
|
||||||
|
bowFacing: i.bowFacing,
|
||||||
|
berthApproved: i.berthApproved,
|
||||||
|
statusOverrideMode: i.statusOverrideMode,
|
||||||
|
lastImportedAt: importedAt,
|
||||||
|
updatedAt: importedAt,
|
||||||
|
};
|
||||||
|
|
||||||
|
let berthId: string;
|
||||||
|
if (entry.action === 'insert') {
|
||||||
|
const [inserted] = await tx
|
||||||
|
.insert(berths)
|
||||||
|
.values({ ...baseValues, tenureType: 'permanent' })
|
||||||
|
.returning({ id: berths.id });
|
||||||
|
berthId = inserted!.id;
|
||||||
|
result.inserted += 1;
|
||||||
|
} else {
|
||||||
|
await tx.update(berths).set(baseValues).where(eq(berths.id, entry.existing!.id));
|
||||||
|
berthId = entry.existing!.id;
|
||||||
|
result.updated += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (i.mapData) {
|
||||||
|
const mapValues = {
|
||||||
|
berthId,
|
||||||
|
svgPath: i.mapData.path ?? null,
|
||||||
|
x: i.mapData.x != null ? String(i.mapData.x) : null,
|
||||||
|
y: i.mapData.y != null ? String(i.mapData.y) : null,
|
||||||
|
transform: i.mapData.transform ?? null,
|
||||||
|
fontSize: i.mapData.fontSize != null ? String(i.mapData.fontSize) : null,
|
||||||
|
updatedAt: importedAt,
|
||||||
|
};
|
||||||
|
await tx
|
||||||
|
.insert(berthMapData)
|
||||||
|
.values(mapValues)
|
||||||
|
.onConflictDoUpdate({
|
||||||
|
target: berthMapData.berthId,
|
||||||
|
set: {
|
||||||
|
svgPath: mapValues.svgPath,
|
||||||
|
x: mapValues.x,
|
||||||
|
y: mapValues.y,
|
||||||
|
transform: mapValues.transform,
|
||||||
|
fontSize: mapValues.fontSize,
|
||||||
|
updatedAt: importedAt,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
result.mapDataWritten += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Snapshot writer (for seed-data refresh) ────────────────────────────────
|
||||||
|
|
||||||
|
async function writeSnapshot(imported: ImportedBerth[]): Promise<string> {
|
||||||
|
// Ordering: idx 0..4 available (small), 5..9 under_offer (medium),
|
||||||
|
// 10..11 sold (large), then everything else by mooring number. The
|
||||||
|
// first 12 indexes feed `seed-data.ts` interest/reservation stubs.
|
||||||
|
const sortByLength = (a: ImportedBerth, b: ImportedBerth) =>
|
||||||
|
(a.numerics.lengthFt ?? 0) - (b.numerics.lengthFt ?? 0);
|
||||||
|
const available = imported
|
||||||
|
.filter((b) => b.status === 'available')
|
||||||
|
.sort(sortByLength)
|
||||||
|
.slice(0, 5);
|
||||||
|
const underOffer = imported
|
||||||
|
.filter((b) => b.status === 'under_offer')
|
||||||
|
.sort(sortByLength)
|
||||||
|
.slice(0, 5);
|
||||||
|
const sold = imported
|
||||||
|
.filter((b) => b.status === 'sold')
|
||||||
|
.sort((a, b) => -sortByLength(a, b))
|
||||||
|
.slice(0, 2);
|
||||||
|
const featured = new Set([...available, ...underOffer, ...sold].map((b) => b.mooringNumber));
|
||||||
|
const rest = imported
|
||||||
|
.filter((b) => !featured.has(b.mooringNumber))
|
||||||
|
.sort((a, b) => a.mooringNumber.localeCompare(b.mooringNumber, 'en', { numeric: true }));
|
||||||
|
const ordered = [...available, ...underOffer, ...sold, ...rest];
|
||||||
|
|
||||||
|
const payload = ordered.map((b) => ({
|
||||||
|
legacyId: b.legacyId,
|
||||||
|
mooringNumber: b.mooringNumber,
|
||||||
|
area: b.area,
|
||||||
|
status: b.status,
|
||||||
|
lengthFt: b.numerics.lengthFt,
|
||||||
|
widthFt: b.numerics.widthFt,
|
||||||
|
draftFt: b.numerics.draftFt,
|
||||||
|
lengthM: b.numerics.lengthM,
|
||||||
|
widthM: b.numerics.widthM,
|
||||||
|
draftM: b.numerics.draftM,
|
||||||
|
widthIsMinimum: b.widthIsMinimum,
|
||||||
|
nominalBoatSize: b.numerics.nominalBoatSize,
|
||||||
|
nominalBoatSizeM: b.numerics.nominalBoatSizeM,
|
||||||
|
waterDepth: b.numerics.waterDepth,
|
||||||
|
waterDepthM: b.numerics.waterDepthM,
|
||||||
|
waterDepthIsMinimum: b.waterDepthIsMinimum,
|
||||||
|
sidePontoon: b.sidePontoon,
|
||||||
|
powerCapacity: b.numerics.powerCapacity,
|
||||||
|
voltage: b.numerics.voltage,
|
||||||
|
mooringType: b.mooringType,
|
||||||
|
cleatType: b.cleatType,
|
||||||
|
cleatCapacity: b.cleatCapacity,
|
||||||
|
bollardType: b.bollardType,
|
||||||
|
bollardCapacity: b.bollardCapacity,
|
||||||
|
access: b.access,
|
||||||
|
price: b.numerics.price,
|
||||||
|
bowFacing: b.bowFacing,
|
||||||
|
berthApproved: b.berthApproved,
|
||||||
|
statusOverrideMode: b.statusOverrideMode,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const target = path.resolve(process.cwd(), 'src/lib/db/seed-data/berths.json');
|
||||||
|
await fs.writeFile(target, JSON.stringify(payload, null, 2) + '\n', 'utf8');
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Main ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
const args = parseArgs(process.argv.slice(2));
|
||||||
|
const config = loadNocoDbConfig();
|
||||||
|
|
||||||
|
const [port] = await db
|
||||||
|
.select({ id: ports.id, slug: ports.slug })
|
||||||
|
.from(ports)
|
||||||
|
.where(eq(ports.slug, args.portSlug))
|
||||||
|
.limit(1);
|
||||||
|
if (!port) {
|
||||||
|
console.error(`No port found with slug "${args.portSlug}".`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`> Fetching NocoDB Berths…`);
|
||||||
|
const rows = await fetchAllRows(NOCO_TABLES.berths, config);
|
||||||
|
console.log(` fetched ${rows.length} rows from NocoDB`);
|
||||||
|
|
||||||
|
const imported: ImportedBerth[] = [];
|
||||||
|
let skippedMalformed = 0;
|
||||||
|
for (const r of rows) {
|
||||||
|
const m = mapRow(r);
|
||||||
|
if (m) imported.push(m);
|
||||||
|
else skippedMalformed += 1;
|
||||||
|
}
|
||||||
|
if (skippedMalformed > 0) {
|
||||||
|
console.warn(` ${skippedMalformed} rows skipped (missing Mooring Number)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// De-dup against any same-mooring twins surfacing from NocoDB
|
||||||
|
// (defensive — the Berths table is keyed on Mooring Number in NocoDB).
|
||||||
|
const seen = new Set<string>();
|
||||||
|
const dedup: ImportedBerth[] = [];
|
||||||
|
for (const b of imported) {
|
||||||
|
if (seen.has(b.mooringNumber)) {
|
||||||
|
console.warn(` duplicate mooring "${b.mooringNumber}" in NocoDB — keeping first`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
seen.add(b.mooringNumber);
|
||||||
|
dedup.push(b);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`> Reading current CRM berths for port "${port.slug}"…`);
|
||||||
|
const existingRows = await db
|
||||||
|
.select({
|
||||||
|
id: berths.id,
|
||||||
|
mooringNumber: berths.mooringNumber,
|
||||||
|
updatedAt: berths.updatedAt,
|
||||||
|
lastImportedAt: berths.lastImportedAt,
|
||||||
|
})
|
||||||
|
.from(berths)
|
||||||
|
.where(eq(berths.portId, port.id));
|
||||||
|
console.log(` ${existingRows.length} existing rows`);
|
||||||
|
|
||||||
|
const existingByMooring = new Map(existingRows.map((r) => [r.mooringNumber, r]));
|
||||||
|
const { plan, orphans } = buildPlan(dedup, existingByMooring, args.force);
|
||||||
|
|
||||||
|
const counts = plan.reduce(
|
||||||
|
(acc, e) => {
|
||||||
|
acc[e.action] += 1;
|
||||||
|
return acc;
|
||||||
|
},
|
||||||
|
{ insert: 0, update: 0, 'skip-edited': 0, noop: 0 } as Record<Action, number>,
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(`> Plan:`);
|
||||||
|
console.log(` insert : ${counts.insert}`);
|
||||||
|
console.log(` update : ${counts.update}`);
|
||||||
|
console.log(` skip-edited : ${counts['skip-edited']}`);
|
||||||
|
console.log(` no-op : ${counts.noop}`);
|
||||||
|
console.log(` orphans (CRM): ${orphans.length}`);
|
||||||
|
|
||||||
|
if (counts['skip-edited'] > 0) {
|
||||||
|
console.log(` ↳ Skipped (CRM-edited; pass --force to overwrite):`);
|
||||||
|
for (const e of plan.filter((p) => p.action === 'skip-edited').slice(0, 10)) {
|
||||||
|
console.log(` - ${e.imported.mooringNumber} ${e.reason}`);
|
||||||
|
}
|
||||||
|
if (counts['skip-edited'] > 10) console.log(` …and ${counts['skip-edited'] - 10} more`);
|
||||||
|
}
|
||||||
|
if (orphans.length > 0) {
|
||||||
|
console.log(` ↳ Orphans (in CRM but missing from NocoDB):`);
|
||||||
|
for (const o of orphans.slice(0, 10)) console.log(` - ${o.mooringNumber}`);
|
||||||
|
if (orphans.length > 10) console.log(` …and ${orphans.length - 10} more`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Snapshot write is independent of DB writes — even in --dry-run mode
|
||||||
|
// a rep may want to refresh the seed JSON to capture the latest NocoDB
|
||||||
|
// shape without committing to the DB import. The original gate dropped
|
||||||
|
// this silently when --dry-run was passed; audit caught it.
|
||||||
|
if (args.updateSnapshot) {
|
||||||
|
const written = await writeSnapshot(dedup);
|
||||||
|
console.log(`> Wrote ${dedup.length} rows to ${path.relative(process.cwd(), written)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (args.dryRun) {
|
||||||
|
console.log(`\n[dry-run] no DB writes performed.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`> Applying…`);
|
||||||
|
const result = await apply(port.id, plan, orphans, new Date());
|
||||||
|
console.log(` inserted : ${result.inserted}`);
|
||||||
|
console.log(` updated : ${result.updated}`);
|
||||||
|
console.log(` skipped : ${result.skipped}`);
|
||||||
|
console.log(` map data writes : ${result.mapDataWritten}`);
|
||||||
|
if (result.warnings.length) {
|
||||||
|
console.log(` warnings :`);
|
||||||
|
for (const w of result.warnings.slice(0, 20)) console.log(` - ${w}`);
|
||||||
|
if (result.warnings.length > 20) console.log(` …and ${result.warnings.length - 20} more`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main()
|
||||||
|
.then(() => process.exit(0))
|
||||||
|
.catch((err: unknown) => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
251
scripts/migrate-from-nocodb.ts
Normal file
251
scripts/migrate-from-nocodb.ts
Normal file
@@ -0,0 +1,251 @@
|
|||||||
|
/**
|
||||||
|
* One-shot migration: legacy NocoDB Interests → new client/interest split.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
*
|
||||||
|
* pnpm tsx scripts/migrate-from-nocodb.ts --dry-run
|
||||||
|
* Pulls the live NocoDB base, runs the transform + dedup pipeline,
|
||||||
|
* writes a report to .migration/<timestamp>/. NO database writes.
|
||||||
|
*
|
||||||
|
* pnpm tsx scripts/migrate-from-nocodb.ts --dry-run --port-slug port-nimara
|
||||||
|
* Same, but tags the planned writes with the named port (matters for
|
||||||
|
* the apply phase — every client/interest belongs to one port).
|
||||||
|
*
|
||||||
|
* pnpm tsx scripts/migrate-from-nocodb.ts --apply --port-slug port-nimara
|
||||||
|
* Re-fetches NocoDB, re-transforms, then writes the planned rows
|
||||||
|
* into the target port via the idempotent `migration_source_links`
|
||||||
|
* ledger. Re-runs are safe — already-imported source IDs are skipped.
|
||||||
|
* REQUIRES `EMAIL_REDIRECT_TO` to be set in env (safety net) unless
|
||||||
|
* `--unsafe-skip-redirect-check` is also passed.
|
||||||
|
*
|
||||||
|
* Design reference: docs/superpowers/specs/2026-05-03-dedup-and-migration-design.md §9.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import 'dotenv/config';
|
||||||
|
import { randomUUID } from 'node:crypto';
|
||||||
|
import path from 'node:path';
|
||||||
|
import { fileURLToPath } from 'node:url';
|
||||||
|
|
||||||
|
import { eq } from 'drizzle-orm';
|
||||||
|
|
||||||
|
import { db } from '@/lib/db';
|
||||||
|
import { ports } from '@/lib/db/schema/ports';
|
||||||
|
import { applyPlan } from '@/lib/dedup/migration-apply';
|
||||||
|
import { fetchSnapshot, loadNocoDbConfig } from '@/lib/dedup/nocodb-source';
|
||||||
|
import { transformSnapshot } from '@/lib/dedup/migration-transform';
|
||||||
|
import { resolveReportPaths, writeReport } from '@/lib/dedup/migration-report';
|
||||||
|
|
||||||
|
interface CliArgs {
|
||||||
|
dryRun: boolean;
|
||||||
|
apply: boolean;
|
||||||
|
portSlug: string | null;
|
||||||
|
reportDir: string | null;
|
||||||
|
unsafeSkipRedirectCheck: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseArgs(argv: string[]): CliArgs {
|
||||||
|
const args: CliArgs = {
|
||||||
|
dryRun: false,
|
||||||
|
apply: false,
|
||||||
|
portSlug: null,
|
||||||
|
reportDir: null,
|
||||||
|
unsafeSkipRedirectCheck: false,
|
||||||
|
};
|
||||||
|
for (let i = 0; i < argv.length; i += 1) {
|
||||||
|
const a = argv[i]!;
|
||||||
|
if (a === '--dry-run') args.dryRun = true;
|
||||||
|
else if (a === '--apply') args.apply = true;
|
||||||
|
else if (a === '--port-slug') args.portSlug = argv[++i] ?? null;
|
||||||
|
else if (a === '--report') args.reportDir = argv[++i] ?? null;
|
||||||
|
else if (a === '--unsafe-skip-redirect-check') args.unsafeSkipRedirectCheck = true;
|
||||||
|
else if (a === '-h' || a === '--help') {
|
||||||
|
printHelp();
|
||||||
|
process.exit(0);
|
||||||
|
} else {
|
||||||
|
console.error(`Unknown argument: ${a}`);
|
||||||
|
printHelp();
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
|
||||||
|
function printHelp(): void {
|
||||||
|
console.log(`Usage:
|
||||||
|
pnpm tsx scripts/migrate-from-nocodb.ts --dry-run [--port-slug <slug>]
|
||||||
|
Pulls NocoDB → transforms → writes report to .migration/<timestamp>/.
|
||||||
|
No database writes.
|
||||||
|
|
||||||
|
pnpm tsx scripts/migrate-from-nocodb.ts --apply --port-slug <slug>
|
||||||
|
Re-fetches NocoDB, re-transforms, writes via migration_source_links
|
||||||
|
ledger. Idempotent — safe to re-run. Requires EMAIL_REDIRECT_TO set
|
||||||
|
(unless --unsafe-skip-redirect-check is also passed).
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
--dry-run Read NocoDB, write report only.
|
||||||
|
--apply Actually write rows to the DB.
|
||||||
|
--port-slug <slug> Port slug to attach to all imported
|
||||||
|
entities. Defaults to the first
|
||||||
|
available port if omitted.
|
||||||
|
--report <dir> Path to a previously-generated report
|
||||||
|
dir (only used by --apply).
|
||||||
|
--unsafe-skip-redirect-check Skip the EMAIL_REDIRECT_TO precondition
|
||||||
|
check. Only use in production cutover.
|
||||||
|
-h, --help Show this help.
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve the target port: use the slug if provided, otherwise the first
|
||||||
|
* port found. Errors out cleanly if the slug doesn't match any port.
|
||||||
|
*/
|
||||||
|
async function resolvePort(slug: string | null): Promise<{ id: string; slug: string }> {
|
||||||
|
if (slug) {
|
||||||
|
const [p] = await db
|
||||||
|
.select({ id: ports.id, slug: ports.slug })
|
||||||
|
.from(ports)
|
||||||
|
.where(eq(ports.slug, slug))
|
||||||
|
.limit(1);
|
||||||
|
if (!p) {
|
||||||
|
console.error(`No port found with slug "${slug}".`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
return { id: p.id, slug: p.slug };
|
||||||
|
}
|
||||||
|
const [first] = await db.select({ id: ports.id, slug: ports.slug }).from(ports).limit(1);
|
||||||
|
if (!first) {
|
||||||
|
console.error('No ports exist in the target DB. Seed at least one port before applying.');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
return { id: first.id, slug: first.slug };
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
const args = parseArgs(process.argv.slice(2));
|
||||||
|
|
||||||
|
if (!args.dryRun && !args.apply) {
|
||||||
|
console.error('Must specify --dry-run or --apply');
|
||||||
|
printHelp();
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Safety gate: --apply must run with EMAIL_REDIRECT_TO set, unless the
|
||||||
|
// operator explicitly opts out (production cutover).
|
||||||
|
if (args.apply && !process.env.EMAIL_REDIRECT_TO && !args.unsafeSkipRedirectCheck) {
|
||||||
|
console.error(
|
||||||
|
'--apply requires EMAIL_REDIRECT_TO to be set in the environment as a safety net.',
|
||||||
|
);
|
||||||
|
console.error('See docs/operations/outbound-comms-safety.md for the rationale.');
|
||||||
|
console.error(
|
||||||
|
'If you are running the production cutover and have read that doc, add ' +
|
||||||
|
'--unsafe-skip-redirect-check to override.',
|
||||||
|
);
|
||||||
|
process.exit(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Fetch + transform (shared by dry-run and apply) ──────────────────────
|
||||||
|
|
||||||
|
console.log('[migrate] Loading NocoDB config…');
|
||||||
|
const config = loadNocoDbConfig();
|
||||||
|
console.log(`[migrate] Source: ${config.url}`);
|
||||||
|
|
||||||
|
console.log('[migrate] Fetching snapshot from NocoDB…');
|
||||||
|
const start = Date.now();
|
||||||
|
const snapshot = await fetchSnapshot(config);
|
||||||
|
const elapsed = ((Date.now() - start) / 1000).toFixed(1);
|
||||||
|
console.log(
|
||||||
|
`[migrate] Snapshot fetched in ${elapsed}s — ${snapshot.interests.length} interests, ${snapshot.residentialInterests.length} residential, ${snapshot.berths.length} berths.`,
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log('[migrate] Running transform + dedup pipeline…');
|
||||||
|
const plan = transformSnapshot(snapshot);
|
||||||
|
|
||||||
|
// Resolve output paths relative to the worktree root.
|
||||||
|
const scriptDir = path.dirname(fileURLToPath(import.meta.url));
|
||||||
|
const repoRoot = path.resolve(scriptDir, '..');
|
||||||
|
const generatedAt = new Date().toISOString();
|
||||||
|
const paths = resolveReportPaths(repoRoot);
|
||||||
|
|
||||||
|
console.log(`[migrate] Writing report to ${paths.rootDir}…`);
|
||||||
|
await writeReport(paths, plan, generatedAt);
|
||||||
|
|
||||||
|
// ── Plan summary ─────────────────────────────────────────────────────────
|
||||||
|
const s = plan.stats;
|
||||||
|
console.log('');
|
||||||
|
console.log('=== Migration Plan Summary ===');
|
||||||
|
console.log(
|
||||||
|
` Input: ${s.inputInterestRows} interests, ${s.inputResidentialRows} residential interests`,
|
||||||
|
);
|
||||||
|
console.log(` Output: ${s.outputClients} clients, ${s.outputInterests} interests`);
|
||||||
|
console.log(` ${s.outputContacts} contacts, ${s.outputAddresses} addresses`);
|
||||||
|
console.log(
|
||||||
|
` ${s.outputDocuments} EOI documents, ${s.outputDocumentSigners} signers`,
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
` ${s.outputResidentialClients} residential clients (with default-stage interests)`,
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
` Dedup: ${s.autoLinkedClusters} auto-linked clusters, ${s.needsReviewPairs} pairs flagged for review`,
|
||||||
|
);
|
||||||
|
console.log(` Quality: ${s.flaggedRows} rows flagged (see report.csv)`);
|
||||||
|
console.log('');
|
||||||
|
console.log(` Full report: ${paths.summaryPath}`);
|
||||||
|
|
||||||
|
if (args.dryRun) {
|
||||||
|
console.log('');
|
||||||
|
console.log('Dry-run complete. Re-run with --apply to write rows.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Apply path ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const port = await resolvePort(args.portSlug);
|
||||||
|
const applyId = randomUUID();
|
||||||
|
|
||||||
|
console.log('');
|
||||||
|
console.log(`[migrate] Applying to port "${port.slug}" (id=${port.id})`);
|
||||||
|
console.log(`[migrate] Apply id: ${applyId}`);
|
||||||
|
console.log('[migrate] Inserting…');
|
||||||
|
|
||||||
|
const applyStart = Date.now();
|
||||||
|
const result = await applyPlan(plan, { port, applyId });
|
||||||
|
const applyElapsed = ((Date.now() - applyStart) / 1000).toFixed(1);
|
||||||
|
|
||||||
|
console.log('');
|
||||||
|
console.log('=== Apply Result ===');
|
||||||
|
console.log(` Time: ${applyElapsed}s`);
|
||||||
|
console.log(
|
||||||
|
` Clients: ${result.clientsInserted} inserted, ${result.clientsSkipped} already linked`,
|
||||||
|
);
|
||||||
|
console.log(` Contacts: ${result.contactsInserted} inserted`);
|
||||||
|
console.log(` Addresses: ${result.addressesInserted} inserted`);
|
||||||
|
console.log(` Yachts: ${result.yachtsInserted} inserted`);
|
||||||
|
console.log(
|
||||||
|
` Interests: ${result.interestsInserted} inserted, ${result.interestsSkipped} already linked`,
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
` Documents: ${result.documentsInserted} inserted, ${result.documentsSkipped} already linked`,
|
||||||
|
);
|
||||||
|
console.log(` Signers: ${result.documentSignersInserted} inserted`);
|
||||||
|
console.log(
|
||||||
|
` Res-Clt: ${result.residentialClientsInserted} inserted, ${result.residentialClientsSkipped} already linked`,
|
||||||
|
);
|
||||||
|
console.log(` Res-Int: ${result.residentialInterestsInserted} inserted`);
|
||||||
|
|
||||||
|
if (result.warnings.length > 0) {
|
||||||
|
console.log('');
|
||||||
|
console.log('Warnings:');
|
||||||
|
for (const w of result.warnings.slice(0, 20)) {
|
||||||
|
console.log(` - ${w}`);
|
||||||
|
}
|
||||||
|
if (result.warnings.length > 20) {
|
||||||
|
console.log(` … ${result.warnings.length - 20} more`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.log('');
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error('[migrate] Fatal error:', err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
29
scripts/migrate-storage.ts
Normal file
29
scripts/migrate-storage.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
/**
|
||||||
|
* Storage backend migration CLI — see §4.7a + §14.9a of
|
||||||
|
* docs/berth-recommender-and-pdf-plan.md.
|
||||||
|
*
|
||||||
|
* pnpm tsx scripts/migrate-storage.ts --from s3 --to filesystem [--dry-run]
|
||||||
|
* pnpm tsx scripts/migrate-storage.ts --from filesystem --to s3
|
||||||
|
*
|
||||||
|
* The actual migration logic lives in `src/lib/storage/migrate.ts` so the
|
||||||
|
* admin UI's "Switch backend" button can run the exact same code path. This
|
||||||
|
* file is a thin CLI wrapper.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { logger } from '@/lib/logger';
|
||||||
|
import { parseArgs, runMigration } from '@/lib/storage/migrate';
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
const args = parseArgs(process.argv.slice(2));
|
||||||
|
logger.info({ args }, 'Starting storage migration');
|
||||||
|
const result = await runMigration(args);
|
||||||
|
logger.info({ result }, 'Storage migration complete');
|
||||||
|
console.log(JSON.stringify(result, null, 2));
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
logger.error({ err }, 'Storage migration failed');
|
||||||
|
console.error(err);
|
||||||
|
process.exit(2);
|
||||||
|
});
|
||||||
106
scripts/smoke-test-redirect.ts
Normal file
106
scripts/smoke-test-redirect.ts
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
/**
|
||||||
|
* Live smoke test for EMAIL_REDIRECT_TO.
|
||||||
|
*
|
||||||
|
* Actually calls `sendEmail()` (the centralized helper used by every
|
||||||
|
* outbound email path in the app) with a fake real-client address. The
|
||||||
|
* SMTP transporter is monkey-patched to capture the message instead of
|
||||||
|
* actually delivering it, so this is safe to run anywhere.
|
||||||
|
*
|
||||||
|
* Prints the captured `to` + `subject` so the operator can see with their
|
||||||
|
* own eyes that the redirect happened. Exits non-zero if the redirect
|
||||||
|
* failed for any reason.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* pnpm tsx scripts/smoke-test-redirect.ts
|
||||||
|
*/
|
||||||
|
import 'dotenv/config';
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const expectedRedirect = process.env.EMAIL_REDIRECT_TO;
|
||||||
|
if (!expectedRedirect) {
|
||||||
|
console.error('FAIL: EMAIL_REDIRECT_TO is not set in env. Set it before running this test.');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[smoke] EMAIL_REDIRECT_TO = ${expectedRedirect}`);
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
// Monkey-patch nodemailer's createTransport so we capture the call
|
||||||
|
// without actually delivering. This is the same pattern the unit
|
||||||
|
// tests use, but at the live import-time level so we're testing the
|
||||||
|
// exact code path that runs in production.
|
||||||
|
const nodemailer = await import('nodemailer');
|
||||||
|
const captured: Array<{ to: unknown; subject: unknown; from: unknown }> = [];
|
||||||
|
const originalCreateTransport = nodemailer.default.createTransport;
|
||||||
|
nodemailer.default.createTransport = (() => ({
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
sendMail: async (msg: any) => {
|
||||||
|
captured.push({ to: msg.to, subject: msg.subject, from: msg.from });
|
||||||
|
return { messageId: '<smoke@test>', accepted: [msg.to], rejected: [] };
|
||||||
|
},
|
||||||
|
})) as unknown as typeof nodemailer.default.createTransport;
|
||||||
|
|
||||||
|
// Now import sendEmail (gets the patched transporter).
|
||||||
|
const { sendEmail } = await import('@/lib/email');
|
||||||
|
|
||||||
|
const realClientEmail = 'real-client-DO-NOT-EMAIL@example.test';
|
||||||
|
const realSubject = 'Important: Your contract is ready';
|
||||||
|
|
||||||
|
console.log('[smoke] calling sendEmail(...) with:');
|
||||||
|
console.log(` to: ${realClientEmail}`);
|
||||||
|
console.log(` subject: "${realSubject}"`);
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
await sendEmail(realClientEmail, realSubject, '<p>Body unused for this smoke.</p>');
|
||||||
|
|
||||||
|
// Restore the original transport (be a good citizen).
|
||||||
|
nodemailer.default.createTransport = originalCreateTransport;
|
||||||
|
|
||||||
|
console.log('[smoke] captured outbound message:');
|
||||||
|
console.log(` to: ${captured[0]?.to}`);
|
||||||
|
console.log(` subject: "${captured[0]?.subject}"`);
|
||||||
|
console.log(` from: ${captured[0]?.from}`);
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
// Assertions
|
||||||
|
let pass = true;
|
||||||
|
|
||||||
|
if (captured.length !== 1) {
|
||||||
|
console.error(`FAIL: expected exactly 1 sendMail call, got ${captured.length}`);
|
||||||
|
pass = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (captured[0]?.to !== expectedRedirect) {
|
||||||
|
console.error(
|
||||||
|
`FAIL: outbound "to" was "${captured[0]?.to}", expected the redirect address "${expectedRedirect}"`,
|
||||||
|
);
|
||||||
|
pass = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
typeof captured[0]?.subject !== 'string' ||
|
||||||
|
!captured[0].subject.startsWith(`[redirected from ${realClientEmail}]`)
|
||||||
|
) {
|
||||||
|
console.error(
|
||||||
|
`FAIL: subject did not get the [redirected from <orig>] prefix. Got: "${captured[0]?.subject}"`,
|
||||||
|
);
|
||||||
|
pass = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pass) {
|
||||||
|
console.log('PASS: EMAIL_REDIRECT_TO is intercepting outbound email correctly.');
|
||||||
|
console.log(
|
||||||
|
' The "to" header matches the redirect, and the original recipient is preserved in the subject.',
|
||||||
|
);
|
||||||
|
process.exit(0);
|
||||||
|
} else {
|
||||||
|
console.error('');
|
||||||
|
console.error('Smoke test FAILED. Do not import production data until this is fixed.');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error('FATAL:', err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
42
scripts/test-currency-api.ts
Normal file
42
scripts/test-currency-api.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
/**
|
||||||
|
* Quick verification: live Frankfurter API → DB upsert → getRate read.
|
||||||
|
* Run with `pnpm tsx scripts/test-currency-api.ts`.
|
||||||
|
*/
|
||||||
|
import { refreshRates, getRate, convert } from '@/lib/services/currency';
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
console.log('1. Fetching live rates from Frankfurter…');
|
||||||
|
await refreshRates();
|
||||||
|
|
||||||
|
console.log('2. Reading round-trip rates from DB:');
|
||||||
|
const usdEur = await getRate('USD', 'EUR');
|
||||||
|
const eurUsd = await getRate('EUR', 'USD');
|
||||||
|
const usdGbp = await getRate('USD', 'GBP');
|
||||||
|
const eurGbp = await getRate('EUR', 'GBP');
|
||||||
|
const usdUsd = await getRate('USD', 'USD');
|
||||||
|
|
||||||
|
console.log(` USD→EUR: ${usdEur}`);
|
||||||
|
console.log(` EUR→USD: ${eurUsd}`);
|
||||||
|
console.log(` USD→GBP: ${usdGbp}`);
|
||||||
|
console.log(` EUR→GBP: ${eurGbp ?? '(no direct row, expected)'}`);
|
||||||
|
console.log(` USD→USD: ${usdUsd}`);
|
||||||
|
|
||||||
|
console.log('3. Convert sample amounts:');
|
||||||
|
const c1 = await convert(1000, 'USD', 'EUR');
|
||||||
|
console.log(` $1000 → ${c1?.result} EUR @ ${c1?.rate}`);
|
||||||
|
const c2 = await convert(500, 'EUR', 'USD');
|
||||||
|
console.log(` €500 → $${c2?.result} @ ${c2?.rate}`);
|
||||||
|
|
||||||
|
// Sanity: EUR→USD should be ≈ 1 / (USD→EUR), within rounding
|
||||||
|
if (usdEur && eurUsd) {
|
||||||
|
const drift = Math.abs(eurUsd - 1 / usdEur);
|
||||||
|
console.log(`4. Inverse-rate drift: ${drift.toFixed(6)} (≤0.001 = healthy)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error('Currency test failed:', err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
135
src/app/(dashboard)/[portSlug]/admin/ai/page.tsx
Normal file
135
src/app/(dashboard)/[portSlug]/admin/ai/page.tsx
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
import Link from 'next/link';
|
||||||
|
import { Bot, Receipt, FileText, Brain, ExternalLink } from 'lucide-react';
|
||||||
|
|
||||||
|
import {
|
||||||
|
SettingsFormCard,
|
||||||
|
type SettingFieldDef,
|
||||||
|
} from '@/components/admin/shared/settings-form-card';
|
||||||
|
import { PageHeader } from '@/components/shared/page-header';
|
||||||
|
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
|
|
||||||
|
const MASTER_FIELDS: SettingFieldDef[] = [
|
||||||
|
{
|
||||||
|
key: 'ai_enabled',
|
||||||
|
label: 'AI features enabled',
|
||||||
|
description:
|
||||||
|
'Master switch. When OFF, every AI surface (receipt OCR fallback, berth-PDF AI parse, future embedding-driven recommendations) is bypassed. Provider keys stay configured but unused.',
|
||||||
|
type: 'boolean',
|
||||||
|
defaultValue: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'ai_monthly_token_cap',
|
||||||
|
label: 'Monthly token cap (this port)',
|
||||||
|
description:
|
||||||
|
'Soft cap on total AI tokens consumed per calendar month across every feature. When exceeded, AI features fall back to non-AI paths and surface a banner. Set 0 for no cap.',
|
||||||
|
type: 'number',
|
||||||
|
defaultValue: 0,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const PROVIDER_FIELDS: SettingFieldDef[] = [
|
||||||
|
{
|
||||||
|
key: 'openai_api_key',
|
||||||
|
label: 'OpenAI API key',
|
||||||
|
description:
|
||||||
|
'Used by Receipt OCR fallback and (future) berth-PDF AI parse. Stored AES-encrypted at rest; the field shows blank after save.',
|
||||||
|
type: 'password',
|
||||||
|
placeholder: 'sk-…',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'openai_default_model',
|
||||||
|
label: 'Default OpenAI model',
|
||||||
|
description: 'Used when a feature does not specify an explicit model.',
|
||||||
|
type: 'select',
|
||||||
|
defaultValue: 'gpt-4o-mini',
|
||||||
|
options: [
|
||||||
|
{ value: 'gpt-4o-mini', label: 'gpt-4o-mini — cheap, fast, vision-capable' },
|
||||||
|
{ value: 'gpt-4o', label: 'gpt-4o — full-strength multimodal' },
|
||||||
|
{ value: 'gpt-4-turbo', label: 'gpt-4-turbo — legacy text reasoning' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
interface FeatureLink {
|
||||||
|
href: string;
|
||||||
|
icon: typeof Bot;
|
||||||
|
title: string;
|
||||||
|
description: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const FEATURE_LINKS: FeatureLink[] = [
|
||||||
|
{
|
||||||
|
href: '../ocr',
|
||||||
|
icon: Receipt,
|
||||||
|
title: 'Receipt OCR settings',
|
||||||
|
description:
|
||||||
|
'Provider, model, and confidence thresholds for the receipt scanner. AI fallback only runs when the on-device parser is uncertain.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
href: '../berth-pdf-parser',
|
||||||
|
icon: FileText,
|
||||||
|
title: 'Berth PDF parser',
|
||||||
|
description:
|
||||||
|
'Three-tier AcroForm → OCR → AI pipeline. The AI pass costs tokens; reps invoke it manually when OCR confidence is low.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
href: '../recommender',
|
||||||
|
icon: Brain,
|
||||||
|
title: 'Berth recommender',
|
||||||
|
description:
|
||||||
|
'Rule-based today; future versions will optionally use embeddings for soft preference matching. AI use is gated by the master switch above.',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
export default function AiAdminPage() {
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<PageHeader
|
||||||
|
title="AI configuration"
|
||||||
|
description="One place to manage every AI-using feature. Provider credentials and the master AI switch live here; per-feature thresholds remain in their dedicated pages, linked below."
|
||||||
|
eyebrow="ADMIN"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SettingsFormCard
|
||||||
|
title="Master controls"
|
||||||
|
description="Hard kill switch + budget guardrails covering every AI surface in this port."
|
||||||
|
fields={MASTER_FIELDS}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SettingsFormCard
|
||||||
|
title="Provider credentials"
|
||||||
|
description="Shared API keys used by AI-enabled features. Per-feature pages can override the model on a feature-by-feature basis."
|
||||||
|
fields={PROVIDER_FIELDS}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base flex items-center gap-2">
|
||||||
|
<Bot className="h-4 w-4" /> Per-feature settings
|
||||||
|
</CardTitle>
|
||||||
|
<CardDescription>
|
||||||
|
Feature-specific tuning lives on each feature's admin page. They all read the
|
||||||
|
master switch + provider credentials configured above.
|
||||||
|
</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="grid gap-3 sm:grid-cols-2 lg:grid-cols-3">
|
||||||
|
{FEATURE_LINKS.map((f) => (
|
||||||
|
<Link
|
||||||
|
key={f.href}
|
||||||
|
href={f.href as never}
|
||||||
|
className="rounded-md border bg-card p-3 hover:border-primary transition-colors block"
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-2 text-sm font-medium">
|
||||||
|
<f.icon className="h-4 w-4 text-muted-foreground" />
|
||||||
|
{f.title}
|
||||||
|
<ExternalLink className="ml-auto h-3 w-3 opacity-50" />
|
||||||
|
</div>
|
||||||
|
<p className="mt-1 text-xs text-muted-foreground">{f.description}</p>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -1,15 +1,15 @@
|
|||||||
|
import { BackupAdminPanel } from '@/components/admin/backup-admin-panel';
|
||||||
import { PageHeader } from '@/components/shared/page-header';
|
import { PageHeader } from '@/components/shared/page-header';
|
||||||
|
|
||||||
export default function BackupManagementPage() {
|
export default function BackupManagementPage() {
|
||||||
return (
|
return (
|
||||||
<div className="space-y-6">
|
<div className="space-y-6">
|
||||||
<PageHeader title="Backup Management" description="Manage system backups and restoration" />
|
<PageHeader
|
||||||
<div className="flex flex-col items-center justify-center rounded-lg border border-dashed p-12">
|
title="Backup & Restore"
|
||||||
<p className="text-lg font-medium text-muted-foreground">Coming in Layer 4</p>
|
eyebrow="ADMIN"
|
||||||
<p className="text-sm text-muted-foreground">
|
description="Trigger ad-hoc database snapshots, browse the history, and download a .dump file for offline restore."
|
||||||
This feature will be implemented in the next phase.
|
/>
|
||||||
</p>
|
<BackupAdminPanel />
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,28 @@ import {
|
|||||||
} from '@/components/admin/shared/settings-form-card';
|
} from '@/components/admin/shared/settings-form-card';
|
||||||
import { PageHeader } from '@/components/shared/page-header';
|
import { PageHeader } from '@/components/shared/page-header';
|
||||||
|
|
||||||
|
const DEFAULT_EMAIL_HEADER_HTML = `<!-- Optional pre-body header -->
|
||||||
|
<table role="presentation" width="100%" cellpadding="0" cellspacing="0" style="border-collapse:collapse;">
|
||||||
|
<tr>
|
||||||
|
<td align="center" style="padding:16px 0;">
|
||||||
|
<a href="https://example.com" style="text-decoration:none;color:#1e293b;font-family:Arial,sans-serif;font-size:14px;font-weight:600;">
|
||||||
|
Your brand name
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>`;
|
||||||
|
|
||||||
|
const DEFAULT_EMAIL_FOOTER_HTML = `<!-- Optional sub-body footer -->
|
||||||
|
<table role="presentation" width="100%" cellpadding="0" cellspacing="0" style="border-collapse:collapse;">
|
||||||
|
<tr>
|
||||||
|
<td align="center" style="padding:24px 0;color:#64748b;font-family:Arial,sans-serif;font-size:12px;">
|
||||||
|
© ${new Date().getFullYear()} Your Company ·
|
||||||
|
<a href="https://example.com" style="color:#64748b;">Visit our website</a> ·
|
||||||
|
<a href="mailto:hello@example.com" style="color:#64748b;">hello@example.com</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>`;
|
||||||
|
|
||||||
const FIELDS: SettingFieldDef[] = [
|
const FIELDS: SettingFieldDef[] = [
|
||||||
{
|
{
|
||||||
key: 'branding_app_name',
|
key: 'branding_app_name',
|
||||||
@@ -15,11 +37,11 @@ const FIELDS: SettingFieldDef[] = [
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'branding_logo_url',
|
key: 'branding_logo_url',
|
||||||
label: 'Logo URL',
|
label: 'Logo',
|
||||||
description:
|
description:
|
||||||
'Public HTTPS URL of the logo used in email headers and the branded auth shell. Recommended size: 240×80 PNG with transparent background.',
|
'Used in email headers and the branded auth shell. Recommended: square PNG with transparent background.',
|
||||||
type: 'string',
|
type: 'image-upload',
|
||||||
placeholder: 'https://example.com/logo.png',
|
imageAspect: 1,
|
||||||
defaultValue: '',
|
defaultValue: '',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -32,9 +54,11 @@ const FIELDS: SettingFieldDef[] = [
|
|||||||
{
|
{
|
||||||
key: 'branding_email_header_html',
|
key: 'branding_email_header_html',
|
||||||
label: 'Email header HTML',
|
label: 'Email header HTML',
|
||||||
description: 'Optional HTML rendered above each email body. Leave blank to use the default.',
|
description:
|
||||||
|
'Optional HTML rendered above each email body. Leave blank to use the default. Tap "Insert default" to start from the baseline template.',
|
||||||
type: 'html',
|
type: 'html',
|
||||||
defaultValue: '',
|
defaultValue: '',
|
||||||
|
defaultTemplate: DEFAULT_EMAIL_HEADER_HTML,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'branding_email_footer_html',
|
key: 'branding_email_footer_html',
|
||||||
@@ -42,6 +66,7 @@ const FIELDS: SettingFieldDef[] = [
|
|||||||
description: 'Optional HTML rendered at the very bottom of each email (above the signature).',
|
description: 'Optional HTML rendered at the very bottom of each email (above the signature).',
|
||||||
type: 'html',
|
type: 'html',
|
||||||
defaultValue: '',
|
defaultValue: '',
|
||||||
|
defaultTemplate: DEFAULT_EMAIL_FOOTER_HTML,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|||||||
21
src/app/(dashboard)/[portSlug]/admin/brochures/page.tsx
Normal file
21
src/app/(dashboard)/[portSlug]/admin/brochures/page.tsx
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import { PageHeader } from '@/components/shared/page-header';
|
||||||
|
import { BrochuresAdminPanel } from '@/components/admin/brochures-admin-panel';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Per-port admin page for managing brochures (Phase 7 §5.8).
|
||||||
|
*
|
||||||
|
* Lists brochures, lets per-port admins upload new versions via direct-to-
|
||||||
|
* storage presigned URLs (so the 20MB+ file never traverses Next.js's
|
||||||
|
* body-size limit — see §11.1), and toggle the default flag.
|
||||||
|
*/
|
||||||
|
export default function BrochuresAdminPage() {
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<PageHeader
|
||||||
|
title="Brochures"
|
||||||
|
description="Port-wide marketing PDFs available to the sales send-out flow. The default brochure is the one /clients picker pre-selects."
|
||||||
|
/>
|
||||||
|
<BrochuresAdminPanel />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -21,6 +21,55 @@ const API_FIELDS: SettingFieldDef[] = [
|
|||||||
type: 'password',
|
type: 'password',
|
||||||
defaultValue: '',
|
defaultValue: '',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: 'documenso_api_version_override',
|
||||||
|
label: 'API version',
|
||||||
|
description:
|
||||||
|
'Which Documenso REST API to call against this port. v1 supports Documenso 1.x (per-field PIXEL placement, /api/v1/templates and /api/v1/documents). v2 unlocks the envelope/embed endpoints introduced in Documenso 2.x. Use the test-connection button below after switching to confirm the chosen version actually works against this port’s instance.',
|
||||||
|
type: 'select',
|
||||||
|
options: [
|
||||||
|
{ value: 'v1', label: 'v1 — Documenso 1.x (legacy stable)' },
|
||||||
|
{ value: 'v2', label: 'v2 — Documenso 2.x (envelope + embedded signing)' },
|
||||||
|
],
|
||||||
|
defaultValue: 'v1',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const SIGNER_FIELDS: SettingFieldDef[] = [
|
||||||
|
{
|
||||||
|
key: 'documenso_developer_name',
|
||||||
|
label: 'Developer signer — name',
|
||||||
|
description:
|
||||||
|
'The party who signs after the client (typically the marina developer or owner). Used as the static "developer" recipient in templated documents (EOI). Was hardcoded as "David Mizrahi" in the legacy single-tenant system.',
|
||||||
|
type: 'string',
|
||||||
|
placeholder: 'David Mizrahi',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'documenso_developer_email',
|
||||||
|
label: 'Developer signer — email',
|
||||||
|
description: 'Email used to send the developer signing request via Documenso.',
|
||||||
|
type: 'string',
|
||||||
|
placeholder: 'dm@portnimara.com',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'documenso_approver_name',
|
||||||
|
label: 'Approver — name',
|
||||||
|
description:
|
||||||
|
'The final approver who signs after the developer (typically a sales/legal lead). Was hardcoded as "Abbie May" in the legacy system.',
|
||||||
|
type: 'string',
|
||||||
|
placeholder: 'Abbie May',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'documenso_approver_email',
|
||||||
|
label: 'Approver — email',
|
||||||
|
description: 'Email used to route the final approval signing request.',
|
||||||
|
type: 'string',
|
||||||
|
placeholder: 'sales@portnimara.com',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
const EOI_FIELDS: SettingFieldDef[] = [
|
const EOI_FIELDS: SettingFieldDef[] = [
|
||||||
@@ -44,6 +93,51 @@ const EOI_FIELDS: SettingFieldDef[] = [
|
|||||||
],
|
],
|
||||||
defaultValue: 'documenso-template',
|
defaultValue: 'documenso-template',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: 'eoi_send_mode',
|
||||||
|
label: 'Initial signing-invitation email behaviour',
|
||||||
|
description:
|
||||||
|
'Auto = the system sends our branded "please sign" email immediately when an EOI/contract/reservation is generated. Manual = the document is generated and the signing URL appears in the UI; a rep clicks "Send invitation" to dispatch. Auto is the lower-friction option for high-volume teams; manual lets reps review before sending. Applies to all document types, not just EOI.',
|
||||||
|
type: 'select',
|
||||||
|
options: [
|
||||||
|
{ value: 'manual', label: 'Manual (rep clicks Send after generation)' },
|
||||||
|
{ value: 'auto', label: 'Auto (send branded email on generate)' },
|
||||||
|
],
|
||||||
|
defaultValue: 'manual',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const CONTRACT_RESERVATION_FIELDS: SettingFieldDef[] = [
|
||||||
|
{
|
||||||
|
key: 'documenso_contract_template_id',
|
||||||
|
label: 'Contract Documenso template ID (optional)',
|
||||||
|
description:
|
||||||
|
'Numeric template ID for sales contract generation. Leave blank to use the per-deal upload-and-place-fields flow instead (the typical path for contracts, since they are usually drafted custom per client).',
|
||||||
|
type: 'string',
|
||||||
|
placeholder: '',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'documenso_reservation_template_id',
|
||||||
|
label: 'Reservation agreement Documenso template ID (optional)',
|
||||||
|
description:
|
||||||
|
'Numeric template ID for reservation agreements. Same logic — leave blank to upload per deal.',
|
||||||
|
type: 'string',
|
||||||
|
placeholder: '',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const EMBED_FIELDS: SettingFieldDef[] = [
|
||||||
|
{
|
||||||
|
key: 'embedded_signing_host',
|
||||||
|
label: 'Embedded signing host',
|
||||||
|
description:
|
||||||
|
"Origin of the public site that hosts the embedded Documenso signing pages. Outbound emails wrap raw Documenso signing URLs into {host}/sign/<type>/<token> so clients sign on your branded page rather than Documenso's domain. Leave blank to fall back to the app URL. Marketing-website pattern: https://portnimara.com",
|
||||||
|
type: 'string',
|
||||||
|
placeholder: 'https://portnimara.com',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
export default function DocumensoSettingsPage() {
|
export default function DocumensoSettingsPage() {
|
||||||
@@ -51,7 +145,7 @@ export default function DocumensoSettingsPage() {
|
|||||||
<div className="space-y-6">
|
<div className="space-y-6">
|
||||||
<PageHeader
|
<PageHeader
|
||||||
title="Documenso & EOI"
|
title="Documenso & EOI"
|
||||||
description="API credentials and default EOI generation pathway. Use the test-connection button to verify a saved configuration before relying on it."
|
description="API credentials, signer identities, and document generation behaviour. Use the test-connection button to verify a saved configuration before relying on it."
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<SettingsFormCard
|
<SettingsFormCard
|
||||||
@@ -61,11 +155,29 @@ export default function DocumensoSettingsPage() {
|
|||||||
extra={<DocumensoTestButton />}
|
extra={<DocumensoTestButton />}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
<SettingsFormCard
|
||||||
|
title="Signers (developer + approver)"
|
||||||
|
description="Identity of the static signers in your Documenso templates. The client is always pulled from the interest's linked client record; these values fill the developer (signing order 2) and approver (signing order 3) slots."
|
||||||
|
fields={SIGNER_FIELDS}
|
||||||
|
/>
|
||||||
|
|
||||||
<SettingsFormCard
|
<SettingsFormCard
|
||||||
title="EOI generation"
|
title="EOI generation"
|
||||||
description="Default pathway and template used when an interest's EOI is generated."
|
description="Default pathway, template, and email behaviour when an interest's EOI is generated."
|
||||||
fields={EOI_FIELDS}
|
fields={EOI_FIELDS}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
<SettingsFormCard
|
||||||
|
title="Contract & reservation templates (optional)"
|
||||||
|
description="Most ports leave these blank because contracts/reservations are drafted per deal and uploaded for signing. Set a template ID only if you have a standardised contract/reservation Documenso template."
|
||||||
|
fields={CONTRACT_RESERVATION_FIELDS}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SettingsFormCard
|
||||||
|
title="Embedded signing"
|
||||||
|
description="Where the public-facing branded signing pages live. The CRM rewrites Documenso signing URLs to point here when sending invitation and reminder emails."
|
||||||
|
fields={EMBED_FIELDS}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
5
src/app/(dashboard)/[portSlug]/admin/duplicates/page.tsx
Normal file
5
src/app/(dashboard)/[portSlug]/admin/duplicates/page.tsx
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
import { DuplicatesReviewQueue } from '@/components/admin/duplicates/duplicates-review-queue';
|
||||||
|
|
||||||
|
export default function DuplicatesAdminPage() {
|
||||||
|
return <DuplicatesReviewQueue />;
|
||||||
|
}
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
import { EmailTemplatesAdmin } from '@/components/admin/email-templates-admin';
|
||||||
|
|
||||||
|
export default function EmailTemplatesPage() {
|
||||||
|
return <EmailTemplatesAdmin />;
|
||||||
|
}
|
||||||
@@ -3,6 +3,7 @@ import {
|
|||||||
type SettingFieldDef,
|
type SettingFieldDef,
|
||||||
} from '@/components/admin/shared/settings-form-card';
|
} from '@/components/admin/shared/settings-form-card';
|
||||||
import { PageHeader } from '@/components/shared/page-header';
|
import { PageHeader } from '@/components/shared/page-header';
|
||||||
|
import { SalesEmailConfigCard } from '@/components/admin/sales-email-config-card';
|
||||||
|
|
||||||
const FIELDS: SettingFieldDef[] = [
|
const FIELDS: SettingFieldDef[] = [
|
||||||
{
|
{
|
||||||
@@ -34,7 +35,7 @@ const FIELDS: SettingFieldDef[] = [
|
|||||||
label: 'Default signature (HTML)',
|
label: 'Default signature (HTML)',
|
||||||
description: 'Appended to the bottom of system-generated emails.',
|
description: 'Appended to the bottom of system-generated emails.',
|
||||||
type: 'html',
|
type: 'html',
|
||||||
placeholder: '<p>—<br>The Port Nimara team</p>',
|
placeholder: '<p>-<br>The Port Nimara team</p>',
|
||||||
defaultValue: '',
|
defaultValue: '',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -71,7 +72,7 @@ const FIELDS: SettingFieldDef[] = [
|
|||||||
{
|
{
|
||||||
key: 'smtp_pass_override',
|
key: 'smtp_pass_override',
|
||||||
label: 'SMTP password override',
|
label: 'SMTP password override',
|
||||||
description: 'Optional. Stored in plain text — only set when overriding env credentials.',
|
description: 'Optional. Stored in plain text - only set when overriding env credentials.',
|
||||||
type: 'password',
|
type: 'password',
|
||||||
defaultValue: '',
|
defaultValue: '',
|
||||||
},
|
},
|
||||||
@@ -94,6 +95,7 @@ export default function EmailSettingsPage() {
|
|||||||
description="Optional per-port SMTP credentials. Leave blank to use the global env defaults."
|
description="Optional per-port SMTP credentials. Leave blank to use the global env defaults."
|
||||||
fields={FIELDS.slice(5)}
|
fields={FIELDS.slice(5)}
|
||||||
/>
|
/>
|
||||||
|
<SalesEmailConfigCard />
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
246
src/app/(dashboard)/[portSlug]/admin/errors/[requestId]/page.tsx
Normal file
246
src/app/(dashboard)/[portSlug]/admin/errors/[requestId]/page.tsx
Normal file
@@ -0,0 +1,246 @@
|
|||||||
|
'use client';
|
||||||
|
|
||||||
|
import Link from 'next/link';
|
||||||
|
import { useParams } from 'next/navigation';
|
||||||
|
import { useQuery } from '@tanstack/react-query';
|
||||||
|
import { format } from 'date-fns';
|
||||||
|
import { ArrowLeft, Copy, Wrench } from 'lucide-react';
|
||||||
|
import { toast } from 'sonner';
|
||||||
|
|
||||||
|
import type { Route } from 'next';
|
||||||
|
|
||||||
|
import { Badge } from '@/components/ui/badge';
|
||||||
|
import { ERROR_CODES, isErrorCode } from '@/lib/error-codes';
|
||||||
|
import { Button } from '@/components/ui/button';
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
|
import { Skeleton } from '@/components/ui/skeleton';
|
||||||
|
import { apiFetch } from '@/lib/api/client';
|
||||||
|
import type { ErrorEvent } from '@/lib/db/schema/system';
|
||||||
|
import type { LikelyCulprit } from '@/lib/error-classifier';
|
||||||
|
|
||||||
|
interface DetailResponse {
|
||||||
|
data: ErrorEvent & { likelyCulprit: LikelyCulprit | null };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detail view for a single captured error. Shows everything an admin
|
||||||
|
* needs to triage:
|
||||||
|
*
|
||||||
|
* - Request shape: method, path, status, duration, who fired it
|
||||||
|
* - Error: name, message, full stack head, (sanitized) request body
|
||||||
|
* - Likely-culprit hint: heuristic-driven plain-English root-cause
|
||||||
|
* - Raw metadata: pg SQLSTATE codes, internal-message debug strings
|
||||||
|
*/
|
||||||
|
export default function ErrorEventDetailPage() {
|
||||||
|
const params = useParams<{ portSlug: string; requestId: string }>();
|
||||||
|
const portSlug = params?.portSlug ?? '';
|
||||||
|
const requestId = params?.requestId ?? '';
|
||||||
|
|
||||||
|
const query = useQuery<DetailResponse>({
|
||||||
|
queryKey: ['admin', 'error-events', requestId],
|
||||||
|
queryFn: () => apiFetch<DetailResponse>(`/api/v1/admin/error-events/${requestId}`),
|
||||||
|
enabled: Boolean(requestId),
|
||||||
|
});
|
||||||
|
|
||||||
|
function copy(text: string, label: string) {
|
||||||
|
if (typeof navigator === 'undefined' || !navigator.clipboard) return;
|
||||||
|
void navigator.clipboard.writeText(text);
|
||||||
|
toast.success(`${label} copied`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (query.isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="space-y-3">
|
||||||
|
<Skeleton className="h-8 w-48" />
|
||||||
|
<Skeleton className="h-32 w-full" />
|
||||||
|
<Skeleton className="h-64 w-full" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const event = query.data?.data;
|
||||||
|
if (!event) {
|
||||||
|
return (
|
||||||
|
<Card>
|
||||||
|
<CardContent className="py-12 text-center text-sm text-muted-foreground">
|
||||||
|
Error event not found. It may have been pruned or you may not have access.
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div>
|
||||||
|
<Button variant="ghost" size="sm" asChild>
|
||||||
|
<Link href={`/${portSlug}/admin/errors` as Route}>
|
||||||
|
<ArrowLeft className="mr-1.5 h-4 w-4" />
|
||||||
|
Back to error list
|
||||||
|
</Link>
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-2 flex-wrap">
|
||||||
|
<h1 className="text-2xl font-bold">Error {requestId.slice(0, 8)}…</h1>
|
||||||
|
<Badge
|
||||||
|
variant="outline"
|
||||||
|
className={
|
||||||
|
event.statusCode >= 500
|
||||||
|
? 'border-destructive/40 text-destructive'
|
||||||
|
: 'border-amber-300 text-amber-800'
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{event.statusCode}
|
||||||
|
</Badge>
|
||||||
|
{event.likelyCulprit && (
|
||||||
|
<Badge variant="secondary" className="gap-1">
|
||||||
|
<Wrench className="h-3 w-3" />
|
||||||
|
{event.likelyCulprit.label}
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
<Button size="sm" variant="ghost" onClick={() => copy(requestId, 'Reference ID')}>
|
||||||
|
<Copy className="mr-1.5 h-3 w-3" />
|
||||||
|
Copy ID
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{event.likelyCulprit && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-sm font-medium flex items-center gap-2">
|
||||||
|
<Wrench className="h-4 w-4" /> Likely culprit
|
||||||
|
</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="text-sm">
|
||||||
|
<p className="font-medium">{event.likelyCulprit.label}</p>
|
||||||
|
<p className="text-muted-foreground mt-1">{event.likelyCulprit.hint}</p>
|
||||||
|
<p className="text-xs text-muted-foreground mt-2">
|
||||||
|
Subsystem: <code className="font-mono">{event.likelyCulprit.subsystem}</code>
|
||||||
|
</p>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* If the captured error has a registered code on its metadata,
|
||||||
|
* surface the canonical user-facing message + status from the
|
||||||
|
* registry so the admin can compare what the user saw to what
|
||||||
|
* the system actually did. */}
|
||||||
|
{(() => {
|
||||||
|
const meta = (event.metadata ?? {}) as Record<string, unknown>;
|
||||||
|
const code = typeof meta.code === 'string' ? meta.code : null;
|
||||||
|
if (!code || !isErrorCode(code)) return null;
|
||||||
|
const def = ERROR_CODES[code];
|
||||||
|
return (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-sm font-medium">Error code</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-1 text-sm">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Badge variant="outline">{def.status}</Badge>
|
||||||
|
<code className="font-mono text-xs font-semibold">{code}</code>
|
||||||
|
</div>
|
||||||
|
<p className="mt-2">{def.userMessage}</p>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
Compare to the message the user saw in their toast.{' '}
|
||||||
|
<Link
|
||||||
|
href={`/${portSlug}/admin/errors/codes` as Route}
|
||||||
|
className="text-primary hover:underline"
|
||||||
|
>
|
||||||
|
All codes →
|
||||||
|
</Link>
|
||||||
|
</p>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
})()}
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-sm font-medium">Request</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="grid grid-cols-1 md:grid-cols-2 gap-3 text-sm">
|
||||||
|
<KV label="Method" value={event.method} />
|
||||||
|
<KV label="Path" value={event.path} mono />
|
||||||
|
<KV label="When" value={format(new Date(event.createdAt), 'PPpp')} />
|
||||||
|
<KV label="Duration" value={event.durationMs ? `${event.durationMs} ms` : '—'} />
|
||||||
|
<KV label="Port" value={event.portId ?? '(none)'} mono />
|
||||||
|
<KV label="User" value={event.userId ?? '(none)'} mono />
|
||||||
|
<KV label="IP" value={event.ipAddress ?? '—'} mono />
|
||||||
|
<KV label="User agent" value={event.userAgent ?? '—'} />
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-sm font-medium">Error</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-3 text-sm">
|
||||||
|
<KV label="Name" value={event.errorName ?? '—'} mono />
|
||||||
|
<div>
|
||||||
|
<p className="text-xs text-muted-foreground">Message</p>
|
||||||
|
<p className="mt-0.5 font-mono whitespace-pre-wrap break-words">
|
||||||
|
{event.errorMessage ?? '—'}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
{event.errorStack && (
|
||||||
|
<div>
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<p className="text-xs text-muted-foreground">Stack (truncated)</p>
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="ghost"
|
||||||
|
onClick={() => copy(event.errorStack ?? '', 'Stack')}
|
||||||
|
>
|
||||||
|
<Copy className="mr-1.5 h-3 w-3" /> Copy
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
<pre className="mt-1 max-h-96 overflow-auto rounded bg-muted p-2 text-xs font-mono whitespace-pre-wrap break-words">
|
||||||
|
{event.errorStack}
|
||||||
|
</pre>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{event.requestBodyExcerpt && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-sm font-medium">
|
||||||
|
Request body (sanitized, max 1 KB)
|
||||||
|
</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<pre className="max-h-64 overflow-auto rounded bg-muted p-2 text-xs font-mono whitespace-pre-wrap break-words">
|
||||||
|
{event.requestBodyExcerpt}
|
||||||
|
</pre>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{event.metadata !== null &&
|
||||||
|
typeof event.metadata === 'object' &&
|
||||||
|
Object.keys(event.metadata as Record<string, unknown>).length > 0 && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-sm font-medium">Metadata</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<pre className="overflow-auto rounded bg-muted p-2 text-xs font-mono">
|
||||||
|
{JSON.stringify(event.metadata, null, 2)}
|
||||||
|
</pre>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function KV({ label, value, mono }: { label: string; value: string | null; mono?: boolean }) {
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<p className="text-xs text-muted-foreground">{label}</p>
|
||||||
|
<p className={`mt-0.5 ${mono ? 'font-mono text-xs' : ''}`}>{value ?? '—'}</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
134
src/app/(dashboard)/[portSlug]/admin/errors/codes/page.tsx
Normal file
134
src/app/(dashboard)/[portSlug]/admin/errors/codes/page.tsx
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
'use client';
|
||||||
|
|
||||||
|
import { useState, useMemo } from 'react';
|
||||||
|
import Link from 'next/link';
|
||||||
|
import { useParams } from 'next/navigation';
|
||||||
|
import { ArrowLeft, BookOpen, Search } from 'lucide-react';
|
||||||
|
|
||||||
|
import type { Route } from 'next';
|
||||||
|
|
||||||
|
import { Badge } from '@/components/ui/badge';
|
||||||
|
import { Button } from '@/components/ui/button';
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
|
import { Input } from '@/components/ui/input';
|
||||||
|
import { ERROR_CODES } from '@/lib/error-codes';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error-code reference page surfaced inside the admin section so an
|
||||||
|
* admin investigating a captured error_events row can flip to this
|
||||||
|
* tab, look up the code the user reported, and read the canonical
|
||||||
|
* plain-language meaning + status code without leaving the app.
|
||||||
|
*
|
||||||
|
* Pulls directly from `src/lib/error-codes.ts` so it stays in sync
|
||||||
|
* automatically — adding an entry to the registry adds a row here.
|
||||||
|
*/
|
||||||
|
export default function ErrorCodeReferencePage() {
|
||||||
|
const params = useParams<{ portSlug: string }>();
|
||||||
|
const portSlug = params?.portSlug ?? '';
|
||||||
|
const [search, setSearch] = useState('');
|
||||||
|
|
||||||
|
const entries = useMemo(() => {
|
||||||
|
const all = Object.entries(ERROR_CODES) as Array<
|
||||||
|
[string, (typeof ERROR_CODES)[keyof typeof ERROR_CODES]]
|
||||||
|
>;
|
||||||
|
if (!search.trim()) return all;
|
||||||
|
const q = search.trim().toLowerCase();
|
||||||
|
return all.filter(
|
||||||
|
([code, def]) => code.toLowerCase().includes(q) || def.userMessage.toLowerCase().includes(q),
|
||||||
|
);
|
||||||
|
}, [search]);
|
||||||
|
|
||||||
|
// Group by domain prefix (the part before the first underscore) so
|
||||||
|
// the table reads naturally — Expenses, Berths, Storage, etc.
|
||||||
|
const grouped = useMemo(() => {
|
||||||
|
const groups = new Map<string, typeof entries>();
|
||||||
|
for (const entry of entries) {
|
||||||
|
const prefix = entry[0].split('_')[0] ?? 'OTHER';
|
||||||
|
const bucket = groups.get(prefix) ?? [];
|
||||||
|
bucket.push(entry);
|
||||||
|
groups.set(prefix, bucket);
|
||||||
|
}
|
||||||
|
return [...groups.entries()].sort(([a], [b]) => a.localeCompare(b));
|
||||||
|
}, [entries]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Button variant="ghost" size="sm" asChild>
|
||||||
|
<Link href={`/${portSlug}/admin/errors` as Route}>
|
||||||
|
<ArrowLeft className="mr-1.5 h-4 w-4" />
|
||||||
|
Back to error inspector
|
||||||
|
</Link>
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-start justify-between gap-4 flex-wrap">
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-bold flex items-center gap-2">
|
||||||
|
<BookOpen className="h-5 w-5" /> Error code reference
|
||||||
|
</h1>
|
||||||
|
<p className="text-muted-foreground text-sm mt-1">
|
||||||
|
Every error code the platform can return, with its HTTP status and the plain-language
|
||||||
|
message a user sees. Codes are stable identifiers — once shipped, they never get
|
||||||
|
renamed.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="relative max-w-md">
|
||||||
|
<Search className="pointer-events-none absolute left-2 top-1/2 -translate-y-1/2 h-4 w-4 text-muted-foreground" />
|
||||||
|
<Input
|
||||||
|
placeholder="Search code or message…"
|
||||||
|
value={search}
|
||||||
|
onChange={(e) => setSearch(e.target.value)}
|
||||||
|
className="pl-8"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{grouped.length === 0 ? (
|
||||||
|
<Card>
|
||||||
|
<CardContent className="py-12 text-center text-sm text-muted-foreground">
|
||||||
|
No codes match "{search}".
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
) : (
|
||||||
|
<div className="space-y-4">
|
||||||
|
{grouped.map(([prefix, items]) => (
|
||||||
|
<Card key={prefix}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-sm font-medium uppercase tracking-wider text-muted-foreground">
|
||||||
|
{prefix}
|
||||||
|
</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="divide-y">
|
||||||
|
{items.map(([code, def]) => (
|
||||||
|
<div key={code} className="flex items-start gap-3 py-3 first:pt-0 last:pb-0">
|
||||||
|
<Badge
|
||||||
|
variant="outline"
|
||||||
|
className={
|
||||||
|
def.status >= 500
|
||||||
|
? 'border-destructive/40 text-destructive'
|
||||||
|
: def.status >= 400
|
||||||
|
? 'border-amber-300 text-amber-800'
|
||||||
|
: 'border-muted'
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{def.status}
|
||||||
|
</Badge>
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<p className="font-mono text-xs font-semibold">{code}</p>
|
||||||
|
<p className="text-sm mt-0.5">{def.userMessage}</p>
|
||||||
|
{'hint' in def && typeof def.hint === 'string' && (
|
||||||
|
<p className="text-xs text-muted-foreground mt-0.5">{def.hint}</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
157
src/app/(dashboard)/[portSlug]/admin/errors/page.tsx
Normal file
157
src/app/(dashboard)/[portSlug]/admin/errors/page.tsx
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
'use client';
|
||||||
|
|
||||||
|
import { useState } from 'react';
|
||||||
|
import Link from 'next/link';
|
||||||
|
import { useParams } from 'next/navigation';
|
||||||
|
import { useQuery } from '@tanstack/react-query';
|
||||||
|
import { format, formatDistanceToNow } from 'date-fns';
|
||||||
|
import { AlertTriangle, BookOpen, Search, Wrench } from 'lucide-react';
|
||||||
|
|
||||||
|
import type { Route } from 'next';
|
||||||
|
|
||||||
|
import { Badge } from '@/components/ui/badge';
|
||||||
|
import { Button } from '@/components/ui/button';
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
|
import { Input } from '@/components/ui/input';
|
||||||
|
import { Skeleton } from '@/components/ui/skeleton';
|
||||||
|
import { PageHeader } from '@/components/shared/page-header';
|
||||||
|
import { EmptyState } from '@/components/shared/empty-state';
|
||||||
|
import { apiFetch } from '@/lib/api/client';
|
||||||
|
import { classifyError } from '@/lib/error-classifier';
|
||||||
|
import type { ErrorEvent } from '@/lib/db/schema/system';
|
||||||
|
|
||||||
|
interface ListResponse {
|
||||||
|
data: ErrorEvent[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Super-admin error inspector.
|
||||||
|
*
|
||||||
|
* Shows the most recent captured 5xx errors with: when, where (HTTP
|
||||||
|
* method + path), what (error name + message), and a heuristic
|
||||||
|
* "likely culprit" badge driven by `classifyError`. Click into any
|
||||||
|
* row for the full stack + body excerpt + raw metadata.
|
||||||
|
*/
|
||||||
|
export default function AdminErrorsPage() {
|
||||||
|
const params = useParams<{ portSlug: string }>();
|
||||||
|
const portSlug = params?.portSlug ?? '';
|
||||||
|
const [statusFilter, setStatusFilter] = useState('');
|
||||||
|
|
||||||
|
const query = useQuery<ListResponse>({
|
||||||
|
queryKey: ['admin', 'error-events', { statusFilter }],
|
||||||
|
queryFn: () => {
|
||||||
|
const search = new URLSearchParams();
|
||||||
|
if (statusFilter) search.set('statusCode', statusFilter);
|
||||||
|
return apiFetch<ListResponse>(
|
||||||
|
`/api/v1/admin/error-events${search.toString() ? `?${search.toString()}` : ''}`,
|
||||||
|
);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const events = query.data?.data ?? [];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-4">
|
||||||
|
<PageHeader
|
||||||
|
title="Error inspector"
|
||||||
|
description="Captured 5xx errors. Click any row for the full stack, request body excerpt, and likely culprit."
|
||||||
|
actions={
|
||||||
|
<Button variant="outline" size="sm" asChild>
|
||||||
|
<Link href={`/${portSlug}/admin/errors/codes` as Route}>
|
||||||
|
<BookOpen className="mr-1.5 h-4 w-4" />
|
||||||
|
Code reference
|
||||||
|
</Link>
|
||||||
|
</Button>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-sm font-medium flex items-center gap-2">
|
||||||
|
<Search className="h-4 w-4" /> Filters
|
||||||
|
</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="flex flex-wrap items-end gap-3">
|
||||||
|
<div className="space-y-1">
|
||||||
|
<label className="text-xs text-muted-foreground" htmlFor="status">
|
||||||
|
Status code
|
||||||
|
</label>
|
||||||
|
<Input
|
||||||
|
id="status"
|
||||||
|
placeholder="e.g. 500"
|
||||||
|
value={statusFilter}
|
||||||
|
onChange={(e) => setStatusFilter(e.target.value.replace(/\D/g, ''))}
|
||||||
|
className="h-8 w-32"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
{statusFilter && (
|
||||||
|
<Button variant="ghost" size="sm" className="h-8" onClick={() => setStatusFilter('')}>
|
||||||
|
Clear
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{query.isLoading ? (
|
||||||
|
<div className="space-y-2">
|
||||||
|
{Array.from({ length: 5 }).map((_, i) => (
|
||||||
|
<Skeleton key={i} className="h-14 w-full" />
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
) : events.length === 0 ? (
|
||||||
|
<EmptyState
|
||||||
|
icon={AlertTriangle}
|
||||||
|
title="No captured errors"
|
||||||
|
description="Nothing has hit a 5xx in the selected window. That's a good thing."
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<div className="rounded-lg border divide-y">
|
||||||
|
{events.map((event) => {
|
||||||
|
const culprit = classifyError(event);
|
||||||
|
return (
|
||||||
|
<Link
|
||||||
|
key={event.requestId}
|
||||||
|
href={`/${portSlug}/admin/errors/${event.requestId}` as Route}
|
||||||
|
className="flex items-start gap-3 p-3 hover:bg-muted/40"
|
||||||
|
>
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Badge
|
||||||
|
variant="outline"
|
||||||
|
className={
|
||||||
|
event.statusCode >= 500
|
||||||
|
? 'border-destructive/40 text-destructive'
|
||||||
|
: 'border-amber-300 text-amber-800'
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{event.statusCode}
|
||||||
|
</Badge>
|
||||||
|
<span className="text-xs font-mono uppercase text-muted-foreground">
|
||||||
|
{event.method}
|
||||||
|
</span>
|
||||||
|
<span className="text-sm font-medium truncate">{event.path}</span>
|
||||||
|
{culprit && (
|
||||||
|
<Badge variant="secondary" className="gap-1 text-xs">
|
||||||
|
<Wrench className="h-3 w-3" />
|
||||||
|
{culprit.label}
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground truncate mt-0.5">
|
||||||
|
{event.errorName ? `${event.errorName}: ` : ''}
|
||||||
|
{event.errorMessage ?? '(no message)'}
|
||||||
|
</p>
|
||||||
|
<p className="text-xs text-muted-foreground mt-0.5">
|
||||||
|
{formatDistanceToNow(new Date(event.createdAt), { addSuffix: true })} ·{' '}
|
||||||
|
{format(new Date(event.createdAt), 'MMM d HH:mm:ss')} · ID{' '}
|
||||||
|
<code className="font-mono">{event.requestId.slice(0, 12)}…</code>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -1,14 +1,75 @@
|
|||||||
import { PageHeader } from '@/components/shared/page-header';
|
import { PageHeader } from '@/components/shared/page-header';
|
||||||
|
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
|
|
||||||
export default function DataImportPage() {
|
export default function DataImportPage() {
|
||||||
return (
|
return (
|
||||||
<div className="space-y-6">
|
<div>
|
||||||
<PageHeader title="Data Import" description="Import data from external sources" />
|
<PageHeader
|
||||||
<div className="flex flex-col items-center justify-center rounded-lg border border-dashed p-12">
|
title="Data import"
|
||||||
<p className="text-lg font-medium text-muted-foreground">Coming in Layer 4</p>
|
description="What you can import today and what an in-app importer will look like."
|
||||||
<p className="text-sm text-muted-foreground">
|
/>
|
||||||
This feature will be implemented in the next phase.
|
|
||||||
|
<div className="grid gap-4 mt-6 lg:grid-cols-2">
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>Available imports today</CardTitle>
|
||||||
|
<CardDescription>Run from the command line until the UI catches up.</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-3 text-sm">
|
||||||
|
<div>
|
||||||
|
<p>
|
||||||
|
<strong>Berths from NocoDB:</strong>
|
||||||
</p>
|
</p>
|
||||||
|
<pre className="bg-muted/40 rounded-md p-2 text-xs mt-1 overflow-auto">
|
||||||
|
pnpm tsx scripts/import-berths-from-nocodb.ts --apply --port-slug port-nimara
|
||||||
|
</pre>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1">
|
||||||
|
Idempotent. Skips rows where <code>updated_at > last_imported_at</code> unless
|
||||||
|
you pass <code>--force</code>. Add <code>--update-snapshot</code> to also rewrite{' '}
|
||||||
|
<code>src/lib/db/seed-data/berths.json</code>.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<p>
|
||||||
|
<strong>Storage backend migration:</strong>
|
||||||
|
</p>
|
||||||
|
<pre className="bg-muted/40 rounded-md p-2 text-xs mt-1 overflow-auto">
|
||||||
|
pnpm tsx scripts/migrate-storage.ts
|
||||||
|
</pre>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1">
|
||||||
|
Run after switching <code>system_settings.storage_backend</code> in System Settings.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<p>
|
||||||
|
<strong>Seed (rebuild dev fixtures):</strong>
|
||||||
|
</p>
|
||||||
|
<pre className="bg-muted/40 rounded-md p-2 text-xs mt-1 overflow-auto">
|
||||||
|
pnpm db:seed
|
||||||
|
</pre>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>What this page will become</CardTitle>
|
||||||
|
<CardDescription>Planned UI for self-serve imports.</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-2 text-sm">
|
||||||
|
<ul className="list-disc pl-5 space-y-1">
|
||||||
|
<li>Drag-and-drop CSV / XLSX upload with column-mapping UI.</li>
|
||||||
|
<li>Dry-run preview that shows new vs. matched-existing rows before commit.</li>
|
||||||
|
<li>Conflict-resolution choices (skip, update, dedup-by-email) per import type.</li>
|
||||||
|
<li>Per-port import history with rollback.</li>
|
||||||
|
<li>Templates for clients, yachts, companies, berths, reservations, expenses.</li>
|
||||||
|
</ul>
|
||||||
|
<p className="text-xs text-muted-foreground pt-2">
|
||||||
|
Imports run against the BullMQ <code>import</code> queue (concurrency 1) so partial
|
||||||
|
failures don’t leave the database half-loaded.
|
||||||
|
</p>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
5
src/app/(dashboard)/[portSlug]/admin/inquiries/page.tsx
Normal file
5
src/app/(dashboard)/[portSlug]/admin/inquiries/page.tsx
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
import { InquiryInbox } from '@/components/admin/inquiry-inbox';
|
||||||
|
|
||||||
|
export default function InquiriesPage() {
|
||||||
|
return <InquiryInbox />;
|
||||||
|
}
|
||||||
36
src/app/(dashboard)/[portSlug]/admin/layout.tsx
Normal file
36
src/app/(dashboard)/[portSlug]/admin/layout.tsx
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
import { redirect } from 'next/navigation';
|
||||||
|
import { headers } from 'next/headers';
|
||||||
|
import { eq } from 'drizzle-orm';
|
||||||
|
|
||||||
|
import { auth } from '@/lib/auth';
|
||||||
|
import { db } from '@/lib/db';
|
||||||
|
import { userProfiles } from '@/lib/db/schema/users';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Guard: only super-admins (isSuperAdmin === true in user_profiles) may access
|
||||||
|
* any page under /[portSlug]/admin. Everyone else is redirected to their dashboard.
|
||||||
|
*/
|
||||||
|
export default async function AdminLayout({
|
||||||
|
children,
|
||||||
|
params,
|
||||||
|
}: {
|
||||||
|
children: React.ReactNode;
|
||||||
|
params: Promise<{ portSlug: string }>;
|
||||||
|
}) {
|
||||||
|
const { portSlug } = await params;
|
||||||
|
const session = await auth.api.getSession({ headers: await headers() });
|
||||||
|
|
||||||
|
if (!session?.user) {
|
||||||
|
redirect('/login');
|
||||||
|
}
|
||||||
|
|
||||||
|
const profile = await db.query.userProfiles.findFirst({
|
||||||
|
where: eq(userProfiles.userId, session.user.id),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!profile?.isSuperAdmin) {
|
||||||
|
redirect(`/${portSlug}/dashboard`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return <>{children}</>;
|
||||||
|
}
|
||||||
@@ -1,15 +1,14 @@
|
|||||||
|
import { OnboardingChecklist } from '@/components/admin/onboarding-checklist';
|
||||||
import { PageHeader } from '@/components/shared/page-header';
|
import { PageHeader } from '@/components/shared/page-header';
|
||||||
|
|
||||||
export default function OnboardingPage() {
|
export default function OnboardingPage() {
|
||||||
return (
|
return (
|
||||||
<div className="space-y-6">
|
<div>
|
||||||
<PageHeader title="Onboarding" description="Guided setup for new port configurations" />
|
<PageHeader
|
||||||
<div className="flex flex-col items-center justify-center rounded-lg border border-dashed p-12">
|
title="Port onboarding"
|
||||||
<p className="text-lg font-medium text-muted-foreground">Coming in Layer 4</p>
|
description="Bring a new port live. Each step links to the right admin page; checks update automatically once you've configured the underlying setting."
|
||||||
<p className="text-sm text-muted-foreground">
|
/>
|
||||||
This feature will be implemented in the next phase.
|
<OnboardingChecklist />
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import {
|
|||||||
Database,
|
Database,
|
||||||
FileText,
|
FileText,
|
||||||
HardDrive,
|
HardDrive,
|
||||||
|
Inbox,
|
||||||
Key,
|
Key,
|
||||||
LayoutDashboard,
|
LayoutDashboard,
|
||||||
Mail,
|
Mail,
|
||||||
@@ -16,7 +17,9 @@ import {
|
|||||||
Tag,
|
Tag,
|
||||||
Upload,
|
Upload,
|
||||||
Users,
|
Users,
|
||||||
|
UsersRound,
|
||||||
Webhook,
|
Webhook,
|
||||||
|
Globe,
|
||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
|
|
||||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';
|
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
@@ -29,7 +32,17 @@ interface AdminSection {
|
|||||||
icon: typeof Settings;
|
icon: typeof Settings;
|
||||||
}
|
}
|
||||||
|
|
||||||
const SECTIONS: AdminSection[] = [
|
interface AdminGroup {
|
||||||
|
title: string;
|
||||||
|
description: string;
|
||||||
|
sections: AdminSection[];
|
||||||
|
}
|
||||||
|
|
||||||
|
const GROUPS: AdminGroup[] = [
|
||||||
|
{
|
||||||
|
title: 'Access',
|
||||||
|
description: 'Who can sign in and what they can do once they do.',
|
||||||
|
sections: [
|
||||||
{
|
{
|
||||||
href: 'users',
|
href: 'users',
|
||||||
label: 'Users',
|
label: 'Users',
|
||||||
@@ -48,12 +61,12 @@ const SECTIONS: AdminSection[] = [
|
|||||||
description: 'Default permission sets and per-port role overrides.',
|
description: 'Default permission sets and per-port role overrides.',
|
||||||
icon: Shield,
|
icon: Shield,
|
||||||
},
|
},
|
||||||
{
|
],
|
||||||
href: 'audit',
|
|
||||||
label: 'Audit Log',
|
|
||||||
description: 'Searchable log of every authenticated mutation in the system.',
|
|
||||||
icon: ScrollText,
|
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
title: 'Configuration',
|
||||||
|
description: 'Branding, integrations, and per-port settings.',
|
||||||
|
sections: [
|
||||||
{
|
{
|
||||||
href: 'email',
|
href: 'email',
|
||||||
label: 'Email Settings',
|
label: 'Email Settings',
|
||||||
@@ -90,6 +103,12 @@ const SECTIONS: AdminSection[] = [
|
|||||||
description: 'Outgoing webhook subscriptions, secrets, and delivery log.',
|
description: 'Outgoing webhook subscriptions, secrets, and delivery log.',
|
||||||
icon: Webhook,
|
icon: Webhook,
|
||||||
},
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'Content',
|
||||||
|
description: 'Forms, templates, and labels that users see.',
|
||||||
|
sections: [
|
||||||
{
|
{
|
||||||
href: 'forms',
|
href: 'forms',
|
||||||
label: 'Forms',
|
label: 'Forms',
|
||||||
@@ -102,6 +121,12 @@ const SECTIONS: AdminSection[] = [
|
|||||||
description: 'PDF + email templates with merge-field placeholders.',
|
description: 'PDF + email templates with merge-field placeholders.',
|
||||||
icon: FileText,
|
icon: FileText,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
href: 'email-templates',
|
||||||
|
label: 'Email Templates',
|
||||||
|
description: 'Customize subject lines for transactional emails (portal, inquiry, invite).',
|
||||||
|
icon: Mail,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
href: 'tags',
|
href: 'tags',
|
||||||
label: 'Tags',
|
label: 'Tags',
|
||||||
@@ -114,6 +139,49 @@ const SECTIONS: AdminSection[] = [
|
|||||||
description: 'Tenant-defined fields for clients, yachts, and reservations.',
|
description: 'Tenant-defined fields for clients, yachts, and reservations.',
|
||||||
icon: Key,
|
icon: Key,
|
||||||
},
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'Data Quality',
|
||||||
|
description: 'Cleanup, imports, and the audit trail.',
|
||||||
|
sections: [
|
||||||
|
{
|
||||||
|
href: 'inquiries',
|
||||||
|
label: 'Inquiry Inbox',
|
||||||
|
description:
|
||||||
|
'Submissions captured from the public marketing site (berth, residence, contact).',
|
||||||
|
icon: Inbox,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
href: 'sends',
|
||||||
|
label: 'Send Log',
|
||||||
|
description: 'Brochure and per-berth PDF sends, with delivery failures surfaced for retry.',
|
||||||
|
icon: Mail,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
href: 'duplicates',
|
||||||
|
label: 'Duplicates',
|
||||||
|
description: 'Review queue of suspected duplicate clients flagged by the dedup engine.',
|
||||||
|
icon: UsersRound,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
href: 'import',
|
||||||
|
label: 'Bulk Import',
|
||||||
|
description: 'CSV-driven imports for clients, yachts, and reservations.',
|
||||||
|
icon: Upload,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
href: 'audit',
|
||||||
|
label: 'Audit Log',
|
||||||
|
description: 'Searchable log of every authenticated mutation in the system.',
|
||||||
|
icon: ScrollText,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'Operations',
|
||||||
|
description: 'Health checks and disaster recovery.',
|
||||||
|
sections: [
|
||||||
{
|
{
|
||||||
href: 'reports',
|
href: 'reports',
|
||||||
label: 'Reports',
|
label: 'Reports',
|
||||||
@@ -126,18 +194,25 @@ const SECTIONS: AdminSection[] = [
|
|||||||
description: 'BullMQ queue health, throughput, and retry diagnostics.',
|
description: 'BullMQ queue health, throughput, and retry diagnostics.',
|
||||||
icon: Database,
|
icon: Database,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
href: 'import',
|
|
||||||
label: 'Bulk Import',
|
|
||||||
description: 'CSV-driven imports for clients, yachts, and reservations.',
|
|
||||||
icon: Upload,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
href: 'backup',
|
href: 'backup',
|
||||||
label: 'Backup & Restore',
|
label: 'Backup & Restore',
|
||||||
description: 'Database snapshots and on-demand exports.',
|
description: 'Backup posture + retention policy (read-only).',
|
||||||
icon: HardDrive,
|
icon: HardDrive,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
href: 'storage',
|
||||||
|
label: 'Storage Backend',
|
||||||
|
description:
|
||||||
|
'Choose between S3-compatible object store or local filesystem; migrate between them.',
|
||||||
|
icon: HardDrive,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'Tenancy',
|
||||||
|
description: 'Multi-port and multi-install scaffolding.',
|
||||||
|
sections: [
|
||||||
{
|
{
|
||||||
href: 'ports',
|
href: 'ports',
|
||||||
label: 'Ports',
|
label: 'Ports',
|
||||||
@@ -146,16 +221,44 @@ const SECTIONS: AdminSection[] = [
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
href: 'onboarding',
|
href: 'onboarding',
|
||||||
label: 'Onboarding',
|
label: 'Onboarding checklist',
|
||||||
description: 'Initial-setup wizard for fresh ports.',
|
description: 'Setup checklist for fresh ports (read-only references).',
|
||||||
icon: LayoutDashboard,
|
icon: LayoutDashboard,
|
||||||
},
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'Integrations',
|
||||||
|
description: 'Third-party providers wired into the app.',
|
||||||
|
sections: [
|
||||||
|
{
|
||||||
|
href: 'ai',
|
||||||
|
label: 'AI configuration',
|
||||||
|
description:
|
||||||
|
'Master switch + provider credentials shared by every AI surface (OCR, berth-PDF parser, future recommender embeddings).',
|
||||||
|
icon: ScrollText,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
href: 'ocr',
|
href: 'ocr',
|
||||||
label: 'Receipt OCR',
|
label: 'Receipt OCR (per-feature)',
|
||||||
description: 'Configure the AI provider used by the mobile receipt scanner.',
|
description: 'Provider, model, and confidence thresholds for the receipt scanner.',
|
||||||
icon: ScrollText,
|
icon: ScrollText,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
href: 'website-analytics',
|
||||||
|
label: 'Website analytics (Umami)',
|
||||||
|
description: 'Per-port Umami URL, API token, and Website ID.',
|
||||||
|
icon: Globe,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
href: 'residential-stages',
|
||||||
|
label: 'Residential pipeline stages',
|
||||||
|
description:
|
||||||
|
'Configure stages residential interests flow through. Removing a stage with active interests prompts for reassignment.',
|
||||||
|
icon: ScrollText,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
export default async function AdminLandingPage({
|
export default async function AdminLandingPage({
|
||||||
@@ -165,13 +268,21 @@ export default async function AdminLandingPage({
|
|||||||
}) {
|
}) {
|
||||||
const { portSlug } = await params;
|
const { portSlug } = await params;
|
||||||
return (
|
return (
|
||||||
<div className="space-y-6">
|
<div className="space-y-8">
|
||||||
<PageHeader
|
<PageHeader
|
||||||
title="Administration"
|
title="Administration"
|
||||||
description="Per-port configuration and system administration. Each card below opens a dedicated settings page."
|
description="Per-port configuration and system administration. Each card below opens a dedicated settings page."
|
||||||
/>
|
/>
|
||||||
|
{GROUPS.map((group) => (
|
||||||
|
<section key={group.title} className="space-y-3">
|
||||||
|
<div>
|
||||||
|
<h2 className="text-xs font-semibold uppercase tracking-wider text-muted-foreground">
|
||||||
|
{group.title}
|
||||||
|
</h2>
|
||||||
|
<p className="text-xs text-muted-foreground/80">{group.description}</p>
|
||||||
|
</div>
|
||||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 gap-4">
|
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||||
{SECTIONS.map((s) => {
|
{group.sections.map((s) => {
|
||||||
const Icon = s.icon;
|
const Icon = s.icon;
|
||||||
return (
|
return (
|
||||||
<Link
|
<Link
|
||||||
@@ -195,6 +306,8 @@ export default async function AdminLandingPage({
|
|||||||
);
|
);
|
||||||
})}
|
})}
|
||||||
</div>
|
</div>
|
||||||
|
</section>
|
||||||
|
))}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,9 +44,8 @@ const DIGEST_FIELDS: SettingFieldDef[] = [
|
|||||||
{
|
{
|
||||||
key: 'reminder_digest_timezone',
|
key: 'reminder_digest_timezone',
|
||||||
label: 'Digest timezone',
|
label: 'Digest timezone',
|
||||||
description: 'IANA timezone name used to interpret the delivery time (e.g. Europe/Warsaw).',
|
description: 'IANA timezone name used to interpret the delivery time.',
|
||||||
type: 'string',
|
type: 'timezone',
|
||||||
placeholder: 'Europe/Warsaw',
|
|
||||||
defaultValue: 'Europe/Warsaw',
|
defaultValue: 'Europe/Warsaw',
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -1,18 +1,5 @@
|
|||||||
import { PageHeader } from '@/components/shared/page-header';
|
import { ReportsDashboard } from '@/components/admin/reports-dashboard';
|
||||||
|
|
||||||
export default function ScheduledReportsPage() {
|
export default function AdminReportsPage() {
|
||||||
return (
|
return <ReportsDashboard />;
|
||||||
<div className="space-y-6">
|
|
||||||
<PageHeader
|
|
||||||
title="Scheduled Reports"
|
|
||||||
description="Configure and manage automated report delivery"
|
|
||||||
/>
|
|
||||||
<div className="flex flex-col items-center justify-center rounded-lg border border-dashed p-12">
|
|
||||||
<p className="text-lg font-medium text-muted-foreground">Coming in Layer 3</p>
|
|
||||||
<p className="text-sm text-muted-foreground">
|
|
||||||
This feature will be implemented in the next phase.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,15 @@
|
|||||||
|
import { ResidentialStagesAdmin } from '@/components/admin/residential-stages-admin';
|
||||||
|
import { PageHeader } from '@/components/shared/page-header';
|
||||||
|
|
||||||
|
export default function ResidentialStagesPage() {
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<PageHeader
|
||||||
|
title="Residential pipeline stages"
|
||||||
|
eyebrow="ADMIN"
|
||||||
|
description="Configure the stages residential interests flow through. Removing a stage that still has interests prompts you to reassign them before saving."
|
||||||
|
/>
|
||||||
|
<ResidentialStagesAdmin />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
5
src/app/(dashboard)/[portSlug]/admin/sends/page.tsx
Normal file
5
src/app/(dashboard)/[portSlug]/admin/sends/page.tsx
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
import { SendsLog } from '@/components/admin/sends-log';
|
||||||
|
|
||||||
|
export default function SendsPage() {
|
||||||
|
return <SendsLog />;
|
||||||
|
}
|
||||||
7
src/app/(dashboard)/[portSlug]/admin/storage/page.tsx
Normal file
7
src/app/(dashboard)/[portSlug]/admin/storage/page.tsx
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import { StorageAdminPanel } from '@/components/admin/storage-admin-panel';
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic';
|
||||||
|
|
||||||
|
export default function StorageAdminPage() {
|
||||||
|
return <StorageAdminPanel />;
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import { useCallback, useEffect, useState } from 'react';
|
import { useCallback, useEffect, useState } from 'react';
|
||||||
|
import { toast } from 'sonner';
|
||||||
import { Button } from '@/components/ui/button';
|
import { Button } from '@/components/ui/button';
|
||||||
import { PageHeader } from '@/components/shared/page-header';
|
import { PageHeader } from '@/components/shared/page-header';
|
||||||
import { Badge } from '@/components/ui/badge';
|
import { Badge } from '@/components/ui/badge';
|
||||||
@@ -47,8 +48,8 @@ export default function WebhooksPage() {
|
|||||||
try {
|
try {
|
||||||
const result = await apiFetch<{ data: Webhook[] }>('/api/v1/admin/webhooks');
|
const result = await apiFetch<{ data: Webhook[] }>('/api/v1/admin/webhooks');
|
||||||
setWebhooks(result.data);
|
setWebhooks(result.data);
|
||||||
} catch {
|
} catch (err) {
|
||||||
// ignore
|
toast.error(err instanceof Error ? err.message : 'Failed to load webhooks');
|
||||||
} finally {
|
} finally {
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
}
|
}
|
||||||
@@ -63,9 +64,10 @@ export default function WebhooksPage() {
|
|||||||
try {
|
try {
|
||||||
await apiFetch(`/api/v1/admin/webhooks/${deleteTarget.id}`, { method: 'DELETE' });
|
await apiFetch(`/api/v1/admin/webhooks/${deleteTarget.id}`, { method: 'DELETE' });
|
||||||
setDeleteTarget(null);
|
setDeleteTarget(null);
|
||||||
|
toast.success('Webhook deleted');
|
||||||
void loadWebhooks();
|
void loadWebhooks();
|
||||||
} catch {
|
} catch (err) {
|
||||||
// ignore
|
toast.error(err instanceof Error ? err.message : 'Failed to delete webhook');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -78,8 +80,8 @@ export default function WebhooksPage() {
|
|||||||
);
|
);
|
||||||
setNewSecret({ webhookId, secret: result.data.secret, masked: result.data.secretMasked });
|
setNewSecret({ webhookId, secret: result.data.secret, masked: result.data.secretMasked });
|
||||||
void loadWebhooks();
|
void loadWebhooks();
|
||||||
} catch {
|
} catch (err) {
|
||||||
// ignore
|
toast.error(err instanceof Error ? err.message : 'Failed to regenerate secret');
|
||||||
} finally {
|
} finally {
|
||||||
setRegenerating(null);
|
setRegenerating(null);
|
||||||
}
|
}
|
||||||
@@ -91,9 +93,10 @@ export default function WebhooksPage() {
|
|||||||
method: 'PATCH',
|
method: 'PATCH',
|
||||||
body: { isActive: !webhook.isActive },
|
body: { isActive: !webhook.isActive },
|
||||||
});
|
});
|
||||||
|
toast.success(webhook.isActive ? 'Webhook disabled' : 'Webhook enabled');
|
||||||
void loadWebhooks();
|
void loadWebhooks();
|
||||||
} catch {
|
} catch (err) {
|
||||||
// ignore
|
toast.error(err instanceof Error ? err.message : 'Failed to toggle webhook');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,74 @@
|
|||||||
|
import {
|
||||||
|
SettingsFormCard,
|
||||||
|
type SettingFieldDef,
|
||||||
|
} from '@/components/admin/shared/settings-form-card';
|
||||||
|
import { UmamiTestButton } from '@/components/admin/website-analytics/umami-test-button';
|
||||||
|
import { PageHeader } from '@/components/shared/page-header';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Per-port Umami credentials. We deliberately keep all three values
|
||||||
|
* port-scoped (per the operator decision) so different ports can point at
|
||||||
|
* different Umami instances if needed. The /website-analytics dashboard
|
||||||
|
* page reads these settings via the umami.service layer at request time.
|
||||||
|
*/
|
||||||
|
const FIELDS: SettingFieldDef[] = [
|
||||||
|
{
|
||||||
|
key: 'umami_api_url',
|
||||||
|
label: 'Umami API URL',
|
||||||
|
description:
|
||||||
|
'Base URL of the Umami instance, e.g. https://analytics.portnimara.com (no trailing slash, no /api).',
|
||||||
|
type: 'string',
|
||||||
|
placeholder: 'https://analytics.portnimara.com',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'umami_api_token',
|
||||||
|
label: 'API token',
|
||||||
|
description:
|
||||||
|
'Long-lived API token if your Umami install supports one (Umami Cloud or v2 self-hosted with API keys enabled). Leave blank if you only have username/password - the service falls back to the JWT login flow using the credentials below. Stored in plain text in system_settings.',
|
||||||
|
type: 'password',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'umami_username',
|
||||||
|
label: 'Username',
|
||||||
|
description: 'Self-hosted JWT fallback. Only used if API token is blank.',
|
||||||
|
type: 'string',
|
||||||
|
placeholder: 'admin',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'umami_password',
|
||||||
|
label: 'Password',
|
||||||
|
description: 'Self-hosted JWT fallback. Only used if API token is blank.',
|
||||||
|
type: 'password',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'umami_website_id',
|
||||||
|
label: 'Website ID',
|
||||||
|
description:
|
||||||
|
'UUID of this port’s website inside Umami. Find it in Umami → Settings → Websites → Edit → Website ID.',
|
||||||
|
type: 'string',
|
||||||
|
placeholder: '00000000-0000-0000-0000-000000000000',
|
||||||
|
defaultValue: '',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
export default function WebsiteAnalyticsSettingsPage() {
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<PageHeader
|
||||||
|
title="Website analytics (Umami)"
|
||||||
|
description="Connect this port to its Umami website to display traffic, top pages, referrers, and conversion data on the Website Analytics dashboard."
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SettingsFormCard
|
||||||
|
title="Umami connection"
|
||||||
|
description="Per-port credentials. Each port can point at its own Umami instance; or share one instance with different website IDs."
|
||||||
|
fields={FIELDS}
|
||||||
|
extra={<UmamiTestButton />}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
import { BerthReservationsList } from '@/components/reservations/berth-reservations-list';
|
||||||
|
|
||||||
|
export default function BerthReservationsPage() {
|
||||||
|
return <BerthReservationsList />;
|
||||||
|
}
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
import { BerthDetail } from '@/components/berths/berth-detail';
|
import { BerthDetail } from '@/components/berths/berth-detail';
|
||||||
|
import { TrackEntityView } from '@/components/search/track-entity-view';
|
||||||
|
|
||||||
interface BerthPageProps {
|
interface BerthPageProps {
|
||||||
params: Promise<{ portSlug: string; berthId: string }>;
|
params: Promise<{ portSlug: string; berthId: string }>;
|
||||||
@@ -6,5 +7,10 @@ interface BerthPageProps {
|
|||||||
|
|
||||||
export default async function BerthPage({ params }: BerthPageProps) {
|
export default async function BerthPage({ params }: BerthPageProps) {
|
||||||
const { berthId } = await params;
|
const { berthId } = await params;
|
||||||
return <BerthDetail berthId={berthId} />;
|
return (
|
||||||
|
<>
|
||||||
|
<TrackEntityView type="berth" id={berthId} />
|
||||||
|
<BerthDetail berthId={berthId} />
|
||||||
|
</>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,41 @@
|
|||||||
|
import { Skeleton } from '@/components/ui/skeleton';
|
||||||
|
import { CardSkeleton } from '@/components/shared/loading-skeleton';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Route-level loading UI for the client detail page. Renders while the
|
||||||
|
* server component resolves the session and the client component bootstraps
|
||||||
|
* its initial query - replaces the previous empty-header flash on direct
|
||||||
|
* URL visits.
|
||||||
|
*/
|
||||||
|
export default function Loading() {
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Header strip - title, badges, action buttons */}
|
||||||
|
<div className="rounded-xl border border-border bg-card px-5 py-4 shadow-sm space-y-3">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<Skeleton className="h-7 w-56" />
|
||||||
|
<Skeleton className="h-5 w-16 rounded-full" />
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-wrap gap-2">
|
||||||
|
<Skeleton className="h-9 w-20 rounded-md" />
|
||||||
|
<Skeleton className="h-9 w-20 rounded-md" />
|
||||||
|
<Skeleton className="h-9 w-24 rounded-md" />
|
||||||
|
<Skeleton className="h-9 w-32 rounded-md" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Tab strip */}
|
||||||
|
<div className="flex gap-2 border-b border-border pb-1">
|
||||||
|
{Array.from({ length: 8 }).map((_, i) => (
|
||||||
|
<Skeleton key={i} className="h-8 w-20 rounded-md" />
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Two-column overview */}
|
||||||
|
<div className="grid grid-cols-1 gap-6 md:grid-cols-2">
|
||||||
|
<CardSkeleton />
|
||||||
|
<CardSkeleton />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
import { ClientDetail } from '@/components/clients/client-detail';
|
import { ClientDetail } from '@/components/clients/client-detail';
|
||||||
|
import { TrackEntityView } from '@/components/search/track-entity-view';
|
||||||
import { auth } from '@/lib/auth';
|
import { auth } from '@/lib/auth';
|
||||||
import { headers } from 'next/headers';
|
import { headers } from 'next/headers';
|
||||||
|
|
||||||
@@ -12,5 +13,10 @@ export default async function ClientDetailPage({ params }: ClientDetailPageProps
|
|||||||
const session = await auth.api.getSession({ headers: await headers() });
|
const session = await auth.api.getSession({ headers: await headers() });
|
||||||
const currentUserId = session?.user?.id;
|
const currentUserId = session?.user?.id;
|
||||||
|
|
||||||
return <ClientDetail clientId={clientId} currentUserId={currentUserId} />;
|
return (
|
||||||
|
<>
|
||||||
|
<TrackEntityView type="client" id={clientId} />
|
||||||
|
<ClientDetail clientId={clientId} currentUserId={currentUserId} />
|
||||||
|
</>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { CompanyDetail } from '@/components/companies/company-detail';
|
import { CompanyDetail } from '@/components/companies/company-detail';
|
||||||
|
import { TrackEntityView } from '@/components/search/track-entity-view';
|
||||||
import { auth } from '@/lib/auth';
|
import { auth } from '@/lib/auth';
|
||||||
import { headers } from 'next/headers';
|
import { headers } from 'next/headers';
|
||||||
|
|
||||||
@@ -12,5 +13,10 @@ export default async function CompanyDetailPage({ params }: CompanyDetailPagePro
|
|||||||
const session = await auth.api.getSession({ headers: await headers() });
|
const session = await auth.api.getSession({ headers: await headers() });
|
||||||
const currentUserId = session?.user?.id;
|
const currentUserId = session?.user?.id;
|
||||||
|
|
||||||
return <CompanyDetail companyId={companyId} currentUserId={currentUserId} />;
|
return (
|
||||||
|
<>
|
||||||
|
<TrackEntityView type="company" id={companyId} />
|
||||||
|
<CompanyDetail companyId={companyId} currentUserId={currentUserId} />
|
||||||
|
</>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { DocumentDetail } from '@/components/documents/document-detail';
|
import { DocumentDetail } from '@/components/documents/document-detail';
|
||||||
|
import { TrackEntityView } from '@/components/search/track-entity-view';
|
||||||
|
|
||||||
interface PageProps {
|
interface PageProps {
|
||||||
params: Promise<{ portSlug: string; id: string }>;
|
params: Promise<{ portSlug: string; id: string }>;
|
||||||
@@ -6,5 +7,10 @@ interface PageProps {
|
|||||||
|
|
||||||
export default async function DocumentDetailPage({ params }: PageProps) {
|
export default async function DocumentDetailPage({ params }: PageProps) {
|
||||||
const { portSlug, id } = await params;
|
const { portSlug, id } = await params;
|
||||||
return <DocumentDetail documentId={id} portSlug={portSlug} />;
|
return (
|
||||||
|
<>
|
||||||
|
<TrackEntityView type="document" id={id} />
|
||||||
|
<DocumentDetail documentId={id} portSlug={portSlug} />
|
||||||
|
</>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import { useParams, useRouter } from 'next/navigation';
|
|||||||
|
|
||||||
import { ExpenseDetail } from '@/components/expenses/expense-detail';
|
import { ExpenseDetail } from '@/components/expenses/expense-detail';
|
||||||
import { ExpenseFormDialog } from '@/components/expenses/expense-form-dialog';
|
import { ExpenseFormDialog } from '@/components/expenses/expense-form-dialog';
|
||||||
|
import { TrackEntityView } from '@/components/search/track-entity-view';
|
||||||
import { useQuery } from '@tanstack/react-query';
|
import { useQuery } from '@tanstack/react-query';
|
||||||
import { apiFetch } from '@/lib/api/client';
|
import { apiFetch } from '@/lib/api/client';
|
||||||
import type { ExpenseRow } from '@/components/expenses/expense-columns';
|
import type { ExpenseRow } from '@/components/expenses/expense-columns';
|
||||||
@@ -22,6 +23,7 @@ export default function ExpenseDetailPage() {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="max-w-3xl mx-auto">
|
<div className="max-w-3xl mx-auto">
|
||||||
|
<TrackEntityView type="expense" id={params.id} />
|
||||||
<ExpenseDetail
|
<ExpenseDetail
|
||||||
expenseId={params.id}
|
expenseId={params.id}
|
||||||
onEdit={() => setEditOpen(true)}
|
onEdit={() => setEditOpen(true)}
|
||||||
@@ -29,11 +31,7 @@ export default function ExpenseDetailPage() {
|
|||||||
/>
|
/>
|
||||||
|
|
||||||
{data?.data && (
|
{data?.data && (
|
||||||
<ExpenseFormDialog
|
<ExpenseFormDialog open={editOpen} onOpenChange={setEditOpen} expense={data.data} />
|
||||||
open={editOpen}
|
|
||||||
onOpenChange={setEditOpen}
|
|
||||||
expense={data.data}
|
|
||||||
/>
|
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
import { useEffect, useRef, useState } from 'react';
|
import { useEffect, useRef, useState } from 'react';
|
||||||
import { useParams, useRouter } from 'next/navigation';
|
import { useParams, useRouter } from 'next/navigation';
|
||||||
import { useMutation } from '@tanstack/react-query';
|
import { useMutation } from '@tanstack/react-query';
|
||||||
import { Camera, Loader2, ScanLine, Upload } from 'lucide-react';
|
import { Camera, Loader2, ScanLine, Upload, X } from 'lucide-react';
|
||||||
|
|
||||||
import { useMobileChrome } from '@/components/layout/mobile/mobile-layout-provider';
|
import { useMobileChrome } from '@/components/layout/mobile/mobile-layout-provider';
|
||||||
|
|
||||||
@@ -30,6 +30,11 @@ interface ScanResult {
|
|||||||
confidence: number;
|
confidence: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface UploadedFileMeta {
|
||||||
|
id: string;
|
||||||
|
filename: string;
|
||||||
|
}
|
||||||
|
|
||||||
export default function ScanReceiptPage() {
|
export default function ScanReceiptPage() {
|
||||||
const params = useParams<{ portSlug: string }>();
|
const params = useParams<{ portSlug: string }>();
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
@@ -38,6 +43,13 @@ export default function ScanReceiptPage() {
|
|||||||
const cameraInputRef = useRef<HTMLInputElement>(null);
|
const cameraInputRef = useRef<HTMLInputElement>(null);
|
||||||
const [scanResult, setScanResult] = useState<ScanResult | null>(null);
|
const [scanResult, setScanResult] = useState<ScanResult | null>(null);
|
||||||
const [previewUrl, setPreviewUrl] = useState<string | null>(null);
|
const [previewUrl, setPreviewUrl] = useState<string | null>(null);
|
||||||
|
// After OCR succeeds we also upload the receipt to /api/v1/files/upload
|
||||||
|
// so the expense links to the actual image. The legacy scanner skipped
|
||||||
|
// this step and saved expenses without their receipt — which silently
|
||||||
|
// disqualified them from parent-company reimbursement (the warning the
|
||||||
|
// PDF export now surfaces).
|
||||||
|
const [uploadedFile, setUploadedFile] = useState<UploadedFileMeta | null>(null);
|
||||||
|
const [pendingFile, setPendingFile] = useState<File | null>(null);
|
||||||
|
|
||||||
const { setChrome } = useMobileChrome();
|
const { setChrome } = useMobileChrome();
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -74,6 +86,29 @@ export default function ScanReceiptPage() {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Uploads the receipt image to /api/v1/files/upload (category=receipt)
|
||||||
|
// so the new expense row can link to it via receiptFileIds. Runs in
|
||||||
|
// parallel with the OCR scan so the rep can keep editing fields while
|
||||||
|
// the upload completes.
|
||||||
|
const uploadMutation = useMutation({
|
||||||
|
mutationFn: async (file: File): Promise<UploadedFileMeta> => {
|
||||||
|
const formData = new FormData();
|
||||||
|
formData.append('file', file);
|
||||||
|
formData.append('category', 'receipt');
|
||||||
|
const res = await fetch('/api/v1/files/upload', {
|
||||||
|
method: 'POST',
|
||||||
|
body: formData,
|
||||||
|
credentials: 'include',
|
||||||
|
});
|
||||||
|
if (!res.ok) throw new Error('Receipt upload failed');
|
||||||
|
const json = (await res.json()) as { data: { id: string; filename: string } };
|
||||||
|
return { id: json.data.id, filename: json.data.filename };
|
||||||
|
},
|
||||||
|
onSuccess: (meta) => {
|
||||||
|
setUploadedFile(meta);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const saveMutation = useMutation({
|
const saveMutation = useMutation({
|
||||||
mutationFn: () =>
|
mutationFn: () =>
|
||||||
apiFetch('/api/v1/expenses', {
|
apiFetch('/api/v1/expenses', {
|
||||||
@@ -85,6 +120,9 @@ export default function ScanReceiptPage() {
|
|||||||
category: category || undefined,
|
category: category || undefined,
|
||||||
expenseDate: date ? new Date(date) : new Date(),
|
expenseDate: date ? new Date(date) : new Date(),
|
||||||
paymentStatus: 'unpaid',
|
paymentStatus: 'unpaid',
|
||||||
|
receiptFileIds: uploadedFile ? [uploadedFile.id] : undefined,
|
||||||
|
// The scanner path always has a receipt (we wouldn't have OCR'd
|
||||||
|
// it otherwise), so we never need the no-receipt flag here.
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
@@ -95,12 +133,32 @@ export default function ScanReceiptPage() {
|
|||||||
function handleFileChange(e: React.ChangeEvent<HTMLInputElement>) {
|
function handleFileChange(e: React.ChangeEvent<HTMLInputElement>) {
|
||||||
const file = e.target.files?.[0];
|
const file = e.target.files?.[0];
|
||||||
if (!file) return;
|
if (!file) return;
|
||||||
|
setPendingFile(file);
|
||||||
const url = URL.createObjectURL(file);
|
const url = URL.createObjectURL(file);
|
||||||
setPreviewUrl(url);
|
setPreviewUrl(url);
|
||||||
|
// Kick off OCR scan + storage upload concurrently. The two are
|
||||||
|
// independent server calls and the rep is staring at the preview
|
||||||
|
// while both run.
|
||||||
scanMutation.mutate(file);
|
scanMutation.mutate(file);
|
||||||
|
uploadMutation.mutate(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function handleClearReceipt() {
|
||||||
|
if (previewUrl) URL.revokeObjectURL(previewUrl);
|
||||||
|
setPreviewUrl(null);
|
||||||
|
setUploadedFile(null);
|
||||||
|
setPendingFile(null);
|
||||||
|
setScanResult(null);
|
||||||
|
// Reset in-flight mutations so a late onSuccess doesn't repopulate
|
||||||
|
// the form against an already-cleared UI (audit finding: stale
|
||||||
|
// receipt could land on the next Save).
|
||||||
|
scanMutation.reset();
|
||||||
|
uploadMutation.reset();
|
||||||
|
if (fileInputRef.current) fileInputRef.current.value = '';
|
||||||
|
if (cameraInputRef.current) cameraInputRef.current.value = '';
|
||||||
|
}
|
||||||
|
void pendingFile;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="max-w-2xl mx-auto space-y-6">
|
<div className="max-w-2xl mx-auto space-y-6">
|
||||||
<div className="hidden sm:block">
|
<div className="hidden sm:block">
|
||||||
@@ -119,18 +177,45 @@ export default function ScanReceiptPage() {
|
|||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
{previewUrl ? (
|
{previewUrl ? (
|
||||||
<div
|
<div className="space-y-2">
|
||||||
className="border-2 border-dashed rounded-lg p-4 text-center cursor-pointer hover:bg-muted/50 transition-colors"
|
<div className="relative border-2 border-dashed rounded-lg p-4 text-center bg-muted/20">
|
||||||
onClick={() => fileInputRef.current?.click()}
|
|
||||||
>
|
|
||||||
<img
|
<img
|
||||||
src={previewUrl}
|
src={previewUrl}
|
||||||
alt="Receipt preview"
|
alt="Receipt preview"
|
||||||
className="max-h-64 mx-auto rounded object-contain"
|
className="max-h-64 mx-auto rounded object-contain"
|
||||||
/>
|
/>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={handleClearReceipt}
|
||||||
|
aria-label="Remove receipt"
|
||||||
|
className="absolute top-2 right-2 rounded-full bg-background/80 hover:bg-background border p-1.5 shadow-sm"
|
||||||
|
>
|
||||||
|
<X className="h-4 w-4" />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-wrap items-center gap-2 text-xs text-muted-foreground">
|
||||||
|
{uploadMutation.isPending && (
|
||||||
|
<span className="inline-flex items-center gap-1">
|
||||||
|
<Loader2 className="h-3 w-3 animate-spin" /> Uploading receipt…
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{uploadedFile && (
|
||||||
|
<span className="text-emerald-600">
|
||||||
|
Receipt uploaded ({uploadedFile.filename})
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{uploadMutation.isError && (
|
||||||
|
<span className="text-destructive">
|
||||||
|
Receipt upload failed — save will still create the expense without an image.
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<div className="grid gap-2 sm:grid-cols-2">
|
<div className="grid gap-2 sm:grid-cols-2">
|
||||||
|
{/* Camera button — available on mobile devices that surface the
|
||||||
|
built-in capture flow when an `image/*` input has the
|
||||||
|
`capture` attribute. Hidden on desktop where it's a no-op. */}
|
||||||
<Button
|
<Button
|
||||||
type="button"
|
type="button"
|
||||||
size="lg"
|
size="lg"
|
||||||
@@ -140,6 +225,8 @@ export default function ScanReceiptPage() {
|
|||||||
<Camera className="mr-2 h-5 w-5" />
|
<Camera className="mr-2 h-5 w-5" />
|
||||||
Take photo
|
Take photo
|
||||||
</Button>
|
</Button>
|
||||||
|
{/* File picker — works on every platform. Phrased so the copy
|
||||||
|
fits both mobile (library/files) and desktop (drag and drop). */}
|
||||||
<Button
|
<Button
|
||||||
type="button"
|
type="button"
|
||||||
variant="outline"
|
variant="outline"
|
||||||
@@ -148,18 +235,21 @@ export default function ScanReceiptPage() {
|
|||||||
onClick={() => fileInputRef.current?.click()}
|
onClick={() => fileInputRef.current?.click()}
|
||||||
>
|
>
|
||||||
<Upload className="mr-2 h-5 w-5" />
|
<Upload className="mr-2 h-5 w-5" />
|
||||||
<span className="sm:hidden">Choose from library</span>
|
<span className="sm:hidden">Choose from device</span>
|
||||||
<span className="hidden sm:inline">Click to upload or drag and drop</span>
|
<span className="hidden sm:inline">Choose from device or drag and drop</span>
|
||||||
</Button>
|
</Button>
|
||||||
<p className="text-xs text-muted-foreground sm:col-span-2 text-center">
|
<p className="text-xs text-muted-foreground sm:col-span-2 text-center">
|
||||||
JPEG, PNG, WebP up to 10MB
|
JPEG, PNG, HEIC, WebP up to 10 MB
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
{/* `image/*` is the broadest accept — includes HEIC on iOS,
|
||||||
|
JPEG/PNG/WebP everywhere. The capture attribute on the second
|
||||||
|
input invokes the native camera flow on mobile. */}
|
||||||
<input
|
<input
|
||||||
ref={fileInputRef}
|
ref={fileInputRef}
|
||||||
type="file"
|
type="file"
|
||||||
accept="image/*"
|
accept="image/*,application/pdf"
|
||||||
className="hidden"
|
className="hidden"
|
||||||
onChange={handleFileChange}
|
onChange={handleFileChange}
|
||||||
/>
|
/>
|
||||||
@@ -178,10 +268,18 @@ export default function ScanReceiptPage() {
|
|||||||
<span className="text-sm">Scanning receipt...</span>
|
<span className="text-sm">Scanning receipt...</span>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{scanMutation.isError && (
|
||||||
|
<div className="mt-4 rounded-md border border-amber-300 bg-amber-50 p-3 text-xs text-amber-900 dark:border-amber-900 dark:bg-amber-950/40 dark:text-amber-200">
|
||||||
|
<span className="font-medium">Couldn't read this receipt automatically.</span>{' '}
|
||||||
|
You can still fill in the details manually below — the receipt image will save with
|
||||||
|
the expense.
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
{(scanResult || scanMutation.isSuccess) && (
|
{(scanResult || scanMutation.isSuccess || scanMutation.isError || uploadedFile) && (
|
||||||
<Card>
|
<Card>
|
||||||
<CardHeader>
|
<CardHeader>
|
||||||
<CardTitle className="text-base">
|
<CardTitle className="text-base">
|
||||||
@@ -264,10 +362,20 @@ export default function ScanReceiptPage() {
|
|||||||
</Button>
|
</Button>
|
||||||
<Button
|
<Button
|
||||||
onClick={() => saveMutation.mutate()}
|
onClick={() => saveMutation.mutate()}
|
||||||
disabled={saveMutation.isPending || !amount}
|
disabled={
|
||||||
|
saveMutation.isPending ||
|
||||||
|
!amount ||
|
||||||
|
// Block save while the receipt upload is still in flight —
|
||||||
|
// otherwise the rep can hit Save before the storage round
|
||||||
|
// trip finishes and the expense lands without `receiptFileIds`,
|
||||||
|
// silently re-creating the legacy receipt-loss bug.
|
||||||
|
uploadMutation.isPending
|
||||||
|
}
|
||||||
>
|
>
|
||||||
{saveMutation.isPending && <Loader2 className="mr-2 h-4 w-4 animate-spin" />}
|
{(saveMutation.isPending || uploadMutation.isPending) && (
|
||||||
Save as Expense
|
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||||
|
)}
|
||||||
|
{uploadMutation.isPending ? 'Uploading…' : 'Save as Expense'}
|
||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { InterestDetail } from '@/components/interests/interest-detail';
|
import { InterestDetail } from '@/components/interests/interest-detail';
|
||||||
|
import { TrackEntityView } from '@/components/search/track-entity-view';
|
||||||
import { auth } from '@/lib/auth';
|
import { auth } from '@/lib/auth';
|
||||||
import { headers } from 'next/headers';
|
import { headers } from 'next/headers';
|
||||||
|
|
||||||
@@ -12,5 +13,10 @@ export default async function InterestDetailPage({ params }: InterestDetailPageP
|
|||||||
const session = await auth.api.getSession({ headers: await headers() });
|
const session = await auth.api.getSession({ headers: await headers() });
|
||||||
const currentUserId = session?.user?.id;
|
const currentUserId = session?.user?.id;
|
||||||
|
|
||||||
return <InterestDetail interestId={interestId} currentUserId={currentUserId} />;
|
return (
|
||||||
|
<>
|
||||||
|
<TrackEntityView type="interest" id={interestId} />
|
||||||
|
<InterestDetail interestId={interestId} currentUserId={currentUserId} />
|
||||||
|
</>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { use } from 'react';
|
import { use } from 'react';
|
||||||
import { InvoiceDetail } from '@/components/invoices/invoice-detail';
|
import { InvoiceDetail } from '@/components/invoices/invoice-detail';
|
||||||
|
import { TrackEntityView } from '@/components/search/track-entity-view';
|
||||||
|
|
||||||
interface InvoiceDetailPageProps {
|
interface InvoiceDetailPageProps {
|
||||||
params: Promise<{ portSlug: string; id: string }>;
|
params: Promise<{ portSlug: string; id: string }>;
|
||||||
@@ -9,6 +10,7 @@ export default function InvoiceDetailPage({ params }: InvoiceDetailPageProps) {
|
|||||||
const { id } = use(params);
|
const { id } = use(params);
|
||||||
return (
|
return (
|
||||||
<div className="max-w-4xl mx-auto space-y-6">
|
<div className="max-w-4xl mx-auto space-y-6">
|
||||||
|
<TrackEntityView type="invoice" id={id} />
|
||||||
<InvoiceDetail invoiceId={id} />
|
<InvoiceDetail invoiceId={id} />
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ export default function NewInvoicePage() {
|
|||||||
}, [setChrome]);
|
}, [setChrome]);
|
||||||
|
|
||||||
// When the form is launched from an interest detail with `?interestId=…&kind=deposit`,
|
// When the form is launched from an interest detail with `?interestId=…&kind=deposit`,
|
||||||
// fetch enough of the interest to display "Deposit for {client} — Berth {n}" in
|
// fetch enough of the interest to display "Deposit for {client} - Berth {n}" in
|
||||||
// the review step. Doubles as the source of truth for the billing entity prefill.
|
// the review step. Doubles as the source of truth for the billing entity prefill.
|
||||||
const { data: prefilledInterest } = useQuery<{
|
const { data: prefilledInterest } = useQuery<{
|
||||||
data: {
|
data: {
|
||||||
@@ -97,6 +97,28 @@ export default function NewInvoicePage() {
|
|||||||
const watchedValues = watch();
|
const watchedValues = watch();
|
||||||
const isDepositInvoice = watchedValues.kind === 'deposit';
|
const isDepositInvoice = watchedValues.kind === 'deposit';
|
||||||
|
|
||||||
|
// Resolve the selected billing entity to a human name so the review step
|
||||||
|
// shows "Acme Yacht Charters" instead of "company 4f2a1b…".
|
||||||
|
const billingEntityRef = watchedValues.billingEntity ?? null;
|
||||||
|
const { data: billingEntityName } = useQuery<{ name: string }>({
|
||||||
|
queryKey: ['billing-entity-name', billingEntityRef?.type, billingEntityRef?.id],
|
||||||
|
queryFn: async () => {
|
||||||
|
if (!billingEntityRef) return { name: '' };
|
||||||
|
const path =
|
||||||
|
billingEntityRef.type === 'company'
|
||||||
|
? `/api/v1/companies/${billingEntityRef.id}`
|
||||||
|
: `/api/v1/clients/${billingEntityRef.id}`;
|
||||||
|
const res = await apiFetch<{
|
||||||
|
data: { fullName?: string; name?: string };
|
||||||
|
}>(path);
|
||||||
|
return {
|
||||||
|
name: res?.data?.fullName ?? res?.data?.name ?? '',
|
||||||
|
};
|
||||||
|
},
|
||||||
|
enabled: !!billingEntityRef?.id,
|
||||||
|
staleTime: 60_000,
|
||||||
|
});
|
||||||
|
|
||||||
// Pre-fill the billing entity from the linked interest's client on launch.
|
// Pre-fill the billing entity from the linked interest's client on launch.
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (prefilledInterest?.data && !watchedValues.billingEntity) {
|
if (prefilledInterest?.data && !watchedValues.billingEntity) {
|
||||||
@@ -162,7 +184,7 @@ export default function NewInvoicePage() {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="max-w-2xl mx-auto space-y-6">
|
<div className="max-w-2xl mx-auto space-y-6">
|
||||||
{/* Header — desktop only; mobile gets the title from the topbar */}
|
{/* Header - desktop only; mobile gets the title from the topbar */}
|
||||||
<div className="hidden sm:flex items-center gap-3">
|
<div className="hidden sm:flex items-center gap-3">
|
||||||
<Button variant="ghost" size="sm" onClick={() => router.push(`/${portSlug}/invoices`)}>
|
<Button variant="ghost" size="sm" onClick={() => router.push(`/${portSlug}/invoices`)}>
|
||||||
<ChevronLeft className="h-4 w-4" />
|
<ChevronLeft className="h-4 w-4" />
|
||||||
@@ -211,7 +233,7 @@ export default function NewInvoicePage() {
|
|||||||
{prefilledInterest?.data
|
{prefilledInterest?.data
|
||||||
? `Linked to ${prefilledInterest.data.clientName ?? 'interest'}${
|
? `Linked to ${prefilledInterest.data.clientName ?? 'interest'}${
|
||||||
prefilledInterest.data.berthMooringNumber
|
prefilledInterest.data.berthMooringNumber
|
||||||
? ` — Berth ${prefilledInterest.data.berthMooringNumber}`
|
? ` - Berth ${prefilledInterest.data.berthMooringNumber}`
|
||||||
: ''
|
: ''
|
||||||
}. Marking this invoice as paid will advance the interest to "Deposit 10%".`
|
}. Marking this invoice as paid will advance the interest to "Deposit 10%".`
|
||||||
: 'Marking this invoice as paid will advance the linked interest to "Deposit 10%".'}
|
: 'Marking this invoice as paid will advance the linked interest to "Deposit 10%".'}
|
||||||
@@ -356,9 +378,13 @@ export default function NewInvoicePage() {
|
|||||||
<p className="font-medium mt-0.5">
|
<p className="font-medium mt-0.5">
|
||||||
{watchedValues.billingEntity ? (
|
{watchedValues.billingEntity ? (
|
||||||
<>
|
<>
|
||||||
<span className="capitalize">{watchedValues.billingEntity.type}</span>{' '}
|
{billingEntityName?.name ? (
|
||||||
<span className="text-xs opacity-60">
|
<span>{billingEntityName.name}</span>
|
||||||
{watchedValues.billingEntity.id.slice(0, 12)}
|
) : (
|
||||||
|
<span className="text-muted-foreground">Loading…</span>
|
||||||
|
)}{' '}
|
||||||
|
<span className="text-xs text-muted-foreground capitalize">
|
||||||
|
({watchedValues.billingEntity.type})
|
||||||
</span>
|
</span>
|
||||||
</>
|
</>
|
||||||
) : (
|
) : (
|
||||||
|
|||||||
@@ -0,0 +1,16 @@
|
|||||||
|
import type { Metadata } from 'next';
|
||||||
|
|
||||||
|
import { UploadReceiptsGuide } from '@/components/invoices/upload-receipts-guide';
|
||||||
|
|
||||||
|
export const metadata: Metadata = {
|
||||||
|
title: 'How to upload receipts',
|
||||||
|
};
|
||||||
|
|
||||||
|
export default async function UploadReceiptsPage({
|
||||||
|
params,
|
||||||
|
}: {
|
||||||
|
params: Promise<{ portSlug: string }>;
|
||||||
|
}) {
|
||||||
|
const { portSlug } = await params;
|
||||||
|
return <UploadReceiptsGuide portSlug={portSlug} />;
|
||||||
|
}
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
import { ResidentialClientDetail } from '@/components/residential/residential-client-detail';
|
import { ResidentialClientDetail } from '@/components/residential/residential-client-detail';
|
||||||
|
import { TrackEntityView } from '@/components/search/track-entity-view';
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
params: Promise<{ id: string }>;
|
params: Promise<{ id: string }>;
|
||||||
@@ -6,5 +7,10 @@ interface Props {
|
|||||||
|
|
||||||
export default async function ResidentialClientDetailPage({ params }: Props) {
|
export default async function ResidentialClientDetailPage({ params }: Props) {
|
||||||
const { id } = await params;
|
const { id } = await params;
|
||||||
return <ResidentialClientDetail clientId={id} />;
|
return (
|
||||||
|
<>
|
||||||
|
<TrackEntityView type="residential-client" id={id} />
|
||||||
|
<ResidentialClientDetail clientId={id} />
|
||||||
|
</>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { ResidentialInterestDetail } from '@/components/residential/residential-interest-detail';
|
import { ResidentialInterestDetail } from '@/components/residential/residential-interest-detail';
|
||||||
|
import { TrackEntityView } from '@/components/search/track-entity-view';
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
params: Promise<{ id: string }>;
|
params: Promise<{ id: string }>;
|
||||||
@@ -6,5 +7,10 @@ interface Props {
|
|||||||
|
|
||||||
export default async function ResidentialInterestDetailPage({ params }: Props) {
|
export default async function ResidentialInterestDetailPage({ params }: Props) {
|
||||||
const { id } = await params;
|
const { id } = await params;
|
||||||
return <ResidentialInterestDetail interestId={id} />;
|
return (
|
||||||
|
<>
|
||||||
|
<TrackEntityView type="residential-interest" id={id} />
|
||||||
|
<ResidentialInterestDetail interestId={id} />
|
||||||
|
</>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
5
src/app/(dashboard)/[portSlug]/settings/profile/page.tsx
Normal file
5
src/app/(dashboard)/[portSlug]/settings/profile/page.tsx
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
import { UserProfile } from '@/components/settings/user-profile';
|
||||||
|
|
||||||
|
export default function ProfilePage() {
|
||||||
|
return <UserProfile />;
|
||||||
|
}
|
||||||
11
src/app/(dashboard)/[portSlug]/website-analytics/page.tsx
Normal file
11
src/app/(dashboard)/[portSlug]/website-analytics/page.tsx
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import type { Metadata } from 'next';
|
||||||
|
|
||||||
|
import { WebsiteAnalyticsShell } from '@/components/website-analytics/website-analytics-shell';
|
||||||
|
|
||||||
|
export const metadata: Metadata = {
|
||||||
|
title: 'Website analytics',
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function WebsiteAnalyticsPage() {
|
||||||
|
return <WebsiteAnalyticsShell />;
|
||||||
|
}
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
import { YachtDetail } from '@/components/yachts/yacht-detail';
|
import { YachtDetail } from '@/components/yachts/yacht-detail';
|
||||||
|
import { TrackEntityView } from '@/components/search/track-entity-view';
|
||||||
import { auth } from '@/lib/auth';
|
import { auth } from '@/lib/auth';
|
||||||
import { headers } from 'next/headers';
|
import { headers } from 'next/headers';
|
||||||
|
|
||||||
@@ -12,5 +13,10 @@ export default async function YachtDetailPage({ params }: YachtDetailPageProps)
|
|||||||
const session = await auth.api.getSession({ headers: await headers() });
|
const session = await auth.api.getSession({ headers: await headers() });
|
||||||
const currentUserId = session?.user?.id;
|
const currentUserId = session?.user?.id;
|
||||||
|
|
||||||
return <YachtDetail yachtId={yachtId} currentUserId={currentUserId} />;
|
return (
|
||||||
|
<>
|
||||||
|
<TrackEntityView type="yacht" id={yachtId} />
|
||||||
|
<YachtDetail yachtId={yachtId} currentUserId={currentUserId} />
|
||||||
|
</>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
55
src/app/(dashboard)/error.tsx
Normal file
55
src/app/(dashboard)/error.tsx
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
'use client';
|
||||||
|
|
||||||
|
import { useEffect } from 'react';
|
||||||
|
import Link from 'next/link';
|
||||||
|
import { AlertCircle, RotateCcw } from 'lucide-react';
|
||||||
|
|
||||||
|
import { Button } from '@/components/ui/button';
|
||||||
|
|
||||||
|
interface ErrorProps {
|
||||||
|
error: Error & { digest?: string };
|
||||||
|
reset: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function DashboardError({ error, reset }: ErrorProps) {
|
||||||
|
useEffect(() => {
|
||||||
|
// Forward to the browser console so the dev sees the stack while the
|
||||||
|
// user sees the friendly UI. The server already wrote an error_events
|
||||||
|
// row through the page-level error pipeline.
|
||||||
|
|
||||||
|
console.error('Dashboard render error:', error);
|
||||||
|
}, [error]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex items-start justify-center min-h-[60vh] p-6">
|
||||||
|
<div className="max-w-md w-full bg-background border rounded-lg p-6 shadow-sm">
|
||||||
|
<div className="flex items-center gap-3 mb-4">
|
||||||
|
<div className="rounded-full bg-red-100 p-2">
|
||||||
|
<AlertCircle className="h-5 w-5 text-red-600" />
|
||||||
|
</div>
|
||||||
|
<h1 className="text-lg font-semibold">Something went wrong</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p className="text-sm text-muted-foreground mb-4">
|
||||||
|
The page hit an unexpected error. The team has been notified
|
||||||
|
{error.digest ? ' (ref: ' : '.'}
|
||||||
|
{error.digest ? <code className="font-mono text-xs">{error.digest}</code> : null}
|
||||||
|
{error.digest ? ').' : ''}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Button onClick={reset} size="sm">
|
||||||
|
<RotateCcw className="h-3.5 w-3.5 mr-1.5" />
|
||||||
|
Try again
|
||||||
|
</Button>
|
||||||
|
<Link
|
||||||
|
href={'/' as never}
|
||||||
|
className="text-sm text-muted-foreground hover:text-foreground underline"
|
||||||
|
>
|
||||||
|
Back to dashboard
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -13,6 +13,7 @@ import { PermissionsProvider } from '@/providers/permissions-provider';
|
|||||||
import { Sidebar } from '@/components/layout/sidebar';
|
import { Sidebar } from '@/components/layout/sidebar';
|
||||||
import { Topbar } from '@/components/layout/topbar';
|
import { Topbar } from '@/components/layout/topbar';
|
||||||
import { MobileLayout } from '@/components/layout/mobile/mobile-layout';
|
import { MobileLayout } from '@/components/layout/mobile/mobile-layout';
|
||||||
|
import { RealtimeToasts } from '@/components/shared/realtime-toasts';
|
||||||
|
|
||||||
export default async function DashboardLayout({ children }: { children: React.ReactNode }) {
|
export default async function DashboardLayout({ children }: { children: React.ReactNode }) {
|
||||||
const session = await auth.api.getSession({ headers: await headers() });
|
const session = await auth.api.getSession({ headers: await headers() });
|
||||||
@@ -38,7 +39,8 @@ export default async function DashboardLayout({ children }: { children: React.Re
|
|||||||
<PortProvider ports={ports} defaultPortId={ports[0]?.id ?? null}>
|
<PortProvider ports={ports} defaultPortId={ports[0]?.id ?? null}>
|
||||||
<PermissionsProvider>
|
<PermissionsProvider>
|
||||||
<SocketProvider>
|
<SocketProvider>
|
||||||
{/* Desktop shell — hidden by CSS on mobile */}
|
<RealtimeToasts />
|
||||||
|
{/* Desktop shell - hidden by CSS on mobile */}
|
||||||
<div data-shell="desktop" className="flex h-screen overflow-hidden bg-background">
|
<div data-shell="desktop" className="flex h-screen overflow-hidden bg-background">
|
||||||
<Sidebar
|
<Sidebar
|
||||||
portRoles={portRoles}
|
portRoles={portRoles}
|
||||||
@@ -47,6 +49,7 @@ export default async function DashboardLayout({ children }: { children: React.Re
|
|||||||
name: profile?.displayName ?? session.user.name ?? session.user.email,
|
name: profile?.displayName ?? session.user.name ?? session.user.email,
|
||||||
email: session.user.email,
|
email: session.user.email,
|
||||||
}}
|
}}
|
||||||
|
ports={ports}
|
||||||
/>
|
/>
|
||||||
<div className="flex-1 flex flex-col overflow-hidden min-w-0">
|
<div className="flex-1 flex flex-col overflow-hidden min-w-0">
|
||||||
<Topbar
|
<Topbar
|
||||||
@@ -56,11 +59,13 @@ export default async function DashboardLayout({ children }: { children: React.Re
|
|||||||
email: session.user.email,
|
email: session.user.email,
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
<main className="flex-1 overflow-y-auto bg-background p-6">{children}</main>
|
<main className="flex-1 overflow-y-auto bg-background pt-3 px-6 pb-6">
|
||||||
|
{children}
|
||||||
|
</main>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Mobile shell — hidden by CSS on desktop */}
|
{/* Mobile shell - hidden by CSS on desktop */}
|
||||||
<MobileLayout>{children}</MobileLayout>
|
<MobileLayout>{children}</MobileLayout>
|
||||||
</SocketProvider>
|
</SocketProvider>
|
||||||
</PermissionsProvider>
|
</PermissionsProvider>
|
||||||
|
|||||||
@@ -12,14 +12,10 @@ export const metadata: Metadata = {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
export default async function PortalLayout({
|
export default async function PortalLayout({ children }: { children: React.ReactNode }) {
|
||||||
children,
|
|
||||||
}: {
|
|
||||||
children: React.ReactNode;
|
|
||||||
}) {
|
|
||||||
// This layout wraps all portal routes including login/verify
|
// This layout wraps all portal routes including login/verify
|
||||||
// We can't easily check pathname in a server layout, so we attempt
|
// We can't easily check pathname in a server layout, so we attempt
|
||||||
// to get the session and pass it down — login/verify pages handle their own
|
// to get the session and pass it down - login/verify pages handle their own
|
||||||
// redirect logic independently.
|
// redirect logic independently.
|
||||||
const session = await getPortalSession().catch(() => null);
|
const session = await getPortalSession().catch(() => null);
|
||||||
|
|
||||||
@@ -42,17 +38,11 @@ export default async function PortalLayout({
|
|||||||
<div className="min-h-screen bg-gray-50">
|
<div className="min-h-screen bg-gray-50">
|
||||||
{session && (
|
{session && (
|
||||||
<>
|
<>
|
||||||
<PortalHeader
|
<PortalHeader portName={portName} portLogoUrl={portLogoUrl} clientName={clientName} />
|
||||||
portName={portName}
|
|
||||||
portLogoUrl={portLogoUrl}
|
|
||||||
clientName={clientName}
|
|
||||||
/>
|
|
||||||
<PortalNav />
|
<PortalNav />
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
<main className={session ? 'max-w-5xl mx-auto px-4 sm:px-6 py-8' : ''}>
|
<main className={session ? 'max-w-5xl mx-auto px-4 sm:px-6 py-8' : ''}>{children}</main>
|
||||||
{children}
|
|
||||||
</main>
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ export default function PortalActivatePage() {
|
|||||||
<PasswordSetForm
|
<PasswordSetForm
|
||||||
endpoint="/api/portal/auth/activate"
|
endpoint="/api/portal/auth/activate"
|
||||||
title="Activate your account"
|
title="Activate your account"
|
||||||
description="Welcome — choose a password to finish setting up your client portal account."
|
description="Welcome - choose a password to finish setting up your client portal account."
|
||||||
successTitle="Account activated"
|
successTitle="Account activated"
|
||||||
successDescription="You can now sign in with your new password."
|
successDescription="You can now sign in with your new password."
|
||||||
submitLabel="Activate account"
|
submitLabel="Activate account"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { redirect } from 'next/navigation';
|
import { redirect } from 'next/navigation';
|
||||||
import { Anchor, FileText, Receipt, Sailboat, Building2, CalendarCheck } from 'lucide-react';
|
import { Anchor, FileText, Receipt, Sailboat, CalendarCheck } from 'lucide-react';
|
||||||
import type { Metadata } from 'next';
|
import type { Metadata } from 'next';
|
||||||
|
|
||||||
import { getPortalSession } from '@/lib/portal/auth';
|
import { getPortalSession } from '@/lib/portal/auth';
|
||||||
@@ -55,12 +55,9 @@ export default async function PortalDashboardPage() {
|
|||||||
icon={Sailboat}
|
icon={Sailboat}
|
||||||
href="/portal/my-yachts"
|
href="/portal/my-yachts"
|
||||||
/>
|
/>
|
||||||
<PortalCard
|
{/* My Memberships tile was a dead-end (no href, no /portal/memberships
|
||||||
title="My Memberships"
|
route). Hidden until a memberships page ships. The count is still
|
||||||
value={dashboard.counts.memberships}
|
available in the underlying dashboard data when needed. */}
|
||||||
description="Companies where you hold an active role"
|
|
||||||
icon={Building2}
|
|
||||||
/>
|
|
||||||
<PortalCard
|
<PortalCard
|
||||||
title="My Active Reservations"
|
title="My Active Reservations"
|
||||||
value={dashboard.counts.activeReservations}
|
value={dashboard.counts.activeReservations}
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
import { Download, Loader2 } from 'lucide-react';
|
import { Download, Loader2 } from 'lucide-react';
|
||||||
|
import { toast } from 'sonner';
|
||||||
|
|
||||||
import { Button } from '@/components/ui/button';
|
import { Button } from '@/components/ui/button';
|
||||||
|
|
||||||
interface DocumentDownloadButtonProps {
|
interface DocumentDownloadButtonProps {
|
||||||
@@ -16,25 +18,20 @@ export function DocumentDownloadButton({ documentId }: DocumentDownloadButtonPro
|
|||||||
try {
|
try {
|
||||||
const res = await fetch(`/api/portal/documents/${documentId}/download`);
|
const res = await fetch(`/api/portal/documents/${documentId}/download`);
|
||||||
if (!res.ok) {
|
if (!res.ok) {
|
||||||
alert('Unable to download document. Please try again.');
|
toast.error('Unable to download document. Please try again.');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const data = await res.json() as { url: string };
|
const data = (await res.json()) as { url: string };
|
||||||
window.open(data.url, '_blank', 'noopener,noreferrer');
|
window.open(data.url, '_blank', 'noopener,noreferrer');
|
||||||
} catch {
|
} catch {
|
||||||
alert('Unable to download document. Please check your connection.');
|
toast.error('Unable to download document. Please check your connection.');
|
||||||
} finally {
|
} finally {
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Button
|
<Button variant="outline" size="sm" onClick={handleDownload} disabled={loading}>
|
||||||
variant="outline"
|
|
||||||
size="sm"
|
|
||||||
onClick={handleDownload}
|
|
||||||
disabled={loading}
|
|
||||||
>
|
|
||||||
{loading ? (
|
{loading ? (
|
||||||
<Loader2 className="h-3.5 w-3.5 animate-spin" />
|
<Loader2 className="h-3.5 w-3.5 animate-spin" />
|
||||||
) : (
|
) : (
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ export default function PortalForgotPasswordPage() {
|
|||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
try {
|
try {
|
||||||
// Always returns 200 — caller never sees whether email exists.
|
// Always returns 200 - caller never sees whether email exists.
|
||||||
await fetch('/api/portal/auth/forgot-password', {
|
await fetch('/api/portal/auth/forgot-password', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ export default async function PortalInterestsPage() {
|
|||||||
<span className="font-medium text-gray-900">General Interest</span>
|
<span className="font-medium text-gray-900">General Interest</span>
|
||||||
)}
|
)}
|
||||||
{interest.berthArea && (
|
{interest.berthArea && (
|
||||||
<span className="text-sm text-gray-400">— {interest.berthArea}</span>
|
<span className="text-sm text-gray-400">- {interest.berthArea}</span>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
{interest.leadCategory && (
|
{interest.leadCategory && (
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ export default async function PortalMyReservationsPage() {
|
|||||||
<div className="flex items-center gap-2 mb-1">
|
<div className="flex items-center gap-2 mb-1">
|
||||||
<span className="font-medium text-gray-900">{r.yachtName ?? 'Yacht'}</span>
|
<span className="font-medium text-gray-900">{r.yachtName ?? 'Yacht'}</span>
|
||||||
{r.berthMooringNumber && (
|
{r.berthMooringNumber && (
|
||||||
<span className="text-sm text-gray-400">— Berth {r.berthMooringNumber}</span>
|
<span className="text-sm text-gray-400">- Berth {r.berthMooringNumber}</span>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
<p className="text-sm text-gray-500">
|
<p className="text-sm text-gray-500">
|
||||||
|
|||||||
42
src/app/(portal)/portal/profile/page.tsx
Normal file
42
src/app/(portal)/portal/profile/page.tsx
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import { redirect } from 'next/navigation';
|
||||||
|
import type { Metadata } from 'next';
|
||||||
|
|
||||||
|
import { getPortalSession } from '@/lib/portal/auth';
|
||||||
|
import { ChangePasswordForm } from '@/components/portal/change-password-form';
|
||||||
|
|
||||||
|
export const metadata: Metadata = { title: 'Profile' };
|
||||||
|
|
||||||
|
export default async function PortalProfilePage() {
|
||||||
|
const session = await getPortalSession();
|
||||||
|
if (!session) redirect('/portal/login');
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-semibold text-gray-900">Profile</h1>
|
||||||
|
<p className="text-sm text-gray-500 mt-1">
|
||||||
|
Read-only contact details and self-service password change.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="bg-white rounded-lg border p-6 space-y-2 text-sm">
|
||||||
|
<div className="flex items-baseline justify-between">
|
||||||
|
<span className="text-gray-500">Email</span>
|
||||||
|
<span className="font-medium">{session.email}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-gray-400 pt-1">
|
||||||
|
To update name, phone, or address, please contact your port team — they keep the records
|
||||||
|
authoritative.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="bg-white rounded-lg border p-6">
|
||||||
|
<h2 className="text-base font-semibold text-gray-900 mb-1">Change password</h2>
|
||||||
|
<p className="text-sm text-gray-500 mb-4">
|
||||||
|
You’ll need your current password to confirm.
|
||||||
|
</p>
|
||||||
|
<ChangePasswordForm />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -1,20 +1,51 @@
|
|||||||
|
import type { Metadata, Viewport } from 'next';
|
||||||
import { redirect } from 'next/navigation';
|
import { redirect } from 'next/navigation';
|
||||||
import { headers } from 'next/headers';
|
import { headers } from 'next/headers';
|
||||||
|
import { eq } from 'drizzle-orm';
|
||||||
|
|
||||||
import { auth } from '@/lib/auth';
|
import { auth } from '@/lib/auth';
|
||||||
import { db } from '@/lib/db';
|
import { db } from '@/lib/db';
|
||||||
import { ports as portsTable } from '@/lib/db/schema/ports';
|
import { ports as portsTable } from '@/lib/db/schema/ports';
|
||||||
import { QueryProvider } from '@/providers/query-provider';
|
import { QueryProvider } from '@/providers/query-provider';
|
||||||
import { PortProvider } from '@/providers/port-provider';
|
import { PortProvider } from '@/providers/port-provider';
|
||||||
import { eq } from 'drizzle-orm';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Minimal layout for the mobile receipt-scanner PWA. No sidebar, no
|
* Minimal layout for the mobile receipt-scanner PWA. No sidebar, no
|
||||||
* topbar — the scanner is its own contained surface. Adds the PWA
|
* topbar - the scanner is its own contained surface. PWA manifest +
|
||||||
* manifest link + theme color so iOS/Android pick up "Add to Home
|
* iOS web-app meta tags are emitted via Next.js's metadata/viewport
|
||||||
* Screen". Auth check matches the dashboard layout so unauthorized
|
* exports so React doesn't try to render a second `<head>` mid-tree
|
||||||
* users still bounce to /login.
|
* (which throws hydration errors in the App Router). Auth check
|
||||||
|
* matches the dashboard layout so unauthorized users still bounce.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
export async function generateMetadata({
|
||||||
|
params,
|
||||||
|
}: {
|
||||||
|
params: Promise<{ portSlug: string }>;
|
||||||
|
}): Promise<Metadata> {
|
||||||
|
const { portSlug } = await params;
|
||||||
|
return {
|
||||||
|
manifest: `/${portSlug}/scan/manifest.webmanifest`,
|
||||||
|
appleWebApp: {
|
||||||
|
capable: true,
|
||||||
|
title: 'PN Scanner',
|
||||||
|
statusBarStyle: 'default',
|
||||||
|
},
|
||||||
|
other: {
|
||||||
|
// Android/Chrome equivalent of the apple-* meta. metadata.appleWebApp
|
||||||
|
// covers iOS only; this preserves the existing PWA hint for Chrome.
|
||||||
|
'mobile-web-app-capable': 'yes',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export const viewport: Viewport = {
|
||||||
|
themeColor: '#3a7bc8',
|
||||||
|
width: 'device-width',
|
||||||
|
initialScale: 1,
|
||||||
|
viewportFit: 'cover',
|
||||||
|
};
|
||||||
|
|
||||||
export default async function ScannerLayout({
|
export default async function ScannerLayout({
|
||||||
children,
|
children,
|
||||||
params,
|
params,
|
||||||
@@ -33,16 +64,7 @@ export default async function ScannerLayout({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<QueryProvider>
|
<QueryProvider>
|
||||||
<PortProvider ports={port ? [port] : []} defaultPortId={port?.id ?? null}>
|
<PortProvider ports={[port]} defaultPortId={port.id}>
|
||||||
<head>
|
|
||||||
<link rel="manifest" href={`/${portSlug}/scan/manifest.webmanifest`} />
|
|
||||||
<meta name="theme-color" content="#3a7bc8" />
|
|
||||||
<meta name="mobile-web-app-capable" content="yes" />
|
|
||||||
<meta name="apple-mobile-web-app-capable" content="yes" />
|
|
||||||
<meta name="apple-mobile-web-app-status-bar-style" content="default" />
|
|
||||||
<meta name="apple-mobile-web-app-title" content="PN Scanner" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1, viewport-fit=cover" />
|
|
||||||
</head>
|
|
||||||
<div className="min-h-[100dvh] bg-background">{children}</div>
|
<div className="min-h-[100dvh] bg-background">{children}</div>
|
||||||
</PortProvider>
|
</PortProvider>
|
||||||
</QueryProvider>
|
</QueryProvider>
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ export async function GET(_req: Request, { params }: { params: Promise<{ portSlu
|
|||||||
const portName = port?.name ?? 'Port Nimara';
|
const portName = port?.name ?? 'Port Nimara';
|
||||||
|
|
||||||
const manifest = {
|
const manifest = {
|
||||||
name: `${portName} — Scanner`,
|
name: `${portName} - Scanner`,
|
||||||
short_name: 'Scanner',
|
short_name: 'Scanner',
|
||||||
description: `Capture and submit expense receipts for ${portName}.`,
|
description: `Capture and submit expense receipts for ${portName}.`,
|
||||||
start_url: `/${portSlug}/scan`,
|
start_url: `/${portSlug}/scan`,
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import type { Metadata } from 'next';
|
|||||||
import { ScanShell } from '@/components/scan/scan-shell';
|
import { ScanShell } from '@/components/scan/scan-shell';
|
||||||
|
|
||||||
export const metadata: Metadata = {
|
export const metadata: Metadata = {
|
||||||
title: 'Scan receipt — Port Nimara',
|
title: 'Scan receipt - Port Nimara',
|
||||||
};
|
};
|
||||||
|
|
||||||
export default function ScanPage() {
|
export default function ScanPage() {
|
||||||
|
|||||||
@@ -1,4 +1,151 @@
|
|||||||
import { auth } from '@/lib/auth';
|
import type { NextRequest } from 'next/server';
|
||||||
import { toNextJsHandler } from 'better-auth/next-js';
|
import { toNextJsHandler } from 'better-auth/next-js';
|
||||||
|
|
||||||
export const { GET, POST } = toNextJsHandler(auth);
|
import { auth } from '@/lib/auth';
|
||||||
|
import { createAuditLog } from '@/lib/audit';
|
||||||
|
import { logger } from '@/lib/logger';
|
||||||
|
|
||||||
|
const upstream = toNextJsHandler(auth);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrap better-auth's `[...all]` handler so we can stamp the audit log on
|
||||||
|
* authentication events. Better-auth itself doesn't fire any callback we
|
||||||
|
* can hook on sign-in / sign-out / failed-login — we inspect the route
|
||||||
|
* + response status after the upstream handler finishes.
|
||||||
|
*
|
||||||
|
* Successful sign-in → action 'login' (severity info)
|
||||||
|
* Failed sign-in → action 'login' (severity warning, ok=false)
|
||||||
|
* Sign-out → action 'logout' (userId resolved before cookie
|
||||||
|
* is cleared)
|
||||||
|
*
|
||||||
|
* Audit writes are fire-and-forget (createAuditLog never throws).
|
||||||
|
*/
|
||||||
|
|
||||||
|
interface AuthBody {
|
||||||
|
user?: { id?: string; email?: string };
|
||||||
|
id?: string;
|
||||||
|
email?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function clientMeta(req: NextRequest): { ipAddress: string; userAgent: string } {
|
||||||
|
const ip =
|
||||||
|
req.headers.get('x-forwarded-for')?.split(',')[0]?.trim() ?? req.headers.get('x-real-ip') ?? '';
|
||||||
|
return { ipAddress: ip, userAgent: req.headers.get('user-agent') ?? '' };
|
||||||
|
}
|
||||||
|
|
||||||
|
function logSignIn(args: {
|
||||||
|
req: NextRequest;
|
||||||
|
responseBody: string;
|
||||||
|
status: number;
|
||||||
|
attemptedEmail: string | null;
|
||||||
|
}) {
|
||||||
|
const meta = clientMeta(args.req);
|
||||||
|
let parsed: AuthBody | null = null;
|
||||||
|
try {
|
||||||
|
parsed = JSON.parse(args.responseBody) as AuthBody;
|
||||||
|
} catch {
|
||||||
|
/* upstream returned non-JSON */
|
||||||
|
}
|
||||||
|
const userId = parsed?.user?.id ?? parsed?.id ?? null;
|
||||||
|
const email = parsed?.user?.email ?? parsed?.email ?? args.attemptedEmail ?? null;
|
||||||
|
const ok = args.status >= 200 && args.status < 300;
|
||||||
|
|
||||||
|
// entityId is text/unbounded but indexed; truncate the attempted-
|
||||||
|
// email fallback to keep the row predictably sized when the form
|
||||||
|
// sends a giant value. The audit metadata still carries the full
|
||||||
|
// original attempted email for forensic context.
|
||||||
|
const safeAttempted = (args.attemptedEmail ?? '').slice(0, 256);
|
||||||
|
void createAuditLog({
|
||||||
|
userId,
|
||||||
|
portId: null,
|
||||||
|
action: 'login',
|
||||||
|
entityType: 'session',
|
||||||
|
entityId: userId ?? safeAttempted ?? 'unknown',
|
||||||
|
metadata: {
|
||||||
|
ok,
|
||||||
|
status: args.status,
|
||||||
|
attemptedEmail: args.attemptedEmail ?? email ?? null,
|
||||||
|
},
|
||||||
|
ipAddress: meta.ipAddress,
|
||||||
|
userAgent: meta.userAgent,
|
||||||
|
severity: ok ? 'info' : 'warning',
|
||||||
|
source: 'auth',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function logSignOut(req: NextRequest) {
|
||||||
|
const meta = clientMeta(req);
|
||||||
|
let userId: string | null = null;
|
||||||
|
try {
|
||||||
|
const session = await auth.api.getSession({ headers: req.headers });
|
||||||
|
userId = session?.user?.id ?? null;
|
||||||
|
} catch {
|
||||||
|
/* unauthenticated or expired */
|
||||||
|
}
|
||||||
|
|
||||||
|
void createAuditLog({
|
||||||
|
userId,
|
||||||
|
portId: null,
|
||||||
|
action: 'logout',
|
||||||
|
entityType: 'session',
|
||||||
|
entityId: userId ?? 'unknown',
|
||||||
|
metadata: {},
|
||||||
|
ipAddress: meta.ipAddress,
|
||||||
|
userAgent: meta.userAgent,
|
||||||
|
severity: 'info',
|
||||||
|
source: 'auth',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function withAuthAudit(req: NextRequest): Promise<Response> {
|
||||||
|
const url = new URL(req.url);
|
||||||
|
const path = url.pathname;
|
||||||
|
const isSignIn = path.endsWith('/sign-in/email') || path.endsWith('/sign-in');
|
||||||
|
const isSignOut = path.endsWith('/sign-out');
|
||||||
|
|
||||||
|
// Read the request body BEFORE forwarding so we can extract the
|
||||||
|
// attempted email even when the credentials are wrong (the upstream
|
||||||
|
// handler will consume the body stream and we can't read it twice).
|
||||||
|
let attemptedEmail: string | null = null;
|
||||||
|
let forwardReq: NextRequest = req;
|
||||||
|
if (isSignIn && req.method === 'POST') {
|
||||||
|
try {
|
||||||
|
const raw = await req.text();
|
||||||
|
try {
|
||||||
|
attemptedEmail = (JSON.parse(raw) as { email?: string }).email ?? null;
|
||||||
|
} catch {
|
||||||
|
/* form-encoded or non-JSON */
|
||||||
|
}
|
||||||
|
// Reconstruct a fresh Request so the upstream handler can read it.
|
||||||
|
forwardReq = new Request(req.url, {
|
||||||
|
method: req.method,
|
||||||
|
headers: req.headers,
|
||||||
|
body: raw,
|
||||||
|
}) as unknown as NextRequest;
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn({ err }, 'Failed to read sign-in body for audit');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Capture sign-out userId BEFORE the upstream handler clears the cookie.
|
||||||
|
const signOutPromise = isSignOut ? logSignOut(req) : null;
|
||||||
|
|
||||||
|
const res = await upstream.POST(forwardReq);
|
||||||
|
|
||||||
|
if (isSignIn) {
|
||||||
|
try {
|
||||||
|
const body = await res.clone().text();
|
||||||
|
logSignIn({ req, responseBody: body, status: res.status, attemptedEmail });
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn({ err }, 'Failed to capture sign-in response for audit');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (signOutPromise) void signOutPromise;
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET = upstream.GET;
|
||||||
|
export async function POST(req: NextRequest): Promise<Response> {
|
||||||
|
return withAuthAudit(req);
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import { NextRequest, NextResponse } from 'next/server';
|
import { NextRequest, NextResponse } from 'next/server';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
import { errorResponse } from '@/lib/errors';
|
import { errorResponse, ValidationError } from '@/lib/errors';
|
||||||
import { consumeCrmInvite } from '@/lib/services/crm-invite.service';
|
import { consumeCrmInvite } from '@/lib/services/crm-invite.service';
|
||||||
|
import { enforcePublicRateLimit } from '@/lib/api/route-helpers';
|
||||||
|
|
||||||
const bodySchema = z.object({
|
const bodySchema = z.object({
|
||||||
token: z.string().min(1),
|
token: z.string().min(1),
|
||||||
@@ -10,27 +11,30 @@ const bodySchema = z.object({
|
|||||||
});
|
});
|
||||||
|
|
||||||
export async function POST(req: NextRequest): Promise<NextResponse> {
|
export async function POST(req: NextRequest): Promise<NextResponse> {
|
||||||
|
// 10/hour/IP — bounds brute-force against the CRM invite token.
|
||||||
|
const limited = await enforcePublicRateLimit(req, 'portalToken');
|
||||||
|
if (limited) return limited;
|
||||||
|
|
||||||
|
try {
|
||||||
let body: unknown;
|
let body: unknown;
|
||||||
try {
|
try {
|
||||||
body = await req.json();
|
body = await req.json();
|
||||||
} catch {
|
} catch {
|
||||||
return NextResponse.json({ message: 'Invalid request body' }, { status: 400 });
|
// Use {error} via errorResponse so the envelope matches every other
|
||||||
|
// route (auditor-F §32 — was emitting {message} as a third variant).
|
||||||
|
throw new ValidationError('Invalid request body');
|
||||||
}
|
}
|
||||||
|
|
||||||
const parsed = bodySchema.safeParse(body);
|
const parsed = bodySchema.safeParse(body);
|
||||||
if (!parsed.success) {
|
if (!parsed.success) {
|
||||||
return NextResponse.json(
|
throw new ValidationError(parsed.error.errors[0]?.message ?? 'Invalid input');
|
||||||
{ message: parsed.error.errors[0]?.message ?? 'Invalid input' },
|
|
||||||
{ status: 400 },
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
|
||||||
const result = await consumeCrmInvite({
|
const result = await consumeCrmInvite({
|
||||||
token: parsed.data.token,
|
token: parsed.data.token,
|
||||||
password: parsed.data.password,
|
password: parsed.data.password,
|
||||||
});
|
});
|
||||||
return NextResponse.json({ success: true, email: result.email });
|
return NextResponse.json({ data: { email: result.email } });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
return errorResponse(err);
|
return errorResponse(err);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
import { NextResponse } from 'next/server';
|
import { NextResponse } from 'next/server';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Liveness probe — confirms the Next.js process is responding.
|
* Liveness probe - confirms the Next.js process is responding.
|
||||||
*
|
*
|
||||||
* Returns 200 unconditionally; if the process is wedged or has crashed
|
* Returns 200 unconditionally; if the process is wedged or has crashed
|
||||||
* the request never lands here at all. Do NOT include database/Redis/MinIO
|
* the request never lands here at all. Do NOT include database/Redis/MinIO
|
||||||
* checks in this endpoint — a transient downstream blip should drop the
|
* checks in this endpoint - a transient downstream blip should drop the
|
||||||
* pod from the load balancer (readiness), not restart the pod (liveness).
|
* pod from the load balancer (readiness), not restart the pod (liveness).
|
||||||
*
|
*
|
||||||
* For deep dependency checks, hit `/api/ready` instead.
|
* For deep dependency checks, hit `/api/ready` instead.
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
import { NextRequest, NextResponse } from 'next/server';
|
import { NextRequest, NextResponse } from 'next/server';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
import { errorResponse } from '@/lib/errors';
|
import { enforcePublicRateLimit } from '@/lib/api/route-helpers';
|
||||||
|
import { errorResponse, ValidationError } from '@/lib/errors';
|
||||||
import { activateAccount } from '@/lib/services/portal-auth.service';
|
import { activateAccount } from '@/lib/services/portal-auth.service';
|
||||||
|
|
||||||
const bodySchema = z.object({
|
const bodySchema = z.object({
|
||||||
@@ -10,22 +11,23 @@ const bodySchema = z.object({
|
|||||||
});
|
});
|
||||||
|
|
||||||
export async function POST(req: NextRequest): Promise<NextResponse> {
|
export async function POST(req: NextRequest): Promise<NextResponse> {
|
||||||
|
// 10/hour/IP — bounds brute-force against the 32-byte activation token.
|
||||||
|
const limited = await enforcePublicRateLimit(req, 'portalToken');
|
||||||
|
if (limited) return limited;
|
||||||
|
|
||||||
|
try {
|
||||||
let body: unknown;
|
let body: unknown;
|
||||||
try {
|
try {
|
||||||
body = await req.json();
|
body = await req.json();
|
||||||
} catch {
|
} catch {
|
||||||
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 });
|
throw new ValidationError('Invalid request body');
|
||||||
}
|
}
|
||||||
|
|
||||||
const parsed = bodySchema.safeParse(body);
|
const parsed = bodySchema.safeParse(body);
|
||||||
if (!parsed.success) {
|
if (!parsed.success) {
|
||||||
return NextResponse.json(
|
throw new ValidationError(parsed.error.errors[0]?.message ?? 'Invalid input');
|
||||||
{ error: parsed.error.errors[0]?.message ?? 'Invalid input' },
|
|
||||||
{ status: 400 },
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
|
||||||
await activateAccount(parsed.data.token, parsed.data.password);
|
await activateAccount(parsed.data.token, parsed.data.password);
|
||||||
return NextResponse.json({ success: true });
|
return NextResponse.json({ success: true });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|||||||
44
src/app/api/portal/auth/change-password/route.ts
Normal file
44
src/app/api/portal/auth/change-password/route.ts
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server';
|
||||||
|
import { z } from 'zod';
|
||||||
|
import { eq } from 'drizzle-orm';
|
||||||
|
|
||||||
|
import { db } from '@/lib/db';
|
||||||
|
import { portalUsers } from '@/lib/db/schema/portal';
|
||||||
|
import { errorResponse, UnauthorizedError, ValidationError } from '@/lib/errors';
|
||||||
|
import { getPortalSession } from '@/lib/portal/auth';
|
||||||
|
import { changePortalPassword } from '@/lib/services/portal-auth.service';
|
||||||
|
|
||||||
|
const bodySchema = z.object({
|
||||||
|
currentPassword: z.string().min(1),
|
||||||
|
newPassword: z.string().min(9),
|
||||||
|
});
|
||||||
|
|
||||||
|
export async function POST(req: NextRequest): Promise<NextResponse> {
|
||||||
|
try {
|
||||||
|
const session = await getPortalSession();
|
||||||
|
if (!session) throw new UnauthorizedError('Portal session required');
|
||||||
|
|
||||||
|
let body: unknown;
|
||||||
|
try {
|
||||||
|
body = await req.json();
|
||||||
|
} catch {
|
||||||
|
throw new ValidationError('Invalid request body');
|
||||||
|
}
|
||||||
|
const { currentPassword, newPassword } = bodySchema.parse(body);
|
||||||
|
|
||||||
|
const user = await db.query.portalUsers.findFirst({
|
||||||
|
where: eq(portalUsers.email, session.email),
|
||||||
|
});
|
||||||
|
if (!user) throw new UnauthorizedError('Portal account not found');
|
||||||
|
|
||||||
|
await changePortalPassword({
|
||||||
|
portalUserId: user.id,
|
||||||
|
currentPassword,
|
||||||
|
newPassword,
|
||||||
|
});
|
||||||
|
|
||||||
|
return NextResponse.json({ data: { ok: true } });
|
||||||
|
} catch (error) {
|
||||||
|
return errorResponse(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,23 +1,30 @@
|
|||||||
import { NextRequest, NextResponse } from 'next/server';
|
import { NextRequest, NextResponse } from 'next/server';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
|
import { enforcePublicRateLimit } from '@/lib/api/route-helpers';
|
||||||
|
import { errorResponse, ValidationError } from '@/lib/errors';
|
||||||
import { logger } from '@/lib/logger';
|
import { logger } from '@/lib/logger';
|
||||||
import { requestPasswordReset } from '@/lib/services/portal-auth.service';
|
import { requestPasswordReset } from '@/lib/services/portal-auth.service';
|
||||||
|
|
||||||
const bodySchema = z.object({ email: z.string().email() });
|
const bodySchema = z.object({ email: z.string().email() });
|
||||||
|
|
||||||
export async function POST(req: NextRequest): Promise<NextResponse> {
|
export async function POST(req: NextRequest): Promise<NextResponse> {
|
||||||
|
// 3/hour/IP — tightest of the portal limiters because each successful
|
||||||
|
// call sends an outbound email and timing differences here are the
|
||||||
|
// primary email-enumeration vector.
|
||||||
|
const limited = await enforcePublicRateLimit(req, 'portalForgot');
|
||||||
|
if (limited) return limited;
|
||||||
|
|
||||||
|
try {
|
||||||
let body: unknown;
|
let body: unknown;
|
||||||
try {
|
try {
|
||||||
body = await req.json();
|
body = await req.json();
|
||||||
} catch {
|
} catch {
|
||||||
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 });
|
throw new ValidationError('Invalid request body');
|
||||||
}
|
}
|
||||||
|
|
||||||
const parsed = bodySchema.safeParse(body);
|
const parsed = bodySchema.safeParse(body);
|
||||||
if (!parsed.success) {
|
if (!parsed.success) throw new ValidationError('Invalid email address');
|
||||||
return NextResponse.json({ error: 'Invalid email address' }, { status: 400 });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Always return 200 to prevent account-enumeration. Errors are logged
|
// Always return 200 to prevent account-enumeration. Errors are logged
|
||||||
// server-side, never surfaced to the client.
|
// server-side, never surfaced to the client.
|
||||||
@@ -27,4 +34,7 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
|
|||||||
logger.error({ err }, 'Portal forgot-password failed (swallowed)');
|
logger.error({ err }, 'Portal forgot-password failed (swallowed)');
|
||||||
}
|
}
|
||||||
return NextResponse.json({ success: true });
|
return NextResponse.json({ success: true });
|
||||||
|
} catch (error) {
|
||||||
|
return errorResponse(error);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
import { NextRequest, NextResponse } from 'next/server';
|
import { NextRequest, NextResponse } from 'next/server';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
import { errorResponse } from '@/lib/errors';
|
import { enforcePublicRateLimit } from '@/lib/api/route-helpers';
|
||||||
|
import { errorResponse, ValidationError } from '@/lib/errors';
|
||||||
import { resetPassword } from '@/lib/services/portal-auth.service';
|
import { resetPassword } from '@/lib/services/portal-auth.service';
|
||||||
|
|
||||||
const bodySchema = z.object({
|
const bodySchema = z.object({
|
||||||
@@ -10,22 +11,23 @@ const bodySchema = z.object({
|
|||||||
});
|
});
|
||||||
|
|
||||||
export async function POST(req: NextRequest): Promise<NextResponse> {
|
export async function POST(req: NextRequest): Promise<NextResponse> {
|
||||||
|
// 10/hour/IP — bounds brute-force against the 32-byte reset token.
|
||||||
|
const limited = await enforcePublicRateLimit(req, 'portalToken');
|
||||||
|
if (limited) return limited;
|
||||||
|
|
||||||
|
try {
|
||||||
let body: unknown;
|
let body: unknown;
|
||||||
try {
|
try {
|
||||||
body = await req.json();
|
body = await req.json();
|
||||||
} catch {
|
} catch {
|
||||||
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 });
|
throw new ValidationError('Invalid request body');
|
||||||
}
|
}
|
||||||
|
|
||||||
const parsed = bodySchema.safeParse(body);
|
const parsed = bodySchema.safeParse(body);
|
||||||
if (!parsed.success) {
|
if (!parsed.success) {
|
||||||
return NextResponse.json(
|
throw new ValidationError(parsed.error.errors[0]?.message ?? 'Invalid input');
|
||||||
{ error: parsed.error.errors[0]?.message ?? 'Invalid input' },
|
|
||||||
{ status: 400 },
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
|
||||||
await resetPassword(parsed.data.token, parsed.data.password);
|
await resetPassword(parsed.data.token, parsed.data.password);
|
||||||
return NextResponse.json({ success: true });
|
return NextResponse.json({ success: true });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
import { NextRequest, NextResponse } from 'next/server';
|
import { NextRequest, NextResponse } from 'next/server';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
import { errorResponse } from '@/lib/errors';
|
import { enforcePublicRateLimit } from '@/lib/api/route-helpers';
|
||||||
|
import { errorResponse, ValidationError } from '@/lib/errors';
|
||||||
import { PORTAL_COOKIE } from '@/lib/portal/auth';
|
import { PORTAL_COOKIE } from '@/lib/portal/auth';
|
||||||
import { signIn } from '@/lib/services/portal-auth.service';
|
import { signIn } from '@/lib/services/portal-auth.service';
|
||||||
|
|
||||||
@@ -17,14 +18,24 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
|
|||||||
try {
|
try {
|
||||||
body = await req.json();
|
body = await req.json();
|
||||||
} catch {
|
} catch {
|
||||||
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 });
|
return errorResponse(new ValidationError('Email format is invalid'));
|
||||||
}
|
}
|
||||||
|
|
||||||
const parsed = bodySchema.safeParse(body);
|
const parsed = bodySchema.safeParse(body);
|
||||||
if (!parsed.success) {
|
if (!parsed.success) {
|
||||||
return NextResponse.json({ error: 'Invalid email or password' }, { status: 400 });
|
return errorResponse(new ValidationError('Email format is invalid'));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Per-(ip,email) bucket: 5 attempts / 15min. Keyed on email-lowercase so
|
||||||
|
// the limiter is per-account-per-IP, not just per-IP — a NATed network
|
||||||
|
// shouldn't be able to lock a single victim by burning their bucket.
|
||||||
|
const limited = await enforcePublicRateLimit(
|
||||||
|
req,
|
||||||
|
'portalSignIn',
|
||||||
|
parsed.data.email.toLowerCase(),
|
||||||
|
);
|
||||||
|
if (limited) return limited;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const result = await signIn(parsed.data);
|
const result = await signIn(parsed.data);
|
||||||
const res = NextResponse.json({ success: true });
|
const res = NextResponse.json({ success: true });
|
||||||
|
|||||||
@@ -1,20 +1,18 @@
|
|||||||
import { NextResponse } from 'next/server';
|
import { NextResponse } from 'next/server';
|
||||||
|
|
||||||
|
import { errorResponse, NotFoundError } from '@/lib/errors';
|
||||||
|
import { logger } from '@/lib/logger';
|
||||||
import { withPortalAuth } from '@/lib/portal/helpers';
|
import { withPortalAuth } from '@/lib/portal/helpers';
|
||||||
import { getPortalDashboard } from '@/lib/services/portal.service';
|
import { getPortalDashboard } from '@/lib/services/portal.service';
|
||||||
import { logger } from '@/lib/logger';
|
|
||||||
|
|
||||||
export const GET = withPortalAuth(async (_req, session) => {
|
export const GET = withPortalAuth(async (_req, session) => {
|
||||||
try {
|
try {
|
||||||
const dashboard = await getPortalDashboard(session.clientId, session.portId);
|
const dashboard = await getPortalDashboard(session.clientId, session.portId);
|
||||||
|
if (!dashboard) throw new NotFoundError('client');
|
||||||
if (!dashboard) {
|
|
||||||
return NextResponse.json({ error: 'Client not found' }, { status: 404 });
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ data: dashboard });
|
return NextResponse.json({ data: dashboard });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error({ error }, 'Portal dashboard fetch failed');
|
logger.error({ err: error }, 'Portal dashboard fetch failed');
|
||||||
return NextResponse.json({ error: 'Failed to load dashboard' }, { status: 500 });
|
return errorResponse(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user