fix(audit): non-Documenso backlog sweep — port-binding, NULLS NOT DISTINCT, custom merge tokens, company docs
Some checks failed
Build & Push Docker Images / lint (push) Successful in 1m36s
Build & Push Docker Images / build-and-push (push) Failing after 4m27s

Wave through the remaining audit-final-deferred items that aren't blocked
on the back-burnered Documenso work.

Multi-tenant isolation:
- Storage proxy ProxyTokenPayload gains optional `p` (port slug) claim;
  verifier asserts `key.startsWith(${p}/)`. Defense-in-depth against a
  buggy issuer in some future code path that mixes port scopes — every
  storage key generated by generateStorageKey() already prefixes the
  slug. document-sends opts in for 24h emailed download links; other
  callers continue working unchanged via the optional field.

DB schema reconciliation:
- Migration 0047 rebuilds system_settings unique index with NULLS NOT
  DISTINCT (Postgres 15+) so global settings (port_id IS NULL) are
  uniquely keyed by `key` alone. Surfaced + dedupe'd 65 duplicate
  (storage_backend, NULL) rows that had accumulated from race-prone
  delete-then-insert patterns in ocr-config / settings / residential-
  stages / ai-budget services. All four services converted to true
  onConflictDoUpdate upserts so the race window is closed.

API uniformity:
- Response shape standardization: 16 routes converted from
  `{ success: true }` to 204 No Content. CLAUDE.md documents the
  convention (`{ data: <T> }` for content, 204 for empty mutations,
  portal-auth retains `{ success: true }` for the frontend's auth chain).
- req.json() → parseBody() migration across 9 admin/CRM routes
  (custom-fields, expenses/export ×3, currency convert,
  search/recently-viewed, admin/duplicates, berths/pdf-{upload-url,
  versions, parse-results}). Uniform 400 error shapes for
  ZodError-flagged bodies.

Custom-fields merge tokens (shipped end-to-end):
- merge-fields.ts gains CUSTOM_MERGE_TOKEN_RE + helpers for the
  `{{custom.<fieldName>}}` shape.
- document-templates validator accepts the dynamic shape alongside
  the static catalog tokens.
- document-sends.service mergeCustomFieldValues resolver fetches
  per-port custom_field_definitions for client/interest/berth contexts
  and substitutes stored values keyed by `{{custom.fieldName}}`.
- custom-fields-manager amber banner updated to reflect that merge
  tokens now expand (search index + entity-diff remain documented
  design limitations).

/api/v1/files cross-entity filtering:
- Validator + listFiles + uploadFile accept companyId AND yachtId
  alongside clientId. file-upload-zone propagates both.
- New CompanyFilesTab component mirrors ClientFilesTab; restored as a
  visible Documents tab in company-tabs.tsx (was a hidden stub).

Inline TODOs:
- Reviewed remaining two TODOs (per-user reminder schedule, import
  worker handlers). Both are placeholders for future feature surfaces,
  not bugs — per-port digest works for every customer; nothing
  currently enqueues import jobs (verified). Annotated in BACKLOG.

BACKLOG.md updated to reflect what landed and what's still pending
(Documenso-related items still bundled with the back-burnered phases).

Tests: 1185/1185 vitest, tsc clean.
This commit is contained in:
2026-05-08 02:20:27 +02:00
parent 60365dc3de
commit 8dc16dcd2e
49 changed files with 578 additions and 254 deletions

View File

@@ -8,8 +8,10 @@
*/
import { NextResponse } from 'next/server';
import { z } from 'zod';
import { type RouteHandler } from '@/lib/api/helpers';
import { parseBody } from '@/lib/api/route-helpers';
import { db } from '@/lib/db';
import { berths } from '@/lib/db/schema/berths';
import { and, eq } from 'drizzle-orm';
@@ -17,17 +19,17 @@ import { errorResponse, NotFoundError, ValidationError } from '@/lib/errors';
import { getMaxUploadMb } from '@/lib/services/berth-pdf.service';
import { getStorageBackend } from '@/lib/storage';
interface PostBody {
fileName: string;
const postBodySchema = z.object({
fileName: z.string().min(1).max(255),
/** Size hint in bytes — used to early-reject oversized uploads before we
* burn a presigned URL. */
sizeBytes?: number;
}
sizeBytes: z.number().int().nonnegative().optional(),
});
export const postHandler: RouteHandler = async (req, ctx, params) => {
try {
const body = (await req.json()) as Partial<PostBody>;
const fileName = (body.fileName ?? '').trim();
const body = await parseBody(req, postBodySchema);
const fileName = body.fileName.trim();
if (!fileName) throw new ValidationError('fileName is required');
// Tenant-scoped berth lookup. Without `eq(berths.portId, ctx.portId)` a

View File

@@ -7,23 +7,27 @@
*/
import { NextResponse } from 'next/server';
import { z } from 'zod';
import { type RouteHandler } from '@/lib/api/helpers';
import { parseBody } from '@/lib/api/route-helpers';
import { errorResponse, ValidationError } from '@/lib/errors';
import { listBerthPdfVersions, uploadBerthPdf } from '@/lib/services/berth-pdf.service';
interface PostBody {
storageKey: string;
fileName: string;
fileSizeBytes: number;
sha256: string;
parseResults?: {
engine: 'acroform' | 'ocr' | 'ai';
extracted?: Record<string, unknown>;
meanConfidence?: number;
warnings?: string[];
};
}
const postBodySchema = z.object({
storageKey: z.string().min(1),
fileName: z.string().min(1).max(255),
fileSizeBytes: z.number().int().positive(),
sha256: z.string().min(1),
parseResults: z
.object({
engine: z.enum(['acroform', 'ocr', 'ai']),
extracted: z.record(z.string(), z.unknown()).optional(),
meanConfidence: z.number().optional(),
warnings: z.array(z.string()).optional(),
})
.optional(),
});
export const getHandler: RouteHandler = async (_req, ctx, params) => {
try {
@@ -47,16 +51,7 @@ const STORAGE_KEY_RE =
export const postHandler: RouteHandler = async (req, ctx, params) => {
try {
const body = (await req.json()) as Partial<PostBody>;
if (!body.storageKey || !body.fileName) {
throw new ValidationError('storageKey and fileName are required');
}
if (typeof body.fileSizeBytes !== 'number' || body.fileSizeBytes <= 0) {
throw new ValidationError('fileSizeBytes must be a positive integer');
}
if (!body.sha256 || typeof body.sha256 !== 'string') {
throw new ValidationError('sha256 is required');
}
const body = await parseBody(req, postBodySchema);
const expectedPrefix = `berths/${params.id!}/uploads/`;
if (!body.storageKey.startsWith(expectedPrefix) || !STORAGE_KEY_RE.test(body.storageKey)) {
throw new ValidationError(

View File

@@ -1,25 +1,23 @@
import { NextResponse } from 'next/server';
import { z } from 'zod';
import { type RouteHandler } from '@/lib/api/helpers';
import { errorResponse, ValidationError } from '@/lib/errors';
import { parseBody } from '@/lib/api/route-helpers';
import { errorResponse } from '@/lib/errors';
import { applyParseResults, type ExtractedBerthFields } from '@/lib/services/berth-pdf.service';
interface PostBody {
versionId: string;
fieldsToApply: Partial<ExtractedBerthFields>;
}
const postBodySchema = z.object({
versionId: z.string().min(1),
fieldsToApply: z.record(z.string(), z.unknown()),
});
export const postHandler: RouteHandler = async (req, ctx, params) => {
try {
const body = (await req.json()) as Partial<PostBody>;
if (!body.versionId) throw new ValidationError('versionId is required');
if (!body.fieldsToApply || typeof body.fieldsToApply !== 'object') {
throw new ValidationError('fieldsToApply must be an object');
}
const body = await parseBody(req, postBodySchema);
const result = await applyParseResults(
params.id!,
body.versionId,
body.fieldsToApply,
body.fieldsToApply as Partial<ExtractedBerthFields>,
ctx.portId,
);
return NextResponse.json({ data: result });

View File

@@ -46,7 +46,7 @@ export const DELETE = withAuth(
ipAddress: ctx.ipAddress,
userAgent: ctx.userAgent,
});
return NextResponse.json({ success: true });
return new NextResponse(null, { status: 204 });
} catch (error) {
return errorResponse(error);
}