diff --git a/docs/audit-final-deferred.md b/docs/audit-final-deferred.md
index d89a691..7b71d27 100644
--- a/docs/audit-final-deferred.md
+++ b/docs/audit-final-deferred.md
@@ -1,5 +1,13 @@
# Final audit deferred findings
+> **Status update (audit-v3 round)**: most of the v2 deferred items have
+> now landed. Items struck through below are completed. The remaining
+> open items are bigger refactors (custom-fields per-entity routes,
+> systemSettings PK reconciliation, Documenso v2 voidDocument verification,
+> partial-vs-composite archived index conversion, storage-proxy port_id
+> claim, Documenso webhook port_id enforcement, response-shape
+> standardization, berths.current_pdf_version_id Drizzle FK).
+
The pre-merge audit on `feat/berth-recommender` produced ~30 findings. The
critical + high-severity items were fixed in-branch. The items below are
medium / low severity and deferred to follow-up issues so the merge isn't
diff --git a/src/app/(dashboard)/[portSlug]/expenses/scan/page.tsx b/src/app/(dashboard)/[portSlug]/expenses/scan/page.tsx
index a9902d4..984c5ad 100644
--- a/src/app/(dashboard)/[portSlug]/expenses/scan/page.tsx
+++ b/src/app/(dashboard)/[portSlug]/expenses/scan/page.tsx
@@ -268,10 +268,18 @@ export default function ScanReceiptPage() {
Scanning receipt...
)}
+
+ {scanMutation.isError && (
+
+ Couldn't read this receipt automatically. {' '}
+ You can still fill in the details manually below — the receipt image will save with
+ the expense.
+
+ )}
- {(scanResult || scanMutation.isSuccess) && (
+ {(scanResult || scanMutation.isSuccess || scanMutation.isError || uploadedFile) && (
diff --git a/src/app/api/public/interests/route.ts b/src/app/api/public/interests/route.ts
index 479babd..a054a5c 100644
--- a/src/app/api/public/interests/route.ts
+++ b/src/app/api/public/interests/route.ts
@@ -250,8 +250,12 @@ export async function POST(req: NextRequest) {
});
// ─── Post-commit side-effects (fire-and-forget) ─────────────────────────
+ // `AuditLogParams.userId` is `string | null`; null is the documented
+ // "system-generated" sentinel and matches `audit_logs.user_id` being
+ // nullable in the schema. The earlier `null as unknown as string`
+ // cast was a relic from before the type was widened.
void createAuditLog({
- userId: null as unknown as string,
+ userId: null,
portId,
action: 'create',
entityType: 'interest',
diff --git a/src/app/api/v1/saved-views/route.ts b/src/app/api/v1/saved-views/route.ts
index ce9bbc0..2b9b3d4 100644
--- a/src/app/api/v1/saved-views/route.ts
+++ b/src/app/api/v1/saved-views/route.ts
@@ -11,6 +11,11 @@ const listQuerySchema = z.object({
entityType: z.string().optional(),
});
+// Saved views are owner-only by design: every service call filters by
+// (portId, userId), so any authenticated user can manage exactly their
+// own views. We deliberately skip `withPermission(...)` here — there is
+// no resource-level permission to add. See `savedViewsService` for the
+// ownership filter that backs this route.
export const GET = withAuth(async (req, ctx) => {
try {
const { entityType } = parseQuery(req, listQuerySchema);
diff --git a/src/components/admin/storage-admin-panel.tsx b/src/components/admin/storage-admin-panel.tsx
index eaab0d5..49b6a61 100644
--- a/src/components/admin/storage-admin-panel.tsx
+++ b/src/components/admin/storage-admin-panel.tsx
@@ -3,6 +3,7 @@
import { useState } from 'react';
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
import { CheckCircle2, HardDrive, Loader2, RefreshCw, ServerCog, XCircle } from 'lucide-react';
+import { toast } from 'sonner';
import { PageHeader } from '@/components/shared/page-header';
import { Button } from '@/components/ui/button';
@@ -56,6 +57,8 @@ export function StorageAdminPanel() {
setDryRun(result.data);
setConfirmOpen(true);
},
+ onError: (e) =>
+ toast.error(e instanceof Error ? e.message : 'Storage migration dry-run failed'),
});
const migrateMutation = useMutation({
@@ -64,11 +67,14 @@ export function StorageAdminPanel() {
method: 'POST',
body: JSON.stringify({ ...opts, dryRun: false }),
}),
- onSuccess: () => {
+ onSuccess: (result) => {
setConfirmOpen(false);
setDryRun(null);
+ const copied = result.data.rowsMigrated ?? 0;
+ toast.success(`Storage migration complete (${copied} file${copied === 1 ? '' : 's'} copied)`);
queryClient.invalidateQueries({ queryKey: ['admin', 'storage', 'status'] });
},
+ onError: (e) => toast.error(e instanceof Error ? e.message : 'Storage migration failed'),
});
const testMutation = useMutation({
diff --git a/src/components/admin/users/user-list.tsx b/src/components/admin/users/user-list.tsx
index 2853b0d..8b40a75 100644
--- a/src/components/admin/users/user-list.tsx
+++ b/src/components/admin/users/user-list.tsx
@@ -7,6 +7,7 @@ import { Pencil, Trash2, Plus, ShieldCheck, ShieldOff } from 'lucide-react';
import { DataTable } from '@/components/shared/data-table';
import { PageHeader } from '@/components/shared/page-header';
import { ConfirmationDialog } from '@/components/shared/confirmation-dialog';
+import { PermissionGate } from '@/components/shared/permission-gate';
import { Button } from '@/components/ui/button';
import { Badge } from '@/components/ui/badge';
import { apiFetch } from '@/lib/api/client';
@@ -111,10 +112,12 @@ export function UserList() {
header: '',
cell: ({ row }) => (
-
handleEditUser(row.original)}>
-
- Edit
-
+
+ handleEditUser(row.original)}>
+
+ Edit
+
+
diff --git a/src/components/email/email-threads-list.tsx b/src/components/email/email-threads-list.tsx
index bf1aaf5..b2c1e56 100644
--- a/src/components/email/email-threads-list.tsx
+++ b/src/components/email/email-threads-list.tsx
@@ -5,6 +5,8 @@ import { formatDistanceToNow } from 'date-fns';
import { Mail } from 'lucide-react';
import { apiFetch } from '@/lib/api/client';
+import { EmptyState } from '@/components/shared/empty-state';
+import { Skeleton } from '@/components/ui/skeleton';
interface Thread {
id: string;
@@ -27,20 +29,32 @@ export function EmailThreadsList() {
});
if (isLoading) {
- return Loading threads…
;
+ // Skeleton rows shaped like the real list so the layout doesn't pop.
+ return (
+
+ {Array.from({ length: 4 }).map((_, i) => (
+
+ ))}
+
+ );
}
const threads = data?.data ?? [];
if (threads.length === 0) {
return (
-
-
-
No email threads yet.
-
- Connect an account and trigger a sync to see incoming threads here.
-
-
+
);
}
diff --git a/src/components/invoices/invoice-detail.tsx b/src/components/invoices/invoice-detail.tsx
index 8e93e81..cb8ee46 100644
--- a/src/components/invoices/invoice-detail.tsx
+++ b/src/components/invoices/invoice-detail.tsx
@@ -11,6 +11,8 @@ import { format } from 'date-fns';
import { Button } from '@/components/ui/button';
import { Badge } from '@/components/ui/badge';
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
+import { PermissionGate } from '@/components/shared/permission-gate';
+import { toast } from 'sonner';
import { Label } from '@/components/ui/label';
import { Input } from '@/components/ui/input';
import {
@@ -93,9 +95,11 @@ export function InvoiceDetail({ invoiceId }: InvoiceDetailProps) {
const sendMutation = useMutation({
mutationFn: () => apiFetch(`/api/v1/invoices/${invoiceId}/send`, { method: 'POST' }),
onSuccess: () => {
+ toast.success('Invoice sent');
queryClient.invalidateQueries({ queryKey: ['invoices', invoiceId] });
queryClient.invalidateQueries({ queryKey: ['invoices'] });
},
+ onError: (e) => toast.error(e instanceof Error ? e.message : 'Could not send invoice'),
});
const paymentForm = useForm({
@@ -110,9 +114,11 @@ export function InvoiceDetail({ invoiceId }: InvoiceDetailProps) {
body: values,
}),
onSuccess: () => {
+ toast.success('Payment recorded');
queryClient.invalidateQueries({ queryKey: ['invoices', invoiceId] });
queryClient.invalidateQueries({ queryKey: ['invoices'] });
},
+ onError: (e) => toast.error(e instanceof Error ? e.message : 'Could not record payment'),
});
if (isLoading) {
@@ -150,19 +156,21 @@ export function InvoiceDetail({ invoiceId }: InvoiceDetailProps) {
{invoice.status === 'draft' && (
-
sendMutation.mutate()}
- disabled={sendMutation.isPending}
- >
- {sendMutation.isPending ? (
-
- ) : (
-
- )}
- Send Invoice
-
+
+ sendMutation.mutate()}
+ disabled={sendMutation.isPending}
+ >
+ {sendMutation.isPending ? (
+
+ ) : (
+
+ )}
+ Send Invoice
+
+
)}
@@ -347,63 +355,69 @@ export function InvoiceDetail({ invoiceId }: InvoiceDetailProps) {
) : (
-
-
- Record Payment
-
-
-
-
-
+
+
+
+ Record Payment
+
+
+
+
+
+
)}
diff --git a/src/lib/db/migrations/0037_missing_fk_indexes.sql b/src/lib/db/migrations/0037_missing_fk_indexes.sql
new file mode 100644
index 0000000..ead9926
--- /dev/null
+++ b/src/lib/db/migrations/0037_missing_fk_indexes.sql
@@ -0,0 +1,38 @@
+-- Audit-final v2 follow-up: cover the FK columns Postgres doesn't auto-index.
+-- Without these, deleting a parent row (or RESTRICT-checking on update) walks
+-- the child table fully. CREATE INDEX IF NOT EXISTS keeps the migration safe
+-- to re-run.
+
+-- berth_reservations
+CREATE INDEX IF NOT EXISTS idx_br_interest
+ ON berth_reservations(interest_id);
+CREATE INDEX IF NOT EXISTS idx_br_contract_file
+ ON berth_reservations(contract_file_id);
+
+-- documents (file FKs)
+CREATE INDEX IF NOT EXISTS idx_docs_file_id
+ ON documents(file_id);
+CREATE INDEX IF NOT EXISTS idx_docs_signed_file_id
+ ON documents(signed_file_id);
+
+-- document_events
+CREATE INDEX IF NOT EXISTS idx_de_signer
+ ON document_events(signer_id);
+
+-- document_templates
+CREATE INDEX IF NOT EXISTS idx_dt_source_file
+ ON document_templates(source_file_id);
+
+-- form_submissions
+CREATE INDEX IF NOT EXISTS idx_fs_template
+ ON form_submissions(form_template_id);
+CREATE INDEX IF NOT EXISTS idx_fs_client
+ ON form_submissions(client_id);
+
+-- document_sends
+CREATE INDEX IF NOT EXISTS idx_ds_brochure
+ ON document_sends(brochure_id);
+CREATE INDEX IF NOT EXISTS idx_ds_brochure_version
+ ON document_sends(brochure_version_id);
+CREATE INDEX IF NOT EXISTS idx_ds_sent_by
+ ON document_sends(sent_by_user_id);
diff --git a/src/lib/db/migrations/0038_document_sends_sent_by_user_fk.sql b/src/lib/db/migrations/0038_document_sends_sent_by_user_fk.sql
new file mode 100644
index 0000000..0b348d1
--- /dev/null
+++ b/src/lib/db/migrations/0038_document_sends_sent_by_user_fk.sql
@@ -0,0 +1,23 @@
+-- Audit-final v2 follow-up: document_sends.sent_by_user_id was a free-text
+-- column with no FK. If a user is hard-deleted (rare; we soft-delete), an
+-- orphan id remained without any ON DELETE handling. Add the FK with
+-- SET NULL semantics so the audit row keeps recipient + timestamp + body
+-- even when the originating user is removed.
+
+-- Drop the NOT NULL so SET NULL is legal.
+ALTER TABLE document_sends
+ ALTER COLUMN sent_by_user_id DROP NOT NULL;
+
+-- Defensive: if any historical rows have sent_by_user_id values that don't
+-- match an existing user (dev-only), null them out so the FK can attach.
+UPDATE document_sends
+SET sent_by_user_id = NULL
+WHERE sent_by_user_id IS NOT NULL
+ AND sent_by_user_id NOT IN (SELECT id FROM "user");
+
+ALTER TABLE document_sends
+ ADD CONSTRAINT document_sends_sent_by_user_id_user_id_fk
+ FOREIGN KEY (sent_by_user_id) REFERENCES "user"(id) ON DELETE SET NULL;
+
+CREATE INDEX IF NOT EXISTS idx_ds_sent_by
+ ON document_sends(sent_by_user_id);
diff --git a/src/lib/db/migrations/meta/_journal.json b/src/lib/db/migrations/meta/_journal.json
index 0f5e39a..ddcf45d 100644
--- a/src/lib/db/migrations/meta/_journal.json
+++ b/src/lib/db/migrations/meta/_journal.json
@@ -260,6 +260,20 @@
"when": 1778100000000,
"tag": "0036_polymorphic_check_constraints",
"breakpoints": true
+ },
+ {
+ "idx": 37,
+ "version": "7",
+ "when": 1778150000000,
+ "tag": "0037_missing_fk_indexes",
+ "breakpoints": true
+ },
+ {
+ "idx": 38,
+ "version": "7",
+ "when": 1778200000000,
+ "tag": "0038_document_sends_sent_by_user_fk",
+ "breakpoints": true
}
]
}
diff --git a/src/lib/db/schema/brochures.ts b/src/lib/db/schema/brochures.ts
index abb3f26..0a95b42 100644
--- a/src/lib/db/schema/brochures.ts
+++ b/src/lib/db/schema/brochures.ts
@@ -13,6 +13,7 @@ import { ports } from './ports';
import { clients } from './clients';
import { interests } from './interests';
import { berths } from './berths';
+import { user } from './users';
/**
* Port-wide brochures (Phase 7 — see plan §3.3 / §4.8).
@@ -123,7 +124,12 @@ export const documentSends = pgTable(
}),
/** Exact body used (after merge-field expansion + sanitization). */
bodyMarkdown: text('body_markdown'),
- sentByUserId: text('sent_by_user_id').notNull(),
+ /**
+ * better-auth user id of the sender. SET NULL on user delete so the
+ * audit row keeps `recipientEmail` + timestamp + body for compliance
+ * even when the originating user is removed from the system.
+ */
+ sentByUserId: text('sent_by_user_id').references(() => user.id, { onDelete: 'set null' }),
fromAddress: text('from_address').notNull(),
sentAt: timestamp('sent_at', { withTimezone: true }).notNull().defaultNow(),
/** SMTP provider message-id for deliverability tracking. */
@@ -143,6 +149,11 @@ export const documentSends = pgTable(
index('idx_ds_interest').on(t.interestId, t.sentAt),
index('idx_ds_berth').on(t.berthId, t.sentAt),
index('idx_ds_port').on(t.portId, t.sentAt),
+ // Reverse-lookups: "what sends used this brochure / version" and
+ // FK-RESTRICT scans on brochure delete.
+ index('idx_ds_brochure').on(t.brochureId),
+ index('idx_ds_brochure_version').on(t.brochureVersionId),
+ index('idx_ds_sent_by').on(t.sentByUserId),
],
);
diff --git a/src/lib/db/schema/documents.ts b/src/lib/db/schema/documents.ts
index b8b18fc..833f962 100644
--- a/src/lib/db/schema/documents.ts
+++ b/src/lib/db/schema/documents.ts
@@ -80,6 +80,11 @@ export const documents = pgTable(
index('idx_docs_reservation').on(table.reservationId),
index('idx_docs_type').on(table.portId, table.documentType),
index('idx_docs_status_port').on(table.portId, table.status),
+ // Cover the file FKs Postgres doesn't auto-index. Without these,
+ // deleting (or RESTRICT-checking) a referenced files row scans
+ // the documents table fully.
+ index('idx_docs_file_id').on(table.fileId),
+ index('idx_docs_signed_file_id').on(table.signedFileId),
],
);
@@ -122,6 +127,8 @@ export const documentEvents = pgTable(
},
(table) => [
index('idx_de_doc').on(table.documentId),
+ // Reverse-lookup signer→events without scanning the events table.
+ index('idx_de_signer').on(table.signerId),
uniqueIndex('idx_de_dedup')
.on(table.documentId, table.signatureHash)
.where(sql`${table.signatureHash} IS NOT NULL`),
@@ -161,6 +168,7 @@ export const documentTemplates = pgTable(
(table) => [
index('idx_dt_port').on(table.portId),
index('idx_dt_type').on(table.portId, table.templateType),
+ index('idx_dt_source_file').on(table.sourceFileId),
],
);
@@ -221,7 +229,11 @@ export const formSubmissions = pgTable(
submittedAt: timestamp('submitted_at', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
- (table) => [uniqueIndex('idx_fs_token').on(table.token)],
+ (table) => [
+ uniqueIndex('idx_fs_token').on(table.token),
+ index('idx_fs_template').on(table.formTemplateId),
+ index('idx_fs_client').on(table.clientId),
+ ],
);
export type File = typeof files.$inferSelect;
diff --git a/src/lib/db/schema/reservations.ts b/src/lib/db/schema/reservations.ts
index 4be8296..7dce185 100644
--- a/src/lib/db/schema/reservations.ts
+++ b/src/lib/db/schema/reservations.ts
@@ -41,6 +41,11 @@ export const berthReservations = pgTable(
index('idx_br_client').on(table.clientId),
index('idx_br_yacht').on(table.yachtId),
index('idx_br_port').on(table.portId),
+ // Cover the FKs Postgres doesn't auto-index. Without these, deleting
+ // (or restrict-checking) the parent interest / contract file row
+ // requires a full scan of berth_reservations.
+ index('idx_br_interest').on(table.interestId),
+ index('idx_br_contract_file').on(table.contractFileId),
uniqueIndex('idx_br_active')
.on(table.berthId)
.where(sql`${table.status} = 'active'`),
diff --git a/src/lib/logger.ts b/src/lib/logger.ts
index 24f232b..8acc5fa 100644
--- a/src/lib/logger.ts
+++ b/src/lib/logger.ts
@@ -15,6 +15,31 @@ export const logger = pino({
'*.secret',
'*.accessKey',
'*.secretKey',
+ // Encrypted credential blobs surface in storage / smtp config logs
+ // unintentionally; redact them defensively even though they're
+ // already AES-encrypted at rest.
+ '*.secretKeyEncrypted',
+ '*.smtpPassEncrypted',
+ '*.imapPassEncrypted',
+ '*.proxyHmacSecretEncrypted',
+ // HTTP authorization headers (Bearer tokens, Basic creds) leak via
+ // err.config.headers on http-client error logs.
+ '*.headers.authorization',
+ '*.headers.Authorization',
+ '*.headers["x-documenso-secret"]',
+ '*.config.headers.Authorization',
+ '*.config.headers.authorization',
+ // Cookie headers can carry session tokens.
+ '*.headers.cookie',
+ '*.headers.Cookie',
+ // Two-level nesting for things like `req.headers.authorization` or
+ // `cfg.s3.secretKeyEncrypted`.
+ '*.*.password',
+ '*.*.token',
+ '*.*.secret',
+ '*.*.secretKeyEncrypted',
+ '*.*.headers.authorization',
+ '*.*.headers.Authorization',
],
censor: '[REDACTED]',
},
diff --git a/src/lib/pdf/fill-eoi-form.ts b/src/lib/pdf/fill-eoi-form.ts
index 213c21c..cc153a6 100644
--- a/src/lib/pdf/fill-eoi-form.ts
+++ b/src/lib/pdf/fill-eoi-form.ts
@@ -4,6 +4,7 @@ import path from 'node:path';
import { PDFDocument } from 'pdf-lib';
import type { EoiContext } from '@/lib/services/eoi-context';
+import { logger } from '@/lib/logger';
/**
* Source PDF for the in-app EOI pathway. Must contain AcroForm fields whose
@@ -48,6 +49,28 @@ function setText(form: ReturnType, name: string, value:
}
}
+/**
+ * Special-cased setter for the multi-berth `Berth Range` field. When the
+ * caller has a non-empty range and the AcroForm field is missing, we log
+ * a warning so the deployment gap is observable (the in-app pathway is
+ * intentionally tolerant of older PDF templates, but ops needs to know
+ * when ranges are silently dropped — otherwise a customer's multi-berth
+ * EOI ships with only the primary mooring visible).
+ */
+function setBerthRange(form: ReturnType, value: string): void {
+ try {
+ form.getTextField('Berth Range').setText(value);
+ } catch {
+ if (value && value.trim().length > 0) {
+ logger.warn(
+ { berthRange: value },
+ 'EOI in-app PDF template is missing the "Berth Range" AcroForm field — ' +
+ 'multi-berth bundle range string was dropped. Update the source template.',
+ );
+ }
+ }
+}
+
function setCheckbox(
form: ReturnType,
name: string,
@@ -88,9 +111,12 @@ export async function fillEoiFormFields(
setText(form, 'Draft', context.yacht?.draftFt ?? '');
setText(form, 'Berth Number', context.berth?.mooringNumber ?? '');
// Multi-berth EOI: compact range string from the interest's EOI bundle.
- // Falls back silently when the AcroForm field doesn't exist (older
- // template revisions without the field still fill cleanly).
- setText(form, 'Berth Range', context.eoiBerthRange);
+ // The AcroForm field may be absent on an older template revision —
+ // when the context HAS a non-empty range string but the field is
+ // missing we surface a structured warning so the deployment gap is
+ // observable (the CRM dataset has multi-berth bundles but the live
+ // PDF template needs the field added before they render correctly).
+ setBerthRange(form, context.eoiBerthRange);
setCheckbox(form, 'Purchase', true);
setCheckbox(form, 'Lease_10', false);
diff --git a/src/lib/queue/workers/ai.ts b/src/lib/queue/workers/ai.ts
index 4cf0184..b4b8973 100644
--- a/src/lib/queue/workers/ai.ts
+++ b/src/lib/queue/workers/ai.ts
@@ -9,6 +9,40 @@ import { QUEUE_CONFIGS } from '@/lib/queue';
const MAX_OUTPUT_BYTES = 10 * 1024; // 10 KB
const OPENAI_TIMEOUT_MS = 30_000; // 30 s
+interface RecordAiUsageArgs {
+ portId: string;
+ userId: string;
+ feature: string;
+ provider: 'openai' | 'claude' | 'tesseract';
+ model: string;
+ inputTokens: number;
+ outputTokens: number;
+ totalTokens: number;
+ requestId: string | null;
+}
+
+/**
+ * Insert one ai_usage_ledger row per provider call. Best-effort — the
+ * draft generation is the user-facing artefact, the ledger is
+ * observability. Imports are lazy so this module loads cleanly inside
+ * the worker bundle without dragging the DB layer in at import time.
+ */
+async function recordAiUsage(args: RecordAiUsageArgs): Promise {
+ const { db } = await import('@/lib/db');
+ const { aiUsageLedger } = await import('@/lib/db/schema/ai-usage');
+ await db.insert(aiUsageLedger).values({
+ portId: args.portId,
+ userId: args.userId,
+ feature: args.feature,
+ provider: args.provider,
+ model: args.model,
+ inputTokens: args.inputTokens,
+ outputTokens: args.outputTokens,
+ totalTokens: args.totalTokens,
+ requestId: args.requestId,
+ });
+}
+
interface GenerateEmailDraftPayload {
interestId: string;
clientId: string;
@@ -150,7 +184,13 @@ async function generateEmailDraft(payload: GenerateEmailDraftPayload): Promise;
+ usage?: {
+ prompt_tokens?: number;
+ completion_tokens?: number;
+ total_tokens?: number;
+ };
};
const content = data.choices[0]?.message?.content ?? '{}';
@@ -160,6 +200,24 @@ async function generateEmailDraft(payload: GenerateEmailDraftPayload): Promise {
+ logger.warn({ err, interestId }, 'Failed to record AI usage ledger row');
+ });
+
const parsed = JSON.parse(content) as { subject?: string; body?: string };
subject = parsed.subject ?? `Follow-up: ${client.fullName}`;
body = parsed.body ?? '';
diff --git a/src/lib/services/documenso-client.ts b/src/lib/services/documenso-client.ts
index 5cc2a85..0078741 100644
--- a/src/lib/services/documenso-client.ts
+++ b/src/lib/services/documenso-client.ts
@@ -382,18 +382,38 @@ export async function placeFields(
pageWidth: Math.round((f.pageWidth / 100) * dims.width),
pageHeight: Math.round((f.pageHeight / 100) * dims.height),
};
- const res = await fetch(`${baseUrl}/api/v1/documents/${docId}/fields`, {
- method: 'POST',
- headers: {
- Authorization: `Bearer ${apiKey}`,
- 'Content-Type': 'application/json',
- },
- body: JSON.stringify(body),
- });
- if (!res.ok) {
- const err = await res.text();
- logger.error({ docId, status: res.status, err, portId }, 'Documenso v1 placeField error');
- throw new Error(`Documenso v1 placeField error: ${res.status}`);
+ // Retry transient failures so one flaky 5xx mid-loop doesn't leave
+ // the document with a partial field set. 3 attempts at 250 / 500 /
+ // 1000 ms; 4xx responses (validation errors) fail-fast.
+ let lastError: { status: number; body: string } | null = null;
+ for (let attempt = 0; attempt < 3; attempt += 1) {
+ const res = await fetch(`${baseUrl}/api/v1/documents/${docId}/fields`, {
+ method: 'POST',
+ headers: {
+ Authorization: `Bearer ${apiKey}`,
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify(body),
+ });
+ if (res.ok) {
+ lastError = null;
+ break;
+ }
+ const errBody = await res.text().catch(() => '');
+ lastError = { status: res.status, body: errBody };
+ // Don't retry on 4xx — that's a validation error, won't change.
+ if (res.status >= 400 && res.status < 500) break;
+ // Backoff: 250ms, 500ms (skipped on the 3rd iteration because we exit).
+ if (attempt < 2) {
+ await new Promise((r) => setTimeout(r, 250 * Math.pow(2, attempt)));
+ }
+ }
+ if (lastError) {
+ logger.error(
+ { docId, status: lastError.status, err: lastError.body, portId },
+ 'Documenso v1 placeField error',
+ );
+ throw new Error(`Documenso v1 placeField error: ${lastError.status}`);
}
}
}
diff --git a/src/lib/storage/filesystem.ts b/src/lib/storage/filesystem.ts
index e349fb6..6fa7e0b 100644
--- a/src/lib/storage/filesystem.ts
+++ b/src/lib/storage/filesystem.ts
@@ -362,7 +362,21 @@ function resolveHmacSecret(encryptedSecret: string | null): string {
// Dev fallback: derive a stable per-process secret so the filesystem
// backend works without explicit configuration during local development.
const seed = process.env.BETTER_AUTH_SECRET ?? env.BETTER_AUTH_SECRET ?? 'storage-default';
- return createHash('sha256').update(`storage-proxy:${seed}`).digest('hex');
+ const derived = createHash('sha256').update(`storage-proxy:${seed}`).digest('hex');
+ // Warn once at boot so two processes started with different
+ // `BETTER_AUTH_SECRET` values are observable: tokens minted by one
+ // wouldn't validate on the other otherwise — which surfaces as random
+ // 401s on file downloads in dev.
+ logger.warn(
+ {
+ hint:
+ 'Storage proxy HMAC derived from BETTER_AUTH_SECRET. ' +
+ 'Multi-process dev setups must share the same secret value.',
+ secretFingerprint: derived.slice(0, 8),
+ },
+ 'FilesystemBackend: using DEV HMAC fallback (no storage_proxy_hmac_secret_encrypted set)',
+ );
+ return derived;
}
async function streamToBuffer(stream: NodeJS.ReadableStream): Promise {
diff --git a/src/lib/storage/s3.ts b/src/lib/storage/s3.ts
index 64449bf..9ad1e3c 100644
--- a/src/lib/storage/s3.ts
+++ b/src/lib/storage/s3.ts
@@ -107,6 +107,37 @@ export class S3Backend implements StorageBackend {
secretKey: resolved.secretKey,
region: resolved.region,
});
+ // Verify the bucket exists at boot so a typo / missing-bucket admin
+ // error surfaces with a clear message instead of as a vague Minio
+ // error inside the first user-facing request that touches storage.
+ // Logged-not-thrown when MINIO_AUTO_CREATE_BUCKET=true and the bucket
+ // is missing — we'll create it. Otherwise we throw so the boot fails
+ // fast and the deployment-time misconfig is loud.
+ try {
+ const exists = await client.bucketExists(resolved.bucket);
+ if (!exists) {
+ if (process.env.MINIO_AUTO_CREATE_BUCKET === 'true') {
+ await client.makeBucket(resolved.bucket, resolved.region);
+ logger.info(
+ { bucket: resolved.bucket, endpoint: resolved.endpoint },
+ 'S3 bucket auto-created (MINIO_AUTO_CREATE_BUCKET=true)',
+ );
+ } else {
+ throw new Error(
+ `S3 bucket "${resolved.bucket}" does not exist on ${resolved.endpoint}. ` +
+ `Create it manually or set MINIO_AUTO_CREATE_BUCKET=true.`,
+ );
+ }
+ }
+ } catch (err) {
+ if (err instanceof Error && err.message.includes('does not exist')) throw err;
+ // Connection / auth errors get re-thrown with extra context.
+ logger.error(
+ { err, bucket: resolved.bucket, endpoint: resolved.endpoint },
+ 'S3 bucket existence check failed at backend boot',
+ );
+ throw err;
+ }
return new S3Backend(client, resolved.bucket);
}