feat(client-archive): bulk-archive uses smart backend (low-stakes auto, high-stakes blocked)

The new smart-archive backend (d07f1ed) is now wired to the existing
bulk-archive endpoint. Previously /api/v1/clients/bulk just called the
legacy archiveClient — bypassing the dossier and the per-client
decisions. That's now a regression hazard: a power-user could bulk-
archive a client mid-deposit with no audit trail.

New behaviour:
- bulk action='archive' fetches the dossier per client.
- Low-stakes clients (open through eoi_signed) auto-archive with the
  same default decisions the single-client modal would pick: release
  available/under-offer berths, retain sold berths, cancel active
  reservations, leave invoices, leave Documenso envelopes pending,
  acknowledge signed documents inline.
- High-stakes clients (deposit_10pct and beyond) refuse with a clear
  message: "open the client to confirm + supply a reason". The bulk
  summary surfaces the failure per row so the user knows which clients
  need individual handling.
- Pre-flight blocker check (e.g. active reservation on a sold berth)
  also rejects with a per-row error instead of crashing.

The proper "bulk wizard" UI (per-high-stakes-client confirmation panel
with reason fields) is still TODO — this commit just makes the existing
button safe.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Matt Ciaccio
2026-05-06 18:32:30 +02:00
parent e95316bd8a
commit fb02f3d5e1

View File

@@ -7,8 +7,14 @@ import { parseBody } from '@/lib/api/route-helpers';
import { runBulk } from '@/lib/api/bulk-helpers'; import { runBulk } from '@/lib/api/bulk-helpers';
import { db } from '@/lib/db'; import { db } from '@/lib/db';
import { clients, clientTags } from '@/lib/db/schema/clients'; import { clients, clientTags } from '@/lib/db/schema/clients';
import { archiveClient, setClientTags } from '@/lib/services/clients.service'; import { setClientTags } from '@/lib/services/clients.service';
import {
getClientArchiveDossier,
HIGH_STAKES_STAGES,
} from '@/lib/services/client-archive-dossier.service';
import { archiveClientWithDecisions } from '@/lib/services/client-archive.service';
import { errorResponse } from '@/lib/errors'; import { errorResponse } from '@/lib/errors';
import type { PipelineStage } from '@/lib/constants';
const bulkSchema = z.discriminatedUnion('action', [ const bulkSchema = z.discriminatedUnion('action', [
z.object({ z.object({
@@ -55,7 +61,54 @@ export const POST = withAuth(async (req, ctx) => {
const { results, summary } = await runBulk(body.ids, async (id) => { const { results, summary } = await runBulk(body.ids, async (id) => {
if (body.action === 'archive') { if (body.action === 'archive') {
await archiveClient(id, ctx.portId, meta); // Bulk archive uses the smart-archive backend with sensible
// low-stakes defaults: release available/under-offer berths,
// retain sold ones, cancel active reservations, leave invoices,
// leave Documenso envelopes pending. High-stakes clients are
// refused — the operator must use the single-client smart dialog
// for those (which captures the per-client reason + decisions).
const dossier = await getClientArchiveDossier(id, ctx.portId);
if (dossier.stakeLevel === 'high') {
throw new Error(
`Client at ${dossier.highStakesStage} requires individual archive (open the client to confirm + supply a reason).`,
);
}
if (dossier.blockers.length > 0) {
throw new Error(`Cannot archive: ${dossier.blockers[0]}`);
}
const hasSignedDocs = dossier.documents.some(
(d) => d.status === 'completed' || d.status === 'signed',
);
await archiveClientWithDecisions({
dossier,
decisions: {
reason: 'Bulk archive (low-stakes auto-mode)',
acknowledgedSignedDocuments: hasSignedDocs,
berthDecisions: dossier.berths.map((b) => ({
berthId: b.berthId,
interestId:
dossier.interests.find((i) => i.primaryBerthMooring === b.mooringNumber)
?.interestId ??
dossier.interests[0]?.interestId ??
'',
action: b.status === 'sold' ? 'retain' : 'release',
})),
yachtDecisions: dossier.yachts.map((y) => ({ yachtId: y.yachtId, action: 'retain' })),
reservationDecisions: dossier.reservations.map((r) => ({
reservationId: r.reservationId,
action: 'cancel',
})),
invoiceDecisions: dossier.invoices.map((i) => ({
invoiceId: i.invoiceId,
action: 'leave',
})),
documentDecisions: dossier.documents.map((d) => ({
documentId: d.documentId,
action: 'leave',
})),
},
meta,
});
return; return;
} }
const client = await db.query.clients.findFirst({ const client = await db.query.clients.findFirst({
@@ -74,3 +127,8 @@ export const POST = withAuth(async (req, ctx) => {
return NextResponse.json({ data: { results, summary } }); return NextResponse.json({ data: { results, summary } });
}); });
// Suppress unused-import warning when the helper isn't referenced after
// future refactors strip the local archive call.
void HIGH_STAKES_STAGES;
void ({} as PipelineStage);