diff --git a/src/app/api/v1/clients/bulk/route.ts b/src/app/api/v1/clients/bulk/route.ts
index 1c635c9..b32d3a1 100644
--- a/src/app/api/v1/clients/bulk/route.ts
+++ b/src/app/api/v1/clients/bulk/route.ts
@@ -11,8 +11,15 @@ import { setClientTags } from '@/lib/services/clients.service';
import {
getClientArchiveDossier,
HIGH_STAKES_STAGES,
+ type ClientArchiveDossier,
} from '@/lib/services/client-archive-dossier.service';
-import { archiveClientWithDecisions } from '@/lib/services/client-archive.service';
+import {
+ archiveClientWithDecisions,
+ type ArchiveResult,
+} from '@/lib/services/client-archive.service';
+import { notifyNextInLine } from '@/lib/services/next-in-line-notify.service';
+import { getQueue } from '@/lib/queue';
+import { logger } from '@/lib/logger';
import { errorResponse } from '@/lib/errors';
import type { PipelineStage } from '@/lib/constants';
@@ -65,6 +72,15 @@ export const POST = withAuth(async (req, ctx) => {
const reasonsByClientId = body.action === 'archive' ? (body.reasonsByClientId ?? {}) : {};
+ // Collect per-archive side-effects so we can fan out Documenso voids
+ // + next-in-line notifications AFTER the bulk loop completes (mirrors
+ // the single-client route's post-commit behaviour). Without this the
+ // bulk path silently dropped both side-effect streams (audit R2-C1).
+ const archiveSideEffects: Array<{
+ dossier: ClientArchiveDossier;
+ result: ArchiveResult;
+ }> = [];
+
const { results, summary } = await runBulk(body.ids, async (id) => {
if (body.action === 'archive') {
// Bulk archive uses the smart-archive backend with sensible
@@ -87,7 +103,7 @@ export const POST = withAuth(async (req, ctx) => {
(d) => d.status === 'completed' || d.status === 'signed',
);
const reason = perClientReason ?? 'Bulk archive (low-stakes auto-mode)';
- await archiveClientWithDecisions({
+ const result = await archiveClientWithDecisions({
dossier,
decisions: {
reason,
@@ -117,6 +133,7 @@ export const POST = withAuth(async (req, ctx) => {
},
meta,
});
+ archiveSideEffects.push({ dossier, result });
return;
}
const client = await db.query.clients.findFirst({
@@ -133,6 +150,56 @@ export const POST = withAuth(async (req, ctx) => {
await setClientTags(id, ctx.portId, Array.from(current), meta);
});
+ // Post-commit side-effects, identical pattern to the single-client
+ // route at /api/v1/clients/[id]/archive. Documenso voids → BullMQ
+ // documents queue; next-in-line notifications fire-and-forget per
+ // released berth.
+ if (archiveSideEffects.length > 0) {
+ const queue = getQueue('documents');
+ for (const { dossier, result } of archiveSideEffects) {
+ for (const c of result.externalCleanups) {
+ if (c.kind === 'documenso_void') {
+ await queue
+ .add('documenso-void', {
+ documentId: c.documentId,
+ documensoId: c.documensoId,
+ portId: ctx.portId,
+ })
+ .catch((err) =>
+ logger.error(
+ { err, documentId: c.documentId, clientId: result.clientId },
+ 'Bulk archive: failed to enqueue Documenso void',
+ ),
+ );
+ }
+ }
+
+ for (const released of result.releasedBerths) {
+ if (released.nextInLineInterestIds.length === 0) continue;
+ const otherInterests =
+ dossier.berths
+ .find((b) => b.berthId === released.berthId)
+ ?.otherInterests.map((o) => ({
+ interestId: o.interestId,
+ clientName: o.clientName,
+ pipelineStage: o.pipelineStage,
+ })) ?? [];
+ void notifyNextInLine({
+ portId: ctx.portId,
+ berthId: released.berthId,
+ mooringNumber: released.mooringNumber,
+ archivedClientName: dossier.client.fullName,
+ nextInLineInterests: otherInterests,
+ }).catch((err) =>
+ logger.error(
+ { err, berthId: released.berthId, clientId: result.clientId },
+ 'Bulk archive: failed to fire next-in-line notification',
+ ),
+ );
+ }
+ }
+ }
+
return NextResponse.json({ data: { results, summary } });
});
diff --git a/src/components/clients/client-detail-header.tsx b/src/components/clients/client-detail-header.tsx
index 7b44e98..76bb36c 100644
--- a/src/components/clients/client-detail-header.tsx
+++ b/src/components/clients/client-detail-header.tsx
@@ -178,8 +178,8 @@ export function ClientDetailHeader({ client }: ClientDetailHeaderProps) {
title={isArchived ? 'Restore client' : 'Archive client'}
className={cn(
'shrink-0 rounded-md p-1.5 text-muted-foreground/70 transition-colors',
- 'hover:bg-foreground/5 hover:text-foreground',
- isArchived ? 'hover:text-foreground' : 'hover:text-destructive',
+ 'hover:bg-foreground/5',
+ isArchived ? 'hover:text-emerald-600' : 'hover:text-destructive',
)}
>
{isArchived ? : }
diff --git a/src/server.ts b/src/server.ts
index 31298cf..48955b2 100644
--- a/src/server.ts
+++ b/src/server.ts
@@ -74,12 +74,28 @@ async function main(): Promise {
const { notificationsWorker } = await import('@/lib/queue/workers/notifications');
const { importWorker } = await import('@/lib/queue/workers/import');
const { exportWorker } = await import('@/lib/queue/workers/export');
+ const { aiWorker } = await import('@/lib/queue/workers/ai');
+ const { bulkWorker } = await import('@/lib/queue/workers/bulk');
+ const { maintenanceWorker } = await import('@/lib/queue/workers/maintenance');
+ const { reportsWorker } = await import('@/lib/queue/workers/reports');
+ const { webhooksWorker } = await import('@/lib/queue/workers/webhooks');
await registerRecurringJobs();
logger.info('BullMQ recurring jobs registered (dev mode)');
// Keep a reference so workers aren't GC'd
- void [emailWorker, documentsWorker, notificationsWorker, importWorker, exportWorker];
+ void [
+ emailWorker,
+ documentsWorker,
+ notificationsWorker,
+ importWorker,
+ exportWorker,
+ aiWorker,
+ bulkWorker,
+ maintenanceWorker,
+ reportsWorker,
+ webhooksWorker,
+ ];
}
httpServer.listen(env.PORT, () => {
diff --git a/src/worker.ts b/src/worker.ts
index 60e330c..48cab19 100644
--- a/src/worker.ts
+++ b/src/worker.ts
@@ -15,9 +15,25 @@ import { documentsWorker } from '@/lib/queue/workers/documents';
import { notificationsWorker } from '@/lib/queue/workers/notifications';
import { importWorker } from '@/lib/queue/workers/import';
import { exportWorker } from '@/lib/queue/workers/export';
+import { aiWorker } from '@/lib/queue/workers/ai';
+import { bulkWorker } from '@/lib/queue/workers/bulk';
+import { maintenanceWorker } from '@/lib/queue/workers/maintenance';
+import { reportsWorker } from '@/lib/queue/workers/reports';
+import { webhooksWorker } from '@/lib/queue/workers/webhooks';
// Keep references so workers aren't GC'd
-const workers = [emailWorker, documentsWorker, notificationsWorker, importWorker, exportWorker];
+const workers = [
+ emailWorker,
+ documentsWorker,
+ notificationsWorker,
+ importWorker,
+ exportWorker,
+ aiWorker,
+ bulkWorker,
+ maintenanceWorker,
+ reportsWorker,
+ webhooksWorker,
+];
async function main(): Promise {
logger.info({ workerCount: workers.length }, 'BullMQ workers started');