feat(audit): wider coverage — sensitive views, cron, jobs, portal abuse

Builds on the audit infra split (severity/source) by emitting events
from every place a security or operations review would want to see:

Sensitive data views (severity=warning):
- GDPR export download URL issued
- Audit log page opened (watch-the-watchers; first page only)
- CSV export of expenses
- Webhook secret regenerated

Authentication abuse (severity=warning, source=auth):
- Portal sign-in: success + failed-credentials + portal-disabled
- Portal password reset: unknown email + portal-disabled + bad token
- Portal activation: bad/expired token

Inbound webhook hardening:
- Documenso webhook with invalid X-Documenso-Secret now writes
  webhook_failed instead of being silently logged

Background work (source=cron / job):
- New attachWorkerAudit() helper wires every BullMQ worker to emit
  job_failed (severity=error) on .on('failed') and cron_run on
  .on('completed') for any job whose name matches the recurring
  scheduler list. Applied across all 10 workers.

1175/1175 vitest passing.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Matt Ciaccio
2026-05-06 20:44:38 +02:00
parent d2171ea79b
commit 9890d065f8
17 changed files with 261 additions and 0 deletions

View File

@@ -8,6 +8,7 @@ import { searchAuditLogs } from '@/lib/services/audit-search.service';
import { db } from '@/lib/db'; import { db } from '@/lib/db';
import { user } from '@/lib/db/schema/users'; import { user } from '@/lib/db/schema/users';
import { errorResponse } from '@/lib/errors'; import { errorResponse } from '@/lib/errors';
import { createAuditLog } from '@/lib/audit';
const auditQuerySchema = z.object({ const auditQuerySchema = z.object({
limit: z.coerce.number().int().min(1).max(200).default(50), limit: z.coerce.number().int().min(1).max(200).default(50),
@@ -67,6 +68,34 @@ export const GET = withAuth(
actor: r.userId ? (userMap.get(r.userId) ?? null) : null, actor: r.userId ? (userMap.get(r.userId) ?? null) : null,
})); }));
// Watch-the-watchers: record that an operator opened the audit log
// page. Only fire on the first page (no cursor) so paginating
// through doesn't spam the log; use 'view' at warning severity so
// the entry stands out in the inspector.
if (!cursor) {
void createAuditLog({
userId: ctx.userId,
portId: ctx.portId,
action: 'view',
entityType: 'audit_log',
entityId: 'list',
metadata: {
filters: {
entityType: query.entityType,
action: query.action,
severity: query.severity,
source: query.source,
userId: query.userId,
entityId: query.entityId,
search: query.search,
},
},
ipAddress: ctx.ipAddress,
userAgent: ctx.userAgent,
severity: 'warning',
});
}
return NextResponse.json({ return NextResponse.json({
data, data,
pagination: { pagination: {

View File

@@ -3,10 +3,16 @@ import { NextResponse } from 'next/server';
import { withAuth, withPermission, withRateLimit } from '@/lib/api/helpers'; import { withAuth, withPermission, withRateLimit } from '@/lib/api/helpers';
import { errorResponse } from '@/lib/errors'; import { errorResponse } from '@/lib/errors';
import { getExportDownloadUrl } from '@/lib/services/gdpr-export.service'; import { getExportDownloadUrl } from '@/lib/services/gdpr-export.service';
import { createAuditLog } from '@/lib/audit';
/** /**
* Returns a fresh signed URL for an existing GDPR export. Staff use this * Returns a fresh signed URL for an existing GDPR export. Staff use this
* from the admin UI; the email path embeds its own signed URL. * from the admin UI; the email path embeds its own signed URL.
*
* Every call writes a `view` audit row at 'warning' severity — GDPR
* exports contain the entire personal data of a client and a fresh
* presigned URL would let the operator download it; we want a clear
* trail of who pulled what when.
*/ */
export const GET = withAuth( export const GET = withAuth(
withPermission( withPermission(
@@ -15,6 +21,19 @@ export const GET = withAuth(
withRateLimit('exports', async (req, ctx, params) => { withRateLimit('exports', async (req, ctx, params) => {
try { try {
const url = await getExportDownloadUrl(params.exportId!, ctx.portId); const url = await getExportDownloadUrl(params.exportId!, ctx.portId);
void createAuditLog({
userId: ctx.userId,
portId: ctx.portId,
action: 'view',
entityType: 'gdpr_export',
entityId: params.exportId!,
metadata: { clientId: params.id ?? null, urlIssued: true },
ipAddress: ctx.ipAddress,
userAgent: ctx.userAgent,
severity: 'warning',
});
return NextResponse.json({ data: { url } }); return NextResponse.json({ data: { url } });
} catch (error) { } catch (error) {
return errorResponse(error); return errorResponse(error);

View File

@@ -4,6 +4,7 @@ import { withAuth, withPermission } from '@/lib/api/helpers';
import { errorResponse } from '@/lib/errors'; import { errorResponse } from '@/lib/errors';
import { exportCsv } from '@/lib/services/expense-export'; import { exportCsv } from '@/lib/services/expense-export';
import { listExpensesSchema } from '@/lib/validators/expenses'; import { listExpensesSchema } from '@/lib/validators/expenses';
import { createAuditLog } from '@/lib/audit';
export const POST = withAuth( export const POST = withAuth(
withPermission('expenses', 'view', async (req, ctx) => { withPermission('expenses', 'view', async (req, ctx) => {
@@ -12,6 +13,18 @@ export const POST = withAuth(
const query = listExpensesSchema.parse(body); const query = listExpensesSchema.parse(body);
const csv = await exportCsv(ctx.portId, query); const csv = await exportCsv(ctx.portId, query);
void createAuditLog({
userId: ctx.userId,
portId: ctx.portId,
action: 'send',
entityType: 'expense_export',
entityId: 'csv',
metadata: { format: 'csv', filterCount: Object.keys(query).length, byteSize: csv.length },
ipAddress: ctx.ipAddress,
userAgent: ctx.userAgent,
severity: 'warning',
});
return new NextResponse(csv, { return new NextResponse(csv, {
status: 200, status: 200,
headers: { headers: {

View File

@@ -13,6 +13,7 @@ import {
handleDocumentCancelled, handleDocumentCancelled,
} from '@/lib/services/documents.service'; } from '@/lib/services/documents.service';
import { logger } from '@/lib/logger'; import { logger } from '@/lib/logger';
import { createAuditLog } from '@/lib/audit';
// BR-024: Dedup via signatureHash unique index on documentEvents // BR-024: Dedup via signatureHash unique index on documentEvents
// Always return 200 from webhook (webhook best practice) // Always return 200 from webhook (webhook best practice)
@@ -66,6 +67,21 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
} }
if (!matched) { if (!matched) {
logger.warn({ providedLen: providedSecret.length }, 'Invalid Documenso webhook secret'); logger.warn({ providedLen: providedSecret.length }, 'Invalid Documenso webhook secret');
void createAuditLog({
userId: null,
portId: null,
action: 'webhook_failed',
entityType: 'webhook_inbound',
entityId: 'documenso',
metadata: {
reason: 'invalid_secret',
providedLen: providedSecret.length,
},
ipAddress: req.headers.get('x-forwarded-for')?.split(',')[0]?.trim() ?? '',
userAgent: req.headers.get('user-agent') ?? '',
severity: 'warning',
source: 'webhook',
});
return NextResponse.json({ ok: false, error: 'Invalid secret' }, { status: 200 }); return NextResponse.json({ ok: false, error: 'Invalid secret' }, { status: 200 });
} }

View File

@@ -0,0 +1,92 @@
/**
* Shared BullMQ → audit log glue.
*
* Each worker calls `attachWorkerAudit(worker, workerName)` after
* defining itself. We listen on the worker's BullMQ events and emit:
*
* - `job_failed` (severity error, source 'job') for every BullMQ
* `failed` event, regardless of attempt number. (Producers know if
* this was a final failure via the existing per-worker logic.)
* - `cron_run` (severity info, source 'cron') for every successful
* completion of a job whose name matches a recurring scheduler
* entry — gives operators a heartbeat row per cron tick.
*
* Audit writes are fire-and-forget and never throw.
*/
import type { Job, Worker } from 'bullmq';
import { createAuditLog } from '@/lib/audit';
import { logger } from '@/lib/logger';
/**
* Names that match recurring jobs registered in `scheduler.ts`.
* Keep in sync — a typo here just means the cron-tick row gets logged
* as a regular job instead of a cron run, no functional impact.
*/
const RECURRING_JOB_NAMES: ReadonlySet<string> = new Set([
'signature-poll',
'reminder-check',
'reminder-overdue-check',
'calendar-sync',
'invoice-overdue-check',
'tenure-expiry-check',
'currency-refresh',
'database-backup',
'backup-cleanup',
'session-cleanup',
'report-scheduler',
'notification-digest',
'temp-file-cleanup',
'form-expiry-check',
'alerts-evaluate',
'analytics-refresh',
'gdpr-export-cleanup',
'ai-usage-retention',
'error-events-retention',
'website-submissions-retention',
]);
export function attachWorkerAudit(worker: Worker, workerName: string): void {
worker.on('failed', (job: Job | undefined, err: Error) => {
void createAuditLog({
userId: null,
portId: null,
action: 'job_failed',
entityType: 'queue_job',
entityId: job?.id ?? `${workerName}:unknown`,
metadata: {
worker: workerName,
jobName: job?.name ?? 'unknown',
attemptsMade: job?.attemptsMade ?? null,
opts: job?.opts ? { attempts: job.opts.attempts } : null,
error: err.message?.slice(0, 1024) ?? null,
},
severity: 'error',
source: 'job',
});
});
worker.on('completed', (job: Job) => {
if (!RECURRING_JOB_NAMES.has(job.name)) return;
void createAuditLog({
userId: null,
portId: null,
action: 'cron_run',
entityType: 'cron',
entityId: job.name,
metadata: {
worker: workerName,
jobId: job.id ?? null,
durationMs: job.processedOn && job.finishedOn ? job.finishedOn - job.processedOn : null,
},
severity: 'info',
source: 'cron',
});
});
// Defensive logger — surface any audit-side failure to the worker log.
worker.on('error', (err) => {
logger.warn({ workerName, err }, 'BullMQ worker error');
});
}

View File

@@ -3,6 +3,7 @@ import { env } from '@/lib/env';
import type { ConnectionOptions } from 'bullmq'; import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger'; import { logger } from '@/lib/logger';
import { attachWorkerAudit } from '@/lib/queue/audit-helpers';
import { QUEUE_CONFIGS } from '@/lib/queue'; import { QUEUE_CONFIGS } from '@/lib/queue';
// ─── Email draft generation ─────────────────────────────────────────────────── // ─── Email draft generation ───────────────────────────────────────────────────
@@ -319,3 +320,5 @@ export const aiWorker = new Worker(
aiWorker.on('failed', (job, err) => { aiWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'AI job failed'); logger.error({ jobId: job?.id, jobName: job?.name, err }, 'AI job failed');
}); });
attachWorkerAudit(aiWorker, 'ai');

View File

@@ -3,6 +3,7 @@ import { env } from '@/lib/env';
import type { ConnectionOptions } from 'bullmq'; import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger'; import { logger } from '@/lib/logger';
import { attachWorkerAudit } from '@/lib/queue/audit-helpers';
import { QUEUE_CONFIGS } from '@/lib/queue'; import { QUEUE_CONFIGS } from '@/lib/queue';
/** /**
@@ -30,3 +31,5 @@ export const bulkWorker = new Worker(
bulkWorker.on('failed', (job, err) => { bulkWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Bulk job failed'); logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Bulk job failed');
}); });
attachWorkerAudit(bulkWorker, 'bulk');

View File

@@ -3,6 +3,7 @@ import { env } from '@/lib/env';
import type { ConnectionOptions } from 'bullmq'; import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger'; import { logger } from '@/lib/logger';
import { attachWorkerAudit } from '@/lib/queue/audit-helpers';
import { QUEUE_CONFIGS } from '@/lib/queue'; import { QUEUE_CONFIGS } from '@/lib/queue';
export const documentsWorker = new Worker( export const documentsWorker = new Worker(
@@ -48,3 +49,5 @@ export const documentsWorker = new Worker(
documentsWorker.on('failed', (job, err) => { documentsWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Documents job failed'); logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Documents job failed');
}); });
attachWorkerAudit(documentsWorker, 'documents');

View File

@@ -3,6 +3,7 @@ import { env } from '@/lib/env';
import type { ConnectionOptions } from 'bullmq'; import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger'; import { logger } from '@/lib/logger';
import { attachWorkerAudit } from '@/lib/queue/audit-helpers';
import { QUEUE_CONFIGS } from '@/lib/queue'; import { QUEUE_CONFIGS } from '@/lib/queue';
export const emailWorker = new Worker( export const emailWorker = new Worker(
@@ -65,3 +66,5 @@ export const emailWorker = new Worker(
emailWorker.on('failed', (job, err) => { emailWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Email job failed'); logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Email job failed');
}); });
attachWorkerAudit(emailWorker, 'email');

View File

@@ -3,6 +3,7 @@ import { env } from '@/lib/env';
import type { ConnectionOptions } from 'bullmq'; import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger'; import { logger } from '@/lib/logger';
import { attachWorkerAudit } from '@/lib/queue/audit-helpers';
import { QUEUE_CONFIGS } from '@/lib/queue'; import { QUEUE_CONFIGS } from '@/lib/queue';
export const exportWorker = new Worker( export const exportWorker = new Worker(
@@ -35,3 +36,5 @@ export const exportWorker = new Worker(
exportWorker.on('failed', (job, err) => { exportWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Export job failed'); logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Export job failed');
}); });
attachWorkerAudit(exportWorker, 'export');

View File

@@ -3,6 +3,7 @@ import { env } from '@/lib/env';
import type { ConnectionOptions } from 'bullmq'; import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger'; import { logger } from '@/lib/logger';
import { attachWorkerAudit } from '@/lib/queue/audit-helpers';
import { QUEUE_CONFIGS } from '@/lib/queue'; import { QUEUE_CONFIGS } from '@/lib/queue';
export const importWorker = new Worker( export const importWorker = new Worker(
@@ -23,3 +24,5 @@ export const importWorker = new Worker(
importWorker.on('failed', (job, err) => { importWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Import job failed'); logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Import job failed');
}); });
attachWorkerAudit(importWorker, 'import');

View File

@@ -10,6 +10,7 @@ import { aiUsageLedger } from '@/lib/db/schema/ai-usage';
import { errorEvents } from '@/lib/db/schema/system'; import { errorEvents } from '@/lib/db/schema/system';
import { websiteSubmissions } from '@/lib/db/schema/website-submissions'; import { websiteSubmissions } from '@/lib/db/schema/website-submissions';
import { logger } from '@/lib/logger'; import { logger } from '@/lib/logger';
import { attachWorkerAudit } from '@/lib/queue/audit-helpers';
import { getStorageBackend } from '@/lib/storage'; import { getStorageBackend } from '@/lib/storage';
import { QUEUE_CONFIGS } from '@/lib/queue'; import { QUEUE_CONFIGS } from '@/lib/queue';
@@ -168,3 +169,5 @@ export const maintenanceWorker = new Worker(
maintenanceWorker.on('failed', (job, err) => { maintenanceWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Maintenance job failed'); logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Maintenance job failed');
}); });
attachWorkerAudit(maintenanceWorker, 'maintenance');

View File

@@ -3,6 +3,7 @@ import { env } from '@/lib/env';
import type { ConnectionOptions } from 'bullmq'; import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger'; import { logger } from '@/lib/logger';
import { attachWorkerAudit } from '@/lib/queue/audit-helpers';
import { QUEUE_CONFIGS } from '@/lib/queue'; import { QUEUE_CONFIGS } from '@/lib/queue';
export const notificationsWorker = new Worker( export const notificationsWorker = new Worker(
@@ -88,3 +89,5 @@ export const notificationsWorker = new Worker(
notificationsWorker.on('failed', (job, err) => { notificationsWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Notifications job failed'); logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Notifications job failed');
}); });
attachWorkerAudit(notificationsWorker, 'notifications');

View File

@@ -3,6 +3,7 @@ import { env } from '@/lib/env';
import type { ConnectionOptions } from 'bullmq'; import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger'; import { logger } from '@/lib/logger';
import { attachWorkerAudit } from '@/lib/queue/audit-helpers';
import { QUEUE_CONFIGS } from '@/lib/queue'; import { QUEUE_CONFIGS } from '@/lib/queue';
export const reportsWorker = new Worker( export const reportsWorker = new Worker(
@@ -70,3 +71,5 @@ export const reportsWorker = new Worker(
reportsWorker.on('failed', (job, err) => { reportsWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Reports job failed'); logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Reports job failed');
}); });
attachWorkerAudit(reportsWorker, 'reports');

View File

@@ -5,6 +5,7 @@ import { lookup } from 'node:dns/promises';
import type { ConnectionOptions } from 'bullmq'; import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger'; import { logger } from '@/lib/logger';
import { attachWorkerAudit } from '@/lib/queue/audit-helpers';
import { QUEUE_CONFIGS } from '@/lib/queue'; import { QUEUE_CONFIGS } from '@/lib/queue';
import { isLocalOrPrivateHost } from '@/lib/validators/webhooks'; import { isLocalOrPrivateHost } from '@/lib/validators/webhooks';
@@ -321,3 +322,5 @@ export const webhooksWorker = new Worker(
webhooksWorker.on('failed', (job, err) => { webhooksWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Webhooks job failed'); logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Webhooks job failed');
}); });
attachWorkerAudit(webhooksWorker, 'webhooks');

View File

@@ -212,6 +212,16 @@ export async function signIn(args: {
: (await verifyPassword(args.password, dummyHash), false); : (await verifyPassword(args.password, dummyHash), false);
if (!user || !user.isActive || !user.passwordHash || !ok) { if (!user || !user.isActive || !user.passwordHash || !ok) {
void createAuditLog({
userId: null,
portId: user?.portId ?? null,
action: 'login',
entityType: 'portal_session',
entityId: user?.id ?? normalizedEmail,
metadata: { ok: false, attemptedEmail: normalizedEmail, reason: 'invalid_credentials' },
severity: 'warning',
source: 'auth',
});
throw new UnauthorizedError('Invalid email or password'); throw new UnauthorizedError('Invalid email or password');
} }
@@ -219,6 +229,16 @@ export async function signIn(args: {
// password on a disabled-port account still surfaces "invalid email or // password on a disabled-port account still surfaces "invalid email or
// password" - we never leak which ports have the portal turned off. // password" - we never leak which ports have the portal turned off.
if (!(await isPortalEnabledForPort(user.portId))) { if (!(await isPortalEnabledForPort(user.portId))) {
void createAuditLog({
userId: null,
portId: user.portId,
action: 'login',
entityType: 'portal_session',
entityId: user.id,
metadata: { ok: false, attemptedEmail: normalizedEmail, reason: 'portal_disabled' },
severity: 'warning',
source: 'auth',
});
throw new UnauthorizedError('Invalid email or password'); throw new UnauthorizedError('Invalid email or password');
} }
@@ -230,6 +250,17 @@ export async function signIn(args: {
await db.update(portalUsers).set({ lastLoginAt: new Date() }).where(eq(portalUsers.id, user.id)); await db.update(portalUsers).set({ lastLoginAt: new Date() }).where(eq(portalUsers.id, user.id));
void createAuditLog({
userId: null,
portId: user.portId,
action: 'login',
entityType: 'portal_session',
entityId: user.id,
metadata: { ok: true, email: user.email },
severity: 'info',
source: 'auth',
});
return { token, clientId: user.clientId, portId: user.portId, email: user.email }; return { token, clientId: user.clientId, portId: user.portId, email: user.email };
} }
@@ -246,6 +277,16 @@ export async function requestPasswordReset(email: string): Promise<void> {
// Silently no-op so unknown emails don't leak through timing or // Silently no-op so unknown emails don't leak through timing or
// response shape. Caller surfaces "if the email matches an account…". // response shape. Caller surfaces "if the email matches an account…".
logger.debug({ email: normalizedEmail }, 'Password reset for unknown email'); logger.debug({ email: normalizedEmail }, 'Password reset for unknown email');
void createAuditLog({
userId: null,
portId: null,
action: 'portal_password_reset_request',
entityType: 'portal_user',
entityId: 'unknown',
metadata: { email: normalizedEmail, reason: 'unknown_or_inactive' },
severity: 'warning',
source: 'auth',
});
return; return;
} }
@@ -253,6 +294,16 @@ export async function requestPasswordReset(email: string): Promise<void> {
// disabled-state from leaking through the public reset endpoint. // disabled-state from leaking through the public reset endpoint.
if (!(await isPortalEnabledForPort(user.portId))) { if (!(await isPortalEnabledForPort(user.portId))) {
logger.debug({ portId: user.portId }, 'Password reset on disabled-portal port'); logger.debug({ portId: user.portId }, 'Password reset on disabled-portal port');
void createAuditLog({
userId: null,
portId: user.portId,
action: 'portal_password_reset_request',
entityType: 'portal_user',
entityId: user.id,
metadata: { email: normalizedEmail, reason: 'portal_disabled' },
severity: 'warning',
source: 'auth',
});
return; return;
} }
@@ -342,6 +393,16 @@ async function consumeToken(
}); });
if (!row) { if (!row) {
void createAuditLog({
userId: null,
portId: null,
action: type === 'reset' ? 'portal_password_reset' : 'portal_activate',
entityType: 'portal_auth_token',
entityId: 'invalid',
metadata: { type, reason: 'invalid_or_expired_token' },
severity: 'warning',
source: 'auth',
});
throw new ValidationError('Invalid or expired token'); throw new ValidationError('Invalid or expired token');
} }

View File

@@ -218,6 +218,7 @@ export async function regenerateSecret(portId: string, webhookId: string, meta:
metadata: { type: 'secret_regenerated' }, metadata: { type: 'secret_regenerated' },
ipAddress: meta.ipAddress, ipAddress: meta.ipAddress,
userAgent: meta.userAgent, userAgent: meta.userAgent,
severity: 'warning',
}); });
// Return new plaintext secret - shown ONCE // Return new plaintext secret - shown ONCE