Initial commit: Port Nimara CRM (Layers 0-4)
Some checks failed
Build & Push Docker Images / build-and-push (push) Has been cancelled
Build & Push Docker Images / deploy (push) Has been cancelled
Build & Push Docker Images / lint (push) Has been cancelled

Full CRM rebuild with Next.js 15, TypeScript, Tailwind, Drizzle ORM,
PostgreSQL, Redis, BullMQ, MinIO, and Socket.io. Includes 461 source
files covering clients, berths, interests/pipeline, documents/EOI,
expenses/invoices, email, notifications, dashboard, admin, and
client portal. CI/CD via Gitea Actions with Docker builds.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-26 11:52:51 +01:00
commit 67d7e6e3d5
572 changed files with 86496 additions and 0 deletions

234
src/lib/queue/workers/ai.ts Normal file
View File

@@ -0,0 +1,234 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
// ─── Email draft generation ───────────────────────────────────────────────────
const MAX_OUTPUT_BYTES = 10 * 1024; // 10 KB
const OPENAI_TIMEOUT_MS = 30_000; // 30 s
interface GenerateEmailDraftPayload {
interestId: string;
clientId: string;
portId: string;
context: 'follow_up' | 'introduction' | 'stage_update' | 'general';
additionalInstructions?: string;
requestedBy: string;
}
interface DraftResult {
subject: string;
body: string;
generatedAt: string;
}
async function generateEmailDraft(payload: GenerateEmailDraftPayload): Promise<DraftResult> {
const { interestId, clientId, portId, context, additionalInstructions } = payload;
// Fetch data by IDs in the worker — never trust PII from the queue payload
const { db } = await import('@/lib/db');
const { interests } = await import('@/lib/db/schema/interests');
const { clients } = await import('@/lib/db/schema/clients');
const { berths } = await import('@/lib/db/schema/berths');
const { interestNotes } = await import('@/lib/db/schema/interests');
const { emailThreads, emailMessages } = await import('@/lib/db/schema/email');
const { and, eq, desc, inArray } = await import('drizzle-orm');
// Fetch interest, client, berth
const [interest, client] = await Promise.all([
db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
}),
db.query.clients.findFirst({ where: eq(clients.id, clientId) }),
]);
if (!interest || !client) {
throw new Error('Interest or client not found');
}
let berthMooring: string | null = null;
if (interest.berthId) {
const berth = await db.query.berths.findFirst({
where: eq(berths.id, interest.berthId),
});
berthMooring = berth?.mooringNumber ?? null;
}
// Fetch last 5 notes
const recentNotes = await db
.select({ content: interestNotes.content, createdAt: interestNotes.createdAt })
.from(interestNotes)
.where(eq(interestNotes.interestId, interestId))
.orderBy(desc(interestNotes.createdAt))
.limit(5);
// Fetch last 5 email subjects (via threads linked to client)
const recentThreads = await db
.select({ subject: emailThreads.subject, lastMessageAt: emailThreads.lastMessageAt })
.from(emailThreads)
.where(and(eq(emailThreads.clientId, clientId), eq(emailThreads.portId, portId)))
.orderBy(desc(emailThreads.lastMessageAt))
.limit(5);
const apiKey = process.env.OPENAI_API_KEY;
if (!apiKey) {
// Fallback: template-based draft
return buildTemplateDraft({ clientName: client.fullName, context, berthMooring, pipelineStage: interest.pipelineStage });
}
// Build prompt
const contextDescriptions: Record<string, string> = {
follow_up: 'a friendly follow-up email',
introduction: 'an initial introduction email',
stage_update: `an email informing the client about their pipeline progression to stage "${interest.pipelineStage}"`,
general: 'a general communication email',
};
const prompt = [
`Write ${contextDescriptions[context] ?? 'an email'} to a marina berth client.`,
'',
`Client name: ${client.fullName}`,
client.companyName ? `Company: ${client.companyName}` : null,
client.yachtName ? `Yacht: ${client.yachtName}` : null,
berthMooring ? `Berth: ${berthMooring}` : 'Berth: not yet assigned',
`Pipeline stage: ${interest.pipelineStage}`,
'',
recentNotes.length > 0
? `Recent notes:\n${recentNotes.map((n) => `- ${n.content.slice(0, 200)}`).join('\n')}`
: null,
recentThreads.length > 0
? `Recent email subjects:\n${recentThreads.map((t) => `- ${t.subject ?? '(no subject)'}`).join('\n')}`
: null,
additionalInstructions ? `Additional instructions: ${additionalInstructions}` : null,
'',
'Return JSON with keys: subject (string) and body (string, plain text).',
]
.filter(Boolean)
.join('\n');
// Call OpenAI with timeout
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), OPENAI_TIMEOUT_MS);
let subject: string;
let body: string;
try {
const response = await fetch('https://api.openai.com/v1/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
model: 'gpt-4o-mini',
messages: [
{
role: 'system',
content:
'You are an expert marina sales and relationship manager. Generate professional, concise emails. Always return valid JSON with "subject" and "body" keys only.',
},
{ role: 'user', content: prompt },
],
max_tokens: 800,
temperature: 0.7,
response_format: { type: 'json_object' },
}),
signal: controller.signal,
});
clearTimeout(timeoutId);
if (!response.ok) {
const errorText = await response.text().catch(() => '');
throw new Error(`OpenAI API error ${response.status}: ${errorText}`);
}
const data = (await response.json()) as {
choices: Array<{ message: { content: string } }>;
};
const content = data.choices[0]?.message?.content ?? '{}';
// Enforce output size cap
if (content.length > MAX_OUTPUT_BYTES) {
throw new Error('AI output exceeded 10 KB cap');
}
const parsed = JSON.parse(content) as { subject?: string; body?: string };
subject = parsed.subject ?? `Follow-up: ${client.fullName}`;
body = parsed.body ?? '';
} catch (err) {
clearTimeout(timeoutId);
logger.warn({ err, interestId }, 'OpenAI call failed, falling back to template draft');
return buildTemplateDraft({ clientName: client.fullName, context, berthMooring, pipelineStage: interest.pipelineStage });
}
return { subject, body, generatedAt: new Date().toISOString() };
}
// ─── Template fallback ────────────────────────────────────────────────────────
function buildTemplateDraft(opts: {
clientName: string;
context: string;
berthMooring: string | null;
pipelineStage: string;
}): DraftResult {
const { clientName, context, berthMooring, pipelineStage } = opts;
const berthText = berthMooring ? `berth ${berthMooring}` : 'your requested berth';
const templates: Record<string, { subject: string; body: string }> = {
introduction: {
subject: `Welcome to Port Nimara ${clientName}`,
body: `Dear ${clientName},\n\nThank you for your interest in Port Nimara. We are delighted to introduce our marina facilities and look forward to discussing how we can accommodate your needs for ${berthText}.\n\nPlease feel free to reach out at any time.\n\nKind regards,\nPort Nimara Team`,
},
follow_up: {
subject: `Following up ${clientName}`,
body: `Dear ${clientName},\n\nI wanted to follow up regarding your interest in ${berthText}. Please let us know if you have any questions or if there is anything we can assist you with.\n\nWe look forward to hearing from you.\n\nKind regards,\nPort Nimara Team`,
},
stage_update: {
subject: `Update on your application ${clientName}`,
body: `Dear ${clientName},\n\nWe are pleased to inform you that your application for ${berthText} has progressed to the "${pipelineStage.replace(/_/g, ' ')}" stage.\n\nWe will be in touch shortly with the next steps.\n\nKind regards,\nPort Nimara Team`,
},
general: {
subject: `Message from Port Nimara ${clientName}`,
body: `Dear ${clientName},\n\nThank you for your continued interest in Port Nimara. We appreciate your patience and look forward to assisting you with ${berthText}.\n\nKind regards,\nPort Nimara Team`,
},
};
const template = templates[context] ?? templates['general']!;
return { ...template, generatedAt: new Date().toISOString() };
}
// ─── Worker ───────────────────────────────────────────────────────────────────
export const aiWorker = new Worker(
'ai',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing AI job');
switch (job.name) {
case 'generate-email-draft': {
const payload = job.data as GenerateEmailDraftPayload;
const result = await generateEmailDraft(payload);
return result;
}
default:
logger.warn({ jobName: job.name }, 'Unknown AI job');
return undefined;
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.ai.concurrency,
},
);
aiWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'AI job failed');
});

View File

@@ -0,0 +1,24 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const bulkWorker = new Worker(
'bulk',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing bulk job');
// TODO(L2): implement bulk operation job handlers
// - bulk status change across multiple records
// - bulk tag assignment / removal
// - bulk delete with soft-delete support
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.bulk.concurrency,
},
);
bulkWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Bulk job failed');
});

View File

@@ -0,0 +1,29 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const documentsWorker = new Worker(
'documents',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing documents job');
switch (job.name) {
case 'signature-poll': {
const { processDocumensoPoll } = await import('@/jobs/processors/documenso-poll');
await processDocumensoPoll();
break;
}
default:
logger.warn({ jobName: job.name }, 'Unknown documents job');
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.documents.concurrency,
},
);
documentsWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Documents job failed');
});

View File

@@ -0,0 +1,30 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const emailWorker = new Worker(
'email',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing email job');
switch (job.name) {
case 'inbox-sync': {
const { accountId } = job.data as { accountId: string };
const { syncInbox } = await import('@/lib/services/email-threads.service');
await syncInbox(accountId);
break;
}
default:
logger.warn({ jobName: job.name }, 'Unknown email job');
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.email.concurrency,
},
);
emailWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Email job failed');
});

View File

@@ -0,0 +1,24 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const exportWorker = new Worker(
'export',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing export job');
// TODO(L2): implement export job handlers
// - CSV data export
// - PDF export
// - Parent company report export
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.export.concurrency,
},
);
exportWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Export job failed');
});

View File

@@ -0,0 +1,24 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const importWorker = new Worker(
'import',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing import job');
// TODO(L2): implement import job handlers
// - CSV client import
// - Excel berth spec import
// - Note: maxAttempts=1 — imports are idempotent, user retries manually
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.import.concurrency,
},
);
importWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Import job failed');
});

View File

@@ -0,0 +1,34 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const maintenanceWorker = new Worker(
'maintenance',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing maintenance job');
switch (job.name) {
case 'currency-refresh': {
const { refreshRates } = await import('@/lib/services/currency');
await refreshRates();
break;
}
case 'form-expiry-check': {
// TODO(L3): mark expired form submissions
logger.info('Form expiry check — not yet implemented');
break;
}
default:
logger.warn({ jobName: job.name }, 'Unknown maintenance job');
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.maintenance.concurrency,
},
);
maintenanceWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Maintenance job failed');
});

View File

@@ -0,0 +1,84 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const notificationsWorker = new Worker(
'notifications',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing notifications job');
switch (job.name) {
case 'invoice-overdue-check': {
const { detectOverdue } = await import('@/lib/services/invoices');
const { db } = await import('@/lib/db');
const { ports } = await import('@/lib/db/schema/ports');
const allPorts = await db.select({ id: ports.id }).from(ports);
for (const port of allPorts) {
try {
await detectOverdue(port.id);
} catch (err) {
logger.error({ err, portId: port.id }, 'Overdue detection failed');
}
}
break;
}
case 'reminder-check': {
const { processDocumentReminders } = await import(
'@/jobs/processors/document-reminder'
);
await processDocumentReminders();
break;
}
case 'send-notification-email': {
const { notificationId } = job.data as { notificationId: string };
const { db } = await import('@/lib/db');
const { notifications } = await import('@/lib/db/schema/operations');
const { user } = await import('@/lib/db/schema/users');
const { eq } = await import('drizzle-orm');
const { sendEmail } = await import('@/lib/email/index');
const [notif] = await db
.select()
.from(notifications)
.where(eq(notifications.id, notificationId))
.limit(1);
if (!notif) break;
// Get user email from the Better Auth user table
const [authUser] = await db
.select({ email: user.email, name: user.name })
.from(user)
.where(eq(user.id, notif.userId))
.limit(1);
if (!authUser?.email) break;
await sendEmail(
authUser.email,
`[Port Nimara] ${notif.title}`,
`<p>${notif.description ?? notif.title}</p>${
notif.link
? `<p><a href="${process.env.APP_URL}${notif.link}">View in CRM</a></p>`
: ''
}`,
);
await db
.update(notifications)
.set({ emailSent: true })
.where(eq(notifications.id, notificationId));
break;
}
default:
logger.warn({ jobName: job.name }, 'Unknown notifications job');
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.notifications.concurrency,
},
);
notificationsWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Notifications job failed');
});

View File

@@ -0,0 +1,74 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const reportsWorker = new Worker(
'reports',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing reports job');
switch (job.name) {
case 'report-scheduler': {
// Check scheduled_reports for reports due to run
const { db } = await import('@/lib/db');
const { scheduledReports } = await import('@/lib/db/schema/operations');
const { generatedReports } = await import('@/lib/db/schema/operations');
const { eq, and, lte } = await import('drizzle-orm');
const dueReports = await db
.select()
.from(scheduledReports)
.where(
and(
eq(scheduledReports.isActive, true),
lte(scheduledReports.nextRunAt, new Date()),
),
);
for (const report of dueReports) {
const { getQueue } = await import('@/lib/queue');
const [genReport] = await db
.insert(generatedReports)
.values({
portId: report.portId,
scheduledReportId: report.id,
reportType: report.reportType,
name: `${report.name} - ${new Date().toISOString().split('T')[0]}`,
status: 'queued',
parameters: (report.config as Record<string, unknown>) ?? {},
requestedBy: report.createdBy,
})
.returning();
if (genReport) {
await getQueue('reports').add('generate-report', {
reportJobId: genReport.id,
});
}
}
break;
}
case 'generate-report': {
const { reportJobId } = job.data as { reportJobId: string };
const { generateReport } = await import('@/lib/services/reports.service');
await generateReport(reportJobId);
break;
}
default:
logger.warn({ jobName: job.name }, 'Unknown reports job');
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.reports.concurrency,
},
);
reportsWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Reports job failed');
});

View File

@@ -0,0 +1,205 @@
import { Worker, type Job } from 'bullmq';
import { createHmac } from 'node:crypto';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
// ─── Job Payload ─────────────────────────────────────────────────────────────
interface WebhookDeliverPayload {
webhookId: string;
portId: string;
event: string;
deliveryId: string;
payload: Record<string, unknown>;
}
// ─── Worker ──────────────────────────────────────────────────────────────────
export const webhooksWorker = new Worker(
'webhooks',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing webhooks job');
if (job.name !== 'deliver') {
logger.warn({ jobName: job.name }, 'Unknown webhooks job');
return;
}
const { webhookId, portId, event, deliveryId, payload } =
job.data as WebhookDeliverPayload;
const { db } = await import('@/lib/db');
const { webhooks, webhookDeliveries } = await import('@/lib/db/schema/system');
const { userProfiles } = await import('@/lib/db/schema/users');
const { decrypt } = await import('@/lib/utils/encryption');
const { createNotification } = await import('@/lib/services/notifications.service');
const { eq, and } = await import('drizzle-orm');
// 1. Fetch webhook — skip if deleted
const webhook = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!webhook) {
logger.info({ webhookId }, 'Webhook deleted — skipping delivery');
await db
.delete(webhookDeliveries)
.where(eq(webhookDeliveries.id, deliveryId));
return;
}
// 2. Decrypt secret
let secret: string;
try {
secret = webhook.secret ? decrypt(webhook.secret) : '';
} catch (err) {
logger.error({ webhookId, err }, 'Failed to decrypt webhook secret');
throw err; // Let BullMQ retry
}
// 3. Build final payload
const finalPayload = {
id: deliveryId,
event,
timestamp: new Date().toISOString(),
port_id: portId,
data: payload,
};
const bodyString = JSON.stringify(finalPayload);
// 4. Sign with HMAC-SHA256
const signature = secret
? `sha256=${createHmac('sha256', secret).update(bodyString).digest('hex')}`
: '';
const attempt = (job.attemptsMade ?? 0) + 1;
// 5. POST to webhook URL with 10s timeout
let responseStatus: number | null = null;
let responseBody: string | null = null;
let success = false;
try {
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 10_000);
const response = await fetch(webhook.url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'User-Agent': 'PortNimara-Webhook/1.0',
'X-Webhook-Id': webhookId,
'X-Webhook-Event': event,
'X-Webhook-Signature': signature,
'X-Webhook-Delivery': deliveryId,
},
body: bodyString,
signal: controller.signal,
});
clearTimeout(timeoutId);
responseStatus = response.status;
// Read up to 1KB of response body
const rawBody = await response.text();
responseBody = rawBody.slice(0, 1024);
success = response.status >= 200 && response.status < 300;
} catch (err) {
// Network error or timeout
logger.warn({ webhookId, deliveryId, err }, 'Webhook delivery network error');
responseStatus = null;
responseBody = err instanceof Error ? err.message.slice(0, 1024) : String(err).slice(0, 1024);
success = false;
}
const maxAttempts = QUEUE_CONFIGS.webhooks.maxAttempts;
const isFinalAttempt = attempt >= maxAttempts;
if (success) {
// 6a. Record success
await db
.update(webhookDeliveries)
.set({
status: 'success',
responseStatus,
responseBody,
attempt,
deliveredAt: new Date(),
})
.where(eq(webhookDeliveries.id, deliveryId));
logger.info({ webhookId, deliveryId, event }, 'Webhook delivered successfully');
} else if (!success && isFinalAttempt) {
// 6b. Final failure → dead_letter + system alert
await db
.update(webhookDeliveries)
.set({
status: 'dead_letter',
responseStatus,
responseBody,
attempt,
})
.where(eq(webhookDeliveries.id, deliveryId));
logger.error(
{ webhookId, deliveryId, event, attempt },
'Webhook delivery permanently failed — dead_letter',
);
// Notify all super admins
try {
const superAdmins = await db
.select({ userId: userProfiles.userId })
.from(userProfiles)
.where(and(eq(userProfiles.isSuperAdmin, true), eq(userProfiles.isActive, true)));
for (const admin of superAdmins) {
void createNotification({
portId,
userId: admin.userId,
type: 'system_alert',
title: 'Webhook delivery failed permanently',
description: `Webhook "${webhook.name}" failed to deliver event "${event}" after ${maxAttempts} attempts.`,
link: `/admin/webhooks/${webhookId}`,
entityType: 'webhook',
entityId: webhookId,
dedupeKey: `webhook:dead_letter:${deliveryId}`,
cooldownMs: 0,
});
}
} catch (notifyErr) {
logger.error({ notifyErr }, 'Failed to send dead_letter notification');
}
// Throw to let BullMQ mark job as failed (it won't retry since it's the final attempt)
throw new Error(
`Webhook delivery failed after ${attempt} attempts. Status: ${responseStatus ?? 'network error'}`,
);
} else {
// 6c. Non-final failure → update record then throw to trigger retry
await db
.update(webhookDeliveries)
.set({
status: 'failed',
responseStatus,
responseBody,
attempt,
})
.where(eq(webhookDeliveries.id, deliveryId));
throw new Error(
`Webhook delivery attempt ${attempt} failed. Status: ${responseStatus ?? 'network error'}. Retrying...`,
);
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.webhooks.concurrency,
},
);
webhooksWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Webhooks job failed');
});