Initial commit: Port Nimara CRM (Layers 0-4)
Some checks failed
Build & Push Docker Images / build-and-push (push) Has been cancelled
Build & Push Docker Images / deploy (push) Has been cancelled
Build & Push Docker Images / lint (push) Has been cancelled

Full CRM rebuild with Next.js 15, TypeScript, Tailwind, Drizzle ORM,
PostgreSQL, Redis, BullMQ, MinIO, and Socket.io. Includes 461 source
files covering clients, berths, interests/pipeline, documents/EOI,
expenses/invoices, email, notifications, dashboard, admin, and
client portal. CI/CD via Gitea Actions with Docker builds.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-26 11:52:51 +01:00
commit 67d7e6e3d5
572 changed files with 86496 additions and 0 deletions

View File

@@ -0,0 +1,144 @@
import { and, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { interests } from '@/lib/db/schema/interests';
import { berths } from '@/lib/db/schema/berths';
import { systemSettings } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { emitToRoom } from '@/lib/socket/server';
// ─── Types ────────────────────────────────────────────────────────────────────
export type BerthRuleTrigger =
| 'eoi_sent'
| 'eoi_signed'
| 'deposit_received'
| 'contract_signed'
| 'interest_archived'
| 'interest_completed'
| 'berth_unlinked';
export type BerthRuleMode = 'auto' | 'suggest' | 'off';
export interface BerthRuleResult {
action: 'applied' | 'suggested' | 'none';
newStatus?: string;
message?: string;
}
interface RuleConfig {
mode: BerthRuleMode;
targetStatus: string;
}
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Defaults ────────────────────────────────────────────────────────────────
const DEFAULT_RULES: Record<BerthRuleTrigger, RuleConfig> = {
eoi_sent: { mode: 'suggest', targetStatus: 'under_offer' },
eoi_signed: { mode: 'auto', targetStatus: 'under_offer' },
deposit_received: { mode: 'auto', targetStatus: 'sold' },
contract_signed: { mode: 'auto', targetStatus: 'sold' },
interest_archived: { mode: 'suggest', targetStatus: 'available' },
interest_completed: { mode: 'auto', targetStatus: 'sold' },
berth_unlinked: { mode: 'off', targetStatus: 'available' },
};
// ─── Config ───────────────────────────────────────────────────────────────────
async function getRulesConfig(
portId: string,
): Promise<Record<BerthRuleTrigger, RuleConfig>> {
const setting = await db.query.systemSettings.findFirst({
where: and(
eq(systemSettings.key, 'berth_rules'),
eq(systemSettings.portId, portId),
),
});
if (!setting?.value) {
return { ...DEFAULT_RULES };
}
const stored = setting.value as Partial<Record<BerthRuleTrigger, RuleConfig>>;
const merged = { ...DEFAULT_RULES };
for (const trigger of Object.keys(DEFAULT_RULES) as BerthRuleTrigger[]) {
if (stored[trigger]) {
merged[trigger] = stored[trigger]!;
}
}
return merged;
}
// ─── Evaluate Rule ────────────────────────────────────────────────────────────
export async function evaluateRule(
trigger: BerthRuleTrigger,
interestId: string,
portId: string,
meta: AuditMeta,
): Promise<BerthRuleResult> {
const interest = await db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
});
if (!interest?.berthId) {
return { action: 'none' };
}
const rulesConfig = await getRulesConfig(portId);
const rule = rulesConfig[trigger];
if (rule.mode === 'off') {
return { action: 'none' };
}
if (rule.mode === 'auto') {
await db
.update(berths)
.set({
status: rule.targetStatus,
statusLastChangedBy: meta.userId,
statusLastChangedReason: `Auto-applied by rule: ${trigger}`,
statusLastModified: new Date(),
updatedAt: new Date(),
})
.where(and(eq(berths.id, interest.berthId), eq(berths.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'berth',
entityId: interest.berthId,
newValue: { status: rule.targetStatus },
metadata: { type: 'berth_rule_auto', trigger, interestId },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'berth:statusChanged', {
berthId: interest.berthId,
newStatus: rule.targetStatus,
triggeredBy: meta.userId,
trigger,
});
return { action: 'applied', newStatus: rule.targetStatus };
}
// suggest mode
return {
action: 'suggested',
newStatus: rule.targetStatus,
message: `Suggested status change to "${rule.targetStatus}" based on trigger "${trigger}"`,
};
}

View File

@@ -0,0 +1,471 @@
import { and, eq, gte, lte, inArray, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import {
berths,
berthTags,
berthWaitingList,
berthMaintenanceLog,
berthMapData,
} from '@/lib/db/schema/berths';
import { tags } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { diffEntity } from '@/lib/entity-diff';
import { NotFoundError } from '@/lib/errors';
import { buildListQuery } from '@/lib/db/query-builder';
import { emitToRoom } from '@/lib/socket/server';
import type {
UpdateBerthInput,
UpdateBerthStatusInput,
ListBerthsQuery,
AddMaintenanceLogInput,
UpdateWaitingListInput,
} from '@/lib/validators/berths';
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listBerths(portId: string, query: ListBerthsQuery) {
const filters = [];
if (query.status) {
filters.push(eq(berths.status, query.status));
}
if (query.area) {
filters.push(eq(berths.area, query.area));
}
if (query.minLength !== undefined) {
filters.push(gte(berths.lengthM, String(query.minLength)));
}
if (query.maxLength !== undefined) {
filters.push(lte(berths.lengthM, String(query.maxLength)));
}
if (query.minPrice !== undefined) {
filters.push(gte(berths.price, String(query.minPrice)));
}
if (query.maxPrice !== undefined) {
filters.push(lte(berths.price, String(query.maxPrice)));
}
if (query.tenureType) {
filters.push(eq(berths.tenureType, query.tenureType));
}
// Tag filter: join against berthTags
if (query.tagIds && query.tagIds.length > 0) {
const tagIds = query.tagIds;
const berthsWithTags = await db
.selectDistinct({ berthId: berthTags.berthId })
.from(berthTags)
.where(inArray(berthTags.tagId, tagIds));
const matchingIds = berthsWithTags.map((r) => r.berthId);
if (matchingIds.length === 0) {
return { data: [], total: 0 };
}
filters.push(inArray(berths.id, matchingIds));
}
const sortColumn = (() => {
switch (query.sort) {
case 'mooringNumber': return berths.mooringNumber;
case 'area': return berths.area;
case 'price': return berths.price;
case 'status': return berths.status;
case 'lengthM': return berths.lengthM;
default: return berths.updatedAt;
}
})();
const result = await buildListQuery({
table: berths,
portIdColumn: berths.portId,
portId,
idColumn: berths.id,
updatedAtColumn: berths.updatedAt,
filters,
sort: { column: sortColumn, direction: query.order },
page: query.page,
pageSize: query.limit,
searchColumns: [berths.mooringNumber, berths.area],
searchTerm: query.search,
// No archivedAt column on berths
includeArchived: true,
});
// Attach tags for list items
const berthIds = (result.data as Array<{ id: string }>).map((b) => b.id);
const tagsByBerthId: Record<string, Array<{ id: string; name: string; color: string }>> = {};
if (berthIds.length > 0) {
const tagRows = await db
.select({
berthId: berthTags.berthId,
id: tags.id,
name: tags.name,
color: tags.color,
})
.from(berthTags)
.innerJoin(tags, eq(berthTags.tagId, tags.id))
.where(inArray(berthTags.berthId, berthIds));
for (const row of tagRows) {
if (!tagsByBerthId[row.berthId]) tagsByBerthId[row.berthId] = [];
tagsByBerthId[row.berthId]!.push({ id: row.id, name: row.name, color: row.color });
}
}
const data = (result.data as Array<Record<string, unknown>>).map((b) => ({
...b,
tags: tagsByBerthId[b.id as string] ?? [],
}));
return { data, total: result.total };
}
// ─── Get By ID ────────────────────────────────────────────────────────────────
export async function getBerthById(id: string, portId: string) {
const berth = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
with: {
mapData: true,
},
});
if (!berth) throw new NotFoundError('Berth');
// Fetch tags
const tagRows = await db
.select({ id: tags.id, name: tags.name, color: tags.color })
.from(berthTags)
.innerJoin(tags, eq(berthTags.tagId, tags.id))
.where(eq(berthTags.berthId, id));
return { ...berth, tags: tagRows };
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateBerth(
id: string,
portId: string,
data: UpdateBerthInput,
meta: AuditMeta,
) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
const { changed, diff } = diffEntity(existing as Record<string, unknown>, data as Record<string, unknown>);
if (!changed) return existing;
// Drizzle numeric columns expect string | null — coerce numbers to strings
const n = (v: number | undefined) => (v !== undefined ? String(v) : undefined);
const [updated] = await db
.update(berths)
.set({
area: data.area,
lengthFt: n(data.lengthFt),
lengthM: n(data.lengthM),
widthFt: n(data.widthFt),
widthM: n(data.widthM),
draftFt: n(data.draftFt),
draftM: n(data.draftM),
widthIsMinimum: data.widthIsMinimum,
nominalBoatSize: data.nominalBoatSize,
nominalBoatSizeM: data.nominalBoatSizeM,
waterDepth: n(data.waterDepth),
waterDepthM: n(data.waterDepthM),
waterDepthIsMinimum: data.waterDepthIsMinimum,
sidePontoon: data.sidePontoon,
powerCapacity: data.powerCapacity,
voltage: data.voltage,
mooringType: data.mooringType,
cleatType: data.cleatType,
cleatCapacity: data.cleatCapacity,
bollardType: data.bollardType,
bollardCapacity: data.bollardCapacity,
access: data.access,
price: n(data.price),
priceCurrency: data.priceCurrency,
bowFacing: data.bowFacing,
berthApproved: data.berthApproved,
tenureType: data.tenureType,
tenureYears: data.tenureYears,
tenureStartDate: data.tenureStartDate,
tenureEndDate: data.tenureEndDate,
updatedAt: new Date(),
})
.where(and(eq(berths.id, id), eq(berths.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'berth',
entityId: id,
oldValue: diff as unknown as Record<string, unknown>,
newValue: data as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'berth:updated', {
berthId: id,
changedFields: Object.keys(diff),
});
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'berth:updated', { berthId: id }),
);
return updated!;
}
// ─── Update Status ────────────────────────────────────────────────────────────
export async function updateBerthStatus(
id: string,
portId: string,
data: UpdateBerthStatusInput,
meta: AuditMeta,
) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
const [updated] = await db
.update(berths)
.set({
status: data.status,
statusLastChangedBy: meta.userId,
statusLastChangedReason: data.reason,
statusLastModified: new Date(),
updatedAt: new Date(),
})
.where(and(eq(berths.id, id), eq(berths.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'berth',
entityId: id,
oldValue: { status: existing.status },
newValue: { status: data.status, reason: data.reason },
metadata: { type: 'status_change', reason: data.reason },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'berth:statusChanged', {
berthId: id,
oldStatus: existing.status,
newStatus: data.status,
triggeredBy: meta.userId,
});
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'berth:statusChanged', {
berthId: id,
oldStatus: existing.status,
newStatus: data.status,
}),
);
return updated!;
}
// ─── Set Tags ─────────────────────────────────────────────────────────────────
export async function setBerthTags(
id: string,
portId: string,
tagIds: string[],
meta: AuditMeta,
) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
// Delete existing tags then insert new ones
await db.delete(berthTags).where(eq(berthTags.berthId, id));
if (tagIds.length > 0) {
await db.insert(berthTags).values(tagIds.map((tagId) => ({ berthId: id, tagId })));
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'berth',
entityId: id,
metadata: { type: 'tags_updated', tagIds },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'berth:updated', {
berthId: id,
changedFields: ['tags'],
});
return { berthId: id, tagIds };
}
// ─── Add Maintenance Log ──────────────────────────────────────────────────────
export async function addMaintenanceLog(
id: string,
portId: string,
data: AddMaintenanceLogInput,
meta: AuditMeta,
) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
const rows = await db
.insert(berthMaintenanceLog)
.values({
berthId: id,
portId,
category: data.category,
description: data.description,
cost: data.cost !== undefined ? String(data.cost) : undefined,
costCurrency: data.costCurrency,
responsibleParty: data.responsibleParty,
performedDate: data.performedDate,
photoFileIds: data.photoFileIds,
createdBy: meta.userId,
})
.returning();
const log = rows[0]!;
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'berth_maintenance_log',
entityId: log.id,
metadata: { berthId: id, category: data.category },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'berth:maintenanceAdded', {
berthId: id,
logEntry: log,
});
return log;
}
// ─── Get Maintenance Logs ─────────────────────────────────────────────────────
export async function getMaintenanceLogs(id: string, portId: string) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
return db
.select()
.from(berthMaintenanceLog)
.where(and(eq(berthMaintenanceLog.berthId, id), eq(berthMaintenanceLog.portId, portId)))
.orderBy(berthMaintenanceLog.performedDate);
}
// ─── Get Waiting List ─────────────────────────────────────────────────────────
export async function getWaitingList(id: string, portId: string) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
return db
.select()
.from(berthWaitingList)
.where(eq(berthWaitingList.berthId, id))
.orderBy(berthWaitingList.position);
}
// ─── Update Waiting List ──────────────────────────────────────────────────────
export async function updateWaitingList(
id: string,
portId: string,
data: UpdateWaitingListInput,
meta: AuditMeta,
) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
// Replace entire waiting list
await db.delete(berthWaitingList).where(eq(berthWaitingList.berthId, id));
if (data.entries.length > 0) {
await db.insert(berthWaitingList).values(
data.entries.map((entry) => ({
berthId: id,
clientId: entry.clientId,
position: entry.position,
priority: entry.priority ?? 'normal',
notifyPref: entry.notifyPref ?? 'email',
notes: entry.notes,
})),
);
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'berth',
entityId: id,
metadata: { type: 'waiting_list_updated', count: data.entries.length },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'berth:waitingListChanged', {
berthId: id,
action: 'replaced',
entry: data.entries,
});
return data.entries;
}
// ─── Options ──────────────────────────────────────────────────────────────────
export async function getBerthOptions(portId: string) {
return db
.select({
id: berths.id,
mooringNumber: berths.mooringNumber,
area: berths.area,
status: berths.status,
})
.from(berths)
.where(eq(berths.portId, portId))
.orderBy(berths.mooringNumber);
}

View File

@@ -0,0 +1,489 @@
import { and, eq, ilike, inArray, or } from 'drizzle-orm';
import { db } from '@/lib/db';
import {
clients,
clientContacts,
clientRelationships,
clientTags,
} from '@/lib/db/schema/clients';
import { tags } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { NotFoundError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { buildListQuery } from '@/lib/db/query-builder';
import { diffEntity } from '@/lib/entity-diff';
import { softDelete, restore, withTransaction } from '@/lib/db/utils';
import type {
CreateClientInput,
UpdateClientInput,
ListClientsInput,
} from '@/lib/validators/clients';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listClients(portId: string, query: ListClientsInput) {
const { page, limit, sort, order, search, includeArchived, source, nationality, isProxy, tagIds } = query;
const filters = [];
if (source) {
filters.push(eq(clients.source, source));
}
if (nationality) {
filters.push(ilike(clients.nationality, `%${nationality}%`));
}
if (isProxy !== undefined) {
filters.push(eq(clients.isProxy, isProxy));
}
if (tagIds && tagIds.length > 0) {
const clientsWithTags = await db
.selectDistinct({ clientId: clientTags.clientId })
.from(clientTags)
.where(inArray(clientTags.tagId, tagIds));
const matchingIds = clientsWithTags.map((r) => r.clientId);
if (matchingIds.length > 0) {
filters.push(inArray(clients.id, matchingIds));
} else {
// No clients match these tags — return empty
return { data: [], total: 0 };
}
}
let sortColumn: typeof clients.fullName | typeof clients.createdAt | typeof clients.updatedAt =
clients.updatedAt;
if (sort === 'fullName') sortColumn = clients.fullName;
else if (sort === 'createdAt') sortColumn = clients.createdAt;
const result = await buildListQuery({
table: clients,
portIdColumn: clients.portId,
portId,
idColumn: clients.id,
updatedAtColumn: clients.updatedAt,
searchColumns: [clients.fullName, clients.companyName],
searchTerm: search,
filters,
sort: sort ? { column: sortColumn, direction: order } : undefined,
page,
pageSize: limit,
includeArchived,
archivedAtColumn: clients.archivedAt,
});
return result;
}
// ─── Get by ID ────────────────────────────────────────────────────────────────
export async function getClientById(id: string, portId: string) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, id),
});
if (!client || client.portId !== portId) {
throw new NotFoundError('Client');
}
const contacts = await db.query.clientContacts.findMany({
where: eq(clientContacts.clientId, id),
orderBy: (t, { desc }) => [desc(t.isPrimary), desc(t.createdAt)],
});
const clientTagRows = await db
.select({ tag: tags })
.from(clientTags)
.innerJoin(tags, eq(clientTags.tagId, tags.id))
.where(eq(clientTags.clientId, id));
return {
...client,
contacts,
tags: clientTagRows.map((r) => r.tag),
};
}
// ─── Create ───────────────────────────────────────────────────────────────────
export async function createClient(
portId: string,
data: CreateClientInput,
meta: AuditMeta,
) {
const result = await withTransaction(async (tx) => {
const { contacts: contactsInput, tagIds, ...clientData } = data;
const [client] = await tx
.insert(clients)
.values({ portId, ...clientData })
.returning();
if (contactsInput.length > 0) {
await tx.insert(clientContacts).values(
contactsInput.map((c) => ({ clientId: client!.id, ...c })),
);
}
if (tagIds && tagIds.length > 0) {
await tx.insert(clientTags).values(
tagIds.map((tagId) => ({ clientId: client!.id, tagId })),
);
}
return client!;
});
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'client',
entityId: result.id,
newValue: { fullName: result.fullName, companyName: result.companyName },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'client:created', { clientId: result.id, clientName: result.fullName ?? '', source: result.source ?? '' });
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'client:created', { clientId: result.id }),
);
return result;
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateClient(
id: string,
portId: string,
data: UpdateClientInput,
meta: AuditMeta,
) {
const existing = await db.query.clients.findFirst({
where: eq(clients.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Client');
}
const { diff } = diffEntity(existing as Record<string, unknown>, data as Record<string, unknown>);
const [updated] = await db
.update(clients)
.set({ ...data, updatedAt: new Date() })
.where(and(eq(clients.id, id), eq(clients.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'client',
entityId: id,
oldValue: diff as any,
newValue: data as any,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'client:updated', { clientId: id, changedFields: Object.keys(diff) });
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'client:updated', { clientId: id }),
);
return updated;
}
// ─── Archive / Restore ────────────────────────────────────────────────────────
export async function archiveClient(id: string, portId: string, meta: AuditMeta) {
const existing = await db.query.clients.findFirst({
where: eq(clients.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Client');
}
await softDelete(clients, clients.id, id);
void createAuditLog({
userId: meta.userId,
portId,
action: 'archive',
entityType: 'client',
entityId: id,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'client:archived', { clientId: id });
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'client:archived', { clientId: id }),
);
}
export async function restoreClient(id: string, portId: string, meta: AuditMeta) {
const existing = await db.query.clients.findFirst({
where: eq(clients.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Client');
}
await restore(clients, clients.id, id);
void createAuditLog({
userId: meta.userId,
portId,
action: 'restore',
entityType: 'client',
entityId: id,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'client:restored', { clientId: id });
}
// ─── Contacts ─────────────────────────────────────────────────────────────────
export async function listContacts(clientId: string, portId: string) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
return db.query.clientContacts.findMany({
where: eq(clientContacts.clientId, clientId),
orderBy: (t, { desc }) => [desc(t.isPrimary), desc(t.createdAt)],
});
}
export async function addContact(
clientId: string,
portId: string,
data: { channel: string; value: string; label?: string; isPrimary?: boolean; notes?: string },
meta: AuditMeta,
) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
const [contact] = await db
.insert(clientContacts)
.values({ clientId, ...data })
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'clientContact',
entityId: contact!.id,
newValue: { clientId, channel: contact!.channel },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'client:updated', { clientId, changedFields: ['contacts'] });
return contact!;
}
export async function updateContact(
contactId: string,
clientId: string,
portId: string,
data: Partial<{ channel: string; value: string; label: string; isPrimary: boolean; notes: string }>,
meta: AuditMeta,
) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
const contact = await db.query.clientContacts.findFirst({
where: and(eq(clientContacts.id, contactId), eq(clientContacts.clientId, clientId)),
});
if (!contact) throw new NotFoundError('Contact');
const [updated] = await db
.update(clientContacts)
.set({ ...data, updatedAt: new Date() })
.where(eq(clientContacts.id, contactId))
.returning();
emitToRoom(`port:${portId}`, 'client:updated', { clientId, changedFields: ['contacts'] });
return updated;
}
export async function removeContact(
contactId: string,
clientId: string,
portId: string,
meta: AuditMeta,
) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
const contact = await db.query.clientContacts.findFirst({
where: and(eq(clientContacts.id, contactId), eq(clientContacts.clientId, clientId)),
});
if (!contact) throw new NotFoundError('Contact');
await db.delete(clientContacts).where(eq(clientContacts.id, contactId));
emitToRoom(`port:${portId}`, 'client:updated', { clientId, changedFields: ['contacts'] });
}
// ─── Tags ─────────────────────────────────────────────────────────────────────
export async function setClientTags(
clientId: string,
portId: string,
tagIds: string[],
meta: AuditMeta,
) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
await db.delete(clientTags).where(eq(clientTags.clientId, clientId));
if (tagIds.length > 0) {
await db.insert(clientTags).values(tagIds.map((tagId) => ({ clientId, tagId })));
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'client',
entityId: clientId,
newValue: { tagIds },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'client:updated', { clientId, changedFields: ['tags'] });
}
// ─── Relationships ────────────────────────────────────────────────────────────
export async function listRelationships(clientId: string, portId: string) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
return db.query.clientRelationships.findMany({
where: (r, { or, eq }) =>
or(eq(r.clientAId, clientId), eq(r.clientBId, clientId)),
});
}
export async function createRelationship(
clientId: string,
portId: string,
data: { clientBId: string; relationshipType: string; description?: string },
meta: AuditMeta,
) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
const [rel] = await db
.insert(clientRelationships)
.values({ portId, clientAId: clientId, ...data })
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'clientRelationship',
entityId: rel!.id,
newValue: { clientAId: clientId, clientBId: data.clientBId, type: data.relationshipType },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return rel!;
}
export async function deleteRelationship(
relId: string,
clientId: string,
portId: string,
meta: AuditMeta,
) {
const rel = await db.query.clientRelationships.findFirst({
where: eq(clientRelationships.id, relId),
});
if (!rel || rel.portId !== portId) throw new NotFoundError('Relationship');
await db.delete(clientRelationships).where(eq(clientRelationships.id, relId));
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'clientRelationship',
entityId: relId,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
}
// ─── Find Duplicates ──────────────────────────────────────────────────────────
export async function findDuplicates(portId: string, fullName: string) {
return db.query.clients.findMany({
where: (c, { and, eq }) =>
and(eq(c.portId, portId), ilike(c.fullName, `%${fullName}%`)),
limit: 5,
});
}
// ─── Options (for comboboxes) ─────────────────────────────────────────────────
export async function listClientOptions(portId: string, search?: string) {
const conditions = [eq(clients.portId, portId)];
if (search) {
conditions.push(
or(
ilike(clients.fullName, `%${search}%`),
ilike(clients.companyName, `%${search}%`),
)!,
);
}
return db
.select({ id: clients.id, fullName: clients.fullName })
.from(clients)
.where(and(...conditions))
.orderBy(clients.fullName)
.limit(50);
}

View File

@@ -0,0 +1,69 @@
import { db } from '@/lib/db';
import { currencyRates } from '@/lib/db/schema/system';
import { eq, and } from 'drizzle-orm';
import { logger } from '@/lib/logger';
export async function getRate(from: string, to: string): Promise<number | null> {
if (from === to) return 1;
const rate = await db.query.currencyRates.findFirst({
where: and(eq(currencyRates.baseCurrency, from), eq(currencyRates.targetCurrency, to)),
});
return rate ? Number(rate.rate) : null;
}
export async function convert(
amount: number,
from: string,
to: string,
): Promise<{ result: number; rate: number } | null> {
const rate = await getRate(from, to);
if (!rate) return null;
return { result: Number((amount * rate).toFixed(2)), rate };
}
export async function refreshRates(): Promise<void> {
try {
const res = await fetch('https://api.frankfurter.dev/v1/latest?base=USD');
if (!res.ok) throw new Error(`Frankfurter API error: ${res.status}`);
const data = await res.json();
const rates = data.rates as Record<string, number>;
for (const [currency, rate] of Object.entries(rates)) {
await db
.insert(currencyRates)
.values({
baseCurrency: 'USD',
targetCurrency: currency,
rate: String(rate),
source: 'frankfurter',
fetchedAt: new Date(),
})
.onConflictDoUpdate({
target: [currencyRates.baseCurrency, currencyRates.targetCurrency],
set: { rate: String(rate), fetchedAt: new Date(), source: 'frankfurter' },
});
}
// Store inverse rates for common conversions
for (const [currency, rate] of Object.entries(rates)) {
const inverse = 1 / rate;
await db
.insert(currencyRates)
.values({
baseCurrency: currency,
targetCurrency: 'USD',
rate: String(inverse.toFixed(6)),
source: 'frankfurter',
fetchedAt: new Date(),
})
.onConflictDoUpdate({
target: [currencyRates.baseCurrency, currencyRates.targetCurrency],
set: { rate: String(inverse.toFixed(6)), fetchedAt: new Date(), source: 'frankfurter' },
});
}
logger.info({ rateCount: Object.keys(rates).length }, 'Currency rates refreshed');
} catch (err) {
logger.error({ err }, 'Failed to refresh currency rates');
}
}

View File

@@ -0,0 +1,323 @@
import { and, eq, count } from 'drizzle-orm';
import { db } from '@/lib/db';
import { customFieldDefinitions, customFieldValues } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { NotFoundError, ValidationError, ConflictError } from '@/lib/errors';
import type { CreateFieldInput, UpdateFieldInput } from '@/lib/validators/custom-fields';
import type { CustomFieldDefinition } from '@/lib/db/schema/system';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Value Validation ─────────────────────────────────────────────────────────
function validateCustomFieldValue(
definition: CustomFieldDefinition,
value: unknown,
): string | null {
if (value === null || value === undefined) {
return definition.isRequired ? 'This field is required' : null;
}
switch (definition.fieldType) {
case 'text':
return typeof value !== 'string'
? 'Must be text'
: value.length > 1000
? 'Max 1000 chars'
: null;
case 'number':
return typeof value !== 'number' || isNaN(value) ? 'Must be a number' : null;
case 'date':
return typeof value !== 'string' || isNaN(Date.parse(value))
? 'Must be a valid date'
: null;
case 'boolean':
return typeof value !== 'boolean' ? 'Must be true or false' : null;
case 'select': {
const options = (definition.selectOptions as string[] | null) ?? [];
return !options.includes(value as string)
? `Must be one of: ${options.join(', ')}`
: null;
}
default:
return 'Unknown field type';
}
}
// ─── Definitions ──────────────────────────────────────────────────────────────
export async function listDefinitions(portId: string, entityType?: string) {
const conditions = [eq(customFieldDefinitions.portId, portId)];
if (entityType) {
conditions.push(eq(customFieldDefinitions.entityType, entityType));
}
return db.query.customFieldDefinitions.findMany({
where: and(...conditions),
orderBy: (fields, { asc }) => [asc(fields.sortOrder), asc(fields.createdAt)],
});
}
export async function createDefinition(
portId: string,
userId: string,
data: CreateFieldInput,
meta: AuditMeta,
) {
// Check for duplicate fieldName within portId + entityType
const existing = await db.query.customFieldDefinitions.findFirst({
where: and(
eq(customFieldDefinitions.portId, portId),
eq(customFieldDefinitions.entityType, data.entityType),
eq(customFieldDefinitions.fieldName, data.fieldName),
),
});
if (existing) {
throw new ConflictError(
`A custom field named "${data.fieldName}" already exists for ${data.entityType}`,
);
}
const rows = await db
.insert(customFieldDefinitions)
.values({
portId,
entityType: data.entityType,
fieldName: data.fieldName,
fieldLabel: data.fieldLabel,
fieldType: data.fieldType,
selectOptions: data.selectOptions ?? null,
isRequired: data.isRequired,
sortOrder: data.sortOrder,
})
.returning();
const created = rows[0];
if (!created) throw new Error('Insert failed — no row returned');
void createAuditLog({
userId,
portId,
action: 'create',
entityType: 'custom_field_definition',
entityId: created.id,
newValue: {
fieldName: created.fieldName,
fieldLabel: created.fieldLabel,
fieldType: created.fieldType,
entityType: created.entityType,
},
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return created;
}
export async function updateDefinition(
portId: string,
fieldId: string,
userId: string,
data: UpdateFieldInput & { fieldType?: unknown },
meta: AuditMeta,
) {
// Immutability guard — fieldType must never change
if ('fieldType' in data && data.fieldType !== undefined) {
throw new ValidationError('Field type cannot be changed after creation');
}
const existing = await db.query.customFieldDefinitions.findFirst({
where: and(
eq(customFieldDefinitions.id, fieldId),
eq(customFieldDefinitions.portId, portId),
),
});
if (!existing) {
throw new NotFoundError('Custom field definition');
}
const updateRows = await db
.update(customFieldDefinitions)
.set({
...(data.fieldLabel !== undefined && { fieldLabel: data.fieldLabel }),
...(data.selectOptions !== undefined && { selectOptions: data.selectOptions }),
...(data.isRequired !== undefined && { isRequired: data.isRequired }),
...(data.sortOrder !== undefined && { sortOrder: data.sortOrder }),
})
.where(eq(customFieldDefinitions.id, fieldId))
.returning();
const updated = updateRows[0];
if (!updated) throw new Error('Update failed — no row returned');
void createAuditLog({
userId,
portId,
action: 'update',
entityType: 'custom_field_definition',
entityId: fieldId,
oldValue: {
fieldLabel: existing.fieldLabel,
selectOptions: existing.selectOptions,
isRequired: existing.isRequired,
sortOrder: existing.sortOrder,
},
newValue: {
fieldLabel: updated.fieldLabel,
selectOptions: updated.selectOptions,
isRequired: updated.isRequired,
sortOrder: updated.sortOrder,
},
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return updated;
}
export async function deleteDefinition(
portId: string,
fieldId: string,
userId: string,
meta: AuditMeta,
) {
const existing = await db.query.customFieldDefinitions.findFirst({
where: and(
eq(customFieldDefinitions.id, fieldId),
eq(customFieldDefinitions.portId, portId),
),
});
if (!existing) {
throw new NotFoundError('Custom field definition');
}
// Count associated values before deletion
const countResult = await db
.select({ count: count() })
.from(customFieldValues)
.where(eq(customFieldValues.fieldId, fieldId));
const valueCount = countResult[0]?.count ?? 0;
// Delete definition — CASCADE handles values
await db
.delete(customFieldDefinitions)
.where(eq(customFieldDefinitions.id, fieldId));
void createAuditLog({
userId,
portId,
action: 'delete',
entityType: 'custom_field_definition',
entityId: fieldId,
oldValue: {
fieldName: existing.fieldName,
fieldLabel: existing.fieldLabel,
fieldType: existing.fieldType,
entityType: existing.entityType,
},
metadata: { deletedValueCount: Number(valueCount) },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return { deletedValueCount: Number(valueCount) };
}
// ─── Values ───────────────────────────────────────────────────────────────────
export async function getValues(entityId: string, portId: string) {
const definitions = await db.query.customFieldDefinitions.findMany({
where: eq(customFieldDefinitions.portId, portId),
orderBy: (fields, { asc }) => [asc(fields.sortOrder), asc(fields.createdAt)],
});
const values = await db.query.customFieldValues.findMany({
where: eq(customFieldValues.entityId, entityId),
});
const valueMap = new Map(values.map((v) => [v.fieldId, v]));
return definitions.map((definition) => ({
definition,
value: valueMap.get(definition.id) ?? null,
}));
}
export async function setValues(
entityId: string,
portId: string,
userId: string,
values: Array<{ fieldId: string; value: unknown }>,
meta: AuditMeta,
) {
if (values.length === 0) return [];
// Fetch relevant definitions to validate values
const fieldIds = values.map((v) => v.fieldId);
const definitions = await db.query.customFieldDefinitions.findMany({
where: eq(customFieldDefinitions.portId, portId),
});
const definitionMap = new Map(definitions.map((d) => [d.id, d]));
// Validate each value
const errors: Array<{ field: string; message: string }> = [];
for (const { fieldId, value } of values) {
const definition = definitionMap.get(fieldId);
if (!definition) {
errors.push({ field: fieldId, message: 'Custom field not found for this port' });
continue;
}
const error = validateCustomFieldValue(definition, value);
if (error) {
errors.push({ field: definition.fieldName, message: error });
}
}
if (errors.length > 0) {
throw new ValidationError('Custom field validation failed', errors);
}
// Upsert all values
const results = await Promise.all(
values.map(async ({ fieldId, value }) => {
const [upserted] = await db
.insert(customFieldValues)
.values({
fieldId,
entityId,
value: value as Record<string, unknown>,
updatedAt: new Date(),
})
.onConflictDoUpdate({
target: [customFieldValues.fieldId, customFieldValues.entityId],
set: {
value: value as Record<string, unknown>,
updatedAt: new Date(),
},
})
.returning();
return upserted;
}),
);
void createAuditLog({
userId,
portId,
action: 'update',
entityType: 'custom_field_values',
entityId,
metadata: { fieldIds, updatedCount: results.length },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return results;
}

View File

@@ -0,0 +1,189 @@
import { and, count, desc, eq, inArray, isNull, sql, sum } from 'drizzle-orm';
import { db } from '@/lib/db';
import { clients } from '@/lib/db/schema/clients';
import { interests } from '@/lib/db/schema/interests';
import { berths } from '@/lib/db/schema/berths';
import { systemSettings, auditLogs } from '@/lib/db/schema/system';
import { PIPELINE_STAGES } from '@/lib/constants';
// ─── Default pipeline weights ────────────────────────────────────────────────
const DEFAULT_PIPELINE_WEIGHTS: Record<string, number> = {
open: 0.05,
details_sent: 0.10,
in_communication: 0.20,
visited: 0.35,
signed_eoi_nda: 0.50,
deposit_10pct: 0.70,
contract: 0.90,
completed: 1.00,
};
// ─── KPIs ─────────────────────────────────────────────────────────────────────
export async function getKpis(portId: string) {
const [totalClientsRow] = await db
.select({ value: count() })
.from(clients)
.where(and(eq(clients.portId, portId), isNull(clients.archivedAt)));
const [activeInterestsRow] = await db
.select({ value: count() })
.from(interests)
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)));
// Pipeline value: SUM berths.price via JOIN from non-archived interests with berthId
const pipelineRows = await db
.select({ price: berths.price })
.from(interests)
.innerJoin(berths, eq(interests.berthId, berths.id))
.where(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
sql`${interests.berthId} IS NOT NULL`,
),
);
const pipelineValueUsd = pipelineRows.reduce((acc, row) => {
return acc + (row.price ? parseFloat(String(row.price)) : 0);
}, 0);
// Occupancy rate: (sold + under_offer) / total * 100
const allBerthsRows = await db
.select({ status: berths.status })
.from(berths)
.where(eq(berths.portId, portId));
const totalBerths = allBerthsRows.length;
const occupiedBerths = allBerthsRows.filter(
(b) => b.status === 'sold' || b.status === 'under_offer',
).length;
const occupancyRate = totalBerths > 0 ? (occupiedBerths / totalBerths) * 100 : 0;
return {
totalClients: totalClientsRow?.value ?? 0,
activeInterests: activeInterestsRow?.value ?? 0,
pipelineValueUsd,
occupancyRate,
};
}
// ─── Pipeline Counts ──────────────────────────────────────────────────────────
export async function getPipelineCounts(portId: string) {
const rows = await db
.select({
stage: interests.pipelineStage,
count: sql<number>`count(*)::int`,
})
.from(interests)
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)))
.groupBy(interests.pipelineStage);
const countsByStage = Object.fromEntries(rows.map((r) => [r.stage, r.count]));
return PIPELINE_STAGES.map((stage) => ({
stage,
count: countsByStage[stage] ?? 0,
}));
}
// ─── Revenue Forecast ─────────────────────────────────────────────────────────
export async function getRevenueForecast(portId: string) {
// Load weights from systemSettings
let weights: Record<string, number> = DEFAULT_PIPELINE_WEIGHTS;
let weightsSource: 'db' | 'default' = 'default';
const settingRow = await db.query.systemSettings.findFirst({
where: and(
eq(systemSettings.key, 'pipeline_weights'),
eq(systemSettings.portId, portId),
),
});
if (settingRow?.value) {
try {
const parsed = settingRow.value as Record<string, number>;
if (typeof parsed === 'object' && parsed !== null) {
weights = parsed;
weightsSource = 'db';
}
} catch {
// Fall through to defaults
}
}
// Fetch all non-archived interests with a linked berth and its price
const interestRows = await db
.select({
id: interests.id,
pipelineStage: interests.pipelineStage,
berthPrice: berths.price,
})
.from(interests)
.innerJoin(berths, eq(interests.berthId, berths.id))
.where(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
sql`${interests.berthId} IS NOT NULL`,
),
);
// Build stageBreakdown
const stageMap: Record<string, { count: number; weightedValue: number }> = {};
for (const row of interestRows) {
const stage = row.pipelineStage ?? 'open';
const price = row.berthPrice ? parseFloat(String(row.berthPrice)) : 0;
const weight = weights[stage] ?? 0;
const weighted = price * weight;
if (!stageMap[stage]) {
stageMap[stage] = { count: 0, weightedValue: 0 };
}
stageMap[stage]!.count += 1;
stageMap[stage]!.weightedValue += weighted;
}
const stageBreakdown = PIPELINE_STAGES.map((stage) => ({
stage,
count: stageMap[stage]?.count ?? 0,
weightedValue: stageMap[stage]?.weightedValue ?? 0,
}));
const totalWeightedValue = stageBreakdown.reduce(
(acc, s) => acc + s.weightedValue,
0,
);
return {
totalWeightedValue,
stageBreakdown,
weightsSource,
};
}
// ─── Recent Activity ──────────────────────────────────────────────────────────
export async function getRecentActivity(portId: string, limit = 20) {
const rows = await db
.select({
id: auditLogs.id,
action: auditLogs.action,
entityType: auditLogs.entityType,
entityId: auditLogs.entityId,
userId: auditLogs.userId,
metadata: auditLogs.metadata,
createdAt: auditLogs.createdAt,
})
.from(auditLogs)
.where(eq(auditLogs.portId, portId))
.orderBy(desc(auditLogs.createdAt))
.limit(limit);
return rows;
}

View File

@@ -0,0 +1,88 @@
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
const BASE_URL = env.DOCUMENSO_API_URL;
const API_KEY = env.DOCUMENSO_API_KEY;
async function documensoFetch(path: string, options?: RequestInit): Promise<unknown> {
const res = await fetch(`${BASE_URL}${path}`, {
...options,
headers: {
Authorization: `Bearer ${API_KEY}`,
'Content-Type': 'application/json',
...options?.headers,
},
});
if (!res.ok) {
const err = await res.text();
logger.error({ path, status: res.status, err }, 'Documenso API error');
throw new Error(`Documenso API error: ${res.status}`);
}
return res.json();
}
export interface DocumensoRecipient {
name: string;
email: string;
role: string;
signingOrder: number;
}
export interface DocumensoDocument {
id: string;
status: string;
recipients: Array<{
id: string;
name: string;
email: string;
role: string;
signingOrder: number;
status: string;
signingUrl?: string;
embeddedUrl?: string;
}>;
}
export async function createDocument(
title: string,
pdfBase64: string,
recipients: DocumensoRecipient[],
): Promise<DocumensoDocument> {
return documensoFetch('/api/v1/documents', {
method: 'POST',
body: JSON.stringify({ title, document: pdfBase64, recipients }),
}) as Promise<DocumensoDocument>;
}
export async function sendDocument(docId: string): Promise<DocumensoDocument> {
return documensoFetch(`/api/v1/documents/${docId}/send`, {
method: 'POST',
}) as Promise<DocumensoDocument>;
}
export async function getDocument(docId: string): Promise<DocumensoDocument> {
return documensoFetch(`/api/v1/documents/${docId}`) as Promise<DocumensoDocument>;
}
export async function sendReminder(docId: string, signerId: string): Promise<void> {
await documensoFetch(`/api/v1/documents/${docId}/recipients/${signerId}/remind`, {
method: 'POST',
});
}
export async function downloadSignedPdf(docId: string): Promise<Buffer> {
const res = await fetch(`${BASE_URL}/api/v1/documents/${docId}/download`, {
headers: { Authorization: `Bearer ${API_KEY}` },
});
if (!res.ok) {
const err = await res.text();
logger.error({ docId, status: res.status, err }, 'Documenso download error');
throw new Error(`Documenso download error: ${res.status}`);
}
const arrayBuffer = await res.arrayBuffer();
return Buffer.from(arrayBuffer);
}

View File

@@ -0,0 +1,15 @@
import { createHmac } from 'crypto';
import { timingSafeEqual } from 'crypto';
export function verifyDocumensoSignature(
payload: string,
signature: string,
secret: string,
): boolean {
const hmac = createHmac('sha256', secret).update(payload).digest('hex');
try {
return timingSafeEqual(Buffer.from(hmac), Buffer.from(signature));
} catch {
return false;
}
}

View File

@@ -0,0 +1,123 @@
import { and, eq, inArray } from 'drizzle-orm';
import { db } from '@/lib/db';
import { documents, documentSigners, documentEvents } from '@/lib/db/schema/documents';
import { interests } from '@/lib/db/schema/interests';
import { ports } from '@/lib/db/schema/ports';
import { sendReminder as documensoRemind } from '@/lib/services/documenso-client';
import { logger } from '@/lib/logger';
// BR-023: Reminders only during 9-16 in port timezone, with 24h cooldown
function getCurrentHourInTimezone(timezone: string): number {
const now = new Date();
const formatter = new Intl.DateTimeFormat('en-US', {
timeZone: timezone,
hour: 'numeric',
hour12: false,
});
return parseInt(formatter.format(now), 10);
}
export async function sendReminderIfAllowed(
documentId: string,
portId: string,
): Promise<boolean> {
const doc = await db.query.documents.findFirst({
where: and(eq(documents.id, documentId), eq(documents.portId, portId)),
});
if (!doc || !doc.interestId || !doc.documensoId) return false;
if (!['sent', 'partially_signed'].includes(doc.status)) return false;
// Check interest.reminderEnabled
const interest = await db.query.interests.findFirst({
where: eq(interests.id, doc.interestId),
});
if (!interest?.reminderEnabled) return false;
// Check port timezone
const port = await db.query.ports.findFirst({
where: eq(ports.id, portId),
});
const timezone = port?.timezone ?? 'UTC';
const currentHour = getCurrentHourInTimezone(timezone);
if (currentHour < 9 || currentHour >= 16) return false;
// Check 24h cooldown — last reminder_sent event for this document
const twentyFourHoursAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
const lastReminder = await db.query.documentEvents.findFirst({
where: and(
eq(documentEvents.documentId, documentId),
eq(documentEvents.eventType, 'reminder_sent'),
),
orderBy: (de, { desc }) => [desc(de.createdAt)],
});
if (lastReminder && lastReminder.createdAt > twentyFourHoursAgo) {
return false;
}
// Find current pending signer (lowest signingOrder with status='pending')
const pendingSigner = await db.query.documentSigners.findFirst({
where: and(
eq(documentSigners.documentId, documentId),
eq(documentSigners.status, 'pending'),
),
orderBy: (ds, { asc }) => [asc(ds.signingOrder)],
});
if (!pendingSigner) return false;
// Send reminder via Documenso
try {
await documensoRemind(doc.documensoId, pendingSigner.id);
} catch (err) {
logger.error({ err, documentId, signerId: pendingSigner.id }, 'Failed to send Documenso reminder');
return false;
}
// Record event
await db.insert(documentEvents).values({
documentId,
eventType: 'reminder_sent',
signerId: pendingSigner.id,
eventData: { signerEmail: pendingSigner.signerEmail, signerRole: pendingSigner.signerRole },
});
return true;
}
export async function processReminderQueue(portId: string): Promise<void> {
// Find all documents with status 'sent' or 'partially_signed' linked to interests with reminderEnabled=true
const activeInterests = await db.query.interests.findMany({
where: and(
eq(interests.portId, portId),
eq(interests.reminderEnabled, true),
),
});
if (activeInterests.length === 0) return;
const interestIds = activeInterests.map((i) => i.id);
const activeDocs = await db.query.documents.findMany({
where: and(
eq(documents.portId, portId),
inArray(documents.status, ['sent', 'partially_signed']),
inArray(documents.interestId, interestIds),
),
});
for (const doc of activeDocs) {
try {
await sendReminderIfAllowed(doc.id, portId);
} catch (err) {
logger.error({ err, documentId: doc.id, portId }, 'Reminder processing failed for document');
}
}
}

View File

@@ -0,0 +1,421 @@
/**
* Admin Document Template Service — TipTap JSON-based templates
*
* This service manages templates whose content is stored as TipTap JSON
* (serialised to the `bodyHtml` text column). Version history is maintained
* via audit_log entries (action='update', entityType='document_template',
* metadata.version + metadata.content).
*
* Template type values: eoi | contract | nda | reservation_agreement | letter | other
* These are stored in the `templateType` column.
*/
import { and, eq, desc } from 'drizzle-orm';
import { db } from '@/lib/db';
import { documentTemplates } from '@/lib/db/schema/documents';
import { auditLogs } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { NotFoundError, ValidationError } from '@/lib/errors';
import { validateTipTapDocument } from '@/lib/pdf/tiptap-to-pdfme';
import type {
CreateAdminTemplateInput,
UpdateAdminTemplateInput,
ListAdminTemplatesInput,
} from '@/lib/validators/document-templates';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
/**
* A version entry reconstructed from audit_log records.
*/
export interface TemplateVersion {
version: number;
content: Record<string, unknown>;
changedBy: string | null;
changedAt: Date;
auditLogId: string;
}
/**
* Helper: extract the numeric version stored in a templateType-encoded field.
* We use a convention: version is stored in the `mergeFields` jsonb array
* as `["__version__:N"]` to avoid adding a new column.
*/
function getVersionFromRecord(
record: typeof documentTemplates.$inferSelect,
): number {
const mf = record.mergeFields as unknown;
if (!Array.isArray(mf)) return 1;
const versionEntry = (mf as string[]).find((e) =>
e.startsWith('__version__:'),
);
if (!versionEntry) return 1;
const n = parseInt(versionEntry.split(':')[1] ?? '1', 10);
return isNaN(n) ? 1 : n;
}
function buildMergeFieldsWithVersion(
version: number,
): string[] {
return [`__version__:${version}`];
}
/**
* Parse TipTap JSON from bodyHtml field. Returns the parsed object, or null
* if bodyHtml is plain HTML (legacy records).
*/
function parseTipTapContent(
bodyHtml: string,
): Record<string, unknown> | null {
try {
const parsed = JSON.parse(bodyHtml) as unknown;
if (
parsed !== null &&
typeof parsed === 'object' &&
'type' in (parsed as Record<string, unknown>)
) {
return parsed as Record<string, unknown>;
}
return null;
} catch {
return null;
}
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listAdminTemplates(
portId: string,
query: ListAdminTemplatesInput,
) {
const { type, isActive } = query;
const conditions = [eq(documentTemplates.portId, portId)];
if (type) {
conditions.push(eq(documentTemplates.templateType, type));
}
if (isActive !== undefined) {
conditions.push(eq(documentTemplates.isActive, isActive));
}
const rows = await db
.select()
.from(documentTemplates)
.where(and(...conditions))
.orderBy(documentTemplates.name);
return rows.map((row) => ({
...row,
version: getVersionFromRecord(row),
content: parseTipTapContent(row.bodyHtml),
}));
}
// ─── Get by ID ────────────────────────────────────────────────────────────────
export async function getAdminTemplate(
portId: string,
templateId: string,
) {
const row = await db.query.documentTemplates.findFirst({
where: and(
eq(documentTemplates.id, templateId),
eq(documentTemplates.portId, portId),
),
});
if (!row) {
throw new NotFoundError('Document template');
}
return {
...row,
version: getVersionFromRecord(row),
content: parseTipTapContent(row.bodyHtml),
};
}
// ─── Validate TipTap Content ─────────────────────────────────────────────────
function assertValidContent(
content: Record<string, unknown>,
): void {
const unsupported = validateTipTapDocument(
content as unknown as Parameters<typeof validateTipTapDocument>[0],
);
if (unsupported.length > 0) {
throw new ValidationError(
`Template content contains unsupported node types: ${unsupported.join(', ')}. ` +
'Supported: paragraph, heading (h1-h3), bulletList, orderedList, listItem, ' +
'table, tableRow, tableCell, tableHeader, image, hardBreak, text.',
);
}
}
// ─── Create ───────────────────────────────────────────────────────────────────
export async function createAdminTemplate(
portId: string,
userId: string,
data: CreateAdminTemplateInput,
meta: AuditMeta,
) {
assertValidContent(data.content);
const [template] = await db
.insert(documentTemplates)
.values({
portId,
name: data.name,
templateType: data.type,
bodyHtml: JSON.stringify(data.content),
mergeFields: buildMergeFieldsWithVersion(1),
isActive: true,
createdBy: userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'document_template',
entityId: template!.id,
newValue: { name: template!.name, type: data.type, version: 1 },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return {
...template!,
version: 1,
content: data.content,
};
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateAdminTemplate(
portId: string,
templateId: string,
userId: string,
data: UpdateAdminTemplateInput,
meta: AuditMeta,
) {
const existing = await getAdminTemplate(portId, templateId);
if (data.content !== undefined) {
assertValidContent(data.content);
}
const currentVersion = existing.version;
const newVersion = data.content !== undefined ? currentVersion + 1 : currentVersion;
// Before updating content, save old content to audit log for versioning
if (data.content !== undefined) {
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'document_template',
entityId: templateId,
oldValue: { version: currentVersion, name: existing.name },
newValue: { version: newVersion, name: data.name ?? existing.name },
metadata: {
versionSnapshot: currentVersion,
content: existing.content ?? {},
},
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
}
const updateValues: Partial<typeof documentTemplates.$inferInsert> = {
updatedAt: new Date(),
};
if (data.name !== undefined) {
updateValues.name = data.name;
}
if (data.content !== undefined) {
updateValues.bodyHtml = JSON.stringify(data.content);
updateValues.mergeFields = buildMergeFieldsWithVersion(newVersion);
}
if (data.isActive !== undefined) {
updateValues.isActive = data.isActive;
}
const [updated] = await db
.update(documentTemplates)
.set(updateValues)
.where(
and(
eq(documentTemplates.id, templateId),
eq(documentTemplates.portId, portId),
),
)
.returning();
return {
...updated!,
version: newVersion,
content:
data.content !== undefined
? data.content
: existing.content,
};
}
// ─── Delete ───────────────────────────────────────────────────────────────────
export async function deleteAdminTemplate(
portId: string,
templateId: string,
userId: string,
meta: AuditMeta,
) {
const existing = await getAdminTemplate(portId, templateId);
await db
.delete(documentTemplates)
.where(
and(
eq(documentTemplates.id, templateId),
eq(documentTemplates.portId, portId),
),
);
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'document_template',
entityId: templateId,
oldValue: { name: existing.name, type: existing.templateType, version: existing.version },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
}
// ─── Version History ──────────────────────────────────────────────────────────
/**
* Retrieves version history for a template by querying audit_logs.
* Each 'update' audit log entry with entityType='document_template' and
* metadata.versionSnapshot contains a saved version.
*/
export async function getAdminTemplateVersions(
portId: string,
templateId: string,
): Promise<TemplateVersion[]> {
// Verify template exists and belongs to port
await getAdminTemplate(portId, templateId);
const logs = await db
.select()
.from(auditLogs)
.where(
and(
eq(auditLogs.entityType, 'document_template'),
eq(auditLogs.entityId, templateId),
eq(auditLogs.action, 'update'),
eq(auditLogs.portId, portId),
),
)
.orderBy(desc(auditLogs.createdAt));
return logs
.filter((log) => {
const meta = log.metadata as Record<string, unknown> | null;
return (
meta !== null &&
typeof meta === 'object' &&
'versionSnapshot' in meta &&
'content' in meta
);
})
.map((log) => {
const meta = log.metadata as Record<string, unknown>;
return {
version: meta.versionSnapshot as number,
content: meta.content as Record<string, unknown>,
changedBy: log.userId,
changedAt: log.createdAt,
auditLogId: log.id,
};
});
}
// ─── Rollback ─────────────────────────────────────────────────────────────────
/**
* Restores a template to a previous version found in audit_logs.
* Creates a new version number (current + 1) with the restored content.
*/
export async function rollbackAdminTemplate(
portId: string,
templateId: string,
version: number,
userId: string,
meta: AuditMeta,
) {
const existing = await getAdminTemplate(portId, templateId);
const versions = await getAdminTemplateVersions(portId, templateId);
const targetVersion = versions.find((v) => v.version === version);
if (!targetVersion) {
throw new NotFoundError(`Template version ${version}`);
}
const newVersion = existing.version + 1;
// Save current state to audit log before rollback
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'document_template',
entityId: templateId,
oldValue: { version: existing.version, name: existing.name },
newValue: { version: newVersion, name: existing.name, rolledBackTo: version },
metadata: {
versionSnapshot: existing.version,
content: existing.content ?? {},
rolledBackTo: version,
},
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
const [updated] = await db
.update(documentTemplates)
.set({
bodyHtml: JSON.stringify(targetVersion.content),
mergeFields: buildMergeFieldsWithVersion(newVersion),
updatedAt: new Date(),
})
.where(
and(
eq(documentTemplates.id, templateId),
eq(documentTemplates.portId, portId),
),
)
.returning();
return {
...updated!,
version: newVersion,
content: targetVersion.content,
rolledBackFrom: existing.version,
rolledBackTo: version,
};
}

View File

@@ -0,0 +1,617 @@
import { and, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { documentTemplates, documents, files } from '@/lib/db/schema/documents';
import { clients, clientContacts } from '@/lib/db/schema/clients';
import { interests } from '@/lib/db/schema/interests';
import { berths } from '@/lib/db/schema/berths';
import { ports } from '@/lib/db/schema/ports';
import { buildListQuery } from '@/lib/db/query-builder';
import { createAuditLog } from '@/lib/audit';
import { diffEntity } from '@/lib/entity-diff';
import { NotFoundError, ValidationError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { minioClient, buildStoragePath } from '@/lib/minio';
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
import { generatePdf } from '@/lib/pdf/generate';
import { createDocument as documensoCreate, sendDocument as documensoSend } from '@/lib/services/documenso-client';
import { sendEmail } from '@/lib/email';
import type {
CreateTemplateInput,
UpdateTemplateInput,
ListTemplatesInput,
GenerateInput,
GenerateAndSendInput,
GenerateAndSignInput,
} from '@/lib/validators/document-templates';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Merge Field Definitions ──────────────────────────────────────────────────
const MERGE_FIELDS: Record<string, Array<{ token: string; label: string; required: boolean }>> = {
client: [
{ token: '{{client.fullName}}', label: 'Client Full Name', required: true },
{ token: '{{client.companyName}}', label: 'Company Name', required: false },
{ token: '{{client.email}}', label: 'Primary Email', required: false },
{ token: '{{client.phone}}', label: 'Primary Phone', required: false },
{ token: '{{client.nationality}}', label: 'Nationality', required: false },
{ token: '{{client.yachtName}}', label: 'Yacht Name', required: false },
{ token: '{{client.yachtLengthFt}}', label: 'Yacht Length (ft)', required: false },
{ token: '{{client.yachtLengthM}}', label: 'Yacht Length (m)', required: false },
{ token: '{{client.yachtWidthFt}}', label: 'Yacht Beam (ft)', required: false },
{ token: '{{client.yachtDraftFt}}', label: 'Yacht Draft (ft)', required: false },
{ token: '{{client.source}}', label: 'Lead Source', required: false },
],
interest: [
{ token: '{{interest.stage}}', label: 'Pipeline Stage', required: false },
{ token: '{{interest.leadCategory}}', label: 'Lead Category', required: false },
{ token: '{{interest.berthNumber}}', label: 'Berth Number', required: false },
{ token: '{{interest.eoiStatus}}', label: 'EOI Status', required: false },
{ token: '{{interest.dateFirstContact}}', label: 'Date First Contact', required: false },
{ token: '{{interest.dateEoiSigned}}', label: 'Date EOI Signed', required: false },
{ token: '{{interest.dateContractSigned}}', label: 'Date Contract Signed', required: false },
{ token: '{{interest.notes}}', label: 'Interest Notes', required: false },
],
berth: [
{ token: '{{berth.mooringNumber}}', label: 'Mooring Number', required: true },
{ token: '{{berth.area}}', label: 'Area', required: false },
{ token: '{{berth.status}}', label: 'Berth Status', required: false },
{ token: '{{berth.price}}', label: 'Price', required: false },
{ token: '{{berth.priceCurrency}}', label: 'Price Currency', required: false },
{ token: '{{berth.lengthFt}}', label: 'Length (ft)', required: false },
{ token: '{{berth.widthFt}}', label: 'Beam (ft)', required: false },
{ token: '{{berth.tenureType}}', label: 'Tenure Type', required: false },
{ token: '{{berth.tenureYears}}', label: 'Tenure Years', required: false },
],
port: [
{ token: '{{port.name}}', label: 'Port Name', required: false },
{ token: '{{port.defaultCurrency}}', label: 'Default Currency', required: false },
],
date: [
{ token: '{{date.today}}', label: "Today's Date", required: false },
{ token: '{{date.year}}', label: 'Current Year', required: false },
],
};
export function getMergeFields(): typeof MERGE_FIELDS {
return MERGE_FIELDS;
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listTemplates(portId: string, query: ListTemplatesInput) {
const { page, limit, sort, order, search, templateType, isActive } = query;
const filters = [];
if (templateType) {
filters.push(eq(documentTemplates.templateType, templateType));
}
if (isActive !== undefined) {
filters.push(eq(documentTemplates.isActive, isActive));
}
const sortColumn =
sort === 'name' ? documentTemplates.name :
sort === 'templateType' ? documentTemplates.templateType :
sort === 'createdAt' ? documentTemplates.createdAt :
documentTemplates.updatedAt;
return buildListQuery({
table: documentTemplates,
portIdColumn: documentTemplates.portId,
portId,
idColumn: documentTemplates.id,
updatedAtColumn: documentTemplates.updatedAt,
searchColumns: [documentTemplates.name],
searchTerm: search,
filters,
sort: { column: sortColumn, direction: order },
page,
pageSize: limit,
});
}
// ─── Get by ID ────────────────────────────────────────────────────────────────
export async function getTemplateById(id: string, portId: string) {
const template = await db.query.documentTemplates.findFirst({
where: eq(documentTemplates.id, id),
});
if (!template || template.portId !== portId) {
throw new NotFoundError('Document template');
}
return template;
}
// ─── Create ───────────────────────────────────────────────────────────────────
export async function createTemplate(portId: string, data: CreateTemplateInput, meta: AuditMeta) {
const [template] = await db
.insert(documentTemplates)
.values({
portId,
name: data.name,
description: data.description ?? null,
templateType: data.templateType,
bodyHtml: data.bodyHtml,
mergeFields: data.mergeFields ?? [],
isActive: data.isActive ?? true,
createdBy: meta.userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'documentTemplate',
entityId: template!.id,
newValue: { name: template!.name, templateType: template!.templateType },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'documentTemplate:created', { templateId: template!.id });
return template!;
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateTemplate(
id: string,
portId: string,
data: UpdateTemplateInput,
meta: AuditMeta,
) {
const existing = await getTemplateById(id, portId);
const { diff } = diffEntity(
existing as Record<string, unknown>,
data as Record<string, unknown>,
);
const [updated] = await db
.update(documentTemplates)
.set({ ...data, updatedAt: new Date() })
.where(and(eq(documentTemplates.id, id), eq(documentTemplates.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'documentTemplate',
entityId: id,
oldValue: diff as Record<string, unknown>,
newValue: data as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'documentTemplate:updated', { templateId: id });
return updated!;
}
// ─── Delete ───────────────────────────────────────────────────────────────────
export async function deleteTemplate(id: string, portId: string, meta: AuditMeta) {
const existing = await getTemplateById(id, portId);
await db
.delete(documentTemplates)
.where(and(eq(documentTemplates.id, id), eq(documentTemplates.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'documentTemplate',
entityId: id,
oldValue: { name: existing.name, templateType: existing.templateType },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'documentTemplate:deleted', { templateId: id });
}
// ─── Resolve Template ─────────────────────────────────────────────────────────
/**
* Interpolates all {{entity.field}} tokens in the template body HTML.
* BR-140: Required merge fields with no value throw ValidationError.
*/
export async function resolveTemplate(
templateId: string,
context: {
clientId?: string;
interestId?: string;
berthId?: string;
portId: string;
},
): Promise<string> {
const template = await getTemplateById(templateId, context.portId);
// Build token→value map from context
const tokenMap: Record<string, string> = {};
// Date tokens
const now = new Date();
tokenMap['{{date.today}}'] = now.toLocaleDateString('en-GB');
tokenMap['{{date.year}}'] = String(now.getFullYear());
// Port tokens
const port = await db.query.ports.findFirst({ where: eq(ports.id, context.portId) });
if (port) {
tokenMap['{{port.name}}'] = port.name;
tokenMap['{{port.defaultCurrency}}'] = port.defaultCurrency;
}
// Client tokens
if (context.clientId) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, context.clientId),
});
if (client && client.portId === context.portId) {
const contactList = await db.query.clientContacts.findMany({
where: eq(clientContacts.clientId, context.clientId),
orderBy: (t, { desc }) => [desc(t.isPrimary), desc(t.createdAt)],
});
const emailContact = contactList.find((c) => c.channel === 'email');
const phoneContact = contactList.find((c) => c.channel === 'phone' || c.channel === 'whatsapp');
tokenMap['{{client.fullName}}'] = client.fullName ?? '';
tokenMap['{{client.companyName}}'] = client.companyName ?? '';
tokenMap['{{client.email}}'] = emailContact?.value ?? '';
tokenMap['{{client.phone}}'] = phoneContact?.value ?? '';
tokenMap['{{client.nationality}}'] = client.nationality ?? '';
tokenMap['{{client.yachtName}}'] = client.yachtName ?? '';
tokenMap['{{client.yachtLengthFt}}'] = client.yachtLengthFt ? String(client.yachtLengthFt) : '';
tokenMap['{{client.yachtLengthM}}'] = client.yachtLengthM ? String(client.yachtLengthM) : '';
tokenMap['{{client.yachtWidthFt}}'] = client.yachtWidthFt ? String(client.yachtWidthFt) : '';
tokenMap['{{client.yachtDraftFt}}'] = client.yachtDraftFt ? String(client.yachtDraftFt) : '';
tokenMap['{{client.source}}'] = client.source ?? '';
}
}
// Interest tokens
if (context.interestId) {
const interest = await db.query.interests.findFirst({
where: eq(interests.id, context.interestId),
});
if (interest && interest.portId === context.portId) {
tokenMap['{{interest.stage}}'] = interest.pipelineStage ?? '';
tokenMap['{{interest.leadCategory}}'] = interest.leadCategory ?? '';
tokenMap['{{interest.eoiStatus}}'] = interest.eoiStatus ?? '';
tokenMap['{{interest.dateFirstContact}}'] = interest.dateFirstContact
? new Date(interest.dateFirstContact).toLocaleDateString('en-GB')
: '';
tokenMap['{{interest.dateEoiSigned}}'] = interest.dateEoiSigned
? new Date(interest.dateEoiSigned).toLocaleDateString('en-GB')
: '';
tokenMap['{{interest.dateContractSigned}}'] = interest.dateContractSigned
? new Date(interest.dateContractSigned).toLocaleDateString('en-GB')
: '';
tokenMap['{{interest.notes}}'] = interest.notes ?? '';
// Berth number from interest if berthId not separately provided
if (interest.berthId && !context.berthId) {
const interestBerth = await db.query.berths.findFirst({
where: eq(berths.id, interest.berthId),
});
tokenMap['{{interest.berthNumber}}'] = interestBerth?.mooringNumber ?? '';
tokenMap['{{berth.mooringNumber}}'] = interestBerth?.mooringNumber ?? '';
} else {
tokenMap['{{interest.berthNumber}}'] = context.berthId
? tokenMap['{{berth.mooringNumber}}'] ?? ''
: '';
}
}
}
// Berth tokens
if (context.berthId) {
const berth = await db.query.berths.findFirst({
where: eq(berths.id, context.berthId),
});
if (berth && berth.portId === context.portId) {
tokenMap['{{berth.mooringNumber}}'] = berth.mooringNumber;
tokenMap['{{berth.area}}'] = berth.area ?? '';
tokenMap['{{berth.status}}'] = berth.status;
tokenMap['{{berth.price}}'] = berth.price ? String(berth.price) : '';
tokenMap['{{berth.priceCurrency}}'] = berth.priceCurrency;
tokenMap['{{berth.lengthFt}}'] = berth.lengthFt ? String(berth.lengthFt) : '';
tokenMap['{{berth.widthFt}}'] = berth.widthFt ? String(berth.widthFt) : '';
tokenMap['{{berth.tenureType}}'] = berth.tenureType;
tokenMap['{{berth.tenureYears}}'] = berth.tenureYears ? String(berth.tenureYears) : '';
tokenMap['{{interest.berthNumber}}'] = berth.mooringNumber;
}
}
// BR-140: Check required merge fields have values
const missing: string[] = [];
for (const [_category, fields] of Object.entries(MERGE_FIELDS)) {
for (const field of fields) {
if (field.required) {
const value = tokenMap[field.token];
if (value !== undefined && value.trim() === '') {
missing.push(field.label);
}
}
}
}
if (missing.length > 0) {
throw new ValidationError(
`Missing required merge field values: ${missing.join(', ')}`,
);
}
// Interpolate all tokens
let resolved = template.bodyHtml;
for (const [token, value] of Object.entries(tokenMap)) {
// Escape token for use in regex
const escaped = token.replace(/[{}]/g, '\\$&');
resolved = resolved.replace(new RegExp(escaped, 'g'), value);
}
return resolved;
}
// ─── Generate From Template ───────────────────────────────────────────────────
/**
* BR-142: Resolve template → HTML → PDF. Store in MinIO + create file/document records.
*/
export async function generateFromTemplate(
templateId: string,
portId: string,
context: GenerateInput,
meta: AuditMeta,
): Promise<{ document: any; file: any }> {
const template = await getTemplateById(templateId, portId);
const resolvedHtml = await resolveTemplate(templateId, { ...context, portId });
const port = await db.query.ports.findFirst({ where: eq(ports.id, portId) });
// Wrap HTML in a minimal full-page document for pdfme text block
const wrappedContent = resolvedHtml
.replace(/<[^>]+>/g, ' ') // strip HTML tags for plain-text PDF rendering
.replace(/\s+/g, ' ')
.trim();
// Use a simple single-field pdfme template for the HTML body
const pdfTemplate = {
basePdf: 'BLANK_PDF' as any,
schemas: [
[
{
name: 'portName',
type: 'text' as const,
position: { x: 20, y: 15 },
width: 170,
height: 10,
fontSize: 14,
},
{
name: 'body',
type: 'text' as const,
position: { x: 20, y: 30 },
width: 170,
height: 230,
fontSize: 9,
},
{
name: 'generatedAt',
type: 'text' as const,
position: { x: 20, y: 275 },
width: 170,
height: 6,
fontSize: 7,
},
],
],
};
const pdfBytes = await generatePdf(pdfTemplate, [
{
portName: `${port?.name ?? 'Port Nimara'}${template.name}`,
body: wrappedContent,
generatedAt: `Generated: ${new Date().toLocaleString('en-GB')}`,
},
]);
// Store in MinIO
const fileId = crypto.randomUUID();
const storagePath = buildStoragePath(
port?.slug ?? portId,
'document-templates',
templateId,
fileId,
'pdf',
);
await minioClient.putObject(
env.MINIO_BUCKET,
storagePath,
Buffer.from(pdfBytes),
pdfBytes.byteLength,
{ 'Content-Type': 'application/pdf' },
);
// Create file record
const [fileRecord] = await db
.insert(files)
.values({
portId,
clientId: context.clientId ?? null,
filename: `${template.name.toLowerCase().replace(/\s+/g, '-')}.pdf`,
originalName: `${template.name}.pdf`,
mimeType: 'application/pdf',
sizeBytes: String(pdfBytes.byteLength),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: 'correspondence',
uploadedBy: meta.userId,
})
.returning();
// Create document record
const [documentRecord] = await db
.insert(documents)
.values({
portId,
clientId: context.clientId ?? null,
interestId: context.interestId ?? null,
documentType: template.templateType,
title: template.name,
status: 'draft',
fileId: fileRecord!.id,
isManualUpload: false,
createdBy: meta.userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'document',
entityId: documentRecord!.id,
newValue: {
templateId,
templateName: template.name,
clientId: context.clientId,
interestId: context.interestId,
berthId: context.berthId,
},
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:created', { documentId: documentRecord!.id });
return { document: documentRecord!, file: fileRecord! };
}
// ─── Generate and Send ────────────────────────────────────────────────────────
export async function generateAndSend(
templateId: string,
portId: string,
context: GenerateInput,
recipientEmail: string,
meta: AuditMeta,
) {
const { document, file } = await generateFromTemplate(templateId, portId, context, meta);
const template = await getTemplateById(templateId, portId);
const port = await db.query.ports.findFirst({ where: eq(ports.id, portId) });
// Send email with PDF as attachment (base64 encoded body)
try {
const resolvedHtml = await resolveTemplate(templateId, { ...context, portId });
await sendEmail(
recipientEmail,
template.name,
`<p>Please find the attached document: <strong>${template.name}</strong></p><hr/>${resolvedHtml}`,
`${port?.name ?? 'Port Nimara'} <noreply@${env.SMTP_HOST}>`,
);
} catch (err) {
logger.error({ err, templateId, recipientEmail }, 'Failed to send template email');
// Don't throw — document was created successfully; email failure is non-fatal
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'documentTemplate',
entityId: templateId,
metadata: { action: 'generate_and_send', recipientEmail },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return { document, file };
}
// ─── Generate and Sign ────────────────────────────────────────────────────────
export async function generateAndSign(
templateId: string,
portId: string,
context: GenerateInput,
signers: GenerateAndSignInput['signers'],
meta: AuditMeta,
) {
const { document: documentRecord, file } = await generateFromTemplate(
templateId,
portId,
context,
meta,
);
const template = await getTemplateById(templateId, portId);
// Fetch PDF bytes from MinIO to send to Documenso
const pdfStream = await minioClient.getObject(env.MINIO_BUCKET, file.storagePath);
const chunks: Buffer[] = [];
for await (const chunk of pdfStream) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk as ArrayBuffer));
}
const pdfBase64 = Buffer.concat(chunks).toString('base64');
// Create Documenso document
const documensoDoc = await documensoCreate(
template.name,
pdfBase64,
signers.map((s) => ({
name: s.name,
email: s.email,
role: s.role,
signingOrder: s.signingOrder,
})),
);
// Send document for signing
await documensoSend(documensoDoc.id);
// Update our document record with Documenso ID and status
await db
.update(documents)
.set({
documensoId: documensoDoc.id,
status: 'sent',
updatedAt: new Date(),
})
.where(eq(documents.id, documentRecord.id));
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'document',
entityId: documentRecord.id,
newValue: { status: 'sent', documensoId: documensoDoc.id },
metadata: { action: 'generate_and_sign', signerCount: signers.length },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:updated', { documentId: documentRecord.id, changedFields: ['status', 'documensoId'] });
return { document: { ...documentRecord, documensoId: documensoDoc.id, status: 'sent' }, file };
}

View File

@@ -0,0 +1,754 @@
import { and, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { documents, documentSigners, documentEvents, files } from '@/lib/db/schema/documents';
import { interests } from '@/lib/db/schema/interests';
import { clients } from '@/lib/db/schema/clients';
import { berths } from '@/lib/db/schema/berths';
import { ports } from '@/lib/db/schema/ports';
import { buildListQuery } from '@/lib/db/query-builder';
import { createAuditLog } from '@/lib/audit';
import { diffEntity } from '@/lib/entity-diff';
import { NotFoundError, ValidationError, ConflictError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { minioClient, getPresignedUrl, buildStoragePath } from '@/lib/minio';
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
import { generatePdf } from '@/lib/pdf/generate';
import { eoiTemplate, buildEoiInputs } from '@/lib/pdf/templates/eoi-template';
import { evaluateRule } from '@/lib/services/berth-rules-engine';
import {
createDocument as documensoCreate,
sendDocument as documensoSend,
getDocument as documensoGet,
sendReminder as documensoRemind,
downloadSignedPdf,
} from '@/lib/services/documenso-client';
import type {
CreateDocumentInput,
UpdateDocumentInput,
ListDocumentsInput,
GenerateEoiInput,
} from '@/lib/validators/documents';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listDocuments(portId: string, query: ListDocumentsInput) {
const { page, limit, sort, order, search, interestId, clientId, documentType, status } = query;
const filters = [];
if (interestId) filters.push(eq(documents.interestId, interestId));
if (clientId) filters.push(eq(documents.clientId, clientId));
if (documentType) filters.push(eq(documents.documentType, documentType));
if (status) filters.push(eq(documents.status, status));
const sortColumn =
sort === 'title' ? documents.title :
sort === 'status' ? documents.status :
sort === 'documentType' ? documents.documentType :
documents.createdAt;
return buildListQuery({
table: documents,
portIdColumn: documents.portId,
portId,
idColumn: documents.id,
updatedAtColumn: documents.updatedAt,
searchColumns: [documents.title],
searchTerm: search,
filters,
sort: sort ? { column: sortColumn, direction: order } : undefined,
page,
pageSize: limit,
});
}
// ─── Get by ID ────────────────────────────────────────────────────────────────
export async function getDocumentById(id: string, portId: string) {
const doc = await db.query.documents.findFirst({
where: and(eq(documents.id, id), eq(documents.portId, portId)),
with: { signers: true },
});
if (!doc) throw new NotFoundError('Document');
return doc;
}
// ─── Create ───────────────────────────────────────────────────────────────────
export async function createDocument(
portId: string,
data: CreateDocumentInput,
meta: AuditMeta,
) {
const [doc] = await db
.insert(documents)
.values({
portId,
interestId: data.interestId ?? null,
clientId: data.clientId ?? null,
documentType: data.documentType,
title: data.title,
notes: data.notes ?? null,
status: 'draft',
createdBy: meta.userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'document',
entityId: doc!.id,
newValue: { documentType: doc!.documentType, title: doc!.title },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:created', { documentId: doc!.id });
return doc!;
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateDocument(
id: string,
portId: string,
data: UpdateDocumentInput,
meta: AuditMeta,
) {
const existing = await getDocumentById(id, portId);
const updates: Partial<typeof documents.$inferInsert> = {};
if (data.title !== undefined) updates.title = data.title;
if (data.notes !== undefined) updates.notes = data.notes;
if (data.status !== undefined) updates.status = data.status;
updates.updatedAt = new Date();
const [updated] = await db
.update(documents)
.set(updates)
.where(and(eq(documents.id, id), eq(documents.portId, portId)))
.returning();
const diff = diffEntity(existing, updated!);
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'document',
entityId: id,
oldValue: existing as unknown as Record<string, unknown>,
newValue: updated as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:updated', { documentId: id });
return updated!;
}
// ─── Delete ───────────────────────────────────────────────────────────────────
export async function deleteDocument(id: string, portId: string, meta: AuditMeta) {
const existing = await getDocumentById(id, portId);
if (['sent', 'partially_signed'].includes(existing.status)) {
throw new ConflictError('Cannot delete a document that is currently in signing process');
}
await db
.delete(documents)
.where(and(eq(documents.id, id), eq(documents.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'document',
entityId: id,
oldValue: { title: existing.title, status: existing.status },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:deleted', { documentId: id });
}
// ─── Generate EOI (BR-020) ────────────────────────────────────────────────────
export async function generateEoi(interestId: string, portId: string, meta: AuditMeta) {
// Fetch interest + related data
const interest = await db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
});
if (!interest) throw new NotFoundError('Interest');
const client = await db.query.clients.findFirst({
where: eq(clients.id, interest.clientId),
with: { contacts: true },
});
if (!client) throw new NotFoundError('Client');
// BR-020: Check prerequisites
const missing: Array<{ field: string; message: string }> = [];
if (!client.fullName) missing.push({ field: 'client.fullName', message: 'Client must have a full name' });
const emailContact = (client.contacts as Array<{ channel: string; value: string }> | undefined)?.find(
(c) => c.channel === 'email',
);
if (!emailContact?.value) missing.push({ field: 'client.email', message: 'Client must have an email contact' });
if (!client.yachtLengthFt && !client.yachtLengthM) {
missing.push({ field: 'client.yachtDimensions', message: 'Client must have yacht dimensions' });
}
if (!interest.berthId) missing.push({ field: 'interest.berthId', message: 'Interest must have a berth linked' });
if (missing.length > 0) {
throw new ValidationError('Missing prerequisites for EOI generation', missing);
}
const [berth, port] = await Promise.all([
db.query.berths.findFirst({ where: eq(berths.id, interest.berthId!) }),
db.query.ports.findFirst({ where: eq(ports.id, portId) }),
]);
if (!berth) throw new NotFoundError('Berth');
if (!port) throw new NotFoundError('Port');
// Generate PDF
const inputs = buildEoiInputs(
interest as unknown as Record<string, unknown>,
{ ...client, contacts: client.contacts } as unknown as Record<string, unknown>,
berth as unknown as Record<string, unknown>,
port as unknown as Record<string, unknown>,
);
const pdfBytes = await generatePdf(eoiTemplate, [inputs]);
const pdfBuffer = Buffer.from(pdfBytes);
// Store in MinIO
const fileId = crypto.randomUUID();
const storagePath = buildStoragePath(port.slug, 'eoi', interestId, fileId, 'pdf');
await minioClient.putObject(env.MINIO_BUCKET, storagePath, pdfBuffer, pdfBuffer.length, {
'Content-Type': 'application/pdf',
});
// Create files record
const [fileRecord] = await db
.insert(files)
.values({
portId,
clientId: client.id,
filename: `eoi-${interestId}.pdf`,
originalName: `eoi-${interestId}.pdf`,
mimeType: 'application/pdf',
sizeBytes: String(pdfBuffer.length),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: 'eoi',
uploadedBy: meta.userId,
})
.returning();
// Create document record
const [doc] = await db
.insert(documents)
.values({
portId,
interestId,
clientId: client.id,
documentType: 'eoi',
title: `EOI ${client.fullName} / ${berth.mooringNumber}`,
status: 'draft',
fileId: fileRecord!.id,
createdBy: meta.userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'document',
entityId: doc!.id,
newValue: { documentType: 'eoi', interestId },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:created', { documentId: doc!.id, type: 'eoi' });
return doc!;
}
// ─── Send for Signing (BR-021) ────────────────────────────────────────────────
export async function sendForSigning(documentId: string, portId: string, meta: AuditMeta) {
const doc = await getDocumentById(documentId, portId);
if (!doc.fileId) throw new ValidationError('Document has no associated file');
if (doc.status !== 'draft') throw new ConflictError('Document is not in draft status');
// Fetch interest + client to build signers
const interest = doc.interestId
? await db.query.interests.findFirst({ where: eq(interests.id, doc.interestId) })
: null;
const client = doc.clientId
? await db.query.clients.findFirst({
where: eq(clients.id, doc.clientId),
with: { contacts: true },
})
: null;
if (!client) throw new ValidationError('Document has no associated client');
const emailContact = (client.contacts as Array<{ channel: string; value: string }> | undefined)?.find(
(c) => c.channel === 'email',
);
if (!emailContact?.value) throw new ValidationError('Client has no email contact');
const port = await db.query.ports.findFirst({ where: eq(ports.id, portId) });
if (!port) throw new NotFoundError('Port');
// BR-021: Create 3 signers — client (1), developer (2), sales/approver (3)
const signerRecords = await db
.insert(documentSigners)
.values([
{
documentId,
signerName: client.fullName,
signerEmail: emailContact.value,
signerRole: 'client',
signingOrder: 1,
status: 'pending',
},
{
documentId,
signerName: port.name,
signerEmail: `developer@${port.slug}.com`,
signerRole: 'developer',
signingOrder: 2,
status: 'pending',
},
{
documentId,
signerName: `${port.name} Sales`,
signerEmail: `sales@${port.slug}.com`,
signerRole: 'approver',
signingOrder: 3,
status: 'pending',
},
])
.returning();
// Get file from MinIO and base64 encode
const fileRecord = await db.query.files.findFirst({ where: eq(files.id, doc.fileId) });
if (!fileRecord) throw new NotFoundError('File');
const fileStream = await minioClient.getObject(env.MINIO_BUCKET, fileRecord.storagePath);
const chunks: Buffer[] = [];
for await (const chunk of fileStream) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
}
const pdfBuffer = Buffer.concat(chunks);
const pdfBase64 = pdfBuffer.toString('base64');
// Create document in Documenso + send
const documensoDoc = await documensoCreate(doc.title, pdfBase64, [
{ name: client.fullName, email: emailContact.value, role: 'SIGNER', signingOrder: 1 },
{ name: port.name, email: `developer@${port.slug}.com`, role: 'SIGNER', signingOrder: 2 },
{ name: `${port.name} Sales`, email: `sales@${port.slug}.com`, role: 'SIGNER', signingOrder: 3 },
]);
await documensoSend(documensoDoc.id);
// Update signer records with signing URLs from Documenso response
for (const docSigner of documensoDoc.recipients) {
const localSigner = signerRecords.find((s) => s.signerEmail === docSigner.email);
if (localSigner) {
await db
.update(documentSigners)
.set({
signingUrl: docSigner.signingUrl ?? null,
embeddedUrl: docSigner.embeddedUrl ?? null,
})
.where(eq(documentSigners.id, localSigner.id));
}
}
// Update document status
await db
.update(documents)
.set({ status: 'sent', documensoId: documensoDoc.id, updatedAt: new Date() })
.where(eq(documents.id, documentId));
// Update interest if linked
if (interest) {
await db
.update(interests)
.set({
documensoId: documensoDoc.id,
dateEoiSent: new Date(),
eoiStatus: 'waiting_for_signatures',
updatedAt: new Date(),
})
.where(eq(interests.id, interest.id));
// Trigger berth rules
void evaluateRule('eoi_sent', interest.id, portId, meta);
}
// Create document event
await db.insert(documentEvents).values({
documentId,
eventType: 'sent',
eventData: { documensoId: documensoDoc.id },
});
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'document',
entityId: documentId,
newValue: { status: 'sent', documensoId: documensoDoc.id },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:sent', { documentId, type: doc.documentType, signerCount: 3, documensoId: documensoDoc.id });
return await getDocumentById(documentId, portId);
}
// ─── Upload Signed Manually (BR-013) ─────────────────────────────────────────
export async function uploadSignedManually(
documentId: string,
portId: string,
fileData: { buffer: Buffer; originalName: string; mimeType: string; size: number },
meta: AuditMeta,
) {
const doc = await getDocumentById(documentId, portId);
const port = await db.query.ports.findFirst({ where: eq(ports.id, portId) });
if (!port) throw new NotFoundError('Port');
// Store the signed file
const fileId = crypto.randomUUID();
const storagePath = buildStoragePath(port.slug, 'eoi-signed', documentId, fileId, 'pdf');
await minioClient.putObject(
env.MINIO_BUCKET,
storagePath,
fileData.buffer,
fileData.size,
{ 'Content-Type': fileData.mimeType },
);
const [fileRecord] = await db
.insert(files)
.values({
portId,
clientId: doc.clientId ?? null,
filename: fileData.originalName,
originalName: fileData.originalName,
mimeType: fileData.mimeType,
sizeBytes: String(fileData.size),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: 'eoi',
uploadedBy: meta.userId,
})
.returning();
// Update document
await db
.update(documents)
.set({
signedFileId: fileRecord!.id,
status: 'completed',
isManualUpload: true,
updatedAt: new Date(),
})
.where(eq(documents.id, documentId));
// Update interest if linked and type is eoi
if (doc.interestId && doc.documentType === 'eoi') {
const interest = await db.query.interests.findFirst({
where: eq(interests.id, doc.interestId),
});
await db
.update(interests)
.set({ eoiStatus: 'signed', dateEoiSigned: new Date(), updatedAt: new Date() })
.where(eq(interests.id, doc.interestId));
if (interest) {
void evaluateRule('eoi_signed', doc.interestId, portId, meta);
}
}
await db.insert(documentEvents).values({
documentId,
eventType: 'completed',
eventData: { isManualUpload: true, fileId: fileRecord!.id },
});
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'document',
entityId: documentId,
newValue: { status: 'completed', isManualUpload: true },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:completed', { documentId });
// Notify creator about manual completion
void import('@/lib/services/notifications.service').then(({ createNotification }) =>
createNotification({
portId,
userId: meta.userId,
type: 'document_signed',
title: 'Document marked as signed',
description: `"${doc.title}" has been manually uploaded as signed`,
link: `/documents/${documentId}`,
entityType: 'document',
entityId: documentId,
dedupeKey: `document:${documentId}:completed`,
}),
);
return await getDocumentById(documentId, portId);
}
// ─── List Signers ─────────────────────────────────────────────────────────────
export async function listDocumentSigners(documentId: string, portId: string) {
await getDocumentById(documentId, portId); // verify access
return db.query.documentSigners.findMany({
where: eq(documentSigners.documentId, documentId),
orderBy: (ds, { asc }) => [asc(ds.signingOrder)],
});
}
// ─── List Events ──────────────────────────────────────────────────────────────
export async function listDocumentEvents(documentId: string, portId: string) {
await getDocumentById(documentId, portId); // verify access
return db.query.documentEvents.findMany({
where: eq(documentEvents.documentId, documentId),
orderBy: (de, { desc }) => [desc(de.createdAt)],
});
}
// ─── Webhook Handlers ─────────────────────────────────────────────────────────
export async function handleRecipientSigned(eventData: {
documentId: string;
recipientEmail: string;
signatureHash?: string;
}) {
const doc = await db.query.documents.findFirst({
where: eq(documents.documensoId, eventData.documentId),
});
if (!doc) {
logger.warn({ documensoId: eventData.documentId }, 'Document not found for webhook');
return;
}
// Update signer status
const [signer] = await db
.update(documentSigners)
.set({ status: 'signed', signedAt: new Date() })
.where(
and(
eq(documentSigners.documentId, doc.id),
eq(documentSigners.signerEmail, eventData.recipientEmail),
),
)
.returning();
// Update document to partially_signed if eoi type
if (doc.documentType === 'eoi' && doc.status === 'sent') {
await db
.update(documents)
.set({ status: 'partially_signed', updatedAt: new Date() })
.where(eq(documents.id, doc.id));
}
await db.insert(documentEvents).values({
documentId: doc.id,
eventType: 'signed',
signerId: signer?.id ?? null,
signatureHash: eventData.signatureHash ?? null,
eventData: { recipientEmail: eventData.recipientEmail },
});
emitToRoom(`port:${doc.portId}`, 'document:signer:signed', {
documentId: doc.id,
signerEmail: eventData.recipientEmail,
});
}
export async function handleDocumentCompleted(eventData: {
documentId: string;
}) {
const doc = await db.query.documents.findFirst({
where: eq(documents.documensoId, eventData.documentId),
});
if (!doc) {
logger.warn({ documensoId: eventData.documentId }, 'Document not found for webhook');
return;
}
// BR-022: Download signed PDF and store in MinIO
const port = await db.query.ports.findFirst({ where: eq(ports.id, doc.portId) });
if (!port) {
logger.error({ portId: doc.portId }, 'Port not found during document completion');
return;
}
try {
const signedPdfBuffer = await downloadSignedPdf(eventData.documentId);
const fileId = crypto.randomUUID();
const storagePath = buildStoragePath(port.slug, 'eoi-signed', doc.id, fileId, 'pdf');
await minioClient.putObject(
env.MINIO_BUCKET,
storagePath,
signedPdfBuffer,
signedPdfBuffer.length,
{ 'Content-Type': 'application/pdf' },
);
const [fileRecord] = await db
.insert(files)
.values({
portId: doc.portId,
clientId: doc.clientId ?? null,
filename: `signed-${doc.id}.pdf`,
originalName: `signed-${doc.id}.pdf`,
mimeType: 'application/pdf',
sizeBytes: String(signedPdfBuffer.length),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: 'eoi',
uploadedBy: 'system',
})
.returning();
await db
.update(documents)
.set({ status: 'completed', signedFileId: fileRecord!.id, updatedAt: new Date() })
.where(eq(documents.id, doc.id));
} catch (err) {
logger.error({ err, documentId: doc.id }, 'Failed to download/store signed PDF');
await db
.update(documents)
.set({ status: 'completed', updatedAt: new Date() })
.where(eq(documents.id, doc.id));
}
// Update interest if eoi type
if (doc.interestId && doc.documentType === 'eoi') {
const interest = await db.query.interests.findFirst({
where: eq(interests.id, doc.interestId),
});
await db
.update(interests)
.set({ eoiStatus: 'signed', dateEoiSigned: new Date(), updatedAt: new Date() })
.where(eq(interests.id, doc.interestId));
if (interest) {
void evaluateRule('eoi_signed', doc.interestId, doc.portId, {
userId: 'system',
portId: doc.portId,
ipAddress: '0.0.0.0',
userAgent: 'webhook',
});
}
}
await db.insert(documentEvents).values({
documentId: doc.id,
eventType: 'completed',
eventData: { documensoId: eventData.documentId },
});
emitToRoom(`port:${doc.portId}`, 'document:completed', { documentId: doc.id });
// Notify the document creator about completion
if (doc.createdBy && doc.createdBy !== 'system') {
void import('@/lib/services/notifications.service').then(({ createNotification }) =>
createNotification({
portId: doc.portId,
userId: doc.createdBy!,
type: 'document_signed',
title: 'Document fully signed',
description: `"${doc.title}" has been signed by all parties`,
link: `/documents/${doc.id}`,
entityType: 'document',
entityId: doc.id,
dedupeKey: `document:${doc.id}:completed`,
}),
);
}
}
export async function handleDocumentExpired(eventData: {
documentId: string;
}) {
const doc = await db.query.documents.findFirst({
where: eq(documents.documensoId, eventData.documentId),
});
if (!doc) {
logger.warn({ documensoId: eventData.documentId }, 'Document not found for webhook');
return;
}
await db
.update(documents)
.set({ status: 'expired', updatedAt: new Date() })
.where(eq(documents.id, doc.id));
if (doc.interestId && doc.documentType === 'eoi') {
await db
.update(interests)
.set({ eoiStatus: 'expired', updatedAt: new Date() })
.where(eq(interests.id, doc.interestId));
}
await db.insert(documentEvents).values({
documentId: doc.id,
eventType: 'expired',
eventData: { documensoId: eventData.documentId },
});
emitToRoom(`port:${doc.portId}`, 'document:expired', { documentId: doc.id });
}

View File

@@ -0,0 +1,173 @@
import { and, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { emailAccounts } from '@/lib/db/schema/email';
import { encrypt, decrypt } from '@/lib/utils/encryption';
import { createAuditLog } from '@/lib/audit';
import { NotFoundError, ForbiddenError } from '@/lib/errors';
import type { ConnectAccountInput, ToggleAccountInput } from '@/lib/validators/email';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
type AccountWithoutCredentials = Omit<typeof emailAccounts.$inferSelect, 'credentialsEnc'>;
// ─── Helpers ──────────────────────────────────────────────────────────────────
function stripCredentials(
account: typeof emailAccounts.$inferSelect,
): AccountWithoutCredentials {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { credentialsEnc: _, ...safe } = account;
return safe;
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listAccounts(
userId: string,
portId: string,
): Promise<AccountWithoutCredentials[]> {
const accounts = await db
.select()
.from(emailAccounts)
.where(and(eq(emailAccounts.userId, userId), eq(emailAccounts.portId, portId)));
return accounts.map(stripCredentials);
}
// ─── Connect ──────────────────────────────────────────────────────────────────
export async function connectAccount(
userId: string,
portId: string,
data: ConnectAccountInput,
audit: AuditMeta,
): Promise<AccountWithoutCredentials> {
const credentialsEnc = encrypt(
JSON.stringify({ username: data.username, password: data.password }),
);
const inserted = await db
.insert(emailAccounts)
.values({
userId,
portId,
provider: data.provider,
emailAddress: data.emailAddress,
smtpHost: data.smtpHost,
smtpPort: data.smtpPort,
imapHost: data.imapHost,
imapPort: data.imapPort,
credentialsEnc,
isActive: true,
})
.returning();
const account = inserted[0];
if (!account) throw new Error('Failed to insert email account');
void createAuditLog({
userId: audit.userId,
portId: audit.portId,
action: 'create',
entityType: 'email_account',
entityId: account.id,
metadata: { emailAddress: data.emailAddress, provider: data.provider },
ipAddress: audit.ipAddress,
userAgent: audit.userAgent,
});
return stripCredentials(account);
}
// ─── Toggle ───────────────────────────────────────────────────────────────────
export async function toggleAccount(
accountId: string,
userId: string,
data: ToggleAccountInput,
): Promise<AccountWithoutCredentials> {
const existing = await db.query.emailAccounts.findFirst({
where: eq(emailAccounts.id, accountId),
});
if (!existing) {
throw new NotFoundError('Email account');
}
if (existing.userId !== userId) {
throw new ForbiddenError('You do not own this email account');
}
const updatedRows = await db
.update(emailAccounts)
.set({ isActive: data.isActive, updatedAt: new Date() })
.where(eq(emailAccounts.id, accountId))
.returning();
const updated = updatedRows[0];
if (!updated) throw new Error('Failed to update email account');
return stripCredentials(updated);
}
// ─── Disconnect ───────────────────────────────────────────────────────────────
export async function disconnectAccount(
accountId: string,
userId: string,
audit: AuditMeta,
): Promise<void> {
const existing = await db.query.emailAccounts.findFirst({
where: eq(emailAccounts.id, accountId),
});
if (!existing) {
throw new NotFoundError('Email account');
}
if (existing.userId !== userId) {
throw new ForbiddenError('You do not own this email account');
}
await db.delete(emailAccounts).where(eq(emailAccounts.id, accountId));
void createAuditLog({
userId: audit.userId,
portId: audit.portId,
action: 'delete',
entityType: 'email_account',
entityId: accountId,
metadata: { emailAddress: existing.emailAddress },
ipAddress: audit.ipAddress,
userAgent: audit.userAgent,
});
}
// ─── Get Decrypted Credentials (INTERNAL ONLY) ────────────────────────────────
export async function getDecryptedCredentials(
accountId: string,
): Promise<{ username: string; password: string }> {
const account = await db.query.emailAccounts.findFirst({
where: eq(emailAccounts.id, accountId),
});
if (!account) {
throw new NotFoundError('Email account');
}
const { username, password } = JSON.parse(decrypt(account.credentialsEnc)) as {
username: string;
password: string;
};
return { username, password };
}

View File

@@ -0,0 +1,176 @@
import nodemailer from 'nodemailer';
import { and, eq, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { emailAccounts, emailMessages, emailThreads } from '@/lib/db/schema/email';
import { createAuditLog } from '@/lib/audit';
import { NotFoundError, ForbiddenError } from '@/lib/errors';
import { getDecryptedCredentials } from '@/lib/services/email-accounts.service';
import type { ComposeEmailInput } from '@/lib/validators/email';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Send Email ───────────────────────────────────────────────────────────────
export async function sendEmail(
userId: string,
portId: string,
data: ComposeEmailInput,
audit: AuditMeta,
) {
// Verify the account belongs to the user
const account = await db.query.emailAccounts.findFirst({
where: and(
eq(emailAccounts.id, data.accountId),
eq(emailAccounts.userId, userId),
),
});
if (!account) {
throw new NotFoundError('Email account');
}
if (account.portId !== portId) {
throw new ForbiddenError('Email account does not belong to this port');
}
// Decrypt credentials (INTERNAL — never logged or returned)
const creds = await getDecryptedCredentials(data.accountId);
// Build user-specific SMTP transporter
const transporter = nodemailer.createTransport({
host: account.smtpHost,
port: account.smtpPort,
secure: account.smtpPort === 465,
auth: { user: creds.username, pass: creds.password },
});
// Resolve threading headers if replying
let inReplyTo: string | undefined;
let references: string | undefined;
if (data.inReplyToMessageId) {
inReplyTo = data.inReplyToMessageId;
// Gather the full references chain from the thread
if (data.threadId) {
const existingMessages = await db
.select({ messageIdHeader: emailMessages.messageIdHeader })
.from(emailMessages)
.where(
and(
eq(emailMessages.threadId, data.threadId),
),
)
.orderBy(emailMessages.sentAt);
const refIds = existingMessages
.map((m) => m.messageIdHeader)
.filter(Boolean) as string[];
if (refIds.length > 0) {
references = refIds.join(' ');
}
}
}
// Send via the user's SMTP transporter
const info = await transporter.sendMail({
from: account.emailAddress,
to: data.to.join(', '),
cc: data.cc?.join(', '),
subject: data.subject,
html: data.bodyHtml,
inReplyTo,
references,
});
const sentMessageId: string =
typeof info.messageId === 'string' ? info.messageId : String(info.messageId ?? '');
// Resolve or create thread
let threadId: string;
if (data.threadId) {
// Verify thread belongs to this port
const existingThread = await db.query.emailThreads.findFirst({
where: and(
eq(emailThreads.id, data.threadId),
eq(emailThreads.portId, portId),
),
});
if (!existingThread) {
throw new NotFoundError('Email thread');
}
threadId = existingThread.id;
} else {
const newThreadRows = await db
.insert(emailThreads)
.values({
portId,
subject: data.subject,
lastMessageAt: new Date(),
messageCount: 0,
})
.returning();
const newThread = newThreadRows[0];
if (!newThread) throw new Error('Failed to create email thread');
threadId = newThread.id;
}
const now = new Date();
// Persist the outbound message
const messageRows = await db
.insert(emailMessages)
.values({
threadId,
messageIdHeader: sentMessageId || null,
fromAddress: account.emailAddress,
toAddresses: data.to,
ccAddresses: data.cc ?? null,
subject: data.subject,
bodyHtml: data.bodyHtml,
direction: 'outbound',
sentAt: now,
})
.returning();
const message = messageRows[0];
if (!message) throw new Error('Failed to persist outbound email message');
// Update thread metadata
await db
.update(emailThreads)
.set({
lastMessageAt: now,
messageCount: sql`${emailThreads.messageCount} + 1`,
updatedAt: now,
})
.where(eq(emailThreads.id, threadId));
void createAuditLog({
userId: audit.userId,
portId: audit.portId,
action: 'create',
entityType: 'email_message',
entityId: message.id,
metadata: {
threadId,
to: data.to,
subject: data.subject,
accountId: data.accountId,
},
ipAddress: audit.ipAddress,
userAgent: audit.userAgent,
});
return { message, threadId };
}

View File

@@ -0,0 +1,73 @@
import { getQueue } from '@/lib/queue';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface DraftRequest {
interestId: string;
clientId: string;
portId: string;
context: 'follow_up' | 'introduction' | 'stage_update' | 'general';
additionalInstructions?: string;
}
export interface DraftResult {
subject: string;
body: string;
generatedAt: Date;
}
// ─── Request draft (enqueues job) ─────────────────────────────────────────────
/**
* Request an AI-generated email draft.
* Enqueues a job on the 'ai' queue. Returns jobId for polling.
* Job payload contains ONLY entity IDs (no PII).
*/
export async function requestEmailDraft(
userId: string,
request: DraftRequest,
): Promise<{ jobId: string }> {
const aiQueue = getQueue('ai');
const job = await aiQueue.add('generate-email-draft', {
// No PII — only IDs and context parameters
interestId: request.interestId,
clientId: request.clientId,
portId: request.portId,
context: request.context,
additionalInstructions: request.additionalInstructions,
requestedBy: userId,
});
return { jobId: job.id! };
}
// ─── Poll for result ──────────────────────────────────────────────────────────
/**
* Get the result of an email draft generation job.
* Returns null if still processing.
*/
export async function getEmailDraftResult(jobId: string): Promise<DraftResult | null> {
const aiQueue = getQueue('ai');
const job = await aiQueue.getJob(jobId);
if (!job) return null;
const state = await job.getState();
if (state !== 'completed') return null;
const returnValue = job.returnvalue as
| { subject: string; body: string; generatedAt: string }
| undefined
| null;
if (!returnValue) return null;
return {
subject: returnValue.subject,
body: returnValue.body,
generatedAt: new Date(returnValue.generatedAt),
};
}

View File

@@ -0,0 +1,354 @@
import { and, desc, eq, ilike, or, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { emailAccounts, emailMessages, emailThreads } from '@/lib/db/schema/email';
import { clientContacts, clients } from '@/lib/db/schema/clients';
import { NotFoundError } from '@/lib/errors';
import { getDecryptedCredentials } from '@/lib/services/email-accounts.service';
import { logger } from '@/lib/logger';
import type { ListThreadsInput } from '@/lib/validators/email';
// ─── Types ────────────────────────────────────────────────────────────────────
interface ParsedEmail {
messageId: string;
from: string;
to: string[];
cc?: string[];
subject: string;
text?: string;
html?: string;
date: Date;
inReplyTo?: string;
references?: string[];
}
// ─── List Threads ─────────────────────────────────────────────────────────────
export async function listThreads(portId: string, query: ListThreadsInput) {
const { page, limit, clientId } = query;
const offset = (page - 1) * limit;
const conditions = [eq(emailThreads.portId, portId)];
if (clientId) {
conditions.push(eq(emailThreads.clientId, clientId));
}
const where = and(...conditions);
const [rows, countResult] = await Promise.all([
db
.select({
thread: emailThreads,
clientName: clients.fullName,
})
.from(emailThreads)
.leftJoin(clients, eq(emailThreads.clientId, clients.id))
.where(where)
.orderBy(desc(emailThreads.lastMessageAt))
.limit(limit)
.offset(offset),
db
.select({ count: sql<string>`count(*)` })
.from(emailThreads)
.where(where),
]);
const total = parseInt(countResult[0]?.count ?? '0', 10);
return {
data: rows.map((r) => ({ ...r.thread, clientName: r.clientName ?? null })),
total,
};
}
// ─── Get Thread ───────────────────────────────────────────────────────────────
export async function getThread(threadId: string, portId: string) {
const thread = await db.query.emailThreads.findFirst({
where: eq(emailThreads.id, threadId),
});
if (!thread) {
throw new NotFoundError('Email thread');
}
if (thread.portId !== portId) {
throw new NotFoundError('Email thread');
}
const messages = await db
.select()
.from(emailMessages)
.where(eq(emailMessages.threadId, threadId))
.orderBy(emailMessages.sentAt);
return { ...thread, messages };
}
// ─── Ingest Message ───────────────────────────────────────────────────────────
export async function ingestMessage(portId: string, parsedEmail: ParsedEmail) {
let threadId: string | null = null;
// Step 1: Message-ID chain — check inReplyTo and references headers
const referencedIds = [
...(parsedEmail.inReplyTo ? [parsedEmail.inReplyTo] : []),
...(parsedEmail.references ?? []),
];
if (referencedIds.length > 0) {
const existingMessage = await db.query.emailMessages.findFirst({
where: or(...referencedIds.map((id) => eq(emailMessages.messageIdHeader, id))),
});
if (existingMessage) {
// Verify thread belongs to this port
const thread = await db.query.emailThreads.findFirst({
where: and(
eq(emailThreads.id, existingMessage.threadId),
eq(emailThreads.portId, portId),
),
});
if (thread) {
threadId = thread.id;
}
}
}
// Step 2: Email address match against client contacts
if (!threadId) {
const fromAddress = parsedEmail.from.replace(/.*<(.+)>/, '$1').trim().toLowerCase();
const contactRows = await db
.select({
clientId: clientContacts.clientId,
clientPortId: clients.portId,
})
.from(clientContacts)
.innerJoin(clients, eq(clientContacts.clientId, clients.id))
.where(
and(
eq(clientContacts.channel, 'email'),
eq(sql`lower(${clientContacts.value})`, fromAddress),
eq(clients.portId, portId),
),
)
.limit(1);
const contactRow = contactRows[0];
if (contactRow) {
const clientId = contactRow.clientId;
// Find most recent thread for this client or create one
const existingThread = await db.query.emailThreads.findFirst({
where: and(eq(emailThreads.portId, portId), eq(emailThreads.clientId, clientId)),
orderBy: [desc(emailThreads.lastMessageAt)],
});
if (existingThread) {
threadId = existingThread.id;
} else {
const newThreadRows = await db
.insert(emailThreads)
.values({
portId,
clientId,
subject: parsedEmail.subject,
lastMessageAt: parsedEmail.date,
messageCount: 0,
})
.returning();
const newThread = newThreadRows[0];
if (!newThread) throw new Error('Failed to create email thread');
threadId = newThread.id;
}
}
}
// Step 3: Subject + sender fuzzy match
if (!threadId) {
const normalizedSubject = parsedEmail.subject
.replace(/^(re|fwd|fw):\s*/i, '')
.trim();
if (normalizedSubject) {
const matchingThread = await db.query.emailThreads.findFirst({
where: and(
eq(emailThreads.portId, portId),
ilike(emailThreads.subject, `%${normalizedSubject}%`),
),
orderBy: [desc(emailThreads.lastMessageAt)],
});
if (matchingThread) {
threadId = matchingThread.id;
}
}
}
// No thread found — create a new one
if (!threadId) {
const newThreadRows = await db
.insert(emailThreads)
.values({
portId,
subject: parsedEmail.subject,
lastMessageAt: parsedEmail.date,
messageCount: 0,
})
.returning();
const newThread = newThreadRows[0];
if (!newThread) throw new Error('Failed to create email thread');
threadId = newThread.id;
}
// Insert the message
const messageRows = await db
.insert(emailMessages)
.values({
threadId,
messageIdHeader: parsedEmail.messageId || null,
fromAddress: parsedEmail.from,
toAddresses: parsedEmail.to,
ccAddresses: parsedEmail.cc ?? null,
subject: parsedEmail.subject,
bodyText: parsedEmail.text ?? null,
bodyHtml: parsedEmail.html ?? null,
direction: 'inbound',
sentAt: parsedEmail.date,
})
.returning();
const message = messageRows[0];
if (!message) throw new Error('Failed to insert email message');
// Update thread's lastMessageAt and messageCount
await db
.update(emailThreads)
.set({
lastMessageAt: parsedEmail.date,
messageCount: sql`${emailThreads.messageCount} + 1`,
updatedAt: new Date(),
})
.where(eq(emailThreads.id, threadId));
return { message, threadId };
}
// ─── Sync Inbox ───────────────────────────────────────────────────────────────
export async function syncInbox(accountId: string): Promise<void> {
// Dynamic imports to avoid loading heavy IMAP/mail modules at module initialisation
const imapflowModule = await import('imapflow');
const ImapFlow = imapflowModule.ImapFlow;
const mailparserModule = await import('mailparser');
const simpleParser = mailparserModule.simpleParser;
const account = await db.query.emailAccounts.findFirst({
where: eq(emailAccounts.id, accountId),
});
if (!account) {
throw new NotFoundError('Email account');
}
const creds = await getDecryptedCredentials(accountId);
// Determine the since date: last sync or 30 days ago
const since = account.lastSyncAt ?? new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
const client = new ImapFlow({
host: account.imapHost,
port: account.imapPort,
secure: account.imapPort === 993,
auth: {
user: creds.username,
pass: creds.password,
},
logger: false,
});
try {
await client.connect();
const mailbox = await client.mailboxOpen('INBOX');
logger.info({ accountId, exists: mailbox.exists }, 'IMAP INBOX opened');
// Search for messages since the last sync date
// client.search() returns false | number[] — false means nothing found
const searchResult = await client.search({ since });
const uids: number[] = searchResult === false ? [] : searchResult;
if (uids.length === 0) {
logger.info({ accountId }, 'No new messages to sync');
return;
}
for await (const message of client.fetch(uids, { source: true })) {
try {
if (!message.source) continue;
const parsed = await simpleParser(message.source);
// Normalise messageId — mailparser returns string | string[] | undefined
const rawMsgId = parsed.messageId;
const messageId =
rawMsgId == null
? ''
: Array.isArray(rawMsgId)
? (rawMsgId[0] ?? '')
: rawMsgId;
const from = parsed.from?.text ?? '';
// Normalise to/cc — mailparser AddressObject can be an array
const resolveAddresses = (
field: typeof parsed.to,
): string[] => {
if (!field) return [];
const arr = Array.isArray(field) ? field : [field];
return arr.flatMap((a) =>
(a.value ?? []).map((x: { address?: string }) => x.address ?? ''),
);
};
const to = resolveAddresses(parsed.to);
const cc = parsed.cc ? resolveAddresses(parsed.cc) : undefined;
const rawRefs = parsed.references;
const references: string[] =
rawRefs == null
? []
: typeof rawRefs === 'string'
? rawRefs.split(/\s+/).filter(Boolean)
: rawRefs;
await ingestMessage(account.portId, {
messageId,
from,
to,
cc,
subject: parsed.subject ?? '(no subject)',
text: parsed.text ?? undefined,
html: typeof parsed.html === 'string' ? parsed.html : undefined,
date: parsed.date ?? new Date(),
inReplyTo: parsed.inReplyTo ?? undefined,
references,
});
} catch (err) {
logger.error({ err, accountId, uid: message.uid }, 'Failed to ingest email message');
}
}
// Update lastSyncAt on the account
await db
.update(emailAccounts)
.set({ lastSyncAt: new Date(), updatedAt: new Date() })
.where(eq(emailAccounts.id, accountId));
logger.info({ accountId, messageCount: uids.length }, 'IMAP sync complete');
} finally {
await client.logout();
}
}

View File

@@ -0,0 +1,211 @@
import { eq, and, gte, lte, isNull, or, ilike } from 'drizzle-orm';
import { db } from '@/lib/db';
import { expenses } from '@/lib/db/schema/financial';
import { generatePdf } from '@/lib/pdf/generate';
import { getRate } from '@/lib/services/currency';
import { logger } from '@/lib/logger';
import type { ListExpensesInput } from '@/lib/validators/expenses';
async function fetchAllExpenses(portId: string, query: ListExpensesInput) {
const conditions: ReturnType<typeof eq>[] = [eq(expenses.portId, portId) as any];
if (!query.includeArchived) {
conditions.push(isNull(expenses.archivedAt) as any);
}
if (query.category) {
conditions.push(eq(expenses.category, query.category) as any);
}
if (query.paymentStatus) {
conditions.push(eq(expenses.paymentStatus, query.paymentStatus) as any);
}
if (query.currency) {
conditions.push(eq(expenses.currency, query.currency) as any);
}
if (query.payer) {
conditions.push(eq(expenses.payer, query.payer) as any);
}
if (query.dateFrom) {
conditions.push(gte(expenses.expenseDate, new Date(query.dateFrom)) as any);
}
if (query.dateTo) {
conditions.push(lte(expenses.expenseDate, new Date(query.dateTo)) as any);
}
if (query.search) {
conditions.push(
or(
ilike(expenses.establishmentName, `%${query.search}%`),
ilike(expenses.description, `%${query.search}%`),
) as any,
);
}
return db
.select()
.from(expenses)
.where(and(...(conditions as any[])));
}
export async function exportCsv(portId: string, query: ListExpensesInput): Promise<string> {
const rows = await fetchAllExpenses(portId, query);
const headers = [
'Date',
'Establishment',
'Category',
'Amount',
'Currency',
'Amount USD',
'Payment Status',
'Payment Method',
'Description',
];
const csvRows = rows.map((r) => {
const date = r.expenseDate ? new Date(r.expenseDate).toISOString().split('T')[0] : '';
return [
date,
r.establishmentName ?? '',
r.category ?? '',
r.amount,
r.currency,
r.amountUsd ?? 'N/A',
r.paymentStatus ?? '',
r.paymentMethod ?? '',
(r.description ?? '').replace(/"/g, '""'),
]
.map((v) => `"${v}"`)
.join(',');
});
return [headers.join(','), ...csvRows].join('\n');
}
export async function exportPdf(portId: string, query: ListExpensesInput): Promise<Uint8Array> {
const rows = await fetchAllExpenses(portId, query);
const template = {
basePdf: { width: 210, height: 297, padding: [10, 10, 10, 10] },
schemas: [
[
{
name: 'title',
type: 'text',
position: { x: 10, y: 10 },
width: 190,
height: 10,
fontSize: 14,
fontColor: '#000000',
},
{
name: 'content',
type: 'text',
position: { x: 10, y: 25 },
width: 190,
height: 260,
fontSize: 8,
fontColor: '#000000',
},
],
],
};
const lines = rows.map((r) => {
const date = r.expenseDate ? new Date(r.expenseDate).toISOString().split('T')[0] : '';
return `${date} | ${r.establishmentName ?? '-'} | ${r.category ?? '-'} | ${r.amount} ${r.currency} | ${r.paymentStatus ?? '-'}`;
});
const inputs = [
{
title: 'Expense Report',
content: lines.join('\n'),
},
];
return generatePdf(template as any, inputs);
}
export async function exportParentCompany(
portId: string,
query: ListExpensesInput,
): Promise<Uint8Array> {
// BR-043: Convert all amounts to EUR, add 5% management fee
const rows = await fetchAllExpenses(portId, query);
const eurRate = await getRate('USD', 'EUR');
if (!eurRate) {
logger.warn('EUR rate unavailable for parent company export, using 1:1 fallback');
}
const rate = eurRate ?? 1;
const convertedRows = rows.map((r) => {
const amountUsd = r.amountUsd ? Number(r.amountUsd) : Number(r.amount);
const amountEur = Number((amountUsd * rate).toFixed(2));
return {
date: r.expenseDate ? new Date(r.expenseDate).toISOString().split('T')[0] : '',
establishment: r.establishmentName ?? '-',
category: r.category ?? '-',
amountEur,
};
});
const subtotal = convertedRows.reduce((sum, r) => sum + r.amountEur, 0);
const fee = Number((subtotal * 0.05).toFixed(2));
const total = Number((subtotal + fee).toFixed(2));
const template = {
basePdf: { width: 210, height: 297, padding: [10, 10, 10, 10] },
schemas: [
[
{
name: 'title',
type: 'text',
position: { x: 10, y: 10 },
width: 190,
height: 10,
fontSize: 14,
fontColor: '#000000',
},
{
name: 'content',
type: 'text',
position: { x: 10, y: 25 },
width: 190,
height: 230,
fontSize: 8,
fontColor: '#000000',
},
{
name: 'summary',
type: 'text',
position: { x: 10, y: 260 },
width: 190,
height: 30,
fontSize: 10,
fontColor: '#000000',
},
],
],
};
const lines = convertedRows.map(
(r) => `${r.date} | ${r.establishment} | ${r.category} | EUR ${r.amountEur.toFixed(2)}`,
);
const summary = [
`Subtotal: EUR ${subtotal.toFixed(2)}`,
`Management Fee (5%): EUR ${fee.toFixed(2)}`,
`Total: EUR ${total.toFixed(2)}`,
].join('\n');
const inputs = [
{
title: 'Parent Company Expense Report (EUR)',
content: lines.join('\n'),
summary,
},
];
return generatePdf(template as any, inputs);
}

View File

@@ -0,0 +1,307 @@
import { eq, and, gte, lte, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { expenses, invoices, invoiceExpenses } from '@/lib/db/schema/financial';
import { buildListQuery } from '@/lib/db/query-builder';
import { createAuditLog } from '@/lib/audit';
import { diffEntity } from '@/lib/entity-diff';
import { softDelete, restore } from '@/lib/db/utils';
import { NotFoundError, ConflictError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { convert } from '@/lib/services/currency';
import { logger } from '@/lib/logger';
import type { CreateExpenseInput, UpdateExpenseInput, ListExpensesInput } from '@/lib/validators/expenses';
export type { ListExpensesInput };
// AuditMeta type expected by service functions
export interface ServiceAuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
export async function listExpenses(portId: string, query: ListExpensesInput) {
const filters = [];
if (query.category) {
filters.push(eq(expenses.category, query.category));
}
if (query.paymentStatus) {
filters.push(eq(expenses.paymentStatus, query.paymentStatus));
}
if (query.currency) {
filters.push(eq(expenses.currency, query.currency));
}
if (query.payer) {
filters.push(eq(expenses.payer, query.payer));
}
if (query.dateFrom) {
filters.push(gte(expenses.expenseDate, new Date(query.dateFrom)));
}
if (query.dateTo) {
filters.push(lte(expenses.expenseDate, new Date(query.dateTo)));
}
return buildListQuery({
table: expenses,
portIdColumn: expenses.portId,
portId,
idColumn: expenses.id,
updatedAtColumn: expenses.updatedAt,
filters,
page: query.page,
pageSize: query.limit,
searchColumns: [expenses.establishmentName, expenses.description],
searchTerm: query.search,
includeArchived: query.includeArchived,
archivedAtColumn: expenses.archivedAt,
sort: query.sort
? { column: expenses[query.sort as keyof typeof expenses] as any, direction: query.order }
: undefined,
});
}
export async function getExpenseById(id: string, portId: string) {
const expense = await db.query.expenses.findFirst({
where: and(eq(expenses.id, id), eq(expenses.portId, portId)),
});
if (!expense) throw new NotFoundError('Expense');
return expense;
}
export async function createExpense(
portId: string,
data: CreateExpenseInput,
meta: ServiceAuditMeta,
) {
let amountUsd: string | null = null;
let exchangeRate: string | null = null;
if (data.currency !== 'USD') {
const conversion = await convert(data.amount, data.currency, 'USD');
if (conversion) {
amountUsd = String(conversion.result);
exchangeRate = String(conversion.rate);
} else {
// BR-040: if rate unavailable, save without conversion + log warning
logger.warn({ currency: data.currency }, 'Currency rate unavailable, saving expense without USD conversion');
}
} else {
amountUsd = String(data.amount);
exchangeRate = '1';
}
const [expense] = await db
.insert(expenses)
.values({
portId,
establishmentName: data.establishmentName,
amount: String(data.amount),
currency: data.currency,
amountUsd,
exchangeRate,
paymentMethod: data.paymentMethod,
category: data.category,
payer: data.payer,
expenseDate: data.expenseDate,
description: data.description,
receiptFileIds: data.receiptFileIds ?? [],
paymentStatus: data.paymentStatus,
paymentDate: data.paymentDate ?? null,
paymentReference: data.paymentReference ?? null,
paymentNotes: data.paymentNotes ?? null,
createdBy: meta.userId,
})
.returning();
if (!expense) throw new Error('Insert failed');
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'expense',
entityId: expense.id,
newValue: expense as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'expense:created', {
expenseId: expense.id,
amount: Number(expense.amount),
currency: expense.currency,
category: expense.category ?? '',
});
return expense;
}
export async function updateExpense(
id: string,
portId: string,
data: UpdateExpenseInput,
meta: ServiceAuditMeta,
) {
const existing = await getExpenseById(id, portId);
const updateData: Record<string, unknown> = { ...data, updatedAt: new Date() };
// Re-convert to USD if amount or currency changed
const newAmount = data.amount ?? Number(existing.amount);
const newCurrency = data.currency ?? existing.currency;
if (data.amount !== undefined || data.currency !== undefined) {
if (newCurrency !== 'USD') {
const conversion = await convert(newAmount, newCurrency, 'USD');
if (conversion) {
updateData.amountUsd = String(conversion.result);
updateData.exchangeRate = String(conversion.rate);
} else {
logger.warn({ currency: newCurrency }, 'Currency rate unavailable during update, clearing USD conversion');
updateData.amountUsd = null;
updateData.exchangeRate = null;
}
} else {
updateData.amountUsd = String(newAmount);
updateData.exchangeRate = '1';
}
}
if (data.amount !== undefined) updateData.amount = String(data.amount);
const { diff } = diffEntity(existing as unknown as Record<string, unknown>, updateData);
const [updated] = await db
.update(expenses)
.set(updateData as any)
.where(and(eq(expenses.id, id), eq(expenses.portId, portId)))
.returning();
if (!updated) throw new NotFoundError('Expense');
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'expense',
entityId: id,
oldValue: existing as unknown as Record<string, unknown>,
newValue: updated as unknown as Record<string, unknown>,
metadata: { diff },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'expense:updated', {
expenseId: id,
changedFields: Object.keys(diff),
});
return updated;
}
export async function archiveExpense(
id: string,
portId: string,
meta: ServiceAuditMeta,
) {
const existing = await getExpenseById(id, portId);
// BR-045: Check if linked to non-draft invoice
const linkedInvoice = await db
.select({ invoiceId: invoiceExpenses.invoiceId })
.from(invoiceExpenses)
.innerJoin(invoices, eq(invoices.id, invoiceExpenses.invoiceId))
.where(
and(
eq(invoiceExpenses.expenseId, id),
sql`${invoices.status} != 'draft'`,
),
)
.limit(1);
if (linkedInvoice.length > 0) {
throw new ConflictError('Cannot archive expense linked to a non-draft invoice');
}
await softDelete(expenses, expenses.id, id);
void createAuditLog({
userId: meta.userId,
portId,
action: 'archive',
entityType: 'expense',
entityId: id,
oldValue: existing as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'expense:archived', { expenseId: id });
}
export async function restoreExpense(
id: string,
portId: string,
meta: ServiceAuditMeta,
) {
await getExpenseById(id, portId);
await restore(expenses, expenses.id, id);
const restored = await getExpenseById(id, portId);
void createAuditLog({
userId: meta.userId,
portId,
action: 'restore',
entityType: 'expense',
entityId: id,
newValue: restored as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'expense:updated', {
expenseId: id,
changedFields: ['archivedAt'],
});
return restored;
}
export async function addReceiptFiles(
id: string,
portId: string,
fileIds: string[],
meta: ServiceAuditMeta,
) {
await getExpenseById(id, portId);
const [updated] = await db
.update(expenses)
.set({
receiptFileIds: sql`array_cat(receipt_file_ids, ${fileIds}::text[])`,
updatedAt: new Date(),
} as any)
.where(and(eq(expenses.id, id), eq(expenses.portId, portId)))
.returning();
if (!updated) throw new NotFoundError('Expense');
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'expense',
entityId: id,
metadata: { addedFileIds: fileIds },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return updated;
}

270
src/lib/services/files.ts Normal file
View File

@@ -0,0 +1,270 @@
import { and, arrayContains, eq, or } from 'drizzle-orm';
import { db } from '@/lib/db';
import { files, documents } from '@/lib/db/schema/documents';
import { expenses } from '@/lib/db/schema/financial';
import { berthMaintenanceLog } from '@/lib/db/schema/berths';
import { createAuditLog } from '@/lib/audit';
import { ConflictError, NotFoundError, ValidationError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { minioClient, getPresignedUrl } from '@/lib/minio';
import { buildListQuery } from '@/lib/db/query-builder';
import { env } from '@/lib/env';
import {
ALLOWED_MIME_TYPES,
MAX_FILE_SIZE,
PREVIEWABLE_MIMES,
} from '@/lib/constants/file-validation';
import { generateStorageKey, sanitizeFilename } from '@/lib/services/storage';
import type { UploadFileInput, UpdateFileInput, ListFilesInput } from '@/lib/validators/files';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
interface UploadFileParams {
buffer: Buffer;
originalName: string;
mimeType: string;
size: number;
}
// ─── Upload ───────────────────────────────────────────────────────────────────
export async function uploadFile(
portId: string,
portSlug: string,
file: UploadFileParams,
data: UploadFileInput,
meta: AuditMeta,
) {
if (!ALLOWED_MIME_TYPES.has(file.mimeType)) {
throw new ValidationError(`File type '${file.mimeType}' is not allowed`);
}
if (file.size > MAX_FILE_SIZE) {
throw new ValidationError('File exceeds maximum size of 50MB');
}
const entity = data.entityType ?? 'general';
const entityId = data.entityId ?? portId;
const storagePath = generateStorageKey(portSlug, entity, entityId, file.mimeType);
const sanitizedOriginal = sanitizeFilename(file.originalName);
const sanitizedFilename = sanitizeFilename(data.filename);
await minioClient.putObject(
env.MINIO_BUCKET,
storagePath,
file.buffer,
file.size,
{ 'Content-Type': file.mimeType },
);
const [record] = await db
.insert(files)
.values({
portId,
clientId: data.clientId ?? null,
filename: sanitizedFilename,
originalName: sanitizedOriginal,
mimeType: file.mimeType,
sizeBytes: String(file.size),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: data.category ?? null,
uploadedBy: meta.userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'file',
entityId: record!.id,
newValue: { filename: record!.filename, mimeType: file.mimeType, size: file.size },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'file:uploaded', {
fileId: record!.id,
filename: record!.filename,
});
return record!;
}
// ─── Download / Preview URLs ──────────────────────────────────────────────────
export async function getDownloadUrl(id: string, portId: string) {
const file = await getFileById(id, portId);
const url = await getPresignedUrl(file.storagePath);
return { url, filename: file.filename };
}
export async function getPreviewUrl(id: string, portId: string) {
const file = await getFileById(id, portId);
if (!file.mimeType || !PREVIEWABLE_MIMES.has(file.mimeType)) {
throw new ValidationError('This file type cannot be previewed');
}
const url = await getPresignedUrl(file.storagePath);
return { url, mimeType: file.mimeType };
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateFile(
id: string,
portId: string,
data: UpdateFileInput,
meta: AuditMeta,
) {
const existing = await getFileById(id, portId);
const updates: { filename?: string; category?: string } = {};
if (data.filename !== undefined) updates.filename = sanitizeFilename(data.filename);
if (data.category !== undefined) updates.category = data.category;
const [updated] = await db
.update(files)
.set(updates)
.where(and(eq(files.id, id), eq(files.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'file',
entityId: id,
oldValue: { filename: existing.filename, category: existing.category },
newValue: updates,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'file:updated', { fileId: id });
return updated!;
}
// ─── Delete (BR-091) ──────────────────────────────────────────────────────────
export async function deleteFile(id: string, portId: string, meta: AuditMeta) {
const existing = await getFileById(id, portId);
// BR-091: check references before deleting
const [docRefs, expenseRefs, maintenanceRefs] = await Promise.all([
db
.select({ id: documents.id })
.from(documents)
.where(
and(
eq(documents.portId, portId),
or(eq(documents.fileId, id), eq(documents.signedFileId, id)),
),
)
.limit(1),
db
.select({ id: expenses.id })
.from(expenses)
.where(
and(
eq(expenses.portId, portId),
arrayContains(expenses.receiptFileIds, [id]),
),
)
.limit(1),
db
.select({ id: berthMaintenanceLog.id })
.from(berthMaintenanceLog)
.where(
and(
eq(berthMaintenanceLog.portId, portId),
arrayContains(berthMaintenanceLog.photoFileIds, [id]),
),
)
.limit(1),
]);
if (docRefs.length > 0 || expenseRefs.length > 0 || maintenanceRefs.length > 0) {
throw new ConflictError(
'File cannot be deleted because it is referenced by other records',
);
}
// Delete from MinIO first, then DB
await minioClient.removeObject(env.MINIO_BUCKET, existing.storagePath);
await db.delete(files).where(and(eq(files.id, id), eq(files.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'file',
entityId: id,
oldValue: { filename: existing.filename },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'file:deleted', { fileId: id });
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listFiles(portId: string, query: ListFilesInput) {
const { page, limit, sort, order, search, clientId, category } = query;
const filters = [];
if (clientId) {
filters.push(eq(files.clientId, clientId));
}
if (category) {
filters.push(eq(files.category, category));
}
const sortColumn =
sort === 'filename' ? files.filename :
sort === 'sizeBytes' ? files.sizeBytes :
files.createdAt;
return buildListQuery({
table: files,
portIdColumn: files.portId,
portId,
idColumn: files.id,
updatedAtColumn: files.createdAt, // no updatedAt on files
searchColumns: [files.filename, files.originalName],
searchTerm: search,
filters,
sort: sort ? { column: sortColumn, direction: order } : undefined,
page,
pageSize: limit,
// no archivedAtColumn — files are immutable records
});
}
// ─── Get by ID ────────────────────────────────────────────────────────────────
export async function getFileById(id: string, portId: string) {
const file = await db.query.files.findFirst({
where: eq(files.id, id),
});
if (!file || file.portId !== portId) {
throw new NotFoundError('File');
}
return file;
}

View File

@@ -0,0 +1,234 @@
import { and, count, eq, gte, isNull } from 'drizzle-orm';
import { db } from '@/lib/db';
import { redis } from '@/lib/redis';
import { interests, interestNotes } from '@/lib/db/schema/interests';
import { reminders } from '@/lib/db/schema/operations';
import { emailThreads } from '@/lib/db/schema/email';
import { logger } from '@/lib/logger';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface InterestScore {
totalScore: number; // 0-100 (normalised)
breakdown: {
pipelineAge: number; // 0-100
stageSpeed: number; // 0-100
documentCompleteness: number; // 0-100
engagement: number; // 0-100
berthLinked: number; // 0 or 25
};
calculatedAt: Date;
}
// ─── Redis cache ──────────────────────────────────────────────────────────────
const SCORE_KEY = (interestId: string) => `interest-score:${interestId}`;
const SCORE_TTL = 3600; // 1 hour
// ─── Scoring helpers ──────────────────────────────────────────────────────────
function scorePipelineAge(createdAt: Date): number {
const days = Math.floor((Date.now() - createdAt.getTime()) / (1000 * 60 * 60 * 24));
if (days <= 30) return 100;
if (days <= 60) return 80;
if (days <= 90) return 60;
if (days <= 180) return 40;
return 20;
}
function scoreStageSpeed(createdAt: Date, pipelineStage: string): number {
// Approximate stage index based on known pipeline order
const STAGE_ORDER: Record<string, number> = {
open: 0,
details_sent: 1,
in_communication: 2,
visited: 3,
signed_eoi_nda: 4,
deposit_10pct: 5,
contract: 6,
completed: 7,
};
const stageIndex = STAGE_ORDER[pipelineStage] ?? 0;
if (stageIndex === 0) {
// Still at open — no progression
return 0;
}
const daysSinceCreation = Math.max(
1,
(Date.now() - createdAt.getTime()) / (1000 * 60 * 60 * 24),
);
// Average days per stage transition
const avgDaysPerStage = daysSinceCreation / stageIndex;
// Thresholds: <7 days/stage = great, <14 = ok, <30 = slow, >=30 = cold
if (avgDaysPerStage < 7) return 100;
if (avgDaysPerStage < 14) return 75;
if (avgDaysPerStage < 30) return 50;
if (avgDaysPerStage < 60) return 25;
return 10;
}
function scoreDocumentCompleteness(interest: {
eoiStatus: string | null;
contractStatus: string | null;
depositStatus: string | null;
dateEoiSigned: Date | null;
dateContractSigned: Date | null;
dateDepositReceived: Date | null;
}): number {
let score = 0;
// EOI signed
if (interest.eoiStatus === 'signed' || interest.dateEoiSigned != null) {
score += 30;
}
// Contract
if (interest.contractStatus === 'signed' || interest.dateContractSigned != null) {
score += 30;
}
// Deposit
if (
interest.depositStatus === 'received' ||
interest.depositStatus === 'paid' ||
interest.dateDepositReceived != null
) {
score += 40;
}
return Math.min(score, 100);
}
// ─── Main scoring function ────────────────────────────────────────────────────
export async function calculateInterestScore(
interestId: string,
portId: string,
): Promise<InterestScore> {
// Try cache first
try {
const cached = await redis.get(SCORE_KEY(interestId));
if (cached) {
const parsed = JSON.parse(cached) as InterestScore & { calculatedAt: string };
return { ...parsed, calculatedAt: new Date(parsed.calculatedAt) };
}
} catch (err) {
logger.warn({ err, interestId }, 'Redis cache read failed for interest score');
}
// Fetch interest
const interest = await db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
});
if (!interest) {
throw new Error(`Interest not found: ${interestId}`);
}
// 1. Pipeline age
const pipelineAge = scorePipelineAge(interest.createdAt);
// 2. Stage speed
const stageSpeed = scoreStageSpeed(interest.createdAt, interest.pipelineStage);
// 3. Document completeness
const documentCompleteness = scoreDocumentCompleteness(interest);
// 4. Engagement — notes, emails, reminders in last 30 days
const thirtyDaysAgo = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
const [notesResult, remindersResult, emailResult] = await Promise.all([
db
.select({ value: count() })
.from(interestNotes)
.where(
and(
eq(interestNotes.interestId, interestId),
gte(interestNotes.createdAt, thirtyDaysAgo),
),
),
db
.select({ value: count() })
.from(reminders)
.where(
and(
eq(reminders.interestId, interestId),
eq(reminders.status, 'completed'),
gte(reminders.completedAt, thirtyDaysAgo),
),
),
db
.select({ value: count() })
.from(emailThreads)
.where(
and(
eq(emailThreads.clientId, interest.clientId),
eq(emailThreads.portId, portId),
gte(emailThreads.lastMessageAt, thirtyDaysAgo),
),
),
]);
const notesCount = notesResult[0]?.value ?? 0;
const remindersCount = remindersResult[0]?.value ?? 0;
const emailCount = emailResult[0]?.value ?? 0;
const notesScore = Math.min(notesCount * 10, 50);
const emailScore = Math.min(emailCount * 5, 30);
const remindersScore = Math.min(remindersCount * 10, 20);
const engagement = Math.min(notesScore + emailScore + remindersScore, 100);
// 5. Berth linked
const berthLinked = interest.berthId != null ? 25 : 0;
// ── Normalise: max raw = 100+100+100+100+25 = 425 → /425 * 100 ──
const RAW_MAX = 425;
const rawTotal = pipelineAge + stageSpeed + documentCompleteness + engagement + berthLinked;
const totalScore = Math.round((rawTotal / RAW_MAX) * 100);
const result: InterestScore = {
totalScore,
breakdown: {
pipelineAge,
stageSpeed,
documentCompleteness,
engagement,
berthLinked,
},
calculatedAt: new Date(),
};
// Write to cache (fire-and-forget)
redis
.setex(SCORE_KEY(interestId), SCORE_TTL, JSON.stringify(result))
.catch((err) => logger.warn({ err, interestId }, 'Redis cache write failed for interest score'));
return result;
}
// ─── Bulk scoring ─────────────────────────────────────────────────────────────
export async function calculateBulkScores(
portId: string,
): Promise<Array<{ interestId: string; score: InterestScore }>> {
const allInterests = await db
.select({ id: interests.id })
.from(interests)
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)));
const results = await Promise.allSettled(
allInterests.map(async (i) => {
const score = await calculateInterestScore(i.id, portId);
return { interestId: i.id, score };
}),
);
return results
.filter((r): r is PromiseFulfilledResult<{ interestId: string; score: InterestScore }> =>
r.status === 'fulfilled',
)
.map((r) => r.value);
}

View File

@@ -0,0 +1,591 @@
import { and, eq, inArray, isNull, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { interests, interestTags } from '@/lib/db/schema/interests';
import { clients } from '@/lib/db/schema/clients';
import { berths } from '@/lib/db/schema/berths';
import { tags } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { NotFoundError, ConflictError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { buildListQuery } from '@/lib/db/query-builder';
import { diffEntity } from '@/lib/entity-diff';
import { softDelete, restore, withTransaction } from '@/lib/db/utils';
import type {
CreateInterestInput,
UpdateInterestInput,
ChangeStageInput,
ListInterestsInput,
} from '@/lib/validators/interests';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── BR-011: Auto-promote leadCategory ───────────────────────────────────────
async function resolveLeadCategory(
clientId: string,
leadCategory: string | undefined | null,
): Promise<string | undefined> {
if (leadCategory && leadCategory !== 'general_interest') {
return leadCategory;
}
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (client && (client.yachtLengthFt || client.yachtLengthM)) {
return 'specific_qualified';
}
return leadCategory ?? undefined;
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listInterests(portId: string, query: ListInterestsInput) {
const {
page,
limit,
sort,
order,
search,
includeArchived,
clientId,
berthId,
pipelineStage,
leadCategory,
eoiStatus,
tagIds,
} = query;
const filters = [];
if (clientId) {
filters.push(eq(interests.clientId, clientId));
}
if (berthId) {
filters.push(eq(interests.berthId, berthId));
}
if (pipelineStage && pipelineStage.length > 0) {
filters.push(inArray(interests.pipelineStage, pipelineStage));
}
if (leadCategory) {
filters.push(eq(interests.leadCategory, leadCategory));
}
if (eoiStatus) {
filters.push(eq(interests.eoiStatus, eoiStatus));
}
if (tagIds && tagIds.length > 0) {
const interestsWithTags = await db
.selectDistinct({ interestId: interestTags.interestId })
.from(interestTags)
.where(inArray(interestTags.tagId, tagIds));
const matchingIds = interestsWithTags.map((r) => r.interestId);
if (matchingIds.length > 0) {
filters.push(inArray(interests.id, matchingIds));
} else {
return { data: [], total: 0 };
}
}
const sortColumn = (() => {
switch (sort) {
case 'pipelineStage': return interests.pipelineStage;
case 'leadCategory': return interests.leadCategory;
case 'createdAt': return interests.createdAt;
default: return interests.updatedAt;
}
})();
const result = await buildListQuery({
table: interests,
portIdColumn: interests.portId,
portId,
idColumn: interests.id,
updatedAtColumn: interests.updatedAt,
filters,
sort: { column: sortColumn, direction: order },
page,
pageSize: limit,
searchColumns: [],
searchTerm: search,
includeArchived,
archivedAtColumn: interests.archivedAt,
});
// Join client names and berth mooring numbers
const interestIds = (result.data as Array<{ id: string; clientId: string; berthId: string | null }>).map((i) => i.id);
const clientIds = [...new Set((result.data as Array<{ clientId: string }>).map((i) => i.clientId))];
const berthIds = [...new Set(
(result.data as Array<{ berthId: string | null }>)
.map((i) => i.berthId)
.filter(Boolean) as string[]
)];
let clientsMap: Record<string, string> = {};
let berthsMap: Record<string, string> = {};
let tagsByInterestId: Record<string, Array<{ id: string; name: string; color: string }>> = {};
if (clientIds.length > 0) {
const clientRows = await db
.select({ id: clients.id, fullName: clients.fullName })
.from(clients)
.where(inArray(clients.id, clientIds));
clientsMap = Object.fromEntries(clientRows.map((c) => [c.id, c.fullName]));
}
if (berthIds.length > 0) {
const berthRows = await db
.select({ id: berths.id, mooringNumber: berths.mooringNumber })
.from(berths)
.where(inArray(berths.id, berthIds));
berthsMap = Object.fromEntries(berthRows.map((b) => [b.id, b.mooringNumber]));
}
if (interestIds.length > 0) {
const tagRows = await db
.select({
interestId: interestTags.interestId,
id: tags.id,
name: tags.name,
color: tags.color,
})
.from(interestTags)
.innerJoin(tags, eq(interestTags.tagId, tags.id))
.where(inArray(interestTags.interestId, interestIds));
for (const row of tagRows) {
if (!tagsByInterestId[row.interestId]) tagsByInterestId[row.interestId] = [];
tagsByInterestId[row.interestId]!.push({ id: row.id, name: row.name, color: row.color });
}
}
const data = (result.data as Array<Record<string, unknown>>).map((i) => ({
...i,
clientName: clientsMap[i.clientId as string] ?? null,
berthMooringNumber: i.berthId ? (berthsMap[i.berthId as string] ?? null) : null,
tags: tagsByInterestId[i.id as string] ?? [],
}));
return { data, total: result.total };
}
// ─── Get by ID ────────────────────────────────────────────────────────────────
export async function getInterestById(id: string, portId: string) {
const interest = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!interest || interest.portId !== portId) {
throw new NotFoundError('Interest');
}
const [clientRow] = await db
.select({ fullName: clients.fullName })
.from(clients)
.where(eq(clients.id, interest.clientId));
let berthMooringNumber: string | null = null;
if (interest.berthId) {
const [berthRow] = await db
.select({ mooringNumber: berths.mooringNumber })
.from(berths)
.where(eq(berths.id, interest.berthId));
berthMooringNumber = berthRow?.mooringNumber ?? null;
}
const tagRows = await db
.select({ id: tags.id, name: tags.name, color: tags.color })
.from(interestTags)
.innerJoin(tags, eq(interestTags.tagId, tags.id))
.where(eq(interestTags.interestId, id));
return {
...interest,
clientName: clientRow?.fullName ?? null,
berthMooringNumber,
tags: tagRows,
};
}
// ─── Create ───────────────────────────────────────────────────────────────────
export async function createInterest(
portId: string,
data: CreateInterestInput,
meta: AuditMeta,
) {
const { tagIds, ...interestData } = data;
// BR-011: auto-promote leadCategory
const resolvedLeadCategory = await resolveLeadCategory(
data.clientId,
data.leadCategory,
);
const result = await withTransaction(async (tx) => {
const [interest] = await tx
.insert(interests)
.values({
portId,
...interestData,
leadCategory: resolvedLeadCategory,
})
.returning();
if (tagIds && tagIds.length > 0) {
await tx.insert(interestTags).values(
tagIds.map((tagId) => ({ interestId: interest!.id, tagId })),
);
}
return interest!;
});
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'interest',
entityId: result.id,
newValue: { clientId: result.clientId, pipelineStage: result.pipelineStage },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:created', { interestId: result.id, clientId: result.clientId, berthId: result.berthId ?? null, source: result.source ?? '' });
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'interest:created', { interestId: result.id, clientId: result.clientId }),
);
return result;
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateInterest(
id: string,
portId: string,
data: UpdateInterestInput,
meta: AuditMeta,
) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
// BR-011: auto-promote leadCategory if provided
let resolvedLeadCategory = data.leadCategory;
if ('leadCategory' in data) {
resolvedLeadCategory = await resolveLeadCategory(
existing.clientId,
data.leadCategory,
) as typeof data.leadCategory;
}
const updateData = { ...data, leadCategory: resolvedLeadCategory };
const { diff } = diffEntity(
existing as Record<string, unknown>,
updateData as Record<string, unknown>,
);
const [updated] = await db
.update(interests)
.set({ ...updateData, updatedAt: new Date() })
.where(and(eq(interests.id, id), eq(interests.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'interest',
entityId: id,
oldValue: diff as Record<string, unknown>,
newValue: updateData as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:updated', { interestId: id, changedFields: Object.keys(diff) });
return updated!;
}
// ─── Change Stage ─────────────────────────────────────────────────────────────
export async function changeInterestStage(
id: string,
portId: string,
data: ChangeStageInput,
meta: AuditMeta,
) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
const oldStage = existing.pipelineStage;
const [updated] = await db
.update(interests)
.set({ pipelineStage: data.pipelineStage, updatedAt: new Date() })
.where(and(eq(interests.id, id), eq(interests.portId, portId)))
.returning();
// BR-133: Auto-populate milestones based on stage
const milestoneUpdates: Record<string, unknown> = {};
if (data.pipelineStage === 'signed_eoi_nda') milestoneUpdates.dateEoiSigned = new Date();
if (data.pipelineStage === 'contract') milestoneUpdates.dateContractSigned = new Date();
if (data.pipelineStage === 'deposit_10pct') milestoneUpdates.dateDepositReceived = new Date();
if (Object.keys(milestoneUpdates).length > 0) {
await db.update(interests).set({ ...milestoneUpdates, updatedAt: new Date() }).where(eq(interests.id, id));
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'interest',
entityId: id,
oldValue: { pipelineStage: oldStage },
newValue: { pipelineStage: data.pipelineStage, reason: data.reason },
metadata: { type: 'stage_change', reason: data.reason },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:stageChanged', {
interestId: id,
oldStage: oldStage ?? '',
newStage: data.pipelineStage,
clientName: '',
berthNumber: '',
});
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'interest:stageChanged', {
interestId: id,
oldStage: oldStage ?? null,
newStage: data.pipelineStage,
}),
);
// Fire-and-forget notification to the acting user
void import('@/lib/services/notifications.service').then(({ createNotification }) =>
createNotification({
portId,
userId: meta.userId,
type: 'interest_stage_changed',
title: `Interest moved to ${data.pipelineStage}`,
description: `Interest ${id} stage changed from ${oldStage ?? 'unknown'} to ${data.pipelineStage}`,
link: `/interests/${id}`,
entityType: 'interest',
entityId: id,
dedupeKey: `interest:${id}:stage:${data.pipelineStage}`,
cooldownMs: 300_000,
}),
);
return updated!;
}
// ─── Archive / Restore ────────────────────────────────────────────────────────
export async function archiveInterest(id: string, portId: string, meta: AuditMeta) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
// BR-014: Block archive if pending EOI/contract
if (existing.eoiStatus === 'waiting_for_signatures' || existing.contractStatus === 'pending') {
throw new ConflictError('Cannot archive interest with pending documents. Cancel documents first.');
}
await softDelete(interests, interests.id, id);
void createAuditLog({
userId: meta.userId,
portId,
action: 'archive',
entityType: 'interest',
entityId: id,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:archived', { interestId: id });
}
export async function restoreInterest(id: string, portId: string, meta: AuditMeta) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
await restore(interests, interests.id, id);
void createAuditLog({
userId: meta.userId,
portId,
action: 'restore',
entityType: 'interest',
entityId: id,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:updated', { interestId: id, changedFields: [] });
}
// ─── Set Tags ─────────────────────────────────────────────────────────────────
export async function setInterestTags(
id: string,
portId: string,
tagIds: string[],
meta: AuditMeta,
) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
await db.delete(interestTags).where(eq(interestTags.interestId, id));
if (tagIds.length > 0) {
await db
.insert(interestTags)
.values(tagIds.map((tagId) => ({ interestId: id, tagId })));
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'interest',
entityId: id,
metadata: { type: 'tags_updated', tagIds },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:updated', { interestId: id, changedFields: ['tags'] });
return { interestId: id, tagIds };
}
// ─── Link / Unlink Berth ──────────────────────────────────────────────────────
export async function linkBerth(
id: string,
portId: string,
berthId: string,
meta: AuditMeta,
) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
const [updated] = await db
.update(interests)
.set({ berthId, updatedAt: new Date() })
.where(and(eq(interests.id, id), eq(interests.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'interest',
entityId: id,
oldValue: { berthId: existing.berthId },
newValue: { berthId },
metadata: { type: 'berth_linked' },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:berthLinked', { interestId: id, berthId });
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'interest:berthLinked', { interestId: id, berthId }),
);
return updated!;
}
export async function unlinkBerth(id: string, portId: string, meta: AuditMeta) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
const oldBerthId = existing.berthId;
const [updated] = await db
.update(interests)
.set({ berthId: null, updatedAt: new Date() })
.where(and(eq(interests.id, id), eq(interests.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'interest',
entityId: id,
oldValue: { berthId: oldBerthId },
newValue: { berthId: null },
metadata: { type: 'berth_unlinked' },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:berthUnlinked', { interestId: id, berthId: oldBerthId ?? '' });
return updated!;
}
// ─── Stage Counts (for board) ────────────────────────────────────────────────
export async function getInterestStageCounts(portId: string) {
const rows = await db.select({ stage: interests.pipelineStage, count: sql<number>`count(*)::int` })
.from(interests)
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)))
.groupBy(interests.pipelineStage);
return Object.fromEntries(rows.map(r => [r.stage, r.count]));
}

View File

@@ -0,0 +1,657 @@
import { eq, and, desc, like, lt, sql, gte, lte, inArray, ne } from 'drizzle-orm';
import { db } from '@/lib/db';
import {
invoices,
invoiceLineItems,
invoiceExpenses,
expenses,
} from '@/lib/db/schema/financial';
import { files } from '@/lib/db/schema/documents';
import { ports } from '@/lib/db/schema/ports';
import { systemSettings } from '@/lib/db/schema/system';
import { buildListQuery } from '@/lib/db/query-builder';
import { createAuditLog } from '@/lib/audit';
import { diffEntity } from '@/lib/entity-diff';
import { withTransaction } from '@/lib/db/utils';
import { NotFoundError, ConflictError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { logger } from '@/lib/logger';
import { generatePdf } from '@/lib/pdf/generate';
import { invoiceTemplate, buildInvoiceInputs } from '@/lib/pdf/templates/invoice-template';
import { minioClient, buildStoragePath } from '@/lib/minio';
import { getQueue } from '@/lib/queue';
import { env } from '@/lib/env';
import type {
CreateInvoiceInput,
UpdateInvoiceInput,
RecordPaymentInput,
ListInvoicesInput,
} from '@/lib/validators/invoices';
// AuditMeta type expected by service functions
export interface ServiceAuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Auto-numbering (BR-041) ───────────────────────────────────────────────
async function generateInvoiceNumber(portId: string, tx: typeof db): Promise<string> {
const lockKey = `invoice_${portId}`;
await tx.execute(sql`SELECT pg_advisory_xact_lock(hashtext(${lockKey}))`);
const now = new Date();
const prefix = `INV-${now.getFullYear()}${String(now.getMonth() + 1).padStart(2, '0')}`;
const [existing] = await tx
.select({ invoiceNumber: invoices.invoiceNumber })
.from(invoices)
.where(
and(eq(invoices.portId, portId), like(invoices.invoiceNumber, `${prefix}-%`)),
)
.orderBy(desc(invoices.invoiceNumber))
.limit(1);
let seq = 1;
if (existing) {
const parts = existing.invoiceNumber.split('-');
seq = parseInt(parts[parts.length - 1] ?? '0', 10) + 1;
}
return `${prefix}-${String(seq).padStart(3, '0')}`;
}
// ─── List ─────────────────────────────────────────────────────────────────
export async function listInvoices(portId: string, query: ListInvoicesInput) {
const filters = [];
if (query.status) {
filters.push(eq(invoices.status, query.status));
}
if (query.clientName) {
filters.push(like(invoices.clientName, `%${query.clientName}%`));
}
if (query.dateFrom) {
filters.push(gte(invoices.dueDate, query.dateFrom));
}
if (query.dateTo) {
filters.push(lte(invoices.dueDate, query.dateTo));
}
return buildListQuery({
table: invoices,
portIdColumn: invoices.portId,
portId,
idColumn: invoices.id,
updatedAtColumn: invoices.updatedAt,
filters,
page: query.page,
pageSize: query.limit,
searchColumns: [invoices.clientName, invoices.invoiceNumber],
searchTerm: query.search,
includeArchived: query.includeArchived,
archivedAtColumn: invoices.archivedAt,
sort: query.sort
? {
column: invoices[query.sort as keyof typeof invoices] as any,
direction: query.order,
}
: undefined,
});
}
// ─── Get by ID ────────────────────────────────────────────────────────────
export async function getInvoiceById(id: string, portId: string) {
const invoice = await db.query.invoices.findFirst({
where: and(eq(invoices.id, id), eq(invoices.portId, portId)),
});
if (!invoice) throw new NotFoundError('Invoice');
const lineItems = await db
.select()
.from(invoiceLineItems)
.where(eq(invoiceLineItems.invoiceId, id))
.orderBy(invoiceLineItems.sortOrder);
const linkedExpenses = await db
.select({ expense: expenses })
.from(invoiceExpenses)
.innerJoin(expenses, eq(expenses.id, invoiceExpenses.expenseId))
.where(eq(invoiceExpenses.invoiceId, id));
return {
...invoice,
lineItems,
linkedExpenses: linkedExpenses.map((r) => r.expense),
};
}
// ─── Create (BR-041, BR-042, BR-045) ─────────────────────────────────────
export async function createInvoice(
portId: string,
data: CreateInvoiceInput,
meta: ServiceAuditMeta,
) {
const invoice = await withTransaction(async (tx) => {
const invoiceNumber = await generateInvoiceNumber(portId, tx);
// Calculate subtotal from line items
const lineItemsData = data.lineItems ?? [];
const subtotal = lineItemsData.reduce(
(sum, li) => sum + li.quantity * li.unitPrice,
0,
);
// BR-042: net10 discount — read from systemSettings
let discountPct = 0;
if (data.paymentTerms === 'net10') {
const [setting] = await tx
.select({ value: systemSettings.value })
.from(systemSettings)
.where(
and(
eq(systemSettings.key, 'invoice_net10_discount'),
eq(systemSettings.portId, portId),
),
)
.limit(1);
if (setting) {
discountPct = Number(setting.value) || 2;
} else {
discountPct = 2;
}
}
const discountAmount = (subtotal * discountPct) / 100;
const feeAmount = 0; // No fee by default
const feePct = 0;
const total = subtotal - discountAmount + feeAmount;
// BR-045: Verify expenses aren't already linked to a non-draft invoice
const expenseIds = data.expenseIds ?? [];
if (expenseIds.length > 0) {
const alreadyLinked = await tx
.select({ expenseId: invoiceExpenses.expenseId })
.from(invoiceExpenses)
.innerJoin(invoices, eq(invoices.id, invoiceExpenses.invoiceId))
.where(
and(
inArray(invoiceExpenses.expenseId, expenseIds),
sql`${invoices.status} != 'draft'`,
),
)
.limit(1);
if (alreadyLinked.length > 0) {
throw new ConflictError(
'One or more expenses are already linked to a non-draft invoice',
);
}
}
const [newInvoice] = await tx
.insert(invoices)
.values({
portId,
invoiceNumber,
clientName: data.clientName,
billingEmail: data.billingEmail ?? null,
billingAddress: data.billingAddress ?? null,
dueDate: data.dueDate,
paymentTerms: data.paymentTerms ?? 'net30',
currency: data.currency ?? 'USD',
subtotal: String(subtotal),
discountPct: String(discountPct),
discountAmount: String(discountAmount),
feePct: String(feePct),
feeAmount: String(feeAmount),
total: String(total),
status: 'draft',
paymentStatus: 'unpaid',
notes: data.notes ?? null,
createdBy: meta.userId,
})
.returning();
if (!newInvoice) throw new Error('Insert failed');
// Insert line items
if (lineItemsData.length > 0) {
await tx.insert(invoiceLineItems).values(
lineItemsData.map((li, idx) => ({
invoiceId: newInvoice.id,
description: li.description,
quantity: String(li.quantity),
unitPrice: String(li.unitPrice),
total: String(li.quantity * li.unitPrice),
sortOrder: idx,
})),
);
}
// Link expenses
if (expenseIds.length > 0) {
await tx.insert(invoiceExpenses).values(
expenseIds.map((expenseId) => ({
invoiceId: newInvoice.id,
expenseId,
})),
);
}
return newInvoice;
});
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'invoice',
entityId: invoice.id,
newValue: invoice as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'invoice:created', {
invoiceId: invoice.id,
invoiceNumber: invoice.invoiceNumber,
total: Number(invoice.total),
clientName: invoice.clientName,
});
return invoice;
}
// ─── Update (draft only) ──────────────────────────────────────────────────
export async function updateInvoice(
id: string,
portId: string,
data: UpdateInvoiceInput,
meta: ServiceAuditMeta,
) {
const existing = await getInvoiceById(id, portId);
if (existing.status !== 'draft') {
throw new ConflictError('Only draft invoices can be updated');
}
const updated = await withTransaction(async (tx) => {
const updateData: Record<string, unknown> = { updatedAt: new Date() };
if (data.clientName !== undefined) updateData.clientName = data.clientName;
if (data.billingEmail !== undefined) updateData.billingEmail = data.billingEmail;
if (data.billingAddress !== undefined) updateData.billingAddress = data.billingAddress;
if (data.dueDate !== undefined) updateData.dueDate = data.dueDate;
if (data.paymentTerms !== undefined) updateData.paymentTerms = data.paymentTerms;
if (data.currency !== undefined) updateData.currency = data.currency;
if (data.notes !== undefined) updateData.notes = data.notes;
// Recalculate totals if line items changed
if (data.lineItems !== undefined) {
const lineItemsData = data.lineItems;
const subtotal = lineItemsData.reduce(
(sum, li) => sum + li.quantity * li.unitPrice,
0,
);
const paymentTerms = data.paymentTerms ?? existing.paymentTerms;
let discountPct = 0;
if (paymentTerms === 'net10') {
const [setting] = await tx
.select({ value: systemSettings.value })
.from(systemSettings)
.where(
and(
eq(systemSettings.key, 'invoice_net10_discount'),
eq(systemSettings.portId, portId),
),
)
.limit(1);
discountPct = setting ? Number(setting.value) || 2 : 2;
}
const discountAmount = (subtotal * discountPct) / 100;
const feeAmount = Number(existing.feeAmount) || 0;
const feePct = Number(existing.feePct) || 0;
const total = subtotal - discountAmount + feeAmount;
updateData.subtotal = String(subtotal);
updateData.discountPct = String(discountPct);
updateData.discountAmount = String(discountAmount);
updateData.feePct = String(feePct);
updateData.feeAmount = String(feeAmount);
updateData.total = String(total);
// Replace line items
await tx.delete(invoiceLineItems).where(eq(invoiceLineItems.invoiceId, id));
if (lineItemsData.length > 0) {
await tx.insert(invoiceLineItems).values(
lineItemsData.map((li, idx) => ({
invoiceId: id,
description: li.description,
quantity: String(li.quantity),
unitPrice: String(li.unitPrice),
total: String(li.quantity * li.unitPrice),
sortOrder: idx,
})),
);
}
}
// Replace expense links if provided
if (data.expenseIds !== undefined) {
// BR-045
if (data.expenseIds.length > 0) {
const alreadyLinked = await tx
.select({ expenseId: invoiceExpenses.expenseId })
.from(invoiceExpenses)
.innerJoin(invoices, eq(invoices.id, invoiceExpenses.invoiceId))
.where(
and(
inArray(invoiceExpenses.expenseId, data.expenseIds),
sql`${invoices.status} != 'draft'`,
ne(invoices.id, id),
),
)
.limit(1);
if (alreadyLinked.length > 0) {
throw new ConflictError(
'One or more expenses are already linked to a non-draft invoice',
);
}
}
await tx.delete(invoiceExpenses).where(eq(invoiceExpenses.invoiceId, id));
if (data.expenseIds.length > 0) {
await tx.insert(invoiceExpenses).values(
data.expenseIds.map((expenseId) => ({ invoiceId: id, expenseId })),
);
}
}
const [result] = await tx
.update(invoices)
.set(updateData as any)
.where(and(eq(invoices.id, id), eq(invoices.portId, portId)))
.returning();
if (!result) throw new NotFoundError('Invoice');
return result;
});
const { diff } = diffEntity(
existing as unknown as Record<string, unknown>,
updated as unknown as Record<string, unknown>,
);
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'invoice',
entityId: id,
oldValue: existing as unknown as Record<string, unknown>,
newValue: updated as unknown as Record<string, unknown>,
metadata: { diff },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'invoice:updated', {
invoiceId: id,
changedFields: Object.keys(diff),
});
return updated;
}
// ─── Delete (draft only) ──────────────────────────────────────────────────
export async function deleteInvoice(
id: string,
portId: string,
meta: ServiceAuditMeta,
) {
const existing = await getInvoiceById(id, portId);
if (existing.status !== 'draft') {
throw new ConflictError('Only draft invoices can be deleted');
}
await withTransaction(async (tx) => {
await tx.delete(invoiceExpenses).where(eq(invoiceExpenses.invoiceId, id));
await tx.delete(invoiceLineItems).where(eq(invoiceLineItems.invoiceId, id));
await tx
.delete(invoices)
.where(and(eq(invoices.id, id), eq(invoices.portId, portId)));
});
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'invoice',
entityId: id,
oldValue: existing as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'invoice:updated', {
invoiceId: id,
changedFields: ['status'],
});
}
// ─── Generate PDF ─────────────────────────────────────────────────────────
export async function generateInvoicePdf(
id: string,
portId: string,
meta: ServiceAuditMeta,
) {
const invoice = await getInvoiceById(id, portId);
const [port] = await db
.select({ id: ports.id, name: ports.name, slug: ports.slug })
.from(ports)
.where(eq(ports.id, portId))
.limit(1);
const inputs = buildInvoiceInputs(invoice, invoice.lineItems, port);
const pdfBytes = await generatePdf(invoiceTemplate, [inputs]);
const fileId = crypto.randomUUID();
const storagePath = buildStoragePath(port?.slug ?? portId, 'invoices', id, fileId, 'pdf');
await minioClient.putObject(
env.MINIO_BUCKET,
storagePath,
Buffer.from(pdfBytes),
pdfBytes.length,
{ 'Content-Type': 'application/pdf' },
);
const [fileRecord] = await db
.insert(files)
.values({
portId,
filename: `invoice-${invoice.invoiceNumber}.pdf`,
originalName: `invoice-${invoice.invoiceNumber}.pdf`,
mimeType: 'application/pdf',
sizeBytes: String(pdfBytes.length),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: 'invoice',
uploadedBy: meta.userId,
})
.returning();
if (!fileRecord) throw new Error('File record insert failed');
await db
.update(invoices)
.set({ pdfFileId: fileRecord.id, updatedAt: new Date() })
.where(and(eq(invoices.id, id), eq(invoices.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'invoice',
entityId: id,
metadata: { action: 'pdf_generated', fileId: fileRecord.id },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return fileRecord;
}
// ─── Send invoice ─────────────────────────────────────────────────────────
export async function sendInvoice(
id: string,
portId: string,
meta: ServiceAuditMeta,
) {
const invoice = await getInvoiceById(id, portId);
// Generate PDF if not exists
let pdfFileId = invoice.pdfFileId;
if (!pdfFileId) {
const fileRecord = await generateInvoicePdf(id, portId, meta);
pdfFileId = fileRecord.id;
}
// Queue email job
await getQueue('email').add('send-invoice', { invoiceId: id, portId });
// Update status to 'sent'
const [updated] = await db
.update(invoices)
.set({ status: 'sent', updatedAt: new Date() })
.where(and(eq(invoices.id, id), eq(invoices.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'invoice',
entityId: id,
oldValue: { status: invoice.status },
newValue: { status: 'sent' },
metadata: { action: 'invoice_sent' },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'invoice:sent', {
invoiceId: id,
invoiceNumber: invoice.invoiceNumber,
recipientEmail: invoice.billingEmail ?? '',
});
return updated;
}
// ─── Record payment ───────────────────────────────────────────────────────
export async function recordPayment(
id: string,
portId: string,
data: RecordPaymentInput,
meta: ServiceAuditMeta,
) {
const existing = await getInvoiceById(id, portId);
const [updated] = await db
.update(invoices)
.set({
paymentStatus: 'paid',
paymentDate: data.paymentDate,
paymentMethod: data.paymentMethod ?? null,
paymentReference: data.paymentReference ?? null,
status: 'paid',
updatedAt: new Date(),
})
.where(and(eq(invoices.id, id), eq(invoices.portId, portId)))
.returning();
if (!updated) throw new NotFoundError('Invoice');
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'invoice',
entityId: id,
oldValue: { status: existing.status, paymentStatus: existing.paymentStatus },
newValue: { status: 'paid', paymentStatus: 'paid', paymentDate: data.paymentDate },
metadata: { action: 'payment_recorded' },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'invoice:paid', {
invoiceId: id,
invoiceNumber: existing.invoiceNumber,
amount: Number(existing.total),
});
return updated;
}
// ─── Detect overdue (BR-044) ──────────────────────────────────────────────
export async function detectOverdue(portId: string) {
const today = new Date().toISOString().split('T')[0]!;
const overdueInvoices = await db
.select({ id: invoices.id, invoiceNumber: invoices.invoiceNumber, dueDate: invoices.dueDate })
.from(invoices)
.where(
and(
eq(invoices.portId, portId),
eq(invoices.status, 'sent'),
lt(invoices.dueDate, today),
),
);
if (overdueInvoices.length === 0) return;
for (const inv of overdueInvoices) {
await db
.update(invoices)
.set({ status: 'overdue', updatedAt: new Date() })
.where(eq(invoices.id, inv.id));
const daysPastDue = Math.max(1, Math.ceil(
(Date.now() - new Date(inv.dueDate).getTime()) / (1000 * 60 * 60 * 24),
));
emitToRoom(`port:${portId}`, 'invoice:overdue', {
invoiceId: inv.id,
invoiceNumber: inv.invoiceNumber,
daysPastDue,
});
await getQueue('notifications').add('invoice-overdue-notify', {
invoiceId: inv.id,
portId,
});
logger.info(
{ invoiceId: inv.id, invoiceNumber: inv.invoiceNumber, portId },
'Invoice marked overdue',
);
}
}

View File

@@ -0,0 +1,281 @@
import { eq, and, desc } from 'drizzle-orm';
import { db } from '@/lib/db';
import { clientNotes, clients } from '@/lib/db/schema/clients';
import { interestNotes, interests } from '@/lib/db/schema/interests';
import { userProfiles } from '@/lib/db/schema/users';
import { NotFoundError, ValidationError } from '@/lib/errors';
import type { CreateNoteInput, UpdateNoteInput } from '@/lib/validators/notes';
const EDIT_WINDOW_MS = 15 * 60 * 1000; // 15 minutes
type EntityType = 'clients' | 'interests';
// ─── Helpers ─────────────────────────────────────────────────────────────────
function getTable(entityType: EntityType) {
return entityType === 'clients' ? clientNotes : interestNotes;
}
function getEntityIdField(entityType: EntityType) {
return entityType === 'clients' ? clientNotes.clientId : interestNotes.interestId;
}
async function verifyParentBelongsToPort(
entityType: EntityType,
entityId: string,
portId: string,
): Promise<void> {
if (entityType === 'clients') {
const client = await db
.select({ id: clients.id })
.from(clients)
.where(and(eq(clients.id, entityId), eq(clients.portId, portId)))
.limit(1);
if (!client.length) throw new NotFoundError('Client');
} else {
const interest = await db
.select({ id: interests.id })
.from(interests)
.where(and(eq(interests.id, entityId), eq(interests.portId, portId)))
.limit(1);
if (!interest.length) throw new NotFoundError('Interest');
}
}
// ─── Service ─────────────────────────────────────────────────────────────────
export async function listForEntity(
portId: string,
entityType: EntityType,
entityId: string,
) {
await verifyParentBelongsToPort(entityType, entityId, portId);
if (entityType === 'clients') {
const rows = await db
.select({
id: clientNotes.id,
clientId: clientNotes.clientId,
authorId: clientNotes.authorId,
content: clientNotes.content,
mentions: clientNotes.mentions,
isLocked: clientNotes.isLocked,
createdAt: clientNotes.createdAt,
updatedAt: clientNotes.updatedAt,
authorName: userProfiles.displayName,
})
.from(clientNotes)
.leftJoin(userProfiles, eq(userProfiles.userId, clientNotes.authorId))
.where(eq(clientNotes.clientId, entityId))
.orderBy(desc(clientNotes.createdAt));
return rows;
} else {
const rows = await db
.select({
id: interestNotes.id,
interestId: interestNotes.interestId,
authorId: interestNotes.authorId,
content: interestNotes.content,
mentions: interestNotes.mentions,
isLocked: interestNotes.isLocked,
createdAt: interestNotes.createdAt,
updatedAt: interestNotes.updatedAt,
authorName: userProfiles.displayName,
})
.from(interestNotes)
.leftJoin(userProfiles, eq(userProfiles.userId, interestNotes.authorId))
.where(eq(interestNotes.interestId, entityId))
.orderBy(desc(interestNotes.createdAt));
return rows;
}
}
export async function create(
portId: string,
entityType: EntityType,
entityId: string,
authorId: string,
data: CreateNoteInput,
) {
await verifyParentBelongsToPort(entityType, entityId, portId);
if (entityType === 'clients') {
const [note] = await db
.insert(clientNotes)
.values({ clientId: entityId, authorId, content: data.content })
.returning();
if (!note) throw new Error('Insert failed');
const profile = await db
.select({ displayName: userProfiles.displayName })
.from(userProfiles)
.where(eq(userProfiles.userId, authorId))
.limit(1);
const authorName = profile[0]?.displayName ?? null;
// Fire mention notifications (fire-and-forget)
if (note.mentions && note.mentions.length > 0) {
for (const mentionedUserId of note.mentions) {
void import('@/lib/services/notifications.service').then(({ createNotification }) =>
createNotification({
portId,
userId: mentionedUserId,
type: 'mention',
title: 'You were mentioned in a note',
description: `${authorName ?? 'Someone'} mentioned you in a note`,
link: `/clients/${entityId}`,
entityType: 'client',
entityId,
dedupeKey: `note:${note.id}:mention:${mentionedUserId}`,
}),
);
}
}
return { ...note, authorName };
} else {
const [note] = await db
.insert(interestNotes)
.values({ interestId: entityId, authorId, content: data.content })
.returning();
if (!note) throw new Error('Insert failed');
const profile = await db
.select({ displayName: userProfiles.displayName })
.from(userProfiles)
.where(eq(userProfiles.userId, authorId))
.limit(1);
const authorName = profile[0]?.displayName ?? null;
// Fire mention notifications (fire-and-forget)
if (note.mentions && note.mentions.length > 0) {
for (const mentionedUserId of note.mentions) {
void import('@/lib/services/notifications.service').then(({ createNotification }) =>
createNotification({
portId,
userId: mentionedUserId,
type: 'mention',
title: 'You were mentioned in a note',
description: `${authorName ?? 'Someone'} mentioned you in a note`,
link: `/interests/${entityId}`,
entityType: 'interest',
entityId,
dedupeKey: `note:${note.id}:mention:${mentionedUserId}`,
}),
);
}
}
return { ...note, authorName };
}
}
export async function update(
portId: string,
entityType: EntityType,
entityId: string,
noteId: string,
data: UpdateNoteInput,
) {
await verifyParentBelongsToPort(entityType, entityId, portId);
if (entityType === 'clients') {
const [existing] = await db
.select()
.from(clientNotes)
.where(and(eq(clientNotes.id, noteId), eq(clientNotes.clientId, entityId)))
.limit(1);
if (!existing) throw new NotFoundError('Note');
if (Date.now() - new Date(existing.createdAt).getTime() > EDIT_WINDOW_MS) {
throw new ValidationError('Note edit window has expired (15 minutes)');
}
const [updated] = await db
.update(clientNotes)
.set({ content: data.content, updatedAt: new Date() })
.where(eq(clientNotes.id, noteId))
.returning();
if (!updated) throw new NotFoundError('Note');
const profile = await db
.select({ displayName: userProfiles.displayName })
.from(userProfiles)
.where(eq(userProfiles.userId, updated.authorId))
.limit(1);
return { ...updated, authorName: profile[0]?.displayName ?? null };
} else {
const [existing] = await db
.select()
.from(interestNotes)
.where(and(eq(interestNotes.id, noteId), eq(interestNotes.interestId, entityId)))
.limit(1);
if (!existing) throw new NotFoundError('Note');
if (Date.now() - new Date(existing.createdAt).getTime() > EDIT_WINDOW_MS) {
throw new ValidationError('Note edit window has expired (15 minutes)');
}
const [updated] = await db
.update(interestNotes)
.set({ content: data.content, updatedAt: new Date() })
.where(eq(interestNotes.id, noteId))
.returning();
if (!updated) throw new NotFoundError('Note');
const profile = await db
.select({ displayName: userProfiles.displayName })
.from(userProfiles)
.where(eq(userProfiles.userId, updated.authorId))
.limit(1);
return { ...updated, authorName: profile[0]?.displayName ?? null };
}
}
export async function deleteNote(
portId: string,
entityType: EntityType,
entityId: string,
noteId: string,
) {
await verifyParentBelongsToPort(entityType, entityId, portId);
if (entityType === 'clients') {
const [existing] = await db
.select()
.from(clientNotes)
.where(and(eq(clientNotes.id, noteId), eq(clientNotes.clientId, entityId)))
.limit(1);
if (!existing) throw new NotFoundError('Note');
if (Date.now() - new Date(existing.createdAt).getTime() > EDIT_WINDOW_MS) {
throw new ValidationError('Note edit window has expired (15 minutes)');
}
await db.delete(clientNotes).where(eq(clientNotes.id, noteId));
return existing;
} else {
const [existing] = await db
.select()
.from(interestNotes)
.where(and(eq(interestNotes.id, noteId), eq(interestNotes.interestId, entityId)))
.limit(1);
if (!existing) throw new NotFoundError('Note');
if (Date.now() - new Date(existing.createdAt).getTime() > EDIT_WINDOW_MS) {
throw new ValidationError('Note edit window has expired (15 minutes)');
}
await db.delete(interestNotes).where(eq(interestNotes.id, noteId));
return existing;
}
}

View File

@@ -0,0 +1,296 @@
import { and, count, eq, gt, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { notifications } from '@/lib/db/schema/operations';
import { userNotificationPreferences } from '@/lib/db/schema/system';
import { userProfiles } from '@/lib/db/schema/users';
import { emitToRoom } from '@/lib/socket/server';
import { getQueue } from '@/lib/queue';
import { NotFoundError } from '@/lib/errors';
import type { ListNotificationsInput, UpdatePreferencesInput } from '@/lib/validators/notifications';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface CreateNotificationParams {
portId: string;
userId: string;
type: string;
title: string;
description?: string;
link?: string;
entityType?: string;
entityId?: string;
dedupeKey?: string;
cooldownMs?: number;
}
// ─── Helpers ─────────────────────────────────────────────────────────────────
async function getUnreadCountValue(userId: string, portId: string): Promise<number> {
const [row] = await db
.select({ count: count() })
.from(notifications)
.where(
and(
eq(notifications.userId, userId),
eq(notifications.portId, portId),
eq(notifications.isRead, false),
),
);
return row?.count ?? 0;
}
// ─── createNotification ───────────────────────────────────────────────────────
export async function createNotification(
params: CreateNotificationParams,
): Promise<typeof notifications.$inferSelect | null> {
const {
portId,
userId,
type,
title,
description,
link,
entityType,
entityId,
dedupeKey,
cooldownMs = 300_000,
} = params;
// 1. Cooldown / deduplication check
if (dedupeKey) {
const cutoff = new Date(Date.now() - cooldownMs);
const [existing] = await db
.select({ id: notifications.id })
.from(notifications)
.where(
and(
eq(notifications.userId, userId),
eq(notifications.type, type),
gt(notifications.createdAt, cutoff),
sql`${notifications.metadata}->>'dedupeKey' = ${dedupeKey}`,
),
)
.limit(1);
if (existing) {
return null; // suppressed by cooldown
}
}
// 2. Preference check (skip for system_alert type — always delivered)
if (type !== 'system_alert') {
const [pref] = await db
.select({ inApp: userNotificationPreferences.inApp, email: userNotificationPreferences.email })
.from(userNotificationPreferences)
.where(
and(
eq(userNotificationPreferences.userId, userId),
eq(userNotificationPreferences.portId, portId),
eq(userNotificationPreferences.notificationType, type),
),
)
.limit(1);
if (pref && pref.inApp === false) {
// Check if email is enabled — if neither, skip entirely
if (pref.email === false) {
return null;
}
// inApp disabled but email enabled: still enqueue email but skip insert
// We can't insert and emit, so just enqueue if there were a row — but we need an ID.
// Per spec: if inApp=false, skip insert. Email requires notificationId so skip email too.
return null;
}
}
// 3. Insert notification
const [notif] = await db
.insert(notifications)
.values({
portId,
userId,
type,
title,
description: description ?? null,
link: link ?? null,
entityType: entityType ?? null,
entityId: entityId ?? null,
isRead: false,
emailSent: false,
metadata: dedupeKey ? { dedupeKey } : {},
})
.returning();
if (!notif) return null;
// 4. Emit socket events
emitToRoom(`user:${userId}`, 'notification:new', {
notificationId: notif.id,
type: notif.type,
title: notif.title,
description: notif.description ?? '',
link: notif.link ?? '',
});
const unreadCount = await getUnreadCountValue(userId, portId);
emitToRoom(`user:${userId}`, 'notification:unreadCount', { count: unreadCount });
// 5. Check email preference and enqueue if needed
const [pref] = await db
.select({ email: userNotificationPreferences.email })
.from(userNotificationPreferences)
.where(
and(
eq(userNotificationPreferences.userId, userId),
eq(userNotificationPreferences.portId, portId),
eq(userNotificationPreferences.notificationType, type),
),
)
.limit(1);
// Default to sending email if no preference record exists (opt-in by default)
const shouldEmail = pref ? pref.email : false;
if (shouldEmail) {
const queue = getQueue('notifications');
await queue.add('send-notification-email', { notificationId: notif.id });
}
return notif;
}
// ─── listNotifications ────────────────────────────────────────────────────────
export async function listNotifications(
userId: string,
portId: string,
query: ListNotificationsInput,
): Promise<{ data: (typeof notifications.$inferSelect)[]; total: number }> {
const { page, limit, unreadOnly } = query;
const offset = (page - 1) * limit;
const conditions = [
eq(notifications.userId, userId),
eq(notifications.portId, portId),
];
if (unreadOnly) {
conditions.push(eq(notifications.isRead, false));
}
const where = and(...conditions);
const [totalRow, rows] = await Promise.all([
db.select({ count: count() }).from(notifications).where(where),
db
.select()
.from(notifications)
.where(where)
.orderBy(sql`${notifications.createdAt} DESC`)
.limit(limit)
.offset(offset),
]);
return {
data: rows,
total: totalRow[0]?.count ?? 0,
};
}
// ─── markRead ─────────────────────────────────────────────────────────────────
export async function markRead(notificationId: string, userId: string): Promise<void> {
const [notif] = await db
.select({ id: notifications.id, portId: notifications.portId, userId: notifications.userId })
.from(notifications)
.where(eq(notifications.id, notificationId))
.limit(1);
if (!notif || notif.userId !== userId) {
throw new NotFoundError('Notification');
}
await db
.update(notifications)
.set({ isRead: true })
.where(and(eq(notifications.id, notificationId), eq(notifications.userId, userId)));
const unreadCount = await getUnreadCountValue(userId, notif.portId);
emitToRoom(`user:${userId}`, 'notification:unreadCount', { count: unreadCount });
}
// ─── markAllRead ──────────────────────────────────────────────────────────────
export async function markAllRead(userId: string, portId: string): Promise<void> {
await db
.update(notifications)
.set({ isRead: true })
.where(
and(
eq(notifications.userId, userId),
eq(notifications.portId, portId),
eq(notifications.isRead, false),
),
);
emitToRoom(`user:${userId}`, 'notification:unreadCount', { count: 0 });
}
// ─── getUnreadCount ───────────────────────────────────────────────────────────
export async function getUnreadCount(
userId: string,
portId: string,
): Promise<{ count: number }> {
const c = await getUnreadCountValue(userId, portId);
return { count: c };
}
// ─── getPreferences ───────────────────────────────────────────────────────────
export async function getPreferences(userId: string, portId: string) {
return db
.select()
.from(userNotificationPreferences)
.where(
and(
eq(userNotificationPreferences.userId, userId),
eq(userNotificationPreferences.portId, portId),
),
);
}
// ─── updatePreferences ────────────────────────────────────────────────────────
export async function updatePreferences(
userId: string,
portId: string,
data: UpdatePreferencesInput,
) {
for (const pref of data.preferences) {
await db
.insert(userNotificationPreferences)
.values({
userId,
portId,
notificationType: pref.notificationType,
inApp: pref.inApp,
email: pref.email,
})
.onConflictDoUpdate({
target: [
userNotificationPreferences.userId,
userNotificationPreferences.portId,
userNotificationPreferences.notificationType,
],
set: {
inApp: pref.inApp,
email: pref.email,
},
});
}
return getPreferences(userId, portId);
}

View File

@@ -0,0 +1,389 @@
import { and, eq, count } from 'drizzle-orm';
import { db } from '@/lib/db';
import { clients, clientContacts } from '@/lib/db/schema/clients';
import { interests } from '@/lib/db/schema/interests';
import { documents, documentSigners, files } from '@/lib/db/schema/documents';
import { invoices } from '@/lib/db/schema/financial';
import { berths } from '@/lib/db/schema/berths';
import { ports } from '@/lib/db/schema/ports';
import { createPortalToken } from '@/lib/portal/auth';
import { sendEmail } from '@/lib/email';
import { getPresignedUrl } from '@/lib/minio';
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
// ─── Magic Link ────────────────────────────────────────────────────────────────
/**
* Requests a magic link for portal access.
* Always returns success — never reveals whether an email exists in the system.
*/
export async function requestMagicLink(email: string): Promise<void> {
const normalizedEmail = email.toLowerCase().trim();
// Find client contact with matching email
const contact = await db.query.clientContacts.findFirst({
where: and(
eq(clientContacts.channel, 'email'),
eq(clientContacts.value, normalizedEmail),
),
with: {
client: true,
},
});
if (!contact || !contact.client) {
// Don't reveal that the email doesn't exist — silently return
logger.debug({ email: normalizedEmail }, 'Portal magic link: no matching client contact');
return;
}
const client = contact.client;
// Build the JWT
const token = await createPortalToken({
clientId: client.id,
portId: client.portId,
email: normalizedEmail,
});
const magicLinkUrl = `${env.APP_URL}/verify?token=${encodeURIComponent(token)}`;
// Fetch port name for the email
const port = await db.query.ports.findFirst({
where: eq(ports.id, client.portId),
});
const portName = port?.name ?? 'Port Nimara';
const clientName = client.fullName;
const html = `
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body style="font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; background: #f5f5f5; padding: 40px 0; margin: 0;">
<div style="max-width: 480px; margin: 0 auto; background: #ffffff; border-radius: 8px; overflow: hidden; box-shadow: 0 2px 8px rgba(0,0,0,0.08);">
<div style="background: #1e2844; padding: 32px 40px; text-align: center;">
<h1 style="color: #ffffff; margin: 0; font-size: 22px; font-weight: 600;">${portName}</h1>
<p style="color: #9ca3af; margin: 6px 0 0; font-size: 14px;">Client Portal</p>
</div>
<div style="padding: 40px;">
<p style="color: #374151; font-size: 16px; margin: 0 0 8px;">Hello, ${clientName}</p>
<p style="color: #6b7280; font-size: 15px; margin: 0 0 32px; line-height: 1.6;">
You requested access to your client portal. Click the button below to sign in. This link expires in 24 hours.
</p>
<div style="text-align: center; margin: 0 0 32px;">
<a href="${magicLinkUrl}" style="display: inline-block; background: #1e2844; color: #ffffff; text-decoration: none; padding: 14px 32px; border-radius: 6px; font-size: 15px; font-weight: 500;">
Access My Portal
</a>
</div>
<p style="color: #9ca3af; font-size: 13px; margin: 0; line-height: 1.6;">
If you didn't request this, you can safely ignore this email. If you're having trouble with the button above, copy and paste this URL into your browser:
<br><br>
<span style="color: #6b7280; word-break: break-all;">${magicLinkUrl}</span>
</p>
</div>
<div style="background: #f9fafb; padding: 20px 40px; text-align: center; border-top: 1px solid #e5e7eb;">
<p style="color: #9ca3af; font-size: 12px; margin: 0;">&copy; ${new Date().getFullYear()} ${portName}. All rights reserved.</p>
</div>
</div>
</body>
</html>
`;
await sendEmail(
normalizedEmail,
`Your ${portName} portal access link`,
html,
);
logger.info({ clientId: client.id, portId: client.portId }, 'Portal magic link sent');
}
// ─── Dashboard ────────────────────────────────────────────────────────────────
export interface PortalDashboard {
client: {
id: string;
fullName: string;
companyName: string | null;
yachtName: string | null;
};
port: {
name: string;
logoUrl: string | null;
};
counts: {
interests: number;
documents: number;
invoices: number;
};
}
export async function getPortalDashboard(
clientId: string,
portId: string,
): Promise<PortalDashboard | null> {
const [client, port, interestCount, documentCount] = await Promise.all([
db.query.clients.findFirst({
where: and(eq(clients.id, clientId), eq(clients.portId, portId)),
with: { contacts: true },
}),
db.query.ports.findFirst({
where: eq(ports.id, portId),
}),
db
.select({ value: count() })
.from(interests)
.where(and(eq(interests.clientId, clientId), eq(interests.portId, portId))),
db
.select({ value: count() })
.from(documents)
.where(and(eq(documents.clientId, clientId), eq(documents.portId, portId))),
]);
if (!client || !port) return null;
// Count invoices matched by client's billing email addresses
const emailContacts = (client.contacts ?? [])
.filter((c) => c.channel === 'email')
.map((c) => c.value.toLowerCase());
let invoiceCount = 0;
if (emailContacts.length > 0) {
const allPortInvoices = await db
.select({ billingEmail: invoices.billingEmail })
.from(invoices)
.where(eq(invoices.portId, portId));
invoiceCount = allPortInvoices.filter(
(inv) => inv.billingEmail && emailContacts.includes(inv.billingEmail.toLowerCase()),
).length;
}
return {
client: {
id: client.id,
fullName: client.fullName,
companyName: client.companyName ?? null,
yachtName: client.yachtName ?? null,
},
port: {
name: port.name,
logoUrl: port.logoUrl ?? null,
},
counts: {
interests: interestCount[0]?.value ?? 0,
documents: documentCount[0]?.value ?? 0,
invoices: invoiceCount,
},
};
}
// ─── Interests ────────────────────────────────────────────────────────────────
export interface PortalInterest {
id: string;
pipelineStage: string;
leadCategory: string | null;
berthMooringNumber: string | null;
berthArea: string | null;
eoiStatus: string | null;
contractStatus: string | null;
dateFirstContact: Date | null;
createdAt: Date;
}
export async function getClientInterests(
clientId: string,
portId: string,
): Promise<PortalInterest[]> {
const rows = await db
.select({
id: interests.id,
pipelineStage: interests.pipelineStage,
leadCategory: interests.leadCategory,
berthId: interests.berthId,
eoiStatus: interests.eoiStatus,
contractStatus: interests.contractStatus,
dateFirstContact: interests.dateFirstContact,
createdAt: interests.createdAt,
})
.from(interests)
.where(
and(
eq(interests.clientId, clientId),
eq(interests.portId, portId),
),
)
.orderBy(interests.createdAt);
// Fetch berth details for interests that have a berth
const berthIds = rows.flatMap((r) => (r.berthId ? [r.berthId] : []));
const berthMap = new Map<string, { mooringNumber: string; area: string | null }>();
if (berthIds.length > 0) {
const berthRows = await db
.select({ id: berths.id, mooringNumber: berths.mooringNumber, area: berths.area })
.from(berths)
.where(eq(berths.portId, portId));
for (const b of berthRows) {
berthMap.set(b.id, { mooringNumber: b.mooringNumber, area: b.area });
}
}
return rows.map((r) => ({
id: r.id,
pipelineStage: r.pipelineStage,
leadCategory: r.leadCategory,
berthMooringNumber: r.berthId ? (berthMap.get(r.berthId)?.mooringNumber ?? null) : null,
berthArea: r.berthId ? (berthMap.get(r.berthId)?.area ?? null) : null,
eoiStatus: r.eoiStatus,
contractStatus: r.contractStatus,
dateFirstContact: r.dateFirstContact,
createdAt: r.createdAt,
}));
}
// ─── Documents ────────────────────────────────────────────────────────────────
export interface PortalDocument {
id: string;
documentType: string;
title: string;
status: string;
isManualUpload: boolean;
hasSignedFile: boolean;
signers: Array<{
signerName: string;
signerEmail: string;
signerRole: string;
status: string;
}>;
createdAt: Date;
}
export async function getClientDocuments(
clientId: string,
portId: string,
): Promise<PortalDocument[]> {
const rows = await db.query.documents.findMany({
where: and(
eq(documents.clientId, clientId),
eq(documents.portId, portId),
),
with: {
signers: true,
},
orderBy: (docs, { desc }) => [desc(docs.createdAt)],
});
return rows.map((doc) => ({
id: doc.id,
documentType: doc.documentType,
title: doc.title,
status: doc.status,
isManualUpload: doc.isManualUpload,
hasSignedFile: doc.signedFileId != null,
signers: (doc.signers ?? []).map((s) => ({
signerName: s.signerName,
signerEmail: s.signerEmail,
signerRole: s.signerRole,
status: s.status,
})),
createdAt: doc.createdAt,
}));
}
// ─── Invoices ─────────────────────────────────────────────────────────────────
export interface PortalInvoice {
id: string;
invoiceNumber: string;
status: string;
currency: string;
total: string;
dueDate: string;
paymentStatus: string | null;
paymentDate: string | null;
createdAt: Date;
}
export async function getClientInvoices(
clientId: string,
portId: string,
): Promise<PortalInvoice[]> {
// Look up the client to get billing email for invoice matching
const client = await db.query.clients.findFirst({
where: and(eq(clients.id, clientId), eq(clients.portId, portId)),
with: {
contacts: true,
},
});
if (!client) return [];
// Get client's email addresses to match against billingEmail
const emailContacts = (client.contacts ?? [])
.filter((c) => c.channel === 'email')
.map((c) => c.value.toLowerCase());
if (emailContacts.length === 0) return [];
// Fetch invoices matching any of the client's email addresses
const allInvoices = await db
.select()
.from(invoices)
.where(eq(invoices.portId, portId))
.orderBy(invoices.createdAt);
const clientInvoices = allInvoices.filter(
(inv) =>
inv.billingEmail && emailContacts.includes(inv.billingEmail.toLowerCase()),
);
return clientInvoices.map((inv) => ({
id: inv.id,
invoiceNumber: inv.invoiceNumber,
status: inv.status,
currency: inv.currency,
total: inv.total,
dueDate: inv.dueDate,
paymentStatus: inv.paymentStatus ?? null,
paymentDate: inv.paymentDate ?? null,
createdAt: inv.createdAt,
}));
}
// ─── Document Download ────────────────────────────────────────────────────────
export async function getDocumentDownloadUrl(
clientId: string,
documentId: string,
portId: string,
): Promise<string | null> {
const doc = await db.query.documents.findFirst({
where: and(
eq(documents.id, documentId),
eq(documents.clientId, clientId),
eq(documents.portId, portId),
),
});
if (!doc) return null;
// Prefer signed file, fall back to original file
const fileId = doc.signedFileId ?? doc.fileId;
if (!fileId) return null;
const file = await db.query.files.findFirst({
where: eq(files.id, fileId),
});
if (!file) return null;
return getPresignedUrl(file.storagePath);
}

View File

@@ -0,0 +1,55 @@
import OpenAI from 'openai';
import { logger } from '@/lib/logger';
const openai = new OpenAI(); // uses OPENAI_API_KEY from env
interface ScanResult {
establishment: string | null;
date: string | null;
amount: number | null;
currency: string | null;
lineItems: Array<{ description: string; amount: number }>;
confidence: number;
}
export async function scanReceipt(
imageBuffer: Buffer,
mimeType: string,
): Promise<ScanResult> {
try {
const base64 = imageBuffer.toString('base64');
const response = await openai.chat.completions.create({
model: 'gpt-4o',
messages: [
{
role: 'user',
content: [
{
type: 'text',
text: 'Extract receipt data as JSON: { establishment, date (ISO), amount (number), currency (3-letter code), lineItems: [{ description, amount }], confidence (0-1) }. Return ONLY valid JSON.',
},
{
type: 'image_url',
image_url: { url: `data:${mimeType};base64,${base64}` },
},
],
},
],
max_tokens: 1000,
});
const content = response.choices[0]?.message?.content ?? '{}';
const cleaned = content.replace(/```json\n?|\n?```/g, '').trim();
return JSON.parse(cleaned) as ScanResult;
} catch (err) {
logger.error({ err }, 'Receipt scan failed');
return {
establishment: null,
date: null,
amount: null,
currency: null,
lineItems: [],
confidence: 0,
};
}
}

View File

@@ -0,0 +1,217 @@
import { and, eq, isNull } from 'drizzle-orm';
import { db } from '@/lib/db';
import { interests } from '@/lib/db/schema/interests';
import { clients } from '@/lib/db/schema/clients';
import { berths, berthRecommendations } from '@/lib/db/schema/berths';
import { NotFoundError } from '@/lib/errors';
import { createAuditLog } from '@/lib/audit';
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Score a single berth ─────────────────────────────────────────────────────
function scoreBerth(
berth: typeof berths.$inferSelect,
yachtLengthFt: number | null,
yachtWidthFt: number | null,
yachtDraftFt: number | null,
): { score: number; reasons: Record<string, number> } {
const reasons: Record<string, number> = {};
const weights: number[] = [];
if (yachtLengthFt && berth.lengthFt) {
const berthLen = parseFloat(berth.lengthFt);
if (berthLen >= yachtLengthFt) {
const fit = Math.min(100, (berthLen / yachtLengthFt) * 100);
// Prefer berths that are not too oversized (within 20% extra is ideal)
const score = berthLen <= yachtLengthFt * 1.2 ? 100 : Math.max(50, 100 - (berthLen / yachtLengthFt - 1.2) * 100);
reasons['length_fit'] = Math.round(score);
weights.push(score);
} else {
// Berth too small
reasons['length_fit'] = 0;
weights.push(0);
}
}
if (yachtWidthFt && berth.widthFt) {
const berthWidth = parseFloat(berth.widthFt);
if (berthWidth >= yachtWidthFt) {
const score = berthWidth <= yachtWidthFt * 1.3 ? 100 : Math.max(40, 100 - (berthWidth / yachtWidthFt - 1.3) * 80);
reasons['beam_fit'] = Math.round(score);
weights.push(score);
} else {
reasons['beam_fit'] = 0;
weights.push(0);
}
}
if (yachtDraftFt && berth.draftFt) {
const berthDraft = parseFloat(berth.draftFt);
if (berthDraft >= yachtDraftFt) {
const score = 100;
reasons['draft_fit'] = score;
weights.push(score);
} else {
reasons['draft_fit'] = 0;
weights.push(0);
}
}
if (weights.length === 0) {
return { score: 50, reasons: { no_dimensions: 50 } };
}
const score = Math.round(weights.reduce((a, b) => a + b, 0) / weights.length);
return { score, reasons };
}
// ─── Generate Recommendations ─────────────────────────────────────────────────
export async function generateRecommendations(
interestId: string,
portId: string,
meta: AuditMeta,
) {
const interest = await db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
});
if (!interest) throw new NotFoundError('Interest');
const client = await db.query.clients.findFirst({
where: eq(clients.id, interest.clientId),
});
const yachtLengthFt = client?.yachtLengthFt ? parseFloat(client.yachtLengthFt) : null;
const yachtWidthFt = client?.yachtWidthFt ? parseFloat(client.yachtWidthFt) : null;
const yachtDraftFt = client?.yachtDraftFt ? parseFloat(client.yachtDraftFt) : null;
// Get all available berths for the port
const availableBerths = await db
.select()
.from(berths)
.where(and(eq(berths.portId, portId), eq(berths.status, 'available')));
// Score each berth
const scored = availableBerths.map((berth) => {
const { score, reasons } = scoreBerth(berth, yachtLengthFt, yachtWidthFt, yachtDraftFt);
return { berth, score, reasons };
});
// Sort by score and take top 10
scored.sort((a, b) => b.score - a.score);
const top10 = scored.slice(0, 10);
// Delete existing AI recommendations for this interest
await db
.delete(berthRecommendations)
.where(
and(
eq(berthRecommendations.interestId, interestId),
eq(berthRecommendations.source, 'ai'),
),
);
// Insert new recommendations
if (top10.length > 0) {
await db.insert(berthRecommendations).values(
top10.map(({ berth, score, reasons }) => ({
interestId,
berthId: berth.id,
matchScore: String(score),
matchReasons: reasons,
source: 'ai' as const,
createdBy: meta.userId,
})),
);
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'berth_recommendation',
entityId: interestId,
metadata: { type: 'ai_generated', count: top10.length },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return listRecommendations(interestId, portId);
}
// ─── List Recommendations ─────────────────────────────────────────────────────
export async function listRecommendations(interestId: string, portId: string) {
const rows = await db
.select({
id: berthRecommendations.id,
interestId: berthRecommendations.interestId,
berthId: berthRecommendations.berthId,
matchScore: berthRecommendations.matchScore,
matchReasons: berthRecommendations.matchReasons,
source: berthRecommendations.source,
createdBy: berthRecommendations.createdBy,
createdAt: berthRecommendations.createdAt,
mooringNumber: berths.mooringNumber,
area: berths.area,
status: berths.status,
lengthFt: berths.lengthFt,
widthFt: berths.widthFt,
draftFt: berths.draftFt,
})
.from(berthRecommendations)
.innerJoin(berths, eq(berthRecommendations.berthId, berths.id))
.where(eq(berthRecommendations.interestId, interestId))
.orderBy(berthRecommendations.matchScore);
return rows.reverse(); // highest score first
}
// ─── Add Manual Recommendation ────────────────────────────────────────────────
export async function addManualRecommendation(
interestId: string,
portId: string,
berthId: string,
meta: AuditMeta,
) {
const interest = await db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
});
if (!interest) throw new NotFoundError('Interest');
const berth = await db.query.berths.findFirst({
where: and(eq(berths.id, berthId), eq(berths.portId, portId)),
});
if (!berth) throw new NotFoundError('Berth');
const [rec] = await db
.insert(berthRecommendations)
.values({
interestId,
berthId,
source: 'manual',
createdBy: meta.userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'berth_recommendation',
entityId: rec!.id,
metadata: { type: 'manual', interestId, berthId },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return rec!;
}

View File

@@ -0,0 +1,189 @@
import { and, desc, eq, inArray } from 'drizzle-orm';
import { db } from '@/lib/db';
import { clients, clientContacts } from '@/lib/db/schema/clients';
import { interests } from '@/lib/db/schema/interests';
import { berths, berthWaitingList, berthMaintenanceLog } from '@/lib/db/schema/berths';
import { auditLogs } from '@/lib/db/schema/system';
import { ports } from '@/lib/db/schema/ports';
import { NotFoundError } from '@/lib/errors';
import { generatePdf } from '@/lib/pdf/generate';
import {
clientSummaryTemplate,
buildClientSummaryInputs,
} from '@/lib/pdf/templates/client-summary-template';
import {
berthSpecTemplate,
buildBerthSpecInputs,
} from '@/lib/pdf/templates/berth-spec-template';
import {
interestSummaryTemplate,
buildInterestSummaryInputs,
} from '@/lib/pdf/templates/interest-summary-template';
// ─── Export Client PDF ────────────────────────────────────────────────────────
export async function exportClientPdf(clientId: string, portId: string): Promise<Uint8Array> {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) {
throw new NotFoundError('Client');
}
const [contactList, port] = await Promise.all([
db.query.clientContacts.findMany({
where: eq(clientContacts.clientId, clientId),
orderBy: (t, { desc }) => [desc(t.isPrimary), desc(t.createdAt)],
}),
db.query.ports.findFirst({ where: eq(ports.id, portId) }),
]);
// Fetch last 20 interests for this client in this port
const interestList = await db
.select()
.from(interests)
.where(and(eq(interests.clientId, clientId), eq(interests.portId, portId)))
.orderBy(desc(interests.updatedAt))
.limit(20);
// Fetch last 20 audit logs for this client
const activity = await db
.select()
.from(auditLogs)
.where(
and(
eq(auditLogs.portId, portId),
eq(auditLogs.entityType, 'client'),
eq(auditLogs.entityId, clientId),
),
)
.orderBy(desc(auditLogs.createdAt))
.limit(20);
// Enrich interests with berth mooring numbers
const berthIds = interestList
.map((i) => i.berthId)
.filter(Boolean) as string[];
let berthsMap: Record<string, string> = {};
if (berthIds.length > 0) {
const berthRows = await db
.select({ id: berths.id, mooringNumber: berths.mooringNumber })
.from(berths)
.where(inArray(berths.id, berthIds));
berthsMap = Object.fromEntries(berthRows.map((b) => [b.id, b.mooringNumber]));
}
const enrichedInterests = interestList.map((i) => ({
...i,
berthMooringNumber: i.berthId ? (berthsMap[i.berthId] ?? null) : null,
}));
const inputs = buildClientSummaryInputs(client, contactList, enrichedInterests, activity, port);
return generatePdf(clientSummaryTemplate, [inputs]);
}
// ─── Export Berth PDF ─────────────────────────────────────────────────────────
export async function exportBerthPdf(berthId: string, portId: string): Promise<Uint8Array> {
const berth = await db.query.berths.findFirst({
where: eq(berths.id, berthId),
});
if (!berth || berth.portId !== portId) {
throw new NotFoundError('Berth');
}
const port = await db.query.ports.findFirst({ where: eq(ports.id, portId) });
// Waiting list with client names
const waitingListRows = await db
.select({
id: berthWaitingList.id,
position: berthWaitingList.position,
priority: berthWaitingList.priority,
notes: berthWaitingList.notes,
clientId: berthWaitingList.clientId,
})
.from(berthWaitingList)
.where(eq(berthWaitingList.berthId, berthId))
.orderBy(berthWaitingList.position);
const clientIds = waitingListRows.map((w) => w.clientId);
let clientsMap: Record<string, string> = {};
if (clientIds.length > 0) {
const clientRows = await db
.select({ id: clients.id, fullName: clients.fullName })
.from(clients)
.where(inArray(clients.id, clientIds));
clientsMap = Object.fromEntries(clientRows.map((c) => [c.id, c.fullName]));
}
const enrichedWaitingList = waitingListRows.map((w) => ({
...w,
clientName: clientsMap[w.clientId] ?? 'Unknown',
}));
// Maintenance log (last 20)
const maintenance = await db
.select()
.from(berthMaintenanceLog)
.where(eq(berthMaintenanceLog.berthId, berthId))
.orderBy(desc(berthMaintenanceLog.performedDate))
.limit(20);
// Linked interests
const linkedInterests = await db
.select()
.from(interests)
.where(and(eq(interests.berthId, berthId), eq(interests.portId, portId)))
.orderBy(desc(interests.updatedAt))
.limit(20);
const inputs = buildBerthSpecInputs(berth, enrichedWaitingList, maintenance, linkedInterests, port);
return generatePdf(berthSpecTemplate, [inputs]);
}
// ─── Export Interest PDF ──────────────────────────────────────────────────────
export async function exportInterestPdf(interestId: string, portId: string): Promise<Uint8Array> {
const interest = await db.query.interests.findFirst({
where: eq(interests.id, interestId),
});
if (!interest || interest.portId !== portId) {
throw new NotFoundError('Interest');
}
const [client, port] = await Promise.all([
db.query.clients.findFirst({ where: eq(clients.id, interest.clientId) }),
db.query.ports.findFirst({ where: eq(ports.id, portId) }),
]);
let berth = null;
if (interest.berthId) {
berth = await db.query.berths.findFirst({ where: eq(berths.id, interest.berthId) });
}
// Audit timeline (last 20 events for this interest)
const timeline = await db
.select()
.from(auditLogs)
.where(
and(
eq(auditLogs.portId, portId),
eq(auditLogs.entityType, 'interest'),
eq(auditLogs.entityId, interestId),
),
)
.orderBy(desc(auditLogs.createdAt))
.limit(20);
const inputs = buildInterestSummaryInputs(interest, client, berth, timeline, port);
return generatePdf(interestSummaryTemplate, [inputs]);
}

View File

@@ -0,0 +1,218 @@
import { and, count, eq, gte, isNull, lte, sql, sum } from 'drizzle-orm';
import { db } from '@/lib/db';
import { interests } from '@/lib/db/schema/interests';
import { berths } from '@/lib/db/schema/berths';
import { auditLogs } from '@/lib/db/schema/system';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface PipelineData {
stageCounts: Record<string, number>;
topInterests: Array<{
id: string;
clientId: string;
pipelineStage: string;
berthPrice: string | null;
}>;
generatedAt: string;
}
export interface RevenueData {
stageRevenue: Record<string, string>;
totalCompleted: string;
generatedAt: string;
}
export interface ActivityData {
logs: Array<{
id: string;
action: string;
entityType: string;
entityId: string | null;
userId: string | null;
createdAt: Date;
}>;
summary: Record<string, number>;
generatedAt: string;
}
export interface OccupancyData {
statusCounts: Record<string, number>;
occupancyRate: number;
totalBerths: number;
generatedAt: string;
}
// ─── Pipeline ─────────────────────────────────────────────────────────────────
export async function fetchPipelineData(
portId: string,
_params: Record<string, unknown>,
): Promise<PipelineData> {
// Count interests per pipeline stage (non-archived)
const stageCounts = await db
.select({
stage: interests.pipelineStage,
count: count(),
})
.from(interests)
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)));
const stageCountMap: Record<string, number> = {};
for (const row of stageCounts) {
stageCountMap[row.stage] = row.count;
}
// Top 10 interests by berth price (via join)
const topInterestsRows = await db
.select({
id: interests.id,
clientId: interests.clientId,
pipelineStage: interests.pipelineStage,
berthPrice: berths.price,
})
.from(interests)
.leftJoin(berths, eq(interests.berthId, berths.id))
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)))
.orderBy(sql`${berths.price} DESC NULLS LAST`)
.limit(10);
return {
stageCounts: stageCountMap,
topInterests: topInterestsRows.map((r) => ({
id: r.id,
clientId: r.clientId,
pipelineStage: r.pipelineStage,
berthPrice: r.berthPrice ? String(r.berthPrice) : null,
})),
generatedAt: new Date().toISOString(),
};
}
// ─── Revenue ──────────────────────────────────────────────────────────────────
export async function fetchRevenueData(
portId: string,
_params: Record<string, unknown>,
): Promise<RevenueData> {
// Sum berth prices grouped by pipeline stage
const stageRevenue = await db
.select({
stage: interests.pipelineStage,
revenue: sum(berths.price),
})
.from(interests)
.leftJoin(berths, eq(interests.berthId, berths.id))
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)))
.groupBy(interests.pipelineStage);
const stageRevenueMap: Record<string, string> = {};
for (const row of stageRevenue) {
stageRevenueMap[row.stage] = row.revenue ? String(row.revenue) : '0';
}
// Total revenue from completed interests
const completedRevenue = await db
.select({ total: sum(berths.price) })
.from(interests)
.leftJoin(berths, eq(interests.berthId, berths.id))
.where(
and(
eq(interests.portId, portId),
eq(interests.pipelineStage, 'completed'),
isNull(interests.archivedAt),
),
);
return {
stageRevenue: stageRevenueMap,
totalCompleted: completedRevenue[0]?.total ? String(completedRevenue[0].total) : '0',
generatedAt: new Date().toISOString(),
};
}
// ─── Activity ─────────────────────────────────────────────────────────────────
export async function fetchActivityData(
portId: string,
params: Record<string, unknown>,
): Promise<ActivityData> {
const dateFrom = params.dateFrom as string | undefined;
const dateTo = params.dateTo as string | undefined;
const thirtyDaysAgo = new Date();
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
const fromDate = dateFrom ? new Date(dateFrom) : thirtyDaysAgo;
const conditions = [
eq(auditLogs.portId, portId),
gte(auditLogs.createdAt, fromDate),
];
if (dateTo) {
conditions.push(lte(auditLogs.createdAt, new Date(dateTo)));
}
const logs = await db
.select({
id: auditLogs.id,
action: auditLogs.action,
entityType: auditLogs.entityType,
entityId: auditLogs.entityId,
userId: auditLogs.userId,
createdAt: auditLogs.createdAt,
})
.from(auditLogs)
.where(and(...conditions))
.orderBy(sql`${auditLogs.createdAt} DESC`)
.limit(200);
// Group by action type
const summary: Record<string, number> = {};
for (const log of logs) {
const key = `${log.action}:${log.entityType}`;
summary[key] = (summary[key] ?? 0) + 1;
}
return {
logs,
summary,
generatedAt: new Date().toISOString(),
};
}
// ─── Occupancy ────────────────────────────────────────────────────────────────
export async function fetchOccupancyData(
portId: string,
_params: Record<string, unknown>,
): Promise<OccupancyData> {
const statusCounts = await db
.select({
status: berths.status,
count: count(),
})
.from(berths)
.where(eq(berths.portId, portId))
.groupBy(berths.status);
const statusCountMap: Record<string, number> = {};
let totalBerths = 0;
for (const row of statusCounts) {
statusCountMap[row.status] = row.count;
totalBerths += row.count;
}
const occupiedCount =
(statusCountMap['under_offer'] ?? 0) + (statusCountMap['sold'] ?? 0);
const occupancyRate = totalBerths > 0 ? (occupiedCount / totalBerths) * 100 : 0;
return {
statusCounts: statusCountMap,
occupancyRate: Math.round(occupancyRate * 10) / 10,
totalBerths,
generatedAt: new Date().toISOString(),
};
}

View File

@@ -0,0 +1,301 @@
import { and, desc, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { generatedReports } from '@/lib/db/schema/operations';
import { notifications } from '@/lib/db/schema/operations';
import { files } from '@/lib/db/schema/documents';
import { ports } from '@/lib/db/schema/ports';
import { generatePdf } from '@/lib/pdf/generate';
import { minioClient, getPresignedUrl, buildStoragePath } from '@/lib/minio/index';
import { emitToRoom } from '@/lib/socket/server';
import { getQueue } from '@/lib/queue';
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
import { NotFoundError } from '@/lib/errors';
import {
fetchPipelineData,
fetchRevenueData,
fetchActivityData,
fetchOccupancyData,
} from '@/lib/services/report-generators';
import {
pipelineReportTemplate,
buildPipelineInputs,
} from '@/lib/pdf/templates/reports/pipeline-report';
import {
revenueReportTemplate,
buildRevenueInputs,
} from '@/lib/pdf/templates/reports/revenue-report';
import {
activityReportTemplate,
buildActivityInputs,
} from '@/lib/pdf/templates/reports/activity-report';
import {
occupancyReportTemplate,
buildOccupancyInputs,
} from '@/lib/pdf/templates/reports/occupancy-report';
import type { RequestReportInput, ListReportsInput } from '@/lib/validators/reports';
// ─── Report Type Map ──────────────────────────────────────────────────────────
const REPORT_TYPE_MAP = {
pipeline: {
fetchData: fetchPipelineData,
template: pipelineReportTemplate,
buildInputs: buildPipelineInputs,
},
revenue: {
fetchData: fetchRevenueData,
template: revenueReportTemplate,
buildInputs: buildRevenueInputs,
},
activity: {
fetchData: fetchActivityData,
template: activityReportTemplate,
buildInputs: buildActivityInputs,
},
occupancy: {
fetchData: fetchOccupancyData,
template: occupancyReportTemplate,
buildInputs: buildOccupancyInputs,
},
} as const;
type ReportType = keyof typeof REPORT_TYPE_MAP;
// ─── requestReport ────────────────────────────────────────────────────────────
export async function requestReport(
portId: string,
userId: string,
data: RequestReportInput,
) {
const [report] = await db
.insert(generatedReports)
.values({
portId,
reportType: data.reportType,
name: data.name,
status: 'queued',
parameters: data.parameters ?? {},
requestedBy: userId,
})
.returning();
if (!report) {
throw new Error('Failed to create report record');
}
await getQueue('reports').add('generate-report', { reportJobId: report.id });
emitToRoom(`user:${userId}`, 'report:queued', {
reportId: report.id,
reportType: report.reportType,
name: report.name,
});
return report;
}
// ─── listReports ──────────────────────────────────────────────────────────────
export async function listReports(portId: string, query: ListReportsInput) {
const conditions = [eq(generatedReports.portId, portId)];
if (query.status) {
conditions.push(eq(generatedReports.status, query.status));
}
const offset = (query.page - 1) * query.limit;
const [rows, countResult] = await Promise.all([
db
.select()
.from(generatedReports)
.where(and(...conditions))
.orderBy(desc(generatedReports.createdAt))
.limit(query.limit)
.offset(offset),
db.$count(generatedReports, and(...conditions)),
]);
return {
data: rows,
total: Number(countResult),
};
}
// ─── getReport ────────────────────────────────────────────────────────────────
export async function getReport(reportId: string, portId: string) {
const report = await db.query.generatedReports.findFirst({
where: and(eq(generatedReports.id, reportId), eq(generatedReports.portId, portId)),
});
if (!report) {
throw new NotFoundError('Report');
}
return report;
}
// ─── getDownloadUrl ───────────────────────────────────────────────────────────
export async function getDownloadUrl(reportId: string, portId: string) {
const report = await db.query.generatedReports.findFirst({
where: and(eq(generatedReports.id, reportId), eq(generatedReports.portId, portId)),
});
if (!report) {
throw new NotFoundError('Report');
}
if (report.status !== 'ready' || !report.fileId) {
throw new Error('Report is not ready for download');
}
const file = await db.query.files.findFirst({
where: eq(files.id, report.fileId),
});
if (!file) {
throw new NotFoundError('File');
}
const url = await getPresignedUrl(file.storagePath);
return { url };
}
// ─── generateReport ───────────────────────────────────────────────────────────
export async function generateReport(reportJobId: string): Promise<void> {
// 1. Fetch the generatedReports record
const report = await db.query.generatedReports.findFirst({
where: eq(generatedReports.id, reportJobId),
});
if (!report) {
throw new Error(`Report job not found: ${reportJobId}`);
}
const { portId, reportType, name, parameters, requestedBy } = report;
try {
// 2. Update status = 'processing', startedAt = now
await db
.update(generatedReports)
.set({ status: 'processing', startedAt: new Date() })
.where(eq(generatedReports.id, reportJobId));
// 3. Look up REPORT_TYPE_MAP[reportType]
const typeKey = reportType as ReportType;
const config = REPORT_TYPE_MAP[typeKey];
if (!config) {
throw new Error(`Unknown report type: ${reportType}`);
}
const params = (parameters ?? {}) as Record<string, unknown>;
// 4. Fetch data
const data = await config.fetchData(portId, params);
// 5. Get port info for name in PDF
const port = await db.query.ports.findFirst({
where: eq(ports.id, portId),
});
const portName = port?.name ?? 'Port Nimara';
const portSlug = port?.slug ?? 'port';
// 6. Build inputs (pass portName)
const inputs = (config.buildInputs as (data: any, portName: string) => Record<string, string>[])(data, portName);
// 7. Generate PDF
const pdfBytes = await generatePdf(config.template, inputs);
// 8. Build storage path
const fileId = crypto.randomUUID();
const storagePath = buildStoragePath(portSlug, 'reports', reportJobId, fileId, 'pdf');
// 9. Upload PDF to MinIO
const buffer = Buffer.from(pdfBytes);
await minioClient.putObject(
env.MINIO_BUCKET,
storagePath,
buffer,
buffer.length,
{ 'Content-Type': 'application/pdf', 'report-type': reportType },
);
// 10. Insert into files table
const [fileRecord] = await db
.insert(files)
.values({
id: fileId,
portId,
filename: `${name.replace(/[^a-z0-9]/gi, '_').toLowerCase()}_${Date.now()}.pdf`,
originalName: `${name}.pdf`,
mimeType: 'application/pdf',
sizeBytes: String(buffer.length),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: 'misc',
uploadedBy: requestedBy,
})
.returning();
if (!fileRecord) {
throw new Error('Failed to insert file record');
}
// 11. Update generatedReports: status='ready', fileId, completedAt
await db
.update(generatedReports)
.set({
status: 'ready',
fileId: fileRecord.id,
completedAt: new Date(),
updatedAt: new Date(),
})
.where(eq(generatedReports.id, reportJobId));
// 12. Emit report:ready socket event
emitToRoom(`user:${requestedBy}`, 'report:ready', {
reportId: reportJobId,
name,
});
// 13. Create notification for requestedBy user
await db.insert(notifications).values({
portId,
userId: requestedBy,
type: 'system_alert',
title: 'Report Ready',
description: `Your report "${name}" is ready to download.`,
entityType: 'report',
entityId: reportJobId,
});
logger.info({ reportJobId, reportType }, 'Report generated successfully');
} catch (err) {
const errorMessage = err instanceof Error ? err.message : 'Unknown error';
logger.error({ reportJobId, err }, 'Report generation failed');
await db
.update(generatedReports)
.set({
status: 'failed',
errorMessage,
updatedAt: new Date(),
})
.where(eq(generatedReports.id, reportJobId));
emitToRoom(`user:${requestedBy}`, 'report:failed', {
reportId: reportJobId,
name,
error: errorMessage,
});
throw err;
}
}

View File

@@ -0,0 +1,173 @@
import { and, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { savedViews } from '@/lib/db/schema';
import { NotFoundError } from '@/lib/errors';
import type { CreateSavedViewInput, UpdateSavedViewInput } from '@/lib/validators/saved-views';
export const savedViewsService = {
async list(portId: string, userId: string, entityType?: string) {
const conditions = [
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
];
if (entityType) {
conditions.push(eq(savedViews.entityType, entityType));
}
return db
.select()
.from(savedViews)
.where(and(...conditions));
},
async create(portId: string, userId: string, data: CreateSavedViewInput) {
if (data.isDefault) {
await db
.update(savedViews)
.set({ isDefault: false })
.where(
and(
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
eq(savedViews.entityType, data.entityType),
eq(savedViews.isDefault, true),
),
);
}
const [view] = await db
.insert(savedViews)
.values({
portId,
userId,
entityType: data.entityType,
name: data.name,
filters: data.filters ?? {},
sortConfig: data.sortConfig ?? null,
columnConfig: data.columnConfig ?? null,
isShared: data.isShared ?? false,
isDefault: data.isDefault ?? false,
})
.returning();
return view;
},
async update(portId: string, userId: string, viewId: string, data: UpdateSavedViewInput) {
const existing = await db.query.savedViews.findFirst({
where: and(
eq(savedViews.id, viewId),
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
),
});
if (!existing) {
throw new NotFoundError('Saved view');
}
if (data.isDefault) {
const entityType = data.entityType ?? existing.entityType;
await db
.update(savedViews)
.set({ isDefault: false })
.where(
and(
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
eq(savedViews.entityType, entityType),
eq(savedViews.isDefault, true),
),
);
}
const [updated] = await db
.update(savedViews)
.set({
...(data.name !== undefined && { name: data.name }),
...(data.entityType !== undefined && { entityType: data.entityType }),
...(data.filters !== undefined && { filters: data.filters }),
...(data.sortConfig !== undefined && { sortConfig: data.sortConfig }),
...(data.columnConfig !== undefined && { columnConfig: data.columnConfig }),
...(data.isShared !== undefined && { isShared: data.isShared }),
...(data.isDefault !== undefined && { isDefault: data.isDefault }),
updatedAt: new Date(),
})
.where(
and(
eq(savedViews.id, viewId),
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
),
)
.returning();
return updated;
},
async delete(portId: string, userId: string, viewId: string) {
const existing = await db.query.savedViews.findFirst({
where: and(
eq(savedViews.id, viewId),
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
),
});
if (!existing) {
throw new NotFoundError('Saved view');
}
await db
.delete(savedViews)
.where(
and(
eq(savedViews.id, viewId),
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
),
);
},
async setDefault(portId: string, userId: string, entityType: string, viewId: string) {
const existing = await db.query.savedViews.findFirst({
where: and(
eq(savedViews.id, viewId),
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
eq(savedViews.entityType, entityType),
),
});
if (!existing) {
throw new NotFoundError('Saved view');
}
// Unset any existing default for this entityType + user + port
await db
.update(savedViews)
.set({ isDefault: false })
.where(
and(
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
eq(savedViews.entityType, entityType),
eq(savedViews.isDefault, true),
),
);
const [updated] = await db
.update(savedViews)
.set({ isDefault: true, updatedAt: new Date() })
.where(
and(
eq(savedViews.id, viewId),
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
),
)
.returning();
return updated;
},
};

View File

@@ -0,0 +1,139 @@
import { sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { redis } from '@/lib/redis';
// ─── Types ────────────────────────────────────────────────────────────────────
interface ClientResult {
id: string;
fullName: string;
companyName: string | null;
}
interface InterestResult {
id: string;
clientName: string;
berthMooringNumber: string | null;
pipelineStage: string;
}
interface BerthResult {
id: string;
mooringNumber: string;
area: string | null;
status: string;
}
interface SearchResults {
clients: ClientResult[];
interests: InterestResult[];
berths: BerthResult[];
}
// ─── Search ───────────────────────────────────────────────────────────────────
export async function search(portId: string, query: string): Promise<SearchResults> {
const [clientRows, berthRows, interestRows] = await Promise.all([
// Clients: full-text search via tsvector
db.execute<{ id: string; full_name: string; company_name: string | null }>(sql`
SELECT id, full_name, company_name
FROM clients
WHERE port_id = ${portId}
AND archived_at IS NULL
AND to_tsvector('simple', coalesce(full_name, '') || ' ' || coalesce(company_name, ''))
@@ plainto_tsquery('simple', ${query})
ORDER BY ts_rank(
to_tsvector('simple', coalesce(full_name, '') || ' ' || coalesce(company_name, '')),
plainto_tsquery('simple', ${query})
) DESC
LIMIT 10
`),
// Berths: trigram similarity on mooring_number
db.execute<{ id: string; mooring_number: string; area: string | null; status: string }>(sql`
SELECT id, mooring_number, area, status
FROM berths
WHERE port_id = ${portId}
AND mooring_number % ${query}
ORDER BY similarity(mooring_number, ${query}) DESC
LIMIT 10
`),
// Interests: JOIN to clients and berths, ILIKE search
db.execute<{
id: string;
full_name: string;
mooring_number: string | null;
pipeline_stage: string;
}>(sql`
SELECT
i.id,
c.full_name,
b.mooring_number,
i.pipeline_stage
FROM interests i
JOIN clients c ON i.client_id = c.id
LEFT JOIN berths b ON i.berth_id = b.id
WHERE i.port_id = ${portId}
AND i.archived_at IS NULL
AND (
c.full_name ILIKE ${'%' + query + '%'}
OR b.mooring_number ILIKE ${'%' + query + '%'}
)
LIMIT 10
`),
]);
return {
clients: Array.from(clientRows).map((r) => ({
id: r.id,
fullName: r.full_name,
companyName: r.company_name ?? null,
})),
berths: Array.from(berthRows).map((r) => ({
id: r.id,
mooringNumber: r.mooring_number,
area: r.area ?? null,
status: r.status,
})),
interests: Array.from(interestRows).map((r) => ({
id: r.id,
clientName: r.full_name,
berthMooringNumber: r.mooring_number ?? null,
pipelineStage: r.pipeline_stage,
})),
};
}
// ─── Recent Searches ──────────────────────────────────────────────────────────
const RECENT_SEARCH_TTL = 2592000; // 30 days in seconds
const RECENT_SEARCH_MAX = 10;
function recentSearchKey(userId: string, portId: string): string {
return `recent-search:${userId}:${portId}`;
}
/**
* Fire-and-forget — saves a search term to the user's recent searches sorted set.
*/
export function saveRecentSearch(userId: string, portId: string, searchTerm: string): void {
const key = recentSearchKey(userId, portId);
redis
.zadd(key, Date.now(), searchTerm)
.then(() => redis.zremrangebyrank(key, 0, -(RECENT_SEARCH_MAX + 1)))
.then(() => redis.expire(key, RECENT_SEARCH_TTL))
.catch(() => {
// Intentionally swallowed — recent searches are non-critical
});
}
/**
* Returns the user's most recent searches, newest first.
*/
export async function getRecentSearches(userId: string, portId: string): Promise<string[]> {
const key = recentSearchKey(userId, portId);
const items = await redis.zrevrange(key, 0, RECENT_SEARCH_MAX - 1);
return items;
}

View File

@@ -0,0 +1,23 @@
import { buildStoragePath } from '@/lib/minio';
import { MIME_TO_EXT } from '@/lib/constants/file-validation';
export function generateStorageKey(
portSlug: string,
entity: string,
entityId: string,
mimeType: string,
): string {
const fileId = crypto.randomUUID();
const extension = MIME_TO_EXT[mimeType] ?? 'bin';
return buildStoragePath(portSlug, entity, entityId, fileId, extension);
}
export function sanitizeFilename(name: string): string {
return name
.replace(/[/\\:]/g, '') // strip path chars
.replace(/\x00/g, '') // strip null bytes
// eslint-disable-next-line no-control-regex
.replace(/[\x01-\x1f\x7f]/g, '') // strip control chars
.trim()
.slice(0, 255);
}

View File

@@ -0,0 +1,377 @@
import { db } from '@/lib/db';
import { auditLogs } from '@/lib/db/schema';
import { redis } from '@/lib/redis';
import { minioClient } from '@/lib/minio/index';
import { getQueue, QUEUE_CONFIGS, type QueueName } from '@/lib/queue';
import { createAuditLog } from '@/lib/audit';
import { env } from '@/lib/env';
import { sql, desc, or, eq } from 'drizzle-orm';
import { logger } from '@/lib/logger';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface ServiceStatus {
name: string;
status: 'healthy' | 'degraded' | 'down';
responseTimeMs: number;
details?: string;
}
export interface HealthStatus {
overall: 'healthy' | 'degraded' | 'down';
services: ServiceStatus[];
checkedAt: Date;
}
export interface QueueStatus {
name: string;
waiting: number;
active: number;
completed: number;
failed: number;
delayed: number;
}
export interface QueueJobSummary {
id: string;
name: string;
data: unknown;
status: string;
timestamp: number | undefined;
processedOn: number | undefined;
finishedOn: number | undefined;
failedReason: string | undefined;
}
export interface PaginatedQueueJobs {
jobs: QueueJobSummary[];
total: number;
page: number;
limit: number;
}
export interface ConnectionStatus {
totalConnections: number;
}
export interface RecentError {
id: string;
source: 'audit' | 'queue';
message: string;
timestamp: Date;
metadata?: Record<string, unknown>;
}
// ─── Timeout helper ───────────────────────────────────────────────────────────
function withTimeout<T>(promise: Promise<T>, ms: number): Promise<T> {
return Promise.race([
promise,
new Promise<T>((_, reject) =>
setTimeout(() => reject(new Error(`Timed out after ${ms}ms`)), ms),
),
]);
}
// ─── healthCheck ──────────────────────────────────────────────────────────────
export async function healthCheck(): Promise<HealthStatus> {
const checks = await Promise.allSettled([
checkPostgres(),
checkRedis(),
checkMinio(),
checkDocumenso(),
]);
const services: ServiceStatus[] = checks.map((result) => {
if (result.status === 'fulfilled') return result.value;
// Should not happen since each checker catches internally
return {
name: 'unknown',
status: 'down' as const,
responseTimeMs: 0,
details: String(result.reason),
};
});
const hasDown = services.some((s) => s.status === 'down');
const hasDegraded = services.some((s) => s.status === 'degraded');
const overall = hasDown ? 'down' : hasDegraded ? 'degraded' : 'healthy';
return { overall, services, checkedAt: new Date() };
}
async function checkPostgres(): Promise<ServiceStatus> {
const start = Date.now();
try {
await withTimeout(db.execute(sql`SELECT 1`), 5000);
return { name: 'PostgreSQL', status: 'healthy', responseTimeMs: Date.now() - start };
} catch (err) {
return {
name: 'PostgreSQL',
status: 'down',
responseTimeMs: Date.now() - start,
details: err instanceof Error ? err.message : 'Unknown error',
};
}
}
async function checkRedis(): Promise<ServiceStatus> {
const start = Date.now();
try {
const result = await withTimeout(redis.ping(), 5000);
const status = result === 'PONG' ? 'healthy' : 'degraded';
return { name: 'Redis', status, responseTimeMs: Date.now() - start };
} catch (err) {
return {
name: 'Redis',
status: 'down',
responseTimeMs: Date.now() - start,
details: err instanceof Error ? err.message : 'Unknown error',
};
}
}
async function checkMinio(): Promise<ServiceStatus> {
const start = Date.now();
try {
await withTimeout(minioClient.bucketExists(env.MINIO_BUCKET), 5000);
return { name: 'MinIO', status: 'healthy', responseTimeMs: Date.now() - start };
} catch (err) {
return {
name: 'MinIO',
status: 'down',
responseTimeMs: Date.now() - start,
details: err instanceof Error ? err.message : 'Unknown error',
};
}
}
async function checkDocumenso(): Promise<ServiceStatus> {
const start = Date.now();
try {
const controller = new AbortController();
const timer = setTimeout(() => controller.abort(), 5000);
try {
const res = await fetch(`${env.DOCUMENSO_API_URL}/api/v1/health`, {
signal: controller.signal,
method: 'GET',
});
clearTimeout(timer);
const status = res.ok ? 'healthy' : 'degraded';
return { name: 'Documenso', status, responseTimeMs: Date.now() - start };
} finally {
clearTimeout(timer);
}
} catch (err) {
return {
name: 'Documenso',
status: 'down',
responseTimeMs: Date.now() - start,
details: err instanceof Error ? err.message : 'Unreachable',
};
}
}
// ─── getQueueDashboard ────────────────────────────────────────────────────────
export async function getQueueDashboard(): Promise<QueueStatus[]> {
const queueNames = Object.keys(QUEUE_CONFIGS) as QueueName[];
const results = await Promise.allSettled(
queueNames.map(async (name) => {
const queue = getQueue(name);
const counts = await queue.getJobCounts(
'waiting',
'active',
'completed',
'failed',
'delayed',
);
return {
name,
waiting: counts.waiting ?? 0,
active: counts.active ?? 0,
completed: counts.completed ?? 0,
failed: counts.failed ?? 0,
delayed: counts.delayed ?? 0,
} satisfies QueueStatus;
}),
);
return results.map((r, i) => {
if (r.status === 'fulfilled') return r.value;
const name = queueNames[i] ?? 'unknown';
logger.warn({ queue: name, err: r.reason }, 'Failed to get queue counts');
return {
name,
waiting: 0,
active: 0,
completed: 0,
failed: 0,
delayed: 0,
};
});
}
// ─── getQueueJobs ─────────────────────────────────────────────────────────────
type JobStatus = 'waiting' | 'active' | 'completed' | 'failed' | 'delayed';
export async function getQueueJobs(
queueName: QueueName,
status: JobStatus = 'failed',
page = 1,
limit = 20,
): Promise<PaginatedQueueJobs> {
const queue = getQueue(queueName);
const start = (page - 1) * limit;
const end = start + limit - 1;
const jobs = await queue.getJobs([status], start, end);
const counts = await queue.getJobCounts(status);
const total = counts[status] ?? 0;
const summaries: QueueJobSummary[] = jobs.map((job) => {
// Truncate job data to prevent huge payloads
let truncatedData: unknown;
try {
const dataStr = JSON.stringify(job.data);
truncatedData =
dataStr.length > 500
? JSON.parse(dataStr.slice(0, 500) + '...(truncated)')
: job.data;
} catch {
truncatedData = '[unparseable]';
}
return {
id: job.id ?? '',
name: job.name,
data: truncatedData,
status,
timestamp: job.timestamp,
processedOn: job.processedOn ?? undefined,
finishedOn: job.finishedOn ?? undefined,
failedReason: job.failedReason ?? undefined,
};
});
return { jobs: summaries, total, page, limit };
}
// ─── retryJob ─────────────────────────────────────────────────────────────────
export async function retryJob(
queueName: QueueName,
jobId: string,
userId: string,
): Promise<void> {
const queue = getQueue(queueName);
const job = await queue.getJob(jobId);
if (!job) throw new Error(`Job ${jobId} not found in queue ${queueName}`);
await job.retry();
void createAuditLog({
userId,
portId: null,
action: 'update',
entityType: 'queue_job',
entityId: jobId,
metadata: { queueName, jobName: job.name, action: 'retry' },
ipAddress: 'system',
userAgent: 'system',
});
}
// ─── deleteJob ────────────────────────────────────────────────────────────────
export async function deleteJob(
queueName: QueueName,
jobId: string,
userId: string,
): Promise<void> {
const queue = getQueue(queueName);
const job = await queue.getJob(jobId);
if (!job) throw new Error(`Job ${jobId} not found in queue ${queueName}`);
await job.remove();
void createAuditLog({
userId,
portId: null,
action: 'delete',
entityType: 'queue_job',
entityId: jobId,
metadata: { queueName, jobName: job.name, action: 'delete' },
ipAddress: 'system',
userAgent: 'system',
});
}
// ─── getActiveConnections ─────────────────────────────────────────────────────
export async function getActiveConnections(): Promise<ConnectionStatus> {
try {
const { getIO } = await import('@/lib/socket/server');
const io = getIO();
const sockets = await io.fetchSockets();
return { totalConnections: sockets.length };
} catch {
return { totalConnections: 0 };
}
}
// ─── getRecentErrors ──────────────────────────────────────────────────────────
export async function getRecentErrors(limit = 20): Promise<RecentError[]> {
// Fetch permission-denied audit log entries
const auditErrors = await db
.select({
id: auditLogs.id,
action: auditLogs.action,
entityType: auditLogs.entityType,
entityId: auditLogs.entityId,
metadata: auditLogs.metadata,
createdAt: auditLogs.createdAt,
})
.from(auditLogs)
.where(eq(auditLogs.action, 'permission_denied'))
.orderBy(desc(auditLogs.createdAt))
.limit(limit);
const auditResults: RecentError[] = auditErrors.map((row) => ({
id: row.id,
source: 'audit' as const,
message: `Permission denied on ${row.entityType}`,
timestamp: row.createdAt,
metadata: (row.metadata as Record<string, unknown>) ?? {},
}));
// Fetch failed jobs from all queues (sample — top 5 per queue)
const queueNames = Object.keys(QUEUE_CONFIGS) as QueueName[];
const failedJobResults = await Promise.allSettled(
queueNames.map(async (name) => {
const queue = getQueue(name);
const jobs = await queue.getJobs(['failed'], 0, 4);
return jobs.map((job): RecentError => ({
id: `${name}:${job.id ?? ''}`,
source: 'queue',
message: `Queue job failed: ${job.name} in ${name}`,
timestamp: job.finishedOn ? new Date(job.finishedOn) : new Date(job.timestamp),
metadata: { queueName: name, failedReason: job.failedReason },
}));
}),
);
const queueErrors: RecentError[] = failedJobResults
.filter((r): r is PromiseFulfilledResult<RecentError[]> => r.status === 'fulfilled')
.flatMap((r) => r.value);
// Merge and sort combined list by timestamp descending
const combined = [...auditResults, ...queueErrors].sort(
(a, b) => b.timestamp.getTime() - a.timestamp.getTime(),
);
return combined.slice(0, limit);
}

View File

@@ -0,0 +1,139 @@
import { and, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { tags } from '@/lib/db/schema';
import { createAuditLog } from '@/lib/audit';
import { ConflictError, NotFoundError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import type { CreateTagInput, UpdateTagInput } from '@/lib/validators/tags';
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
export async function listTags(portId: string) {
return db
.select()
.from(tags)
.where(eq(tags.portId, portId))
.orderBy(tags.name);
}
export async function createTag(
portId: string,
data: CreateTagInput,
meta: AuditMeta,
) {
// Enforce unique (portId, name)
const existing = await db.query.tags.findFirst({
where: and(eq(tags.portId, portId), eq(tags.name, data.name)),
});
if (existing) {
throw new ConflictError(`A tag named "${data.name}" already exists in this port`);
}
const [tag] = await db
.insert(tags)
.values({ portId, name: data.name, color: data.color })
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'tag',
entityId: tag!.id,
newValue: { name: tag!.name, color: tag!.color },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'system:alert', {
alertType: 'tag:created',
message: `Tag "${tag!.name}" created`,
severity: 'info',
});
return tag!;
}
export async function updateTag(
id: string,
portId: string,
data: UpdateTagInput,
meta: AuditMeta,
) {
const tag = await db.query.tags.findFirst({
where: and(eq(tags.id, id), eq(tags.portId, portId)),
});
if (!tag) throw new NotFoundError('Tag');
// Check name uniqueness if name is being changed
if (data.name && data.name !== tag.name) {
const conflict = await db.query.tags.findFirst({
where: and(eq(tags.portId, portId), eq(tags.name, data.name)),
});
if (conflict) {
throw new ConflictError(`A tag named "${data.name}" already exists in this port`);
}
}
const [updated] = await db
.update(tags)
.set({ ...(data.name ? { name: data.name } : {}), ...(data.color ? { color: data.color } : {}) })
.where(and(eq(tags.id, id), eq(tags.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'tag',
entityId: id,
oldValue: { name: tag.name, color: tag.color },
newValue: { name: updated!.name, color: updated!.color },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'system:alert', {
alertType: 'tag:updated',
message: `Tag "${updated!.name}" updated`,
severity: 'info',
});
return updated!;
}
export async function deleteTag(
id: string,
portId: string,
meta: AuditMeta,
) {
const tag = await db.query.tags.findFirst({
where: and(eq(tags.id, id), eq(tags.portId, portId)),
});
if (!tag) throw new NotFoundError('Tag');
await db.delete(tags).where(and(eq(tags.id, id), eq(tags.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'tag',
entityId: id,
oldValue: { name: tag.name, color: tag.color },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'system:alert', {
alertType: 'tag:deleted',
message: `Tag "${tag.name}" deleted`,
severity: 'info',
});
}

View File

@@ -0,0 +1,74 @@
import { and, eq, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { webhooks, webhookDeliveries } from '@/lib/db/schema/system';
import { getQueue } from '@/lib/queue';
import { logger } from '@/lib/logger';
import { INTERNAL_TO_WEBHOOK_MAP } from '@/lib/services/webhook-event-map';
/**
* Translates an internal socket event to the outbound webhook event name,
* queries all active webhooks for the given port that are subscribed to that
* event, and enqueues a BullMQ delivery job for each one.
*
* This function is fire-and-forget — callers should use `void dispatchWebhookEvent(...)`.
*/
export async function dispatchWebhookEvent(
portId: string,
internalEvent: string,
payload: Record<string, unknown>,
): Promise<void> {
const webhookEvent = INTERNAL_TO_WEBHOOK_MAP[internalEvent];
if (!webhookEvent) {
// No mapping for this event — skip silently
return;
}
try {
// Query active webhooks for this port that subscribe to this event
const matchingWebhooks = await db
.select({ id: webhooks.id })
.from(webhooks)
.where(
and(
eq(webhooks.portId, portId),
eq(webhooks.isActive, true),
// Check if events array contains the webhook event
sql`${webhooks.events} @> ARRAY[${webhookEvent}]::text[]`,
),
);
if (matchingWebhooks.length === 0) {
return;
}
const queue = getQueue('webhooks');
for (const webhook of matchingWebhooks) {
// Create a pending delivery record before enqueueing
const [delivery] = await db
.insert(webhookDeliveries)
.values({
webhookId: webhook.id,
eventType: webhookEvent,
payload,
status: 'pending',
})
.returning({ id: webhookDeliveries.id });
await queue.add('deliver', {
webhookId: webhook.id,
portId,
event: webhookEvent,
deliveryId: delivery!.id,
payload,
});
}
} catch (err) {
// Never block callers — log and swallow
logger.error(
{ portId, internalEvent, webhookEvent, err },
'Failed to dispatch webhook event',
);
}
}

View File

@@ -0,0 +1,53 @@
// ─── Webhook Event Map ────────────────────────────────────────────────────────
// Defines the canonical set of outbound webhook event names and provides a
// translation map from internal camelCase socket events to dot-style webhook
// event names.
export const WEBHOOK_EVENTS = [
'client.created',
'client.updated',
'client.archived',
'client.merged',
'interest.created',
'interest.stage_changed',
'interest.berth_linked',
'berth.status_changed',
'berth.updated',
'document.sent',
'document.signed',
'document.completed',
'document.expired',
'expense.created',
'expense.updated',
'invoice.created',
'invoice.sent',
'invoice.paid',
'invoice.overdue',
'registration.new',
] as const;
export type WebhookEvent = (typeof WEBHOOK_EVENTS)[number];
/** Maps internal socket event names to outbound webhook event names. */
export const INTERNAL_TO_WEBHOOK_MAP: Record<string, WebhookEvent> = {
'client:created': 'client.created',
'client:updated': 'client.updated',
'client:archived': 'client.archived',
'client:merged': 'client.merged',
'interest:created': 'interest.created',
'interest:stageChanged': 'interest.stage_changed',
'interest:berthLinked': 'interest.berth_linked',
'berth:statusChanged': 'berth.status_changed',
'berth:updated': 'berth.updated',
'document:sent': 'document.sent',
'document:signed': 'document.signed',
'document:completed': 'document.completed',
'document:expired': 'document.expired',
'expense:created': 'expense.created',
'expense:updated': 'expense.updated',
'invoice:created': 'invoice.created',
'invoice:sent': 'invoice.sent',
'invoice:paid': 'invoice.paid',
'invoice:overdue': 'invoice.overdue',
'registration:new': 'registration.new',
};

View File

@@ -0,0 +1,331 @@
import { randomBytes } from 'node:crypto';
import { and, desc, eq, count } from 'drizzle-orm';
import { db } from '@/lib/db';
import { webhooks, webhookDeliveries } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { encrypt, decrypt } from '@/lib/utils/encryption';
import { NotFoundError } from '@/lib/errors';
import { getQueue } from '@/lib/queue';
import type {
CreateWebhookInput,
UpdateWebhookInput,
ListDeliveriesInput,
} from '@/lib/validators/webhooks';
import type { WebhookEvent } from '@/lib/services/webhook-event-map';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Helpers ─────────────────────────────────────────────────────────────────
/** Generates a 32-byte hex secret for signing webhook payloads. */
function generateSecret(): string {
return randomBytes(32).toString('hex');
}
/**
* Returns a masked representation of the plaintext secret.
* Shows the first 5 and last 3 characters: `wh_ab...xyz`
*/
function maskSecret(plaintext: string): string {
if (plaintext.length < 10) return '***';
return `${plaintext.slice(0, 5)}...${plaintext.slice(-3)}`;
}
// ─── Create ───────────────────────────────────────────────────────────────────
export async function createWebhook(
portId: string,
userId: string,
data: CreateWebhookInput,
meta: AuditMeta,
) {
const plaintextSecret = generateSecret();
const encryptedSecret = encrypt(plaintextSecret);
const [webhook] = await db
.insert(webhooks)
.values({
portId,
name: data.name,
url: data.url,
secret: encryptedSecret,
events: data.events,
isActive: data.isActive,
createdBy: userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'webhook',
entityId: webhook!.id,
newValue: { name: data.name, url: data.url, events: data.events },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
// Return with plaintext secret — shown ONCE only on creation
return {
...webhook!,
secret: plaintextSecret,
secretMasked: maskSecret(plaintextSecret),
};
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listWebhooks(portId: string) {
const rows = await db
.select()
.from(webhooks)
.where(eq(webhooks.portId, portId))
.orderBy(desc(webhooks.createdAt));
return rows.map((w) => {
let secretMasked = '***';
if (w.secret) {
try {
const plaintext = decrypt(w.secret);
secretMasked = maskSecret(plaintext);
} catch {
secretMasked = '***';
}
}
return { ...w, secret: undefined, secretMasked };
});
}
// ─── Get Single ───────────────────────────────────────────────────────────────
export async function getWebhook(portId: string, webhookId: string) {
const webhook = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!webhook || webhook.portId !== portId) {
throw new NotFoundError('Webhook');
}
let secretMasked = '***';
if (webhook.secret) {
try {
const plaintext = decrypt(webhook.secret);
secretMasked = maskSecret(plaintext);
} catch {
secretMasked = '***';
}
}
return { ...webhook, secret: undefined, secretMasked };
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateWebhook(
portId: string,
webhookId: string,
data: UpdateWebhookInput,
meta: AuditMeta,
) {
const existing = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Webhook');
}
const [updated] = await db
.update(webhooks)
.set({ ...data, updatedAt: new Date() })
.where(and(eq(webhooks.id, webhookId), eq(webhooks.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'webhook',
entityId: webhookId,
oldValue: {
name: existing.name,
url: existing.url,
events: existing.events,
isActive: existing.isActive,
},
newValue: data as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return { ...updated!, secret: undefined };
}
// ─── Delete ───────────────────────────────────────────────────────────────────
export async function deleteWebhook(
portId: string,
webhookId: string,
meta: AuditMeta,
) {
const existing = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Webhook');
}
// CASCADE deletes webhook_deliveries
await db
.delete(webhooks)
.where(and(eq(webhooks.id, webhookId), eq(webhooks.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'webhook',
entityId: webhookId,
oldValue: { name: existing.name, url: existing.url },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
}
// ─── Regenerate Secret ────────────────────────────────────────────────────────
export async function regenerateSecret(
portId: string,
webhookId: string,
meta: AuditMeta,
) {
const existing = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Webhook');
}
const plaintextSecret = generateSecret();
const encryptedSecret = encrypt(plaintextSecret);
await db
.update(webhooks)
.set({ secret: encryptedSecret, updatedAt: new Date() })
.where(and(eq(webhooks.id, webhookId), eq(webhooks.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'webhook',
entityId: webhookId,
metadata: { type: 'secret_regenerated' },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
// Return new plaintext secret — shown ONCE
return {
webhookId,
secret: plaintextSecret,
secretMasked: maskSecret(plaintextSecret),
};
}
// ─── List Deliveries ─────────────────────────────────────────────────────────
export async function listDeliveries(
portId: string,
webhookId: string,
query: ListDeliveriesInput,
) {
// Verify webhook belongs to port
const webhook = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!webhook || webhook.portId !== portId) {
throw new NotFoundError('Webhook');
}
const { page, limit, status } = query;
const offset = (page - 1) * limit;
const filters = [eq(webhookDeliveries.webhookId, webhookId)];
if (status) {
filters.push(eq(webhookDeliveries.status, status));
}
const [countRow] = await db
.select({ total: count() })
.from(webhookDeliveries)
.where(and(...filters));
const total = countRow?.total ?? 0;
const data = await db
.select()
.from(webhookDeliveries)
.where(and(...filters))
.orderBy(desc(webhookDeliveries.createdAt))
.limit(limit)
.offset(offset);
return { data, total };
}
// ─── Send Test Webhook ────────────────────────────────────────────────────────
export async function sendTestWebhook(
portId: string,
webhookId: string,
eventType: WebhookEvent,
) {
const webhook = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!webhook || webhook.portId !== portId) {
throw new NotFoundError('Webhook');
}
// Create a pending delivery record
const [delivery] = await db
.insert(webhookDeliveries)
.values({
webhookId,
eventType,
payload: {
test: true,
event: eventType,
port_id: portId,
data: { message: 'This is a test webhook delivery' },
},
status: 'pending',
})
.returning();
// Enqueue the job
const queue = getQueue('webhooks');
await queue.add('deliver', {
webhookId,
portId,
event: eventType,
deliveryId: delivery!.id,
payload: delivery!.payload,
});
return { deliveryId: delivery!.id, status: 'queued' };
}