Initial commit: Port Nimara CRM (Layers 0-4)
Some checks failed
Build & Push Docker Images / build-and-push (push) Has been cancelled
Build & Push Docker Images / deploy (push) Has been cancelled
Build & Push Docker Images / lint (push) Has been cancelled

Full CRM rebuild with Next.js 15, TypeScript, Tailwind, Drizzle ORM,
PostgreSQL, Redis, BullMQ, MinIO, and Socket.io. Includes 461 source
files covering clients, berths, interests/pipeline, documents/EOI,
expenses/invoices, email, notifications, dashboard, admin, and
client portal. CI/CD via Gitea Actions with Docker builds.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-26 11:52:51 +01:00
commit 67d7e6e3d5
572 changed files with 86496 additions and 0 deletions

45
src/lib/api/client.ts Normal file
View File

@@ -0,0 +1,45 @@
'use client';
import { useUIStore } from '@/stores/ui-store';
export interface ApiFetchOptions extends Omit<RequestInit, 'body'> {
body?: unknown;
}
/**
* Client-side fetch wrapper that attaches the `X-Port-Id` header from the
* UI store to every request. Used by all queryFn/mutationFn callbacks.
*/
export async function apiFetch<T = unknown>(
url: string,
opts: ApiFetchOptions = {},
): Promise<T> {
const portId = useUIStore.getState().currentPortId;
const headers = new Headers(opts.headers);
if (portId) {
headers.set('X-Port-Id', portId);
}
if (opts.body !== undefined && !headers.has('Content-Type')) {
headers.set('Content-Type', 'application/json');
}
const res = await fetch(url, {
...opts,
headers,
credentials: 'include',
body: opts.body !== undefined ? JSON.stringify(opts.body) : undefined,
});
if (!res.ok) {
const error = await res.json().catch(() => ({ error: res.statusText }));
throw Object.assign(new Error(error.error ?? 'Request failed'), {
status: res.status,
code: error.code,
details: error.details,
});
}
if (res.status === 204) return undefined as T;
return res.json() as Promise<T>;
}

241
src/lib/api/helpers.ts Normal file
View File

@@ -0,0 +1,241 @@
import { and, eq } from 'drizzle-orm';
import { NextRequest, NextResponse } from 'next/server';
import { auth } from '@/lib/auth';
import { db } from '@/lib/db';
import {
portRoleOverrides,
ports,
userPortRoles,
userProfiles,
} from '@/lib/db/schema';
import { type RolePermissions } from '@/lib/db/schema/users';
import { createAuditLog } from '@/lib/audit';
import { errorResponse } from '@/lib/errors';
import { logger } from '@/lib/logger';
// ─── Types ────────────────────────────────────────────────────────────────────
/**
* Authenticated request context resolved by `withAuth`.
* Passed as the second argument to every wrapped route handler.
*/
export interface AuthContext {
userId: string;
portId: string;
portSlug: string;
/** true for super_admin users — bypasses all permission checks. */
isSuperAdmin: boolean;
/**
* Effective permissions after role + port override deep-merge.
* null when isSuperAdmin is true (super admins bypass permission checks).
*/
permissions: RolePermissions | null;
user: {
email: string;
name: string;
};
/** Client IP extracted from X-Forwarded-For header. */
ipAddress: string;
userAgent: string;
}
type RouteHandler<T = unknown> = (
req: NextRequest,
ctx: AuthContext,
params: Record<string, string>,
) => Promise<NextResponse<T>>;
// ─── deepMerge ───────────────────────────────────────────────────────────────
/**
* Recursively merges `source` into `target`.
* Used to apply port-level role permission overrides on top of the base role.
*/
export function deepMerge(
target: Record<string, unknown>,
source: Record<string, unknown>,
): Record<string, unknown> {
const result = { ...target };
for (const key of Object.keys(source)) {
const sourceVal = source[key];
const targetVal = result[key];
if (
typeof sourceVal === 'object' &&
sourceVal !== null &&
!Array.isArray(sourceVal) &&
typeof targetVal === 'object' &&
targetVal !== null &&
!Array.isArray(targetVal)
) {
result[key] = deepMerge(
targetVal as Record<string, unknown>,
sourceVal as Record<string, unknown>,
);
} else {
result[key] = sourceVal;
}
}
return result;
}
// ─── withAuth ────────────────────────────────────────────────────────────────
/**
* Validates the session, loads the user profile, resolves port context and
* applies port-level role overrides before calling the inner handler.
*
* Usage:
* ```ts
* export const GET = withAuth(handler);
* export const POST = withAuth(withPermission('clients', 'create', handler));
* ```
*/
export function withAuth(
handler: RouteHandler,
): (
req: NextRequest,
routeContext: { params: Promise<Record<string, string>> },
) => Promise<NextResponse> {
return async (req, routeContext) => {
try {
// 1. Validate session via Better Auth.
const session = await auth.api.getSession({ headers: req.headers });
if (!session?.user) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 });
}
// 2. Load the CRM user profile (keyed on Better Auth user ID).
const profile = await db.query.userProfiles.findFirst({
where: eq(userProfiles.userId, session.user.id),
});
if (!profile || !profile.isActive) {
return NextResponse.json({ error: 'Account disabled' }, { status: 403 });
}
// 3. Resolve port context.
// Port ID comes from the X-Port-Id header (set by the client after port
// selection), falling back to the user's default port from preferences.
// It NEVER comes from the request body — SECURITY-GUIDELINES.md §2.1.
const portIdFromHeader = req.headers.get('X-Port-Id');
const portId =
portIdFromHeader ??
(profile.preferences as { defaultPortId?: string } | null)?.defaultPortId ??
null;
if (!portId && !profile.isSuperAdmin) {
return NextResponse.json({ error: 'Port context required' }, { status: 400 });
}
// 4. Resolve effective permissions.
let permissions: RolePermissions | null = null;
let portSlug = '';
if (!profile.isSuperAdmin && portId) {
const portRole = await db.query.userPortRoles.findFirst({
where: and(
eq(userPortRoles.userId, profile.userId),
eq(userPortRoles.portId, portId),
),
with: {
role: true,
port: true,
},
});
if (!portRole) {
return NextResponse.json({ error: 'No access to this port' }, { status: 403 });
}
permissions = { ...(portRole.role.permissions as RolePermissions) };
portSlug = (portRole.port as { slug: string } | null)?.slug ?? '';
// Apply port-specific role overrides (deep-merge on top of base role).
const override = await db.query.portRoleOverrides.findFirst({
where: and(
eq(portRoleOverrides.portId, portId),
eq(portRoleOverrides.roleId, portRole.roleId),
),
});
if (override?.permissionOverrides) {
permissions = deepMerge(
permissions as unknown as Record<string, unknown>,
override.permissionOverrides as Record<string, unknown>,
) as RolePermissions;
}
} else if (profile.isSuperAdmin && portId) {
// Super admin still needs portSlug for response context.
const port = await db.query.ports.findFirst({
where: eq(ports.id, portId),
});
portSlug = port?.slug ?? '';
}
const ctx: AuthContext = {
userId: profile.userId,
portId: portId ?? '',
portSlug,
isSuperAdmin: profile.isSuperAdmin,
permissions,
user: {
email: session.user.email,
name: session.user.name,
},
ipAddress:
req.headers.get('x-forwarded-for')?.split(',')[0]?.trim() ?? 'unknown',
userAgent: req.headers.get('user-agent') ?? 'unknown',
};
const params = await routeContext.params;
return await handler(req, ctx, params);
} catch (error) {
return errorResponse(error);
}
};
}
// ─── withPermission ──────────────────────────────────────────────────────────
/**
* Wraps a route handler with a permission gate.
* Denied attempts are logged to the audit trail.
*
* Compose inside withAuth:
* ```ts
* export const DELETE = withAuth(withPermission('clients', 'delete', handler));
* ```
*/
export function withPermission(
resource: keyof RolePermissions,
action: string,
handler: RouteHandler,
): RouteHandler {
return async (req, ctx, params) => {
if (!ctx.isSuperAdmin) {
const resourcePerms = ctx.permissions?.[resource] as
| Record<string, boolean>
| undefined;
if (!resourcePerms || !resourcePerms[action]) {
logger.warn({ userId: ctx.userId, resource, action }, 'Permission denied');
// Log the denied attempt — fire-and-forget; audit must never block response.
void createAuditLog({
userId: ctx.userId,
portId: ctx.portId,
action: 'permission_denied',
entityType: resource,
entityId: '',
metadata: { attemptedAction: action },
ipAddress: ctx.ipAddress,
userAgent: ctx.userAgent,
});
return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 });
}
}
return handler(req, ctx, params);
};
}

View File

@@ -0,0 +1,43 @@
import { NextRequest } from 'next/server';
import { z, ZodSchema } from 'zod';
/**
* Base list query schema shared by all paginated list endpoints.
*/
export const baseListQuerySchema = z.object({
page: z.coerce.number().int().min(1).default(1),
limit: z.coerce.number().int().min(1).max(100).default(25),
sort: z.string().optional(),
order: z.enum(['asc', 'desc']).default('desc'),
search: z.string().optional(),
includeArchived: z
.enum(['true', 'false'])
.transform((v) => v === 'true')
.default('false'),
});
export type BaseListQuery = z.infer<typeof baseListQuerySchema>;
/**
* Parses URL search params against a Zod schema.
* Throws a ZodError on validation failure (caught by `errorResponse`).
*/
export function parseQuery<T extends ZodSchema>(
req: NextRequest,
schema: T,
): z.infer<T> {
const params = Object.fromEntries(req.nextUrl.searchParams.entries());
return schema.parse(params);
}
/**
* Parses the JSON request body against a Zod schema.
* Throws a ZodError on validation failure (caught by `errorResponse`).
*/
export async function parseBody<T extends ZodSchema>(
req: NextRequest,
schema: T,
): Promise<z.infer<T>> {
const body = await req.json();
return schema.parse(body);
}

116
src/lib/audit.ts Normal file
View File

@@ -0,0 +1,116 @@
import { db } from '@/lib/db';
import { auditLogs } from '@/lib/db/schema';
import { logger } from '@/lib/logger';
export type AuditAction =
| 'create'
| 'update'
| 'delete'
| 'archive'
| 'restore'
| 'merge'
| 'login'
| 'logout'
| 'permission_denied'
| 'revert';
export interface AuditLogParams {
/** Null for system-generated events. */
userId: string | null;
/** Null for system-level events not tied to a port. */
portId: string | null;
action: AuditAction;
entityType: string;
entityId: string;
fieldChanged?: string;
oldValue?: Record<string, unknown>;
newValue?: Record<string, unknown>;
metadata?: Record<string, unknown>;
ipAddress: string;
userAgent: string;
}
const SENSITIVE_FIELDS = new Set([
'email',
'phone',
'password',
'credentials_enc',
'token',
]);
/**
* Masks sensitive field values to prevent PII or secrets from being stored
* verbatim in the audit log (SECURITY-GUIDELINES.md §5.2).
*
* Strings are replaced with a partial mask — first 2 chars + *** + last 2 chars.
*/
export function maskSensitiveFields(
data?: Record<string, unknown>,
): Record<string, unknown> | undefined {
if (!data) return undefined;
const masked = { ...data };
for (const key of Object.keys(masked)) {
if (SENSITIVE_FIELDS.has(key) && typeof masked[key] === 'string') {
const val = masked[key] as string;
masked[key] = val.length > 4 ? `${val.slice(0, 2)}***${val.slice(-2)}` : '***';
}
}
return masked;
}
/**
* Computes a field-level diff between two records.
* Returns one entry per changed field with the old and new values.
*/
export function diffFields(
oldRecord: Record<string, unknown>,
newRecord: Record<string, unknown>,
): Array<{ field: string; oldValue: unknown; newValue: unknown }> {
const changes: Array<{ field: string; oldValue: unknown; newValue: unknown }> = [];
for (const key of Object.keys(newRecord)) {
if (JSON.stringify(oldRecord[key]) !== JSON.stringify(newRecord[key])) {
changes.push({ field: key, oldValue: oldRecord[key], newValue: newRecord[key] });
}
}
return changes;
}
/**
* Inserts an audit log entry into the database.
*
* This function NEVER throws — errors are caught and logged so that an audit
* failure never rolls back or disrupts the parent operation.
*/
export async function createAuditLog(params: AuditLogParams): Promise<void> {
try {
await db.insert(auditLogs).values({
portId: params.portId,
userId: params.userId,
action: params.action,
entityType: params.entityType,
entityId: params.entityId,
fieldChanged: params.fieldChanged ?? null,
oldValue: maskSensitiveFields(params.oldValue) ?? null,
newValue: maskSensitiveFields(params.newValue) ?? null,
metadata: params.metadata ?? null,
ipAddress: params.ipAddress,
userAgent: params.userAgent,
});
} catch (err) {
// Strip old/new values from the log to avoid secondary exposure of the data
// that just failed to persist.
logger.error(
{
err,
audit: {
userId: params.userId,
portId: params.portId,
action: params.action,
entityType: params.entityType,
entityId: params.entityId,
},
},
'Failed to write audit log',
);
}
}

9
src/lib/auth/client.ts Normal file
View File

@@ -0,0 +1,9 @@
'use client';
import { createAuthClient } from 'better-auth/react';
export const authClient = createAuthClient({
baseURL: process.env.NEXT_PUBLIC_APP_URL,
});
export const { useSession, signIn, signOut, getSession } = authClient;

53
src/lib/auth/index.ts Normal file
View File

@@ -0,0 +1,53 @@
import { betterAuth } from 'better-auth';
import { drizzleAdapter } from 'better-auth/adapters/drizzle';
import { db } from '@/lib/db';
import { logger } from '@/lib/logger';
/**
* Better Auth server configuration.
*
* Sessions are stored in PostgreSQL (not Redis) per SECURITY-GUIDELINES.md §1.2.
* The drizzle adapter handles session persistence via the existing `sessions` table.
*/
export const auth = betterAuth({
database: drizzleAdapter(db, {
provider: 'pg',
}),
emailAndPassword: {
enabled: true,
minPasswordLength: 12,
// Accounts are admin-created only — no self-service email verification flow.
requireEmailVerification: false,
},
session: {
// Enable cookie-level session caching to reduce DB reads (5-minute cache).
cookieCache: {
enabled: true,
maxAge: 5 * 60,
},
// Absolute session lifetime: 24 hours.
expiresIn: 60 * 60 * 24,
// Refresh the session whenever the user is active in the last 25% of its lifetime (6h).
updateAge: 60 * 60 * 6,
},
advanced: {
cookiePrefix: 'pn-crm',
defaultCookieAttributes: {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'strict' as const,
},
},
logger: {
disabled: false,
level: 'error' as const,
},
});
export type Session = typeof auth.$Infer.Session;
export type User = typeof auth.$Infer.Session.user;

View File

@@ -0,0 +1,50 @@
import { ForbiddenError } from '@/lib/errors';
import { type RolePermissions } from '@/lib/db/schema/users';
import type { AuthContext } from '@/lib/api/helpers';
export type { RolePermissions };
export type PermissionResource = keyof RolePermissions;
export type PermissionAction<R extends PermissionResource> = keyof RolePermissions[R];
/**
* Checks whether a permissions map grants a specific resource/action pair.
*
* Returns `true` automatically when `permissions` is `null`, which signals a
* super-admin context that bypasses all permission checks.
*/
export function hasPermission<R extends PermissionResource>(
permissions: RolePermissions | null,
resource: R,
action: PermissionAction<R>,
): boolean {
// null = super admin; all permissions implicitly granted.
if (permissions === null) return true;
const resourcePerms = permissions[resource] as Record<string, boolean> | undefined;
if (!resourcePerms) return false;
return resourcePerms[action as string] === true;
}
/**
* Throws a `ForbiddenError` if the auth context does not have the required
* resource/action permission.
*
* For use inside API route handlers or service functions after `withAuth` has
* resolved the context.
*
* @example
* requirePermission(ctx, 'clients', 'delete');
*/
export function requirePermission<R extends PermissionResource>(
ctx: Pick<AuthContext, 'isSuperAdmin' | 'permissions'>,
resource: R,
action: PermissionAction<R>,
): void {
if (ctx.isSuperAdmin) return;
if (!hasPermission(ctx.permissions, resource, action)) {
throw new ForbiddenError('Insufficient permissions');
}
}

128
src/lib/constants.ts Normal file
View File

@@ -0,0 +1,128 @@
// ─── Pipeline Stages ─────────────────────────────────────────────────────────
export const PIPELINE_STAGES = [
'open',
'details_sent',
'in_communication',
'visited',
'signed_eoi_nda',
'deposit_10pct',
'contract',
'completed',
] as const;
export type PipelineStage = (typeof PIPELINE_STAGES)[number];
// ─── Berth Statuses ──────────────────────────────────────────────────────────
export const BERTH_STATUSES = ['available', 'under_offer', 'sold'] as const;
export type BerthStatus = (typeof BERTH_STATUSES)[number];
// ─── Lead Categories ─────────────────────────────────────────────────────────
export const LEAD_CATEGORIES = [
'general_interest',
'specific_qualified',
'hot_lead',
] as const;
export type LeadCategory = (typeof LEAD_CATEGORIES)[number];
// ─── Document Types ──────────────────────────────────────────────────────────
export const DOCUMENT_TYPES = [
'eoi',
'contract',
'nda',
'reservation_agreement',
'other',
] as const;
export type DocumentType = (typeof DOCUMENT_TYPES)[number];
// ─── Document Statuses ───────────────────────────────────────────────────────
export const DOCUMENT_STATUSES = [
'draft',
'sent',
'partially_signed',
'completed',
'expired',
'cancelled',
] as const;
export type DocumentStatus = (typeof DOCUMENT_STATUSES)[number];
// ─── Expense Categories ──────────────────────────────────────────────────────
export const EXPENSE_CATEGORIES = [
'fuel',
'maintenance',
'cleaning',
'docking',
'insurance',
'utilities',
'marina_fees',
'repairs',
'equipment',
'crew',
'administration',
'marketing',
'travel',
'entertainment',
'other',
] as const;
export type ExpenseCategory = (typeof EXPENSE_CATEGORIES)[number];
// ─── Payment Methods ─────────────────────────────────────────────────────────
export const PAYMENT_METHODS = [
'bank_transfer',
'credit_card',
'debit_card',
'cash',
'cheque',
'crypto',
'other',
] as const;
export type PaymentMethod = (typeof PAYMENT_METHODS)[number];
// ─── Notification Types ──────────────────────────────────────────────────────
export const NOTIFICATION_TYPES = [
// Interest / pipeline
'interest_stage_changed',
'interest_created',
'interest_assigned',
// Documents
'document_sent',
'document_signed',
'document_completed',
'document_expired',
'document_reminder',
// Reminders
'reminder_due',
'reminder_overdue',
'reminder_assigned',
// Financial
'invoice_sent',
'invoice_paid',
'invoice_overdue',
// Notes
'mention',
// Email
'email_received',
// System
'system_alert',
'job_failed',
'bulk_operation_complete',
'export_ready',
// Berths
'berth_status_changed',
'berth_waiting_list_update',
] as const;
export type NotificationType = (typeof NOTIFICATION_TYPES)[number];

View File

@@ -0,0 +1,37 @@
export const ALLOWED_MIME_TYPES = new Set<string>([
'image/jpeg',
'image/png',
'image/gif',
'image/webp',
'application/pdf',
'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/vnd.ms-excel',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'text/plain',
'text/csv',
]);
export const MIME_TO_EXT: Record<string, string> = {
'image/jpeg': 'jpg',
'image/png': 'png',
'image/gif': 'gif',
'image/webp': 'webp',
'application/pdf': 'pdf',
'application/msword': 'doc',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': 'docx',
'application/vnd.ms-excel': 'xls',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': 'xlsx',
'text/plain': 'txt',
'text/csv': 'csv',
};
export const MAX_FILE_SIZE = 52_428_800; // 50MB
export const PREVIEWABLE_MIMES = new Set<string>([
'image/jpeg',
'image/png',
'image/gif',
'image/webp',
'application/pdf',
]);

20
src/lib/db/index.ts Normal file
View File

@@ -0,0 +1,20 @@
import { drizzle } from 'drizzle-orm/postgres-js';
import postgres from 'postgres';
import * as schema from './schema';
const connectionString = process.env.DATABASE_URL!;
// Connection pool for queries.
const queryClient = postgres(connectionString, {
max: 20,
idle_timeout: 20,
connect_timeout: 10,
});
export const db = drizzle(queryClient, {
schema,
logger: process.env.NODE_ENV === 'development',
});
export type Database = typeof db;

109
src/lib/db/query-builder.ts Normal file
View File

@@ -0,0 +1,109 @@
import {
and,
asc,
desc,
eq,
ilike,
isNull,
or,
sql,
type SQL,
} from 'drizzle-orm';
import type { PgTable, PgColumn } from 'drizzle-orm/pg-core';
import { db } from './index';
export interface BuildListQueryOptions {
table: PgTable;
portIdColumn: PgColumn;
portId: string;
idColumn: PgColumn;
updatedAtColumn: PgColumn;
filters?: SQL[];
sort?: { column: PgColumn; direction: 'asc' | 'desc' };
page: number;
pageSize: number;
searchColumns?: PgColumn[];
searchTerm?: string;
includeArchived?: boolean;
archivedAtColumn?: PgColumn;
}
export interface ListResult<T> {
data: T[];
total: number;
}
/**
* Generic Drizzle paginated query builder with port-scoping.
*
* - Port scoping is always the first condition in the AND chain.
* - `archivedAt IS NULL` by default (unless `includeArchived` is true).
* - Deterministic secondary sort: `updatedAt DESC, id DESC`.
*/
export async function buildListQuery<T>(
opts: BuildListQueryOptions,
): Promise<ListResult<T>> {
const {
table,
portIdColumn,
portId,
idColumn,
updatedAtColumn,
filters = [],
sort,
page,
pageSize,
searchColumns = [],
searchTerm,
includeArchived = false,
archivedAtColumn,
} = opts;
const conditions: SQL[] = [eq(portIdColumn, portId)];
// Exclude archived by default
if (!includeArchived && archivedAtColumn) {
conditions.push(isNull(archivedAtColumn));
}
// Full-text search across multiple columns via ILIKE
if (searchTerm && searchColumns.length > 0) {
const searchConditions = searchColumns.map((col) =>
ilike(col, `%${searchTerm}%`),
);
conditions.push(or(...searchConditions)!);
}
// Append caller-supplied filters
conditions.push(...filters);
const where = and(...conditions);
// Count total
const countResult = await db
.select({ count: sql<number>`count(*)::int` })
.from(table)
.where(where);
const total = countResult[0]?.count ?? 0;
// Build order by: user sort + deterministic secondary sort
const orderClauses: SQL[] = [];
if (sort) {
orderClauses.push(
sort.direction === 'asc' ? asc(sort.column) : desc(sort.column),
);
}
orderClauses.push(desc(updatedAtColumn), desc(idColumn));
// Fetch page
const offset = (page - 1) * pageSize;
const data = await db
.select()
.from(table)
.where(where)
.orderBy(...orderClauses)
.limit(pageSize)
.offset(offset);
return { data: data as T[], total };
}

178
src/lib/db/schema/berths.ts Normal file
View File

@@ -0,0 +1,178 @@
import {
pgTable,
text,
boolean,
integer,
numeric,
timestamp,
date,
jsonb,
index,
uniqueIndex,
primaryKey,
} from 'drizzle-orm/pg-core';
import { ports } from './ports';
import { clients } from './clients';
export const berths = pgTable(
'berths',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
mooringNumber: text('mooring_number').notNull(),
area: text('area'),
status: text('status').notNull().default('available'), // available, under_offer, sold
lengthFt: numeric('length_ft'),
widthFt: numeric('width_ft'),
draftFt: numeric('draft_ft'),
lengthM: numeric('length_m'),
widthM: numeric('width_m'),
draftM: numeric('draft_m'),
widthIsMinimum: boolean('width_is_minimum').default(false),
nominalBoatSize: text('nominal_boat_size'),
nominalBoatSizeM: text('nominal_boat_size_m'),
waterDepth: numeric('water_depth'),
waterDepthM: numeric('water_depth_m'),
waterDepthIsMinimum: boolean('water_depth_is_minimum').default(false),
sidePontoon: text('side_pontoon'),
powerCapacity: text('power_capacity'),
voltage: text('voltage'),
mooringType: text('mooring_type'),
cleatType: text('cleat_type'),
cleatCapacity: text('cleat_capacity'),
bollardType: text('bollard_type'),
bollardCapacity: text('bollard_capacity'),
access: text('access'),
price: numeric('price'),
priceCurrency: text('price_currency').notNull().default('USD'),
bowFacing: text('bow_facing'),
berthApproved: boolean('berth_approved').default(false),
tenureType: text('tenure_type').notNull().default('permanent'), // permanent, fixed_term
tenureYears: integer('tenure_years'),
tenureStartDate: date('tenure_start_date'),
tenureEndDate: date('tenure_end_date'),
statusLastChangedBy: text('status_last_changed_by'), // user ID
statusLastChangedReason: text('status_last_changed_reason'),
statusLastModified: timestamp('status_last_modified', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_berths_port').on(table.portId),
index('idx_berths_status').on(table.portId, table.status),
index('idx_berths_area').on(table.portId, table.area),
uniqueIndex('idx_berths_mooring').on(table.portId, table.mooringNumber),
],
);
export const berthMapData = pgTable(
'berth_map_data',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
berthId: text('berth_id')
.notNull()
.unique()
.references(() => berths.id, { onDelete: 'cascade' }),
svgPath: text('svg_path'),
x: numeric('x'),
y: numeric('y'),
transform: text('transform'),
fontSize: numeric('font_size'),
extraData: jsonb('extra_data').default({}),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [uniqueIndex('berth_map_data_berth_id_idx').on(table.berthId)],
);
export const berthRecommendations = pgTable(
'berth_recommendations',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
interestId: text('interest_id').notNull(), // references interests.id
berthId: text('berth_id')
.notNull()
.references(() => berths.id, { onDelete: 'cascade' }),
matchScore: numeric('match_score'), // 0-100
matchReasons: jsonb('match_reasons'), // { "dimensional_fit": 95, "power_match": 80, ... }
source: text('source').notNull().default('ai'), // ai, manual
createdBy: text('created_by'), // user ID for manual recommendations
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('berth_rec_interest_berth_idx').on(table.interestId, table.berthId),
index('idx_br_interest').on(table.interestId),
],
);
export const berthWaitingList = pgTable(
'berth_waiting_list',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
berthId: text('berth_id')
.notNull()
.references(() => berths.id, { onDelete: 'cascade' }),
clientId: text('client_id')
.notNull()
.references(() => clients.id, { onDelete: 'cascade' }),
position: integer('position').notNull(),
priority: text('priority').notNull().default('normal'), // normal, high
notifyPref: text('notify_pref').default('email'), // email, in_app, both
notes: text('notes'),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('berth_waiting_list_berth_client_idx').on(table.berthId, table.clientId),
index('idx_bwl_berth').on(table.berthId, table.position),
],
);
export const berthMaintenanceLog = pgTable(
'berth_maintenance_log',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
berthId: text('berth_id')
.notNull()
.references(() => berths.id, { onDelete: 'cascade' }),
portId: text('port_id')
.notNull()
.references(() => ports.id),
category: text('category').notNull(), // routine, repair, inspection, upgrade
description: text('description').notNull(),
cost: numeric('cost'),
costCurrency: text('cost_currency').default('USD'),
responsibleParty: text('responsible_party'),
performedDate: date('performed_date').notNull(),
photoFileIds: text('photo_file_ids').array(), // references to files table
createdBy: text('created_by').notNull(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_bml_berth').on(table.berthId),
index('idx_bml_port').on(table.portId),
],
);
export const berthTags = pgTable(
'berth_tags',
{
berthId: text('berth_id')
.notNull()
.references(() => berths.id, { onDelete: 'cascade' }),
tagId: text('tag_id').notNull(), // references tags.id
},
(table) => [primaryKey({ columns: [table.berthId, table.tagId] })],
);
export type Berth = typeof berths.$inferSelect;
export type NewBerth = typeof berths.$inferInsert;
export type BerthMapData = typeof berthMapData.$inferSelect;
export type NewBerthMapData = typeof berthMapData.$inferInsert;
export type BerthRecommendation = typeof berthRecommendations.$inferSelect;
export type NewBerthRecommendation = typeof berthRecommendations.$inferInsert;
export type BerthWaitingList = typeof berthWaitingList.$inferSelect;
export type NewBerthWaitingList = typeof berthWaitingList.$inferInsert;
export type BerthMaintenanceLog = typeof berthMaintenanceLog.$inferSelect;
export type NewBerthMaintenanceLog = typeof berthMaintenanceLog.$inferInsert;

View File

@@ -0,0 +1,150 @@
import {
pgTable,
text,
boolean,
timestamp,
numeric,
jsonb,
index,
uniqueIndex,
primaryKey,
} from 'drizzle-orm/pg-core';
import { sql } from 'drizzle-orm';
import { ports } from './ports';
export const clients = pgTable(
'clients',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
fullName: text('full_name').notNull(),
companyName: text('company_name'),
nationality: text('nationality'),
isProxy: boolean('is_proxy').notNull().default(false),
proxyType: text('proxy_type'), // broker, representative, family_member, legal_counsel, other
actualOwnerName: text('actual_owner_name'),
relationshipNotes: text('relationship_notes'),
yachtName: text('yacht_name'),
yachtLengthFt: numeric('yacht_length_ft'),
yachtWidthFt: numeric('yacht_width_ft'),
yachtDraftFt: numeric('yacht_draft_ft'),
yachtLengthM: numeric('yacht_length_m'),
yachtWidthM: numeric('yacht_width_m'),
yachtDraftM: numeric('yacht_draft_m'),
berthSizeDesired: text('berth_size_desired'),
preferredContactMethod: text('preferred_contact_method'), // email, phone, whatsapp
preferredLanguage: text('preferred_language'),
timezone: text('timezone'),
source: text('source'), // website, manual, referral, broker
sourceDetails: text('source_details'),
archivedAt: timestamp('archived_at', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_clients_port').on(table.portId),
index('idx_clients_name').on(table.portId, table.fullName),
index('idx_clients_archived').on(table.portId, table.archivedAt),
],
);
export const clientContacts = pgTable(
'client_contacts',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
clientId: text('client_id')
.notNull()
.references(() => clients.id, { onDelete: 'cascade' }),
channel: text('channel').notNull(), // email, phone, whatsapp, other
value: text('value').notNull(),
label: text('label'), // primary, secondary, work, personal, broker, assistant
isPrimary: boolean('is_primary').notNull().default(false),
notes: text('notes'),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_cc_client').on(table.clientId),
index('idx_cc_email').on(table.channel, table.value).where(sql`${table.channel} = 'email'`),
index('idx_cc_phone').on(table.channel, table.value).where(sql`${table.channel} = 'phone'`),
],
);
export const clientRelationships = pgTable(
'client_relationships',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
clientAId: text('client_a_id')
.notNull()
.references(() => clients.id, { onDelete: 'cascade' }),
clientBId: text('client_b_id')
.notNull()
.references(() => clients.id, { onDelete: 'cascade' }),
relationshipType: text('relationship_type').notNull(), // referred_by, broker_for, family_member, same_vessel, custom
description: text('description'),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_cr_port').on(table.portId)],
);
export const clientNotes = pgTable(
'client_notes',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
clientId: text('client_id')
.notNull()
.references(() => clients.id, { onDelete: 'cascade' }),
authorId: text('author_id').notNull(), // user ID
content: text('content').notNull(),
mentions: text('mentions').array(), // array of mentioned user IDs
isLocked: boolean('is_locked').notNull().default(false),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_cn_client').on(table.clientId)],
);
export const clientTags = pgTable(
'client_tags',
{
clientId: text('client_id')
.notNull()
.references(() => clients.id, { onDelete: 'cascade' }),
tagId: text('tag_id').notNull(), // references tags.id — defined later in system.ts
},
(table) => [primaryKey({ columns: [table.clientId, table.tagId] })],
);
export const clientMergeLog = pgTable(
'client_merge_log',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
survivingClientId: text('surviving_client_id')
.notNull()
.references(() => clients.id),
mergedClientId: text('merged_client_id').notNull(), // the client that was merged away (may no longer exist)
mergedBy: text('merged_by').notNull(), // user ID
mergeDetails: jsonb('merge_details').notNull(), // which fields were kept from which record
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_cml_port').on(table.portId)],
);
export type Client = typeof clients.$inferSelect;
export type NewClient = typeof clients.$inferInsert;
export type ClientContact = typeof clientContacts.$inferSelect;
export type NewClientContact = typeof clientContacts.$inferInsert;
export type ClientRelationship = typeof clientRelationships.$inferSelect;
export type NewClientRelationship = typeof clientRelationships.$inferInsert;
export type ClientNote = typeof clientNotes.$inferSelect;
export type NewClientNote = typeof clientNotes.$inferInsert;
export type ClientMergeLog = typeof clientMergeLog.$inferSelect;
export type NewClientMergeLog = typeof clientMergeLog.$inferInsert;

View File

@@ -0,0 +1,184 @@
import {
pgTable,
text,
boolean,
integer,
timestamp,
jsonb,
index,
uniqueIndex,
} from 'drizzle-orm/pg-core';
import { sql } from 'drizzle-orm';
import { ports } from './ports';
import { clients } from './clients';
export const files = pgTable(
'files',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
clientId: text('client_id').references(() => clients.id),
filename: text('filename').notNull(),
originalName: text('original_name').notNull(),
mimeType: text('mime_type'),
sizeBytes: text('size_bytes'), // stored as text to avoid bigint issues; parse as number in app
storagePath: text('storage_path').notNull(),
storageBucket: text('storage_bucket').notNull().default('crm-files'),
category: text('category'), // eoi, contract, image, receipt, correspondence, misc
uploadedBy: text('uploaded_by').notNull(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_files_port').on(table.portId),
index('idx_files_client').on(table.clientId),
],
);
export const documents = pgTable(
'documents',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
interestId: text('interest_id'), // references interests.id
clientId: text('client_id').references(() => clients.id),
documentType: text('document_type').notNull(), // eoi, contract, nda, reservation_agreement, other
title: text('title').notNull(),
status: text('status').notNull().default('draft'), // draft, sent, partially_signed, completed, expired, cancelled
documensoId: text('documenso_id'),
fileId: text('file_id').references(() => files.id),
signedFileId: text('signed_file_id').references(() => files.id),
isManualUpload: boolean('is_manual_upload').notNull().default(false),
notes: text('notes'),
createdBy: text('created_by').notNull(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_docs_port').on(table.portId),
index('idx_docs_interest').on(table.interestId),
index('idx_docs_client').on(table.clientId),
index('idx_docs_type').on(table.portId, table.documentType),
],
);
export const documentSigners = pgTable(
'document_signers',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
documentId: text('document_id')
.notNull()
.references(() => documents.id, { onDelete: 'cascade' }),
signerName: text('signer_name').notNull(),
signerEmail: text('signer_email').notNull(),
signerRole: text('signer_role').notNull(), // client, developer, sales, approver, other
signingOrder: integer('signing_order').notNull(),
status: text('status').notNull().default('pending'), // pending, signed, declined
signedAt: timestamp('signed_at', { withTimezone: true }),
signingUrl: text('signing_url'),
embeddedUrl: text('embedded_url'),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_ds_doc').on(table.documentId)],
);
export const documentEvents = pgTable(
'document_events',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
documentId: text('document_id')
.notNull()
.references(() => documents.id, { onDelete: 'cascade' }),
eventType: text('event_type').notNull(), // created, sent, viewed, signed, completed, expired, reminder_sent
signerId: text('signer_id').references(() => documentSigners.id),
eventData: jsonb('event_data').default({}),
signatureHash: text('signature_hash'), // deduplication
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_de_doc').on(table.documentId),
uniqueIndex('idx_de_dedup').on(table.documentId, table.signatureHash).where(
sql`${table.signatureHash} IS NOT NULL`
),
],
);
export const documentTemplates = pgTable(
'document_templates',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
name: text('name').notNull(),
description: text('description'),
templateType: text('template_type').notNull(), // welcome_letter, handover_checklist, acknowledgment, correspondence, custom
bodyHtml: text('body_html').notNull(),
mergeFields: jsonb('merge_fields').notNull().default([]),
isActive: boolean('is_active').notNull().default(true),
createdBy: text('created_by').notNull(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_dt_port').on(table.portId),
index('idx_dt_type').on(table.portId, table.templateType),
],
);
export const formTemplates = pgTable(
'form_templates',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
name: text('name').notNull(),
description: text('description'),
fields: jsonb('fields').notNull(),
branding: jsonb('branding').default({}),
isActive: boolean('is_active').notNull().default(true),
createdBy: text('created_by').notNull(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_ft_port').on(table.portId)],
);
export const formSubmissions = pgTable(
'form_submissions',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
formTemplateId: text('form_template_id')
.notNull()
.references(() => formTemplates.id),
clientId: text('client_id').references(() => clients.id),
interestId: text('interest_id'), // references interests.id
token: text('token').notNull().unique(),
prefilledData: jsonb('prefilled_data').default({}),
submittedData: jsonb('submitted_data'),
status: text('status').notNull().default('pending'), // pending, submitted, expired
expiresAt: timestamp('expires_at', { withTimezone: true }),
submittedAt: timestamp('submitted_at', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [uniqueIndex('idx_fs_token').on(table.token)],
);
export type File = typeof files.$inferSelect;
export type NewFile = typeof files.$inferInsert;
export type Document = typeof documents.$inferSelect;
export type NewDocument = typeof documents.$inferInsert;
export type DocumentSigner = typeof documentSigners.$inferSelect;
export type NewDocumentSigner = typeof documentSigners.$inferInsert;
export type DocumentEvent = typeof documentEvents.$inferSelect;
export type NewDocumentEvent = typeof documentEvents.$inferInsert;
export type DocumentTemplate = typeof documentTemplates.$inferSelect;
export type NewDocumentTemplate = typeof documentTemplates.$inferInsert;
export type FormTemplate = typeof formTemplates.$inferSelect;
export type NewFormTemplate = typeof formTemplates.$inferInsert;
export type FormSubmission = typeof formSubmissions.$inferSelect;
export type NewFormSubmission = typeof formSubmissions.$inferInsert;

View File

@@ -0,0 +1,95 @@
import {
pgTable,
text,
boolean,
integer,
timestamp,
index,
uniqueIndex,
} from 'drizzle-orm/pg-core';
import { sql } from 'drizzle-orm';
import { ports } from './ports';
import { clients } from './clients';
import { files } from './documents';
export const emailAccounts = pgTable(
'email_accounts',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
userId: text('user_id').notNull(), // references Better Auth user ID
portId: text('port_id')
.notNull()
.references(() => ports.id),
provider: text('provider').notNull(), // google, outlook, custom
emailAddress: text('email_address').notNull(),
smtpHost: text('smtp_host').notNull(),
smtpPort: integer('smtp_port').notNull(),
imapHost: text('imap_host').notNull(),
imapPort: integer('imap_port').notNull(),
// credentials_enc stored as base64-encoded text (encrypted at application layer)
credentialsEnc: text('credentials_enc').notNull(),
isActive: boolean('is_active').notNull().default(true),
lastSyncAt: timestamp('last_sync_at', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_ea_user').on(table.userId),
index('idx_ea_port').on(table.portId),
],
);
export const emailThreads = pgTable(
'email_threads',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
clientId: text('client_id').references(() => clients.id),
subject: text('subject'),
lastMessageAt: timestamp('last_message_at', { withTimezone: true }),
messageCount: integer('message_count').notNull().default(0),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_et_client').on(table.clientId),
index('idx_et_port').on(table.portId),
],
);
export const emailMessages = pgTable(
'email_messages',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
threadId: text('thread_id')
.notNull()
.references(() => emailThreads.id, { onDelete: 'cascade' }),
messageIdHeader: text('message_id_header'), // email Message-ID header
fromAddress: text('from_address').notNull(),
toAddresses: text('to_addresses').array().notNull(),
ccAddresses: text('cc_addresses').array(),
subject: text('subject'),
bodyText: text('body_text'),
bodyHtml: text('body_html'),
direction: text('direction').notNull(), // inbound, outbound
sentAt: timestamp('sent_at', { withTimezone: true }).notNull(),
attachmentFileIds: text('attachment_file_ids').array(), // references to files table
rawFileId: text('raw_file_id').references(() => files.id),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_em_thread').on(table.threadId),
uniqueIndex('idx_em_message_id').on(table.messageIdHeader).where(
sql`${table.messageIdHeader} IS NOT NULL`
),
],
);
export type EmailAccount = typeof emailAccounts.$inferSelect;
export type NewEmailAccount = typeof emailAccounts.$inferInsert;
export type EmailThread = typeof emailThreads.$inferSelect;
export type NewEmailThread = typeof emailThreads.$inferInsert;
export type EmailMessage = typeof emailMessages.$inferSelect;
export type NewEmailMessage = typeof emailMessages.$inferInsert;

View File

@@ -0,0 +1,125 @@
import {
pgTable,
text,
numeric,
integer,
timestamp,
date,
index,
uniqueIndex,
primaryKey,
} from 'drizzle-orm/pg-core';
import { ports } from './ports';
import { files } from './documents';
export const expenses = pgTable(
'expenses',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
establishmentName: text('establishment_name'),
amount: numeric('amount').notNull(),
currency: text('currency').notNull().default('USD'),
amountUsd: numeric('amount_usd'),
exchangeRate: numeric('exchange_rate'),
paymentMethod: text('payment_method'),
category: text('category'),
payer: text('payer'),
expenseDate: timestamp('expense_date', { withTimezone: true }).notNull(),
description: text('description'),
receiptFileIds: text('receipt_file_ids').array(), // references to files table
paymentStatus: text('payment_status').default('unpaid'), // unpaid, paid, partial
paymentDate: date('payment_date'),
paymentReference: text('payment_reference'),
paymentNotes: text('payment_notes'),
createdBy: text('created_by').notNull(),
archivedAt: timestamp('archived_at', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_expenses_port').on(table.portId),
index('idx_expenses_date').on(table.portId, table.expenseDate),
index('idx_expenses_category').on(table.portId, table.category),
],
);
export const invoices = pgTable(
'invoices',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
invoiceNumber: text('invoice_number').notNull(), // INV-YYYYMM-### auto-generated
clientName: text('client_name').notNull(),
billingEmail: text('billing_email'),
billingAddress: text('billing_address'),
dueDate: date('due_date').notNull(),
paymentTerms: text('payment_terms').notNull().default('net30'), // immediate, net10, net15, net30, net45, net60
currency: text('currency').notNull().default('USD'),
subtotal: numeric('subtotal').notNull(),
discountPct: numeric('discount_pct').default('0'),
discountAmount: numeric('discount_amount').default('0'),
feePct: numeric('fee_pct').default('0'),
feeAmount: numeric('fee_amount').default('0'),
total: numeric('total').notNull(),
status: text('status').notNull().default('draft'), // draft, sent, paid, overdue, cancelled
paymentStatus: text('payment_status').default('unpaid'),
paymentDate: date('payment_date'),
paymentMethod: text('payment_method'),
paymentReference: text('payment_reference'),
pdfFileId: text('pdf_file_id').references(() => files.id),
notes: text('notes'),
createdBy: text('created_by').notNull(),
archivedAt: timestamp('archived_at', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('idx_invoices_number').on(table.portId, table.invoiceNumber),
index('idx_invoices_port').on(table.portId),
index('idx_invoices_status').on(table.portId, table.status),
],
);
export const invoiceLineItems = pgTable(
'invoice_line_items',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
invoiceId: text('invoice_id')
.notNull()
.references(() => invoices.id, { onDelete: 'cascade' }),
description: text('description').notNull(),
quantity: numeric('quantity').notNull().default('1'),
unitPrice: numeric('unit_price').notNull(),
total: numeric('total').notNull(),
sortOrder: integer('sort_order').notNull().default(0),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_ili_invoice').on(table.invoiceId)],
);
export const invoiceExpenses = pgTable(
'invoice_expenses',
{
invoiceId: text('invoice_id')
.notNull()
.references(() => invoices.id, { onDelete: 'cascade' }),
expenseId: text('expense_id')
.notNull()
.references(() => expenses.id, { onDelete: 'cascade' }),
},
(table) => [primaryKey({ columns: [table.invoiceId, table.expenseId] })],
);
export type Expense = typeof expenses.$inferSelect;
export type NewExpense = typeof expenses.$inferInsert;
export type Invoice = typeof invoices.$inferSelect;
export type NewInvoice = typeof invoices.$inferInsert;
export type InvoiceLineItem = typeof invoiceLineItems.$inferSelect;
export type NewInvoiceLineItem = typeof invoiceLineItems.$inferInsert;
export type InvoiceExpense = typeof invoiceExpenses.$inferSelect;
export type NewInvoiceExpense = typeof invoiceExpenses.$inferInsert;

View File

@@ -0,0 +1,32 @@
// Ports
export * from './ports';
// Users & Auth
export * from './users';
// Clients
export * from './clients';
// Interests
export * from './interests';
// Berths
export * from './berths';
// Documents & Files
export * from './documents';
// Financial
export * from './financial';
// Email
export * from './email';
// Operations
export * from './operations';
// System
export * from './system';
// Relations (must come last — references all tables)
export * from './relations';

View File

@@ -0,0 +1,89 @@
import {
pgTable,
text,
boolean,
integer,
timestamp,
primaryKey,
index,
} from 'drizzle-orm/pg-core';
import { ports } from './ports';
import { clients } from './clients';
// Pipeline stages: open, details_sent, in_communication, visited, signed_eoi_nda, deposit_10pct, contract, completed
export const interests = pgTable(
'interests',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
clientId: text('client_id')
.notNull()
.references(() => clients.id),
berthId: text('berth_id'), // nullable — FK to berths defined in berths.ts, added via relation
pipelineStage: text('pipeline_stage').notNull().default('open'),
leadCategory: text('lead_category'), // general_interest, specific_qualified, hot_lead
source: text('source'), // website, manual, referral, broker
eoiStatus: text('eoi_status'), // null, waiting_for_signatures, signed, expired
documensoId: text('documenso_id'),
contractStatus: text('contract_status'),
depositStatus: text('deposit_status'),
reservationStatus: text('reservation_status'),
dateFirstContact: timestamp('date_first_contact', { withTimezone: true }),
dateLastContact: timestamp('date_last_contact', { withTimezone: true }),
dateEoiSent: timestamp('date_eoi_sent', { withTimezone: true }),
dateEoiSigned: timestamp('date_eoi_signed', { withTimezone: true }),
dateContractSent: timestamp('date_contract_sent', { withTimezone: true }),
dateContractSigned: timestamp('date_contract_signed', { withTimezone: true }),
dateDepositReceived: timestamp('date_deposit_received', { withTimezone: true }),
reminderEnabled: boolean('reminder_enabled').notNull().default(false),
reminderDays: integer('reminder_days'),
reminderLastFired: timestamp('reminder_last_fired', { withTimezone: true }),
notes: text('notes'),
archivedAt: timestamp('archived_at', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_interests_port').on(table.portId),
index('idx_interests_client').on(table.clientId),
index('idx_interests_berth').on(table.berthId),
index('idx_interests_stage').on(table.portId, table.pipelineStage),
index('idx_interests_archived').on(table.portId, table.archivedAt),
],
);
export const interestNotes = pgTable(
'interest_notes',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
interestId: text('interest_id')
.notNull()
.references(() => interests.id, { onDelete: 'cascade' }),
authorId: text('author_id').notNull(), // user ID
content: text('content').notNull(),
mentions: text('mentions').array(), // array of mentioned user IDs
isLocked: boolean('is_locked').notNull().default(false),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_in_interest').on(table.interestId)],
);
export const interestTags = pgTable(
'interest_tags',
{
interestId: text('interest_id')
.notNull()
.references(() => interests.id, { onDelete: 'cascade' }),
tagId: text('tag_id').notNull(), // references tags.id
},
(table) => [primaryKey({ columns: [table.interestId, table.tagId] })],
);
export type Interest = typeof interests.$inferSelect;
export type NewInterest = typeof interests.$inferInsert;
export type InterestNote = typeof interestNotes.$inferSelect;
export type NewInterestNote = typeof interestNotes.$inferInsert;

View File

@@ -0,0 +1,193 @@
import {
pgTable,
text,
boolean,
timestamp,
jsonb,
index,
uniqueIndex,
} from 'drizzle-orm/pg-core';
import { sql } from 'drizzle-orm';
import { ports } from './ports';
import { clients } from './clients';
import { files } from './documents';
export const reminders = pgTable(
'reminders',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
title: text('title').notNull(),
note: text('note'),
dueAt: timestamp('due_at', { withTimezone: true }).notNull(),
priority: text('priority').notNull().default('medium'), // low, medium, high, urgent
status: text('status').notNull().default('pending'), // pending, snoozed, completed, dismissed
assignedTo: text('assigned_to'), // user ID
createdBy: text('created_by').notNull(),
clientId: text('client_id').references(() => clients.id),
interestId: text('interest_id'), // references interests.id
berthId: text('berth_id'), // references berths.id
autoGenerated: boolean('auto_generated').notNull().default(false),
googleCalendarEventId: text('google_calendar_event_id'),
googleCalendarSynced: boolean('google_calendar_synced').notNull().default(false),
snoozedUntil: timestamp('snoozed_until', { withTimezone: true }),
completedAt: timestamp('completed_at', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_reminders_port').on(table.portId),
index('idx_reminders_assigned').on(table.assignedTo, table.status),
index('idx_reminders_due').on(table.portId, table.dueAt).where(
sql`${table.status} IN ('pending', 'snoozed')`
),
],
);
export const googleCalendarTokens = pgTable(
'google_calendar_tokens',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
userId: text('user_id').notNull().unique(),
accessToken: text('access_token').notNull(), // encrypted
refreshToken: text('refresh_token').notNull(), // encrypted
tokenExpiry: timestamp('token_expiry', { withTimezone: true }).notNull(),
calendarId: text('calendar_id').notNull().default('primary'),
connectedAt: timestamp('connected_at', { withTimezone: true }).notNull().defaultNow(),
lastSyncAt: timestamp('last_sync_at', { withTimezone: true }),
syncEnabled: boolean('sync_enabled').notNull().default(true),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [uniqueIndex('gcal_tokens_user_id_idx').on(table.userId)],
);
export const googleCalendarCache = pgTable(
'google_calendar_cache',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
userId: text('user_id').notNull(),
eventId: text('event_id').notNull(), // Google Calendar event ID
title: text('title').notNull(),
startAt: timestamp('start_at', { withTimezone: true }).notNull(),
endAt: timestamp('end_at', { withTimezone: true }),
location: text('location'),
description: text('description'),
isCrmPushed: boolean('is_crm_pushed').notNull().default(false),
reminderId: text('reminder_id').references(() => reminders.id),
fetchedAt: timestamp('fetched_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('gcal_cache_user_event_idx').on(table.userId, table.eventId),
index('idx_gcal_cache_user').on(table.userId, table.startAt),
],
);
export const notifications = pgTable(
'notifications',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
userId: text('user_id').notNull(),
type: text('type').notNull(), // reminder_due, reminder_overdue, new_registration, eoi_signed, eoi_completed, email_received, duplicate_alert, invoice_overdue, waiting_list, system_alert, follow_up_created, tenure_expiring
title: text('title').notNull(),
description: text('description'),
link: text('link'),
entityType: text('entity_type'), // client, interest, berth, invoice, etc.
entityId: text('entity_id'),
isRead: boolean('is_read').notNull().default(false),
emailSent: boolean('email_sent').notNull().default(false),
metadata: jsonb('metadata').$type<Record<string, unknown>>().default({}),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_notif_user').on(table.userId, table.isRead),
index('idx_notif_port').on(table.portId),
index('idx_notifications_user_type').on(table.userId, table.type, table.createdAt),
],
);
export const scheduledReports = pgTable(
'scheduled_reports',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
name: text('name').notNull(),
reportType: text('report_type').notNull(), // pipeline_summary, expense_summary, berth_occupancy, activity_log, overdue_items, revenue_forecast
schedule: text('schedule').notNull(), // cron expression
lastRunAt: timestamp('last_run_at', { withTimezone: true }),
nextRunAt: timestamp('next_run_at', { withTimezone: true }),
isActive: boolean('is_active').notNull().default(true),
config: jsonb('config').default({}),
createdBy: text('created_by').notNull(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_sr_port').on(table.portId)],
);
export const reportRecipients = pgTable(
'report_recipients',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
reportId: text('report_id')
.notNull()
.references(() => scheduledReports.id, { onDelete: 'cascade' }),
email: text('email').notNull(),
userId: text('user_id'), // null for external recipients
},
(table) => [
uniqueIndex('report_recipients_report_email_idx').on(table.reportId, table.email),
index('idx_rr_report').on(table.reportId),
],
);
export const generatedReports = pgTable(
'generated_reports',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
scheduledReportId: text('scheduled_report_id').references(() => scheduledReports.id),
reportType: text('report_type').notNull(),
name: text('name').notNull(),
status: text('status').notNull().default('queued'), // queued, processing, ready, failed
parameters: jsonb('parameters').default({}),
fileId: text('file_id').references(() => files.id),
errorMessage: text('error_message'),
requestedBy: text('requested_by').notNull(),
startedAt: timestamp('started_at', { withTimezone: true }),
completedAt: timestamp('completed_at', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_gr_port_created').on(table.portId, table.createdAt),
index('idx_gr_port_status').on(table.portId, table.status),
index('idx_gr_scheduled').on(table.scheduledReportId).where(
sql`${table.scheduledReportId} IS NOT NULL`
),
],
);
export type Reminder = typeof reminders.$inferSelect;
export type NewReminder = typeof reminders.$inferInsert;
export type GoogleCalendarToken = typeof googleCalendarTokens.$inferSelect;
export type NewGoogleCalendarToken = typeof googleCalendarTokens.$inferInsert;
export type GoogleCalendarCache = typeof googleCalendarCache.$inferSelect;
export type NewGoogleCalendarCache = typeof googleCalendarCache.$inferInsert;
export type Notification = typeof notifications.$inferSelect;
export type NewNotification = typeof notifications.$inferInsert;
export type ScheduledReport = typeof scheduledReports.$inferSelect;
export type NewScheduledReport = typeof scheduledReports.$inferInsert;
export type ReportRecipient = typeof reportRecipients.$inferSelect;
export type NewReportRecipient = typeof reportRecipients.$inferInsert;
export type GeneratedReport = typeof generatedReports.$inferSelect;
export type NewGeneratedReport = typeof generatedReports.$inferInsert;

View File

@@ -0,0 +1,50 @@
import { pgTable, text, boolean, timestamp, jsonb, uniqueIndex } from 'drizzle-orm/pg-core';
// Port settings type for JSONB column
export type PortSettings = {
berth_status_rules?: Array<{
trigger: string;
mode: 'auto' | 'suggest' | 'off';
target_status: string;
}>;
follow_up_defaults?: {
reminder_days: number;
send_window_hours: number[];
cooldown_days: number;
};
eoi_reminder_settings?: {
schedule: string[];
cooldown_days: number;
send_window_hours: number[];
};
[key: string]: unknown;
};
export type PortBranding = {
logo_url?: string;
primary_color?: string;
secondary_color?: string;
font_family?: string;
[key: string]: unknown;
};
export const ports = pgTable(
'ports',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
name: text('name').notNull(),
slug: text('slug').notNull(),
logoUrl: text('logo_url'),
primaryColor: text('primary_color'),
defaultCurrency: text('default_currency').notNull().default('USD'),
timezone: text('timezone').notNull().default('America/Anguilla'),
settings: jsonb('settings').$type<PortSettings>().notNull().default({}),
isActive: boolean('is_active').notNull().default(true),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [uniqueIndex('ports_slug_idx').on(table.slug)],
);
export type Port = typeof ports.$inferSelect;
export type NewPort = typeof ports.$inferInsert;

View File

@@ -0,0 +1,644 @@
import { relations } from 'drizzle-orm';
// Ports
import { ports } from './ports';
// Users
import { userProfiles, roles, portRoleOverrides, userPortRoles } from './users';
// Clients
import {
clients,
clientContacts,
clientRelationships,
clientNotes,
clientTags,
clientMergeLog,
} from './clients';
// Interests
import { interests, interestNotes, interestTags } from './interests';
// Berths
import {
berths,
berthMapData,
berthRecommendations,
berthWaitingList,
berthMaintenanceLog,
berthTags,
} from './berths';
// Documents
import {
files,
documents,
documentSigners,
documentEvents,
documentTemplates,
formTemplates,
formSubmissions,
} from './documents';
// Financial
import { expenses, invoices, invoiceLineItems, invoiceExpenses } from './financial';
// Email
import { emailAccounts, emailThreads, emailMessages } from './email';
// Operations
import {
reminders,
googleCalendarCache,
googleCalendarTokens,
notifications,
scheduledReports,
reportRecipients,
generatedReports,
} from './operations';
// System
import {
auditLogs,
tags,
webhooks,
webhookDeliveries,
systemSettings,
savedViews,
scratchpadNotes,
userNotificationPreferences,
currencyRates,
customFieldDefinitions,
customFieldValues,
} from './system';
// ─── Ports ────────────────────────────────────────────────────────────────────
export const portsRelations = relations(ports, ({ many }) => ({
userPortRoles: many(userPortRoles),
portRoleOverrides: many(portRoleOverrides),
clients: many(clients),
interests: many(interests),
berths: many(berths),
documents: many(documents),
documentTemplates: many(documentTemplates),
formTemplates: many(formTemplates),
expenses: many(expenses),
invoices: many(invoices),
emailAccounts: many(emailAccounts),
emailThreads: many(emailThreads),
reminders: many(reminders),
notifications: many(notifications),
scheduledReports: many(scheduledReports),
auditLogs: many(auditLogs),
tags: many(tags),
files: many(files),
webhooks: many(webhooks),
systemSettings: many(systemSettings),
savedViews: many(savedViews),
userNotificationPreferences: many(userNotificationPreferences),
customFieldDefinitions: many(customFieldDefinitions),
berthMaintenanceLogs: many(berthMaintenanceLog),
clientMergeLogs: many(clientMergeLog),
clientRelationships: many(clientRelationships),
}));
// ─── Users ────────────────────────────────────────────────────────────────────
export const userProfilesRelations = relations(userProfiles, ({ many }) => ({
userPortRoles: many(userPortRoles),
}));
export const rolesRelations = relations(roles, ({ many }) => ({
userPortRoles: many(userPortRoles),
portRoleOverrides: many(portRoleOverrides),
}));
export const portRoleOverridesRelations = relations(portRoleOverrides, ({ one }) => ({
port: one(ports, {
fields: [portRoleOverrides.portId],
references: [ports.id],
}),
role: one(roles, {
fields: [portRoleOverrides.roleId],
references: [roles.id],
}),
}));
export const userPortRolesRelations = relations(userPortRoles, ({ one }) => ({
port: one(ports, {
fields: [userPortRoles.portId],
references: [ports.id],
}),
role: one(roles, {
fields: [userPortRoles.roleId],
references: [roles.id],
}),
}));
// ─── Clients ──────────────────────────────────────────────────────────────────
export const clientsRelations = relations(clients, ({ one, many }) => ({
port: one(ports, {
fields: [clients.portId],
references: [ports.id],
}),
contacts: many(clientContacts),
notes: many(clientNotes),
tags: many(clientTags),
interests: many(interests),
relationships_a: many(clientRelationships, { relationName: 'client_a' }),
relationships_b: many(clientRelationships, { relationName: 'client_b' }),
mergeLogsAsSurvivor: many(clientMergeLog),
documents: many(documents),
emailThreads: many(emailThreads),
reminders: many(reminders),
files: many(files),
waitingListEntries: many(berthWaitingList),
scratchpadNotes: many(scratchpadNotes),
formSubmissions: many(formSubmissions),
}));
export const clientContactsRelations = relations(clientContacts, ({ one }) => ({
client: one(clients, {
fields: [clientContacts.clientId],
references: [clients.id],
}),
}));
export const clientRelationshipsRelations = relations(clientRelationships, ({ one }) => ({
port: one(ports, {
fields: [clientRelationships.portId],
references: [ports.id],
}),
clientA: one(clients, {
fields: [clientRelationships.clientAId],
references: [clients.id],
relationName: 'client_a',
}),
clientB: one(clients, {
fields: [clientRelationships.clientBId],
references: [clients.id],
relationName: 'client_b',
}),
}));
export const clientNotesRelations = relations(clientNotes, ({ one }) => ({
client: one(clients, {
fields: [clientNotes.clientId],
references: [clients.id],
}),
}));
export const clientTagsRelations = relations(clientTags, ({ one }) => ({
client: one(clients, {
fields: [clientTags.clientId],
references: [clients.id],
}),
tag: one(tags, {
fields: [clientTags.tagId],
references: [tags.id],
}),
}));
export const clientMergeLogRelations = relations(clientMergeLog, ({ one }) => ({
port: one(ports, {
fields: [clientMergeLog.portId],
references: [ports.id],
}),
survivingClient: one(clients, {
fields: [clientMergeLog.survivingClientId],
references: [clients.id],
}),
}));
// ─── Interests ────────────────────────────────────────────────────────────────
export const interestsRelations = relations(interests, ({ one, many }) => ({
port: one(ports, {
fields: [interests.portId],
references: [ports.id],
}),
client: one(clients, {
fields: [interests.clientId],
references: [clients.id],
}),
berth: one(berths, {
fields: [interests.berthId],
references: [berths.id],
}),
notes: many(interestNotes),
tags: many(interestTags),
documents: many(documents),
reminders: many(reminders),
berthRecommendations: many(berthRecommendations),
formSubmissions: many(formSubmissions),
}));
export const interestNotesRelations = relations(interestNotes, ({ one }) => ({
interest: one(interests, {
fields: [interestNotes.interestId],
references: [interests.id],
}),
}));
export const interestTagsRelations = relations(interestTags, ({ one }) => ({
interest: one(interests, {
fields: [interestTags.interestId],
references: [interests.id],
}),
tag: one(tags, {
fields: [interestTags.tagId],
references: [tags.id],
}),
}));
// ─── Berths ───────────────────────────────────────────────────────────────────
export const berthsRelations = relations(berths, ({ one, many }) => ({
port: one(ports, {
fields: [berths.portId],
references: [ports.id],
}),
mapData: one(berthMapData),
recommendations: many(berthRecommendations),
waitingList: many(berthWaitingList),
maintenanceLogs: many(berthMaintenanceLog),
tags: many(berthTags),
interests: many(interests),
reminders: many(reminders),
}));
export const berthMapDataRelations = relations(berthMapData, ({ one }) => ({
berth: one(berths, {
fields: [berthMapData.berthId],
references: [berths.id],
}),
}));
export const berthRecommendationsRelations = relations(berthRecommendations, ({ one }) => ({
interest: one(interests, {
fields: [berthRecommendations.interestId],
references: [interests.id],
}),
berth: one(berths, {
fields: [berthRecommendations.berthId],
references: [berths.id],
}),
}));
export const berthWaitingListRelations = relations(berthWaitingList, ({ one }) => ({
berth: one(berths, {
fields: [berthWaitingList.berthId],
references: [berths.id],
}),
client: one(clients, {
fields: [berthWaitingList.clientId],
references: [clients.id],
}),
}));
export const berthMaintenanceLogRelations = relations(berthMaintenanceLog, ({ one }) => ({
berth: one(berths, {
fields: [berthMaintenanceLog.berthId],
references: [berths.id],
}),
port: one(ports, {
fields: [berthMaintenanceLog.portId],
references: [ports.id],
}),
}));
export const berthTagsRelations = relations(berthTags, ({ one }) => ({
berth: one(berths, {
fields: [berthTags.berthId],
references: [berths.id],
}),
tag: one(tags, {
fields: [berthTags.tagId],
references: [tags.id],
}),
}));
// ─── Documents ────────────────────────────────────────────────────────────────
export const filesRelations = relations(files, ({ one, many }) => ({
port: one(ports, {
fields: [files.portId],
references: [ports.id],
}),
client: one(clients, {
fields: [files.clientId],
references: [clients.id],
}),
documentAsFile: many(documents, { relationName: 'file' }),
documentAsSignedFile: many(documents, { relationName: 'signed_file' }),
}));
export const documentsRelations = relations(documents, ({ one, many }) => ({
port: one(ports, {
fields: [documents.portId],
references: [ports.id],
}),
interest: one(interests, {
fields: [documents.interestId],
references: [interests.id],
}),
client: one(clients, {
fields: [documents.clientId],
references: [clients.id],
}),
file: one(files, {
fields: [documents.fileId],
references: [files.id],
relationName: 'file',
}),
signedFile: one(files, {
fields: [documents.signedFileId],
references: [files.id],
relationName: 'signed_file',
}),
signers: many(documentSigners),
events: many(documentEvents),
}));
export const documentSignersRelations = relations(documentSigners, ({ one, many }) => ({
document: one(documents, {
fields: [documentSigners.documentId],
references: [documents.id],
}),
events: many(documentEvents),
}));
export const documentEventsRelations = relations(documentEvents, ({ one }) => ({
document: one(documents, {
fields: [documentEvents.documentId],
references: [documents.id],
}),
signer: one(documentSigners, {
fields: [documentEvents.signerId],
references: [documentSigners.id],
}),
}));
export const documentTemplatesRelations = relations(documentTemplates, ({ one }) => ({
port: one(ports, {
fields: [documentTemplates.portId],
references: [ports.id],
}),
}));
export const formTemplatesRelations = relations(formTemplates, ({ one, many }) => ({
port: one(ports, {
fields: [formTemplates.portId],
references: [ports.id],
}),
submissions: many(formSubmissions),
}));
export const formSubmissionsRelations = relations(formSubmissions, ({ one }) => ({
formTemplate: one(formTemplates, {
fields: [formSubmissions.formTemplateId],
references: [formTemplates.id],
}),
client: one(clients, {
fields: [formSubmissions.clientId],
references: [clients.id],
}),
interest: one(interests, {
fields: [formSubmissions.interestId],
references: [interests.id],
}),
}));
// ─── Financial ────────────────────────────────────────────────────────────────
export const expensesRelations = relations(expenses, ({ one, many }) => ({
port: one(ports, {
fields: [expenses.portId],
references: [ports.id],
}),
invoiceExpenses: many(invoiceExpenses),
}));
export const invoicesRelations = relations(invoices, ({ one, many }) => ({
port: one(ports, {
fields: [invoices.portId],
references: [ports.id],
}),
pdfFile: one(files, {
fields: [invoices.pdfFileId],
references: [files.id],
}),
lineItems: many(invoiceLineItems),
invoiceExpenses: many(invoiceExpenses),
}));
export const invoiceLineItemsRelations = relations(invoiceLineItems, ({ one }) => ({
invoice: one(invoices, {
fields: [invoiceLineItems.invoiceId],
references: [invoices.id],
}),
}));
export const invoiceExpensesRelations = relations(invoiceExpenses, ({ one }) => ({
invoice: one(invoices, {
fields: [invoiceExpenses.invoiceId],
references: [invoices.id],
}),
expense: one(expenses, {
fields: [invoiceExpenses.expenseId],
references: [expenses.id],
}),
}));
// ─── Email ────────────────────────────────────────────────────────────────────
export const emailAccountsRelations = relations(emailAccounts, ({ one }) => ({
port: one(ports, {
fields: [emailAccounts.portId],
references: [ports.id],
}),
}));
export const emailThreadsRelations = relations(emailThreads, ({ one, many }) => ({
port: one(ports, {
fields: [emailThreads.portId],
references: [ports.id],
}),
client: one(clients, {
fields: [emailThreads.clientId],
references: [clients.id],
}),
messages: many(emailMessages),
}));
export const emailMessagesRelations = relations(emailMessages, ({ one }) => ({
thread: one(emailThreads, {
fields: [emailMessages.threadId],
references: [emailThreads.id],
}),
rawFile: one(files, {
fields: [emailMessages.rawFileId],
references: [files.id],
}),
}));
// ─── Operations ───────────────────────────────────────────────────────────────
export const remindersRelations = relations(reminders, ({ one, many }) => ({
port: one(ports, {
fields: [reminders.portId],
references: [ports.id],
}),
client: one(clients, {
fields: [reminders.clientId],
references: [clients.id],
}),
interest: one(interests, {
fields: [reminders.interestId],
references: [interests.id],
}),
berth: one(berths, {
fields: [reminders.berthId],
references: [berths.id],
}),
calendarCacheEntries: many(googleCalendarCache),
}));
export const googleCalendarTokensRelations = relations(googleCalendarTokens, ({ many }) => ({
cacheEntries: many(googleCalendarCache),
}));
export const googleCalendarCacheRelations = relations(googleCalendarCache, ({ one }) => ({
reminder: one(reminders, {
fields: [googleCalendarCache.reminderId],
references: [reminders.id],
}),
}));
export const notificationsRelations = relations(notifications, ({ one }) => ({
port: one(ports, {
fields: [notifications.portId],
references: [ports.id],
}),
}));
export const scheduledReportsRelations = relations(scheduledReports, ({ one, many }) => ({
port: one(ports, {
fields: [scheduledReports.portId],
references: [ports.id],
}),
recipients: many(reportRecipients),
generatedReports: many(generatedReports),
}));
export const reportRecipientsRelations = relations(reportRecipients, ({ one }) => ({
report: one(scheduledReports, {
fields: [reportRecipients.reportId],
references: [scheduledReports.id],
}),
}));
export const generatedReportsRelations = relations(generatedReports, ({ one }) => ({
port: one(ports, {
fields: [generatedReports.portId],
references: [ports.id],
}),
scheduledReport: one(scheduledReports, {
fields: [generatedReports.scheduledReportId],
references: [scheduledReports.id],
}),
file: one(files, {
fields: [generatedReports.fileId],
references: [files.id],
}),
}));
// ─── System ───────────────────────────────────────────────────────────────────
export const auditLogsRelations = relations(auditLogs, ({ one }) => ({
port: one(ports, {
fields: [auditLogs.portId],
references: [ports.id],
}),
revertOfLog: one(auditLogs, {
fields: [auditLogs.revertOf],
references: [auditLogs.id],
relationName: 'revert_of',
}),
}));
export const tagsRelations = relations(tags, ({ one, many }) => ({
port: one(ports, {
fields: [tags.portId],
references: [ports.id],
}),
clientTags: many(clientTags),
interestTags: many(interestTags),
berthTags: many(berthTags),
}));
export const webhooksRelations = relations(webhooks, ({ one, many }) => ({
port: one(ports, {
fields: [webhooks.portId],
references: [ports.id],
}),
deliveries: many(webhookDeliveries),
}));
export const webhookDeliveriesRelations = relations(webhookDeliveries, ({ one }) => ({
webhook: one(webhooks, {
fields: [webhookDeliveries.webhookId],
references: [webhooks.id],
}),
}));
export const systemSettingsRelations = relations(systemSettings, ({ one }) => ({
port: one(ports, {
fields: [systemSettings.portId],
references: [ports.id],
}),
}));
export const savedViewsRelations = relations(savedViews, ({ one }) => ({
port: one(ports, {
fields: [savedViews.portId],
references: [ports.id],
}),
}));
export const scratchpadNotesRelations = relations(scratchpadNotes, ({ one }) => ({
linkedClient: one(clients, {
fields: [scratchpadNotes.linkedClientId],
references: [clients.id],
}),
}));
export const userNotificationPreferencesRelations = relations(
userNotificationPreferences,
({ one }) => ({
port: one(ports, {
fields: [userNotificationPreferences.portId],
references: [ports.id],
}),
}),
);
export const customFieldDefinitionsRelations = relations(
customFieldDefinitions,
({ one, many }) => ({
port: one(ports, {
fields: [customFieldDefinitions.portId],
references: [ports.id],
}),
values: many(customFieldValues),
}),
);
export const customFieldValuesRelations = relations(customFieldValues, ({ one }) => ({
definition: one(customFieldDefinitions, {
fields: [customFieldValues.fieldId],
references: [customFieldDefinitions.id],
}),
}));

243
src/lib/db/schema/system.ts Normal file
View File

@@ -0,0 +1,243 @@
import {
pgTable,
text,
boolean,
integer,
numeric,
timestamp,
jsonb,
index,
uniqueIndex,
primaryKey,
} from 'drizzle-orm/pg-core';
import { ports } from './ports';
import { clients } from './clients';
export const auditLogs = pgTable(
'audit_logs',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id').references(() => ports.id), // null for system-level events
userId: text('user_id'), // null for system-generated events
action: text('action').notNull(), // create, update, delete, archive, restore, merge, login, logout, revert
entityType: text('entity_type').notNull(), // client, interest, berth, expense, invoice, file, user, role, etc.
entityId: text('entity_id'),
fieldChanged: text('field_changed'),
oldValue: jsonb('old_value'),
newValue: jsonb('new_value'),
ipAddress: text('ip_address'),
userAgent: text('user_agent'),
revertedBy: text('reverted_by'), // user ID if this change was reverted
revertedAt: timestamp('reverted_at', { withTimezone: true }),
revertOf: text('revert_of').references((): any => auditLogs.id),
metadata: jsonb('metadata').default({}),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
index('idx_al_port').on(table.portId, table.createdAt),
index('idx_al_entity').on(table.entityType, table.entityId),
index('idx_al_user').on(table.userId, table.createdAt),
index('idx_al_created').on(table.createdAt),
],
);
export const tags = pgTable(
'tags',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
name: text('name').notNull(),
color: text('color').notNull().default('#6B7280'),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('tags_port_name_idx').on(table.portId, table.name),
index('idx_tags_port').on(table.portId),
],
);
export const webhooks = pgTable(
'webhooks',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
name: text('name').notNull(),
url: text('url').notNull(),
secret: text('secret'),
events: text('events').array().notNull(),
isActive: boolean('is_active').notNull().default(true),
createdBy: text('created_by').notNull(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_webhooks_port').on(table.portId)],
);
export const webhookDeliveries = pgTable(
'webhook_deliveries',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
webhookId: text('webhook_id')
.notNull()
.references(() => webhooks.id, { onDelete: 'cascade' }),
eventType: text('event_type').notNull(),
payload: jsonb('payload').notNull(),
responseStatus: integer('response_status'),
responseBody: text('response_body'),
attempt: integer('attempt').notNull().default(1),
status: text('status').notNull().default('pending'), // pending, success, failed, dead_letter
deliveredAt: timestamp('delivered_at', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_wd_webhook').on(table.webhookId, table.createdAt)],
);
export const systemSettings = pgTable(
'system_settings',
{
key: text('key').notNull(),
value: jsonb('value').notNull(),
portId: text('port_id').references(() => ports.id), // null for global settings
updatedBy: text('updated_by'),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('system_settings_key_port_idx').on(table.key, table.portId),
// Note: the PRIMARY KEY is `key` alone based on schema, but unique on (key, port_id)
// We use key as primary key per SQL schema
],
);
export const savedViews = pgTable(
'saved_views',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
userId: text('user_id').notNull(),
entityType: text('entity_type').notNull(), // clients, interests, berths, expenses, invoices
name: text('name').notNull(),
filters: jsonb('filters').notNull(),
sortConfig: jsonb('sort_config'),
columnConfig: jsonb('column_config'),
isShared: boolean('is_shared').notNull().default(false),
isDefault: boolean('is_default').notNull().default(false),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_sv_user').on(table.userId, table.entityType)],
);
export const scratchpadNotes = pgTable(
'scratchpad_notes',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
userId: text('user_id').notNull(),
content: text('content').notNull(),
linkedClientId: text('linked_client_id').references(() => clients.id),
linkedAt: timestamp('linked_at', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_sp_user').on(table.userId)],
);
export const userNotificationPreferences = pgTable(
'user_notification_preferences',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
userId: text('user_id').notNull(),
portId: text('port_id')
.notNull()
.references(() => ports.id),
notificationType: text('notification_type').notNull(),
inApp: boolean('in_app').notNull().default(true),
email: boolean('email').notNull().default(true),
},
(table) => [
uniqueIndex('unp_user_port_type_idx').on(table.userId, table.portId, table.notificationType),
],
);
export const currencyRates = pgTable(
'currency_rates',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
baseCurrency: text('base_currency').notNull(),
targetCurrency: text('target_currency').notNull(),
rate: numeric('rate').notNull(),
source: text('source').notNull().default('frankfurter'), // frankfurter, manual
fetchedAt: timestamp('fetched_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('currency_rates_base_target_idx').on(table.baseCurrency, table.targetCurrency),
],
);
export const customFieldDefinitions = pgTable(
'custom_field_definitions',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
entityType: text('entity_type').notNull(), // client, interest, berth
fieldName: text('field_name').notNull(),
fieldLabel: text('field_label').notNull(),
fieldType: text('field_type').notNull(), // text, number, date, boolean, select
selectOptions: jsonb('select_options'), // for select type: ["option1", "option2"]
isRequired: boolean('is_required').notNull().default(false),
sortOrder: integer('sort_order').notNull().default(0),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('cfd_port_entity_name_idx').on(table.portId, table.entityType, table.fieldName),
index('idx_cfd_port').on(table.portId),
],
);
export const customFieldValues = pgTable(
'custom_field_values',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
fieldId: text('field_id')
.notNull()
.references(() => customFieldDefinitions.id, { onDelete: 'cascade' }),
entityId: text('entity_id').notNull(), // references the client/interest/berth ID
value: jsonb('value').notNull(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('cfv_field_entity_idx').on(table.fieldId, table.entityId),
index('idx_cfv_entity').on(table.entityId),
],
);
export type AuditLog = typeof auditLogs.$inferSelect;
export type NewAuditLog = typeof auditLogs.$inferInsert;
export type Tag = typeof tags.$inferSelect;
export type NewTag = typeof tags.$inferInsert;
export type Webhook = typeof webhooks.$inferSelect;
export type NewWebhook = typeof webhooks.$inferInsert;
export type WebhookDelivery = typeof webhookDeliveries.$inferSelect;
export type NewWebhookDelivery = typeof webhookDeliveries.$inferInsert;
export type SystemSetting = typeof systemSettings.$inferSelect;
export type NewSystemSetting = typeof systemSettings.$inferInsert;
export type SavedView = typeof savedViews.$inferSelect;
export type NewSavedView = typeof savedViews.$inferInsert;
export type ScratchpadNote = typeof scratchpadNotes.$inferSelect;
export type NewScratchpadNote = typeof scratchpadNotes.$inferInsert;
export type UserNotificationPreference = typeof userNotificationPreferences.$inferSelect;
export type NewUserNotificationPreference = typeof userNotificationPreferences.$inferInsert;
export type CurrencyRate = typeof currencyRates.$inferSelect;
export type NewCurrencyRate = typeof currencyRates.$inferInsert;
export type CustomFieldDefinition = typeof customFieldDefinitions.$inferSelect;
export type NewCustomFieldDefinition = typeof customFieldDefinitions.$inferInsert;
export type CustomFieldValue = typeof customFieldValues.$inferSelect;
export type NewCustomFieldValue = typeof customFieldValues.$inferInsert;

265
src/lib/db/schema/users.ts Normal file
View File

@@ -0,0 +1,265 @@
import {
pgTable,
text,
boolean,
timestamp,
jsonb,
index,
uniqueIndex,
} from 'drizzle-orm/pg-core';
import { ports } from './ports';
// ─── Permission Types ─────────────────────────────────────────────────────────
export type RolePermissions = {
clients: {
view: boolean;
create: boolean;
edit: boolean;
delete: boolean;
merge: boolean;
export: boolean;
};
interests: {
view: boolean;
create: boolean;
edit: boolean;
delete: boolean;
change_stage: boolean;
generate_eoi: boolean;
export: boolean;
};
berths: {
view: boolean;
edit: boolean;
import: boolean;
manage_waiting_list: boolean;
};
documents: {
view: boolean;
create: boolean;
send_for_signing: boolean;
upload_signed: boolean;
delete: boolean;
};
expenses: {
view: boolean;
create: boolean;
edit: boolean;
delete: boolean;
export: boolean;
scan_receipt: boolean;
};
invoices: {
view: boolean;
create: boolean;
edit: boolean;
delete: boolean;
send: boolean;
record_payment: boolean;
export: boolean;
};
files: {
view: boolean;
upload: boolean;
delete: boolean;
manage_folders: boolean;
};
email: {
view: boolean;
send: boolean;
configure_account: boolean;
};
reminders: {
view_own: boolean;
view_all: boolean;
create: boolean;
edit_own: boolean;
edit_all: boolean;
assign_others: boolean;
};
calendar: {
connect: boolean;
view_events: boolean;
};
reports: {
view_dashboard: boolean;
view_analytics: boolean;
export: boolean;
};
document_templates: {
view: boolean;
generate: boolean;
manage: boolean;
};
admin: {
manage_users: boolean;
view_audit_log: boolean;
manage_settings: boolean;
manage_webhooks: boolean;
manage_reports: boolean;
manage_custom_fields: boolean;
manage_forms: boolean;
manage_tags: boolean;
system_backup: boolean;
};
};
export type UserPreferences = {
dark_mode?: boolean;
locale?: string;
timezone?: string;
[key: string]: unknown;
};
// ─── Better Auth Core Tables ─────────────────────────────────────────────────
/**
* Core user table managed by Better Auth.
* Do NOT modify directly — Better Auth handles CRUD via its adapter.
*/
export const user = pgTable('user', {
id: text('id').primaryKey(),
name: text('name').notNull(),
email: text('email').notNull().unique(),
emailVerified: boolean('email_verified').notNull().default(false),
image: text('image'),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
});
export const account = pgTable('account', {
id: text('id').primaryKey(),
accountId: text('account_id').notNull(),
providerId: text('provider_id').notNull(),
userId: text('user_id').notNull().references(() => user.id),
accessToken: text('access_token'),
refreshToken: text('refresh_token'),
idToken: text('id_token'),
accessTokenExpiresAt: timestamp('access_token_expires_at', { withTimezone: true }),
refreshTokenExpiresAt: timestamp('refresh_token_expires_at', { withTimezone: true }),
scope: text('scope'),
password: text('password'),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
});
export const verification = pgTable('verification', {
id: text('id').primaryKey(),
identifier: text('identifier').notNull(),
value: text('value').notNull(),
expiresAt: timestamp('expires_at', { withTimezone: true }).notNull(),
createdAt: timestamp('created_at', { withTimezone: true }),
updatedAt: timestamp('updated_at', { withTimezone: true }),
});
// ─── CRM Extension Tables ───────────────────────────────────────────────────
/**
* Extension table for Better Auth users.
* Better Auth manages the core `user` table.
* We extend with CRM-specific fields here.
*/
export const userProfiles = pgTable(
'user_profiles',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
userId: text('user_id').notNull().unique(), // references Better Auth user ID
displayName: text('display_name').notNull(),
avatarUrl: text('avatar_url'),
phone: text('phone'),
isSuperAdmin: boolean('is_super_admin').notNull().default(false),
isActive: boolean('is_active').notNull().default(true),
lastLoginAt: timestamp('last_login_at', { withTimezone: true }),
preferences: jsonb('preferences').$type<UserPreferences>().notNull().default({}),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [uniqueIndex('user_profiles_user_id_idx').on(table.userId)],
);
export const roles = pgTable('roles', {
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
name: text('name').notNull(),
description: text('description'),
permissions: jsonb('permissions').$type<RolePermissions>().notNull().default({} as RolePermissions),
isGlobal: boolean('is_global').notNull().default(true),
isSystem: boolean('is_system').notNull().default(false),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
});
export const portRoleOverrides = pgTable(
'port_role_overrides',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id, { onDelete: 'cascade' }),
roleId: text('role_id')
.notNull()
.references(() => roles.id, { onDelete: 'cascade' }),
permissionOverrides: jsonb('permission_overrides')
.$type<Partial<RolePermissions>>()
.notNull()
.default({}),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('port_role_overrides_port_role_idx').on(table.portId, table.roleId),
index('port_role_overrides_port_idx').on(table.portId),
],
);
export const userPortRoles = pgTable(
'user_port_roles',
{
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
userId: text('user_id').notNull(), // references Better Auth user ID
portId: text('port_id')
.notNull()
.references(() => ports.id, { onDelete: 'cascade' }),
roleId: text('role_id')
.notNull()
.references(() => roles.id, { onDelete: 'cascade' }),
assignedBy: text('assigned_by'), // user ID of who assigned this
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('user_port_roles_user_port_role_idx').on(table.userId, table.portId, table.roleId),
index('idx_upr_user').on(table.userId),
index('idx_upr_port').on(table.portId),
],
);
/**
* Sessions table — Better Auth compatibility.
* Better Auth manages session creation/validation.
*/
export const session = pgTable(
'session',
{
id: text('id').primaryKey(),
userId: text('user_id').notNull(),
token: text('token').notNull().unique(),
expiresAt: timestamp('expires_at', { withTimezone: true }).notNull(),
ipAddress: text('ip_address'),
userAgent: text('user_agent'),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('sessions_token_idx').on(table.token),
index('sessions_user_id_idx').on(table.userId),
],
);
export type UserProfile = typeof userProfiles.$inferSelect;
export type NewUserProfile = typeof userProfiles.$inferInsert;
export type Role = typeof roles.$inferSelect;
export type NewRole = typeof roles.$inferInsert;
export type PortRoleOverride = typeof portRoleOverrides.$inferSelect;
export type NewPortRoleOverride = typeof portRoleOverrides.$inferInsert;
export type UserPortRole = typeof userPortRoles.$inferSelect;
export type NewUserPortRole = typeof userPortRoles.$inferInsert;

219
src/lib/db/seed.ts Normal file
View File

@@ -0,0 +1,219 @@
/**
* Seed script for Port Nimara CRM.
*
* Seeds:
* - 1 Port: Port Nimara
* - 5 System roles with full permission maps
* - 1 Super admin user profile (matt@portnimara.com)
*
* Run with: npm run db:seed
*/
import 'dotenv/config';
import { db } from './index';
import { ports } from './schema/ports';
import { roles, userProfiles } from './schema/users';
import type { RolePermissions } from './schema/users';
// ─── Permission Maps ─────────────────────────────────────────────────────────
const ALL_PERMISSIONS: RolePermissions = {
clients: { view: true, create: true, edit: true, delete: true, merge: true, export: true },
interests: { view: true, create: true, edit: true, delete: true, change_stage: true, generate_eoi: true, export: true },
berths: { view: true, edit: true, import: true, manage_waiting_list: true },
documents: { view: true, create: true, send_for_signing: true, upload_signed: true, delete: true },
expenses: { view: true, create: true, edit: true, delete: true, export: true, scan_receipt: true },
invoices: { view: true, create: true, edit: true, delete: true, send: true, record_payment: true, export: true },
files: { view: true, upload: true, delete: true, manage_folders: true },
email: { view: true, send: true, configure_account: true },
reminders: { view_own: true, view_all: true, create: true, edit_own: true, edit_all: true, assign_others: true },
calendar: { connect: true, view_events: true },
reports: { view_dashboard: true, view_analytics: true, export: true },
document_templates: { view: true, generate: true, manage: true },
admin: { manage_users: true, view_audit_log: true, manage_settings: true, manage_webhooks: true, manage_reports: true, manage_custom_fields: true, manage_forms: true, manage_tags: true, system_backup: true },
};
const DIRECTOR_PERMISSIONS: RolePermissions = {
clients: { view: true, create: true, edit: true, delete: true, merge: true, export: true },
interests: { view: true, create: true, edit: true, delete: true, change_stage: true, generate_eoi: true, export: true },
berths: { view: true, edit: true, import: true, manage_waiting_list: true },
documents: { view: true, create: true, send_for_signing: true, upload_signed: true, delete: true },
expenses: { view: true, create: true, edit: true, delete: true, export: true, scan_receipt: true },
invoices: { view: true, create: true, edit: true, delete: true, send: true, record_payment: true, export: true },
files: { view: true, upload: true, delete: true, manage_folders: true },
email: { view: true, send: true, configure_account: true },
reminders: { view_own: true, view_all: true, create: true, edit_own: true, edit_all: true, assign_others: true },
calendar: { connect: true, view_events: true },
reports: { view_dashboard: true, view_analytics: true, export: true },
document_templates: { view: true, generate: true, manage: true },
admin: { manage_users: true, view_audit_log: true, manage_settings: true, manage_webhooks: true, manage_reports: true, manage_custom_fields: true, manage_forms: true, manage_tags: true, system_backup: false },
};
const SALES_MANAGER_PERMISSIONS: RolePermissions = {
clients: { view: true, create: true, edit: true, delete: false, merge: true, export: true },
interests: { view: true, create: true, edit: true, delete: false, change_stage: true, generate_eoi: true, export: true },
berths: { view: true, edit: false, import: false, manage_waiting_list: true },
documents: { view: true, create: true, send_for_signing: true, upload_signed: true, delete: false },
expenses: { view: true, create: true, edit: true, delete: false, export: true, scan_receipt: true },
invoices: { view: true, create: true, edit: true, delete: false, send: true, record_payment: true, export: true },
files: { view: true, upload: true, delete: false, manage_folders: true },
email: { view: true, send: true, configure_account: true },
reminders: { view_own: true, view_all: true, create: true, edit_own: true, edit_all: true, assign_others: true },
calendar: { connect: true, view_events: true },
reports: { view_dashboard: true, view_analytics: true, export: true },
document_templates: { view: true, generate: true, manage: false },
admin: { manage_users: false, view_audit_log: false, manage_settings: false, manage_webhooks: false, manage_reports: false, manage_custom_fields: false, manage_forms: false, manage_tags: true, system_backup: false },
};
const SALES_AGENT_PERMISSIONS: RolePermissions = {
clients: { view: true, create: true, edit: true, delete: false, merge: false, export: true },
interests: { view: true, create: true, edit: true, delete: false, change_stage: true, generate_eoi: true, export: true },
berths: { view: true, edit: false, import: false, manage_waiting_list: true },
documents: { view: true, create: true, send_for_signing: true, upload_signed: true, delete: false },
expenses: { view: true, create: true, edit: true, delete: false, export: true, scan_receipt: true },
invoices: { view: true, create: true, edit: true, delete: false, send: true, record_payment: true, export: true },
files: { view: true, upload: true, delete: false, manage_folders: false },
email: { view: true, send: true, configure_account: true },
reminders: { view_own: true, view_all: false, create: true, edit_own: true, edit_all: false, assign_others: false },
calendar: { connect: true, view_events: true },
reports: { view_dashboard: true, view_analytics: true, export: true },
document_templates: { view: true, generate: true, manage: false },
admin: { manage_users: false, view_audit_log: false, manage_settings: false, manage_webhooks: false, manage_reports: false, manage_custom_fields: false, manage_forms: false, manage_tags: true, system_backup: false },
};
const VIEWER_PERMISSIONS: RolePermissions = {
clients: { view: true, create: false, edit: false, delete: false, merge: false, export: false },
interests: { view: true, create: false, edit: false, delete: false, change_stage: false, generate_eoi: false, export: false },
berths: { view: true, edit: false, import: false, manage_waiting_list: false },
documents: { view: true, create: false, send_for_signing: false, upload_signed: false, delete: false },
expenses: { view: true, create: false, edit: false, delete: false, export: false, scan_receipt: false },
invoices: { view: true, create: false, edit: false, delete: false, send: false, record_payment: false, export: false },
files: { view: true, upload: false, delete: false, manage_folders: false },
email: { view: true, send: false, configure_account: false },
reminders: { view_own: true, view_all: false, create: false, edit_own: false, edit_all: false, assign_others: false },
calendar: { connect: false, view_events: true },
reports: { view_dashboard: true, view_analytics: false, export: false },
document_templates: { view: true, generate: false, manage: false },
admin: { manage_users: false, view_audit_log: false, manage_settings: false, manage_webhooks: false, manage_reports: false, manage_custom_fields: false, manage_forms: false, manage_tags: false, system_backup: false },
};
// ─── Seed Function ────────────────────────────────────────────────────────────
async function seed() {
console.log('Seeding Port Nimara CRM...');
// ── 1. Port ─────────────────────────────────────────────────────────────────
console.log('Creating Port Nimara...');
const [port] = await db
.insert(ports)
.values({
id: crypto.randomUUID(),
name: 'Port Nimara',
slug: 'port-nimara',
logoUrl: null,
primaryColor: '#0F4C81',
defaultCurrency: 'USD',
timezone: 'America/Anguilla',
settings: {},
isActive: true,
})
.onConflictDoNothing()
.returning();
const portId = port?.id;
if (!portId) {
console.log('Port already exists, skipping...');
} else {
console.log(`Port created: ${portId}`);
}
// ── 2. System Roles ─────────────────────────────────────────────────────────
console.log('Creating system roles...');
const systemRoles = [
{
id: crypto.randomUUID(),
name: 'super_admin',
description: 'Full system access. Bypasses all permission checks.',
permissions: ALL_PERMISSIONS,
isGlobal: true,
isSystem: true,
},
{
id: crypto.randomUUID(),
name: 'director',
description: 'Operational admin within assigned port(s). Can manage users and settings.',
permissions: DIRECTOR_PERMISSIONS,
isGlobal: true,
isSystem: true,
},
{
id: crypto.randomUUID(),
name: 'sales_manager',
description: 'Full sales access. Can view all reminders, assign tasks, and export reports.',
permissions: SALES_MANAGER_PERMISSIONS,
isGlobal: true,
isSystem: true,
},
{
id: crypto.randomUUID(),
name: 'sales_agent',
description: 'Standard sales role. View/create/edit clients and interests, manage own reminders.',
permissions: SALES_AGENT_PERMISSIONS,
isGlobal: true,
isSystem: true,
},
{
id: crypto.randomUUID(),
name: 'viewer',
description: 'Read-only access to all records.',
permissions: VIEWER_PERMISSIONS,
isGlobal: true,
isSystem: true,
},
];
for (const role of systemRoles) {
await db.insert(roles).values(role).onConflictDoNothing();
console.log(`Role created: ${role.name}`);
}
// ── 3. Super Admin User Profile ─────────────────────────────────────────────
// Note: Better Auth creates the actual `user` record on first login.
// We create the profile extension now, linked to a known user_id.
// The Better Auth user_id for matt@portnimara.com must match this value
// once Better Auth is configured. Use a stable placeholder ID here.
console.log('Creating super admin user profile...');
const superAdminUserId = 'super-admin-matt-portnimara';
await db
.insert(userProfiles)
.values({
id: crypto.randomUUID(),
userId: superAdminUserId,
displayName: 'Matt',
avatarUrl: null,
phone: null,
isSuperAdmin: true,
isActive: true,
lastLoginAt: null,
preferences: {},
})
.onConflictDoNothing();
console.log(`Super admin profile created for user_id: ${superAdminUserId}`);
console.log('');
console.log('Seed complete!');
console.log('');
console.log('NOTE: The Better Auth user for matt@portnimara.com must be created');
console.log(`separately. Once created, update user_profiles.user_id to match`);
console.log(`the actual Better Auth user ID (currently placeholder: ${superAdminUserId})`);
process.exit(0);
}
seed().catch((err) => {
console.error('Seed failed:', err);
process.exit(1);
});

56
src/lib/db/utils.ts Normal file
View File

@@ -0,0 +1,56 @@
import { eq, sql } from 'drizzle-orm';
import type { PgTable, PgColumn } from 'drizzle-orm/pg-core';
import { db } from './index';
/**
* Wraps a database operation in a transaction.
* Rolls back automatically on error.
*
* @example
* const result = await withTransaction(async (tx) => {
* await tx.insert(clients).values({ ... });
* await tx.insert(interests).values({ ... });
* return result;
* });
*/
export async function withTransaction<T>(
callback: (tx: typeof db) => Promise<T>,
): Promise<T> {
return db.transaction(callback as any) as Promise<T>;
}
/**
* Soft-deletes a record by setting `archived_at` to now.
* The table must have an `archived_at` column.
*
* @example
* await softDelete(clients, clients.id, clientId);
*/
export async function softDelete<TTable extends PgTable>(
table: TTable,
idColumn: PgColumn,
id: string,
): Promise<void> {
await db
.update(table)
.set({ archived_at: sql`now()` } as any)
.where(eq(idColumn, id));
}
/**
* Restores a soft-deleted record by clearing `archived_at`.
* The table must have an `archived_at` column.
*
* @example
* await restore(clients, clients.id, clientId);
*/
export async function restore<TTable extends PgTable>(
table: TTable,
idColumn: PgColumn,
id: string,
): Promise<void> {
await db
.update(table)
.set({ archived_at: null } as any)
.where(eq(idColumn, id));
}

57
src/lib/email/index.ts Normal file
View File

@@ -0,0 +1,57 @@
import nodemailer, { type Transporter } from 'nodemailer';
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
/**
* Creates and returns a new Nodemailer SMTP transporter.
*
* A new instance is created on each call so the factory can be used in
* contexts where connection pooling is managed externally (e.g. per-request
* in serverless, or once at worker startup).
*/
export function createTransporter(): Transporter {
return nodemailer.createTransport({
host: env.SMTP_HOST,
port: env.SMTP_PORT,
// Implicitly secure when port is 465; STARTTLS for all other ports.
secure: env.SMTP_PORT === 465,
});
}
export interface SendEmailOptions {
to: string | string[];
subject: string;
html: string;
from?: string;
}
/**
* Sends a single email via SMTP.
*
* Returns the nodemailer info object on success. Propagates errors to the
* caller — callers in background jobs should wrap in try/catch and handle
* retries via BullMQ.
*/
export async function sendEmail(
to: string | string[],
subject: string,
html: string,
from?: string,
): Promise<nodemailer.SentMessageInfo> {
const transporter = createTransporter();
const info = await transporter.sendMail({
from: from ?? `Port Nimara CRM <noreply@${env.SMTP_HOST}>`,
to: Array.isArray(to) ? to.join(', ') : to,
subject,
html,
});
logger.debug(
{ messageId: info.messageId, to, subject },
'Email sent',
);
return info;
}

31
src/lib/entity-diff.ts Normal file
View File

@@ -0,0 +1,31 @@
import { diffFields } from '@/lib/audit';
const SKIP_FIELDS = new Set(['createdAt', 'updatedAt', 'portId']);
/**
* Wraps `diffFields` with automatic exclusion of metadata fields
* (createdAt, updatedAt, portId).
*/
export function diffEntity<T extends Record<string, unknown>>(
oldRecord: T,
newRecord: Partial<T>,
): { changed: boolean; diff: Record<string, { old: unknown; new: unknown }> } {
// Only compare keys present in newRecord
const filteredOld: Record<string, unknown> = {};
const filteredNew: Record<string, unknown> = {};
for (const key of Object.keys(newRecord)) {
if (SKIP_FIELDS.has(key)) continue;
filteredOld[key] = oldRecord[key];
filteredNew[key] = newRecord[key];
}
const changes = diffFields(filteredOld, filteredNew);
const diff: Record<string, { old: unknown; new: unknown }> = {};
for (const change of changes) {
diff[change.field] = { old: change.oldValue, new: change.newValue };
}
return { changed: changes.length > 0, diff };
}

66
src/lib/env.ts Normal file
View File

@@ -0,0 +1,66 @@
import { z } from 'zod';
const envSchema = z.object({
// Database
DATABASE_URL: z.string().url().startsWith('postgresql://'),
// Redis
REDIS_URL: z.string().url().startsWith('redis://'),
// Auth
BETTER_AUTH_SECRET: z.string().min(32),
BETTER_AUTH_URL: z.string().url(),
CSRF_SECRET: z.string().min(32),
// MinIO
MINIO_ENDPOINT: z.string().min(1),
MINIO_PORT: z.coerce.number().int().positive(),
MINIO_ACCESS_KEY: z.string().min(1),
MINIO_SECRET_KEY: z.string().min(1),
MINIO_BUCKET: z.string().min(1),
MINIO_USE_SSL: z.enum(['true', 'false']).transform((v) => v === 'true'),
// Documenso
DOCUMENSO_API_URL: z.string().url(),
DOCUMENSO_API_KEY: z.string().min(1),
DOCUMENSO_WEBHOOK_SECRET: z.string().min(16),
// Email
SMTP_HOST: z.string().min(1),
SMTP_PORT: z.coerce.number().int().positive(),
// Encryption
EMAIL_CREDENTIAL_KEY: z
.string()
.length(64)
.regex(/^[0-9a-f]+$/i, 'Must be a 64-character hex string'),
// Google OAuth (optional)
GOOGLE_CLIENT_ID: z.string().optional(),
GOOGLE_CLIENT_SECRET: z.string().optional(),
// OpenAI (optional)
OPENAI_API_KEY: z.string().optional(),
// App
APP_URL: z.string().url(),
PUBLIC_SITE_URL: z.string().url(),
NODE_ENV: z.enum(['development', 'production', 'test']).default('development'),
LOG_LEVEL: z.enum(['fatal', 'error', 'warn', 'info', 'debug', 'trace']).default('info'),
});
export type Env = z.infer<typeof envSchema>;
function validateEnv(): Env {
const result = envSchema.safeParse(process.env);
if (!result.success) {
console.error('Invalid environment variables:');
for (const issue of result.error.issues) {
console.error(` ${issue.path.join('.')}: ${issue.message}`);
}
process.exit(1);
}
return result.data;
}
export const env = validateEnv();

86
src/lib/errors.ts Normal file
View File

@@ -0,0 +1,86 @@
import { NextResponse } from 'next/server';
import { ZodError } from 'zod';
import { logger } from '@/lib/logger';
export class AppError extends Error {
constructor(
public statusCode: number,
message: string,
public code?: string,
) {
super(message);
this.name = 'AppError';
}
}
export class NotFoundError extends AppError {
constructor(entity: string) {
super(404, `${entity} not found`, 'NOT_FOUND');
}
}
export class ForbiddenError extends AppError {
constructor(message = 'Insufficient permissions') {
super(403, message, 'FORBIDDEN');
}
}
export class ValidationError extends AppError {
constructor(
message: string,
public details?: Array<{ field: string; message: string }>,
) {
super(400, message, 'VALIDATION_ERROR');
}
}
export class ConflictError extends AppError {
constructor(message: string) {
super(409, message, 'CONFLICT');
}
}
export class RateLimitError extends AppError {
constructor(public retryAfter: number) {
super(429, 'Too many requests', 'RATE_LIMITED');
}
}
/**
* Converts any thrown value into a sanitised NextResponse.
* Never leaks stack traces, internal paths, or database error details to the client.
*/
export function errorResponse(error: unknown): NextResponse {
if (error instanceof AppError) {
const body: Record<string, unknown> = {
error: error.message,
code: error.code,
};
if (error instanceof ValidationError && error.details) {
body.details = error.details;
}
if (error instanceof RateLimitError) {
body.retryAfter = error.retryAfter;
}
return NextResponse.json(body, { status: error.statusCode });
}
if (error instanceof ZodError) {
return NextResponse.json(
{
error: 'Validation failed',
code: 'VALIDATION_ERROR',
details: error.errors.map((e) => ({
field: e.path.join('.'),
message: e.message,
})),
},
{ status: 400 },
);
}
// Log full details server-side; never send them to the client.
logger.error({ err: error }, 'Unhandled error');
return NextResponse.json({ error: 'Internal server error' }, { status: 500 });
}

30
src/lib/logger.ts Normal file
View File

@@ -0,0 +1,30 @@
import pino from 'pino';
export const logger = pino({
level: process.env.LOG_LEVEL ?? 'info',
redact: {
paths: [
'password',
'token',
'secret',
'accessKey',
'secretKey',
'creditCard',
'*.password',
'*.token',
'*.secret',
'*.accessKey',
'*.secretKey',
],
censor: '[REDACTED]',
},
transport:
process.env.NODE_ENV !== 'production'
? { target: 'pino-pretty', options: { colorize: true } }
: undefined,
serializers: {
err: pino.stdSerializers.err,
req: pino.stdSerializers.req,
res: pino.stdSerializers.res,
},
});

63
src/lib/minio/index.ts Normal file
View File

@@ -0,0 +1,63 @@
import { Client } from 'minio';
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
export const minioClient = new Client({
endPoint: env.MINIO_ENDPOINT,
port: env.MINIO_PORT,
useSSL: env.MINIO_USE_SSL,
accessKey: env.MINIO_ACCESS_KEY,
secretKey: env.MINIO_SECRET_KEY,
});
const BUCKET = env.MINIO_BUCKET;
/**
* Ensures the configured bucket exists, creating it if not.
* Should be called once at application startup.
*/
export async function ensureBucket(): Promise<void> {
try {
const exists = await minioClient.bucketExists(BUCKET);
if (!exists) {
await minioClient.makeBucket(BUCKET);
logger.info({ bucket: BUCKET }, 'MinIO bucket created');
} else {
logger.debug({ bucket: BUCKET }, 'MinIO bucket exists');
}
} catch (err) {
logger.error({ err, bucket: BUCKET }, 'Failed to ensure MinIO bucket');
throw err;
}
}
/**
* Generates a pre-signed GET URL for an object.
*
* Default expiry is 15 minutes (900 seconds) per SECURITY-GUIDELINES.md §7.1.
*/
export async function getPresignedUrl(
objectKey: string,
expirySeconds = 900,
): Promise<string> {
return minioClient.presignedGetObject(BUCKET, objectKey, expirySeconds);
}
/**
* Constructs a storage path from typed components.
*
* Format: `{portSlug}/{entity}/{entityId}/{fileId}.{extension}`
*
* No user-supplied input should ever be used as path components — only UUIDs
* and controlled slugs (SECURITY-GUIDELINES.md §3.4, §7.1).
*/
export function buildStoragePath(
portSlug: string,
entity: string,
entityId: string,
fileId: string,
extension: string,
): string {
return `${portSlug}/${entity}/${entityId}/${fileId}.${extension}`;
}

24
src/lib/pdf/generate.ts Normal file
View File

@@ -0,0 +1,24 @@
import { generate } from '@pdfme/generator';
import type { Template } from '@pdfme/common';
import { logger } from '@/lib/logger';
/**
* Generates a PDF from a @pdfme template and input data.
*
* @param template The @pdfme template definition (basePdf + schemas)
* @param inputs Array of input objects matching the template schema fields
* @returns Raw PDF bytes
*/
export async function generatePdf(
template: Template,
inputs: Record<string, string>[],
): Promise<Uint8Array> {
try {
const pdf = await generate({ template, inputs });
return pdf;
} catch (err) {
logger.error({ err }, 'PDF generation failed');
throw new Error('Failed to generate PDF');
}
}

View File

@@ -0,0 +1,113 @@
import type { Template } from '@pdfme/common';
export const berthSpecTemplate: Template = {
basePdf: 'BLANK_PDF' as any,
schemas: [
[
{ name: 'portName', type: 'text', position: { x: 20, y: 15 }, width: 100, height: 10, fontSize: 16 },
{ name: 'title', type: 'text', position: { x: 20, y: 30 }, width: 170, height: 8, fontSize: 14 },
{ name: 'berthInfo', type: 'text', position: { x: 20, y: 45 }, width: 80, height: 25, fontSize: 9 },
{ name: 'dimensions', type: 'text', position: { x: 110, y: 45 }, width: 80, height: 25, fontSize: 9 },
{ name: 'pricing', type: 'text', position: { x: 20, y: 75 }, width: 80, height: 20, fontSize: 9 },
{ name: 'tenure', type: 'text', position: { x: 110, y: 75 }, width: 80, height: 20, fontSize: 9 },
{ name: 'infrastructure', type: 'text', position: { x: 20, y: 100 }, width: 170, height: 25, fontSize: 9 },
{ name: 'waitingList', type: 'text', position: { x: 20, y: 130 }, width: 170, height: 50, fontSize: 8 },
{ name: 'maintenanceLog', type: 'text', position: { x: 20, y: 185 }, width: 170, height: 75, fontSize: 8 },
{ name: 'generatedAt', type: 'text', position: { x: 20, y: 275 }, width: 170, height: 6, fontSize: 7 },
],
],
};
export function buildBerthSpecInputs(
berth: any,
waitingList: any[],
maintenance: any[],
linkedInterests: any[],
port: any,
): Record<string, string> {
const berthInfo = [
`Mooring: ${berth.mooringNumber}`,
berth.area ? `Area: ${berth.area}` : null,
`Status: ${berth.status ?? 'available'}`,
berth.nominalBoatSize ? `Nominal boat size: ${berth.nominalBoatSize}` : null,
berth.bowFacing ? `Bow facing: ${berth.bowFacing}` : null,
]
.filter(Boolean)
.join('\n');
const dimensions = [
berth.lengthFt
? `Length: ${berth.lengthFt}ft${berth.lengthM ? ` / ${berth.lengthM}m` : ''}`
: null,
berth.widthFt
? `Beam: ${berth.widthFt}ft${berth.widthM ? ` / ${berth.widthM}m` : ''}${berth.widthIsMinimum ? ' (min)' : ''}`
: null,
berth.draftFt
? `Draft: ${berth.draftFt}ft${berth.draftM ? ` / ${berth.draftM}m` : ''}`
: null,
berth.waterDepth
? `Water depth: ${berth.waterDepth}ft${berth.waterDepthM ? ` / ${berth.waterDepthM}m` : ''}${berth.waterDepthIsMinimum ? ' (min)' : ''}`
: null,
]
.filter(Boolean)
.join('\n') || 'Dimensions not specified';
const pricing = berth.price
? `Price: ${berth.priceCurrency ?? 'USD'} ${Number(berth.price).toLocaleString()}`
: 'Price: TBD';
const tenure = [
`Tenure type: ${berth.tenureType ?? 'permanent'}`,
berth.tenureYears ? `Tenure years: ${berth.tenureYears}` : null,
berth.tenureStartDate ? `Start: ${berth.tenureStartDate}` : null,
berth.tenureEndDate ? `End: ${berth.tenureEndDate}` : null,
]
.filter(Boolean)
.join('\n');
const infrastructure = [
berth.mooringType ? `Mooring type: ${berth.mooringType}` : null,
berth.powerCapacity ? `Power: ${berth.powerCapacity}${berth.voltage ? ` / ${berth.voltage}V` : ''}` : null,
berth.cleatType ? `Cleat: ${berth.cleatType}${berth.cleatCapacity ? ` (${berth.cleatCapacity})` : ''}` : null,
berth.bollardType
? `Bollard: ${berth.bollardType}${berth.bollardCapacity ? ` (${berth.bollardCapacity})` : ''}`
: null,
berth.sidePontoon ? `Side pontoon: ${berth.sidePontoon}` : null,
berth.access ? `Access: ${berth.access}` : null,
]
.filter(Boolean)
.join(' | ') || 'Infrastructure details not specified';
const waitingListText =
waitingList.length > 0
? waitingList
.map(
(w) =>
`${w.position}. ${w.clientName ?? 'Unknown'}${w.priority === 'high' ? ' [HIGH]' : ''}${w.notes ? `${w.notes}` : ''}`,
)
.join('\n')
: 'No clients on waiting list';
const maintenanceText =
maintenance.length > 0
? maintenance
.map(
(m) =>
`${m.performedDate} [${m.category}] ${m.description}${m.cost ? ` Cost: ${m.costCurrency ?? 'USD'} ${Number(m.cost).toLocaleString()}` : ''}`,
)
.join('\n')
: 'No maintenance records';
return {
portName: port?.name ?? 'Port Nimara',
title: `Berth Specification — Mooring ${berth.mooringNumber}`,
berthInfo,
dimensions,
pricing,
tenure,
infrastructure,
waitingList: `Waiting List (${waitingList.length}):\n${waitingListText}`,
maintenanceLog: `Maintenance Log (last ${maintenance.length}):\n${maintenanceText}`,
generatedAt: `Generated: ${new Date().toLocaleString('en-GB')}`,
};
}

View File

@@ -0,0 +1,92 @@
import type { Template } from '@pdfme/common';
export const clientSummaryTemplate: Template = {
basePdf: 'BLANK_PDF' as any,
schemas: [
[
{ name: 'portName', type: 'text', position: { x: 20, y: 15 }, width: 100, height: 10, fontSize: 16 },
{ name: 'title', type: 'text', position: { x: 20, y: 30 }, width: 170, height: 8, fontSize: 14 },
{ name: 'clientInfo', type: 'text', position: { x: 20, y: 45 }, width: 80, height: 40, fontSize: 9 },
{ name: 'contacts', type: 'text', position: { x: 110, y: 45 }, width: 80, height: 40, fontSize: 9 },
{ name: 'vesselInfo', type: 'text', position: { x: 20, y: 90 }, width: 170, height: 20, fontSize: 9 },
{ name: 'interests', type: 'text', position: { x: 20, y: 115 }, width: 170, height: 80, fontSize: 8 },
{ name: 'recentActivity', type: 'text', position: { x: 20, y: 200 }, width: 170, height: 60, fontSize: 8 },
{ name: 'generatedAt', type: 'text', position: { x: 20, y: 275 }, width: 170, height: 6, fontSize: 7 },
],
],
};
export function buildClientSummaryInputs(
client: any,
contacts: any[],
interestList: any[],
activity: any[],
port: any,
): Record<string, string> {
const clientInfo = [
`Name: ${client.fullName ?? 'N/A'}`,
client.companyName ? `Company: ${client.companyName}` : null,
client.nationality ? `Nationality: ${client.nationality}` : null,
client.source ? `Source: ${client.source}` : null,
client.isProxy ? `Proxy: Yes${client.proxyType ? ` (${client.proxyType})` : ''}` : null,
`Added: ${new Date(client.createdAt).toLocaleDateString('en-GB')}`,
]
.filter(Boolean)
.join('\n');
const contactsText = contacts.length > 0
? contacts
.map(
(c) =>
`${c.channel.charAt(0).toUpperCase() + c.channel.slice(1)}${c.isPrimary ? ' (primary)' : ''}: ${c.value}${c.label ? ` [${c.label}]` : ''}`,
)
.join('\n')
: 'No contacts on file';
const vesselInfo = [
client.yachtName ? `Yacht: ${client.yachtName}` : null,
client.yachtLengthFt
? `Length: ${client.yachtLengthFt}ft${client.yachtLengthM ? ` / ${client.yachtLengthM}m` : ''}`
: null,
client.yachtWidthFt
? `Beam: ${client.yachtWidthFt}ft${client.yachtWidthM ? ` / ${client.yachtWidthM}m` : ''}`
: null,
client.yachtDraftFt
? `Draft: ${client.yachtDraftFt}ft${client.yachtDraftM ? ` / ${client.yachtDraftM}m` : ''}`
: null,
client.berthSizeDesired ? `Desired berth size: ${client.berthSizeDesired}` : null,
]
.filter(Boolean)
.join(' | ') || 'No vessel information on file';
const interestsText =
interestList.length > 0
? interestList
.map(
(i) =>
`${i.pipelineStage ?? 'open'}${i.berthMooringNumber ? ` — Berth ${i.berthMooringNumber}` : ''}${i.leadCategory ? ` [${i.leadCategory}]` : ''} (${new Date(i.createdAt).toLocaleDateString('en-GB')})`,
)
.join('\n')
: 'No pipeline interests on file';
const activityText =
activity.length > 0
? activity
.map(
(a) =>
`${new Date(a.createdAt).toLocaleDateString('en-GB')} ${a.action} ${a.entityType}${a.fieldChanged ? ` (${a.fieldChanged})` : ''}`,
)
.join('\n')
: 'No recent activity';
return {
portName: port?.name ?? 'Port Nimara',
title: `Client Summary — ${client.fullName ?? ''}`,
clientInfo,
contacts: contactsText,
vesselInfo,
interests: `Pipeline Interests:\n${interestsText}`,
recentActivity: `Recent Activity:\n${activityText}`,
generatedAt: `Generated: ${new Date().toLocaleString('en-GB')}`,
};
}

View File

@@ -0,0 +1,45 @@
import type { Template } from '@pdfme/common';
export const eoiTemplate: Template = {
basePdf: 'BLANK_PDF' as any,
schemas: [
[
{ name: 'portName', type: 'text', position: { x: 20, y: 20 }, width: 170, height: 10, fontSize: 18 },
{ name: 'title', type: 'text', position: { x: 20, y: 40 }, width: 170, height: 8, fontSize: 14 },
{ name: 'clientName', type: 'text', position: { x: 20, y: 60 }, width: 80, height: 6 },
{ name: 'clientEmail', type: 'text', position: { x: 20, y: 68 }, width: 80, height: 6 },
{ name: 'yachtName', type: 'text', position: { x: 20, y: 80 }, width: 80, height: 6 },
{ name: 'yachtDimensions', type: 'text', position: { x: 20, y: 88 }, width: 80, height: 6 },
{ name: 'berthNumber', type: 'text', position: { x: 110, y: 60 }, width: 80, height: 6 },
{ name: 'berthDimensions', type: 'text', position: { x: 110, y: 68 }, width: 80, height: 6 },
{ name: 'berthPrice', type: 'text', position: { x: 110, y: 76 }, width: 80, height: 6 },
{ name: 'date', type: 'text', position: { x: 20, y: 110 }, width: 80, height: 6 },
{ name: 'terms', type: 'text', position: { x: 20, y: 130 }, width: 170, height: 100, fontSize: 9 },
],
],
};
export function buildEoiInputs(
interest: Record<string, unknown>,
client: Record<string, unknown>,
berth: Record<string, unknown>,
port: Record<string, unknown>,
): Record<string, string> {
const contacts = (client.contacts as Array<{ channel: string; value: string }> | undefined) ?? [];
const emailContact = contacts.find((c) => c.channel === 'email');
return {
portName: (port.name as string) ?? 'Port Nimara',
title: 'Expression of Interest',
clientName: `Client: ${client.fullName as string}`,
clientEmail: `Email: ${emailContact?.value ?? 'N/A'}`,
yachtName: `Yacht: ${(client.yachtName as string) ?? 'N/A'}`,
yachtDimensions: `LOA: ${(client.yachtLengthFt as string) ?? '?'}ft × Beam: ${(client.yachtWidthFt as string) ?? '?'}ft × Draft: ${(client.yachtDraftFt as string) ?? '?'}ft`,
berthNumber: `Berth: ${berth.mooringNumber as string}`,
berthDimensions: `${(berth.lengthFt as string) ?? '?'}ft × ${(berth.widthFt as string) ?? '?'}ft`,
berthPrice: `Price: ${(berth.priceCurrency as string) ?? 'USD'} ${(berth.price as string) ?? 'TBD'}`,
date: `Date: ${new Date().toLocaleDateString('en-GB')}`,
terms:
"This Expression of Interest confirms the above-named client's interest in the specified berth. This document is non-binding until signed by all parties. Upon signing, the client agrees to proceed with the berth acquisition process as outlined in the full terms and conditions provided separately.",
};
}

View File

@@ -0,0 +1,101 @@
import type { Template } from '@pdfme/common';
export const interestSummaryTemplate: Template = {
basePdf: 'BLANK_PDF' as any,
schemas: [
[
{ name: 'portName', type: 'text', position: { x: 20, y: 15 }, width: 100, height: 10, fontSize: 16 },
{ name: 'title', type: 'text', position: { x: 20, y: 30 }, width: 170, height: 8, fontSize: 14 },
{ name: 'clientInfo', type: 'text', position: { x: 20, y: 45 }, width: 80, height: 30, fontSize: 9 },
{ name: 'berthInfo', type: 'text', position: { x: 110, y: 45 }, width: 80, height: 30, fontSize: 9 },
{ name: 'stageAndCategory', type: 'text', position: { x: 20, y: 80 }, width: 170, height: 15, fontSize: 9 },
{ name: 'milestones', type: 'text', position: { x: 20, y: 100 }, width: 170, height: 40, fontSize: 8 },
{ name: 'notes', type: 'text', position: { x: 20, y: 145 }, width: 170, height: 30, fontSize: 9 },
{ name: 'recentTimeline', type: 'text', position: { x: 20, y: 180 }, width: 170, height: 85, fontSize: 8 },
{ name: 'generatedAt', type: 'text', position: { x: 20, y: 275 }, width: 170, height: 6, fontSize: 7 },
],
],
};
function formatDate(d: Date | string | null | undefined): string {
if (!d) return 'N/A';
return new Date(d).toLocaleDateString('en-GB');
}
export function buildInterestSummaryInputs(
interest: any,
client: any,
berth: any,
timeline: any[],
port: any,
): Record<string, string> {
const clientInfo = [
`Name: ${client?.fullName ?? 'N/A'}`,
client?.companyName ? `Company: ${client.companyName}` : null,
client?.yachtName ? `Yacht: ${client.yachtName}` : null,
client?.yachtLengthFt
? `Length: ${client.yachtLengthFt}ft${client.yachtLengthM ? ` / ${client.yachtLengthM}m` : ''}`
: null,
]
.filter(Boolean)
.join('\n');
const berthInfo = berth
? [
`Mooring: ${berth.mooringNumber}`,
berth.area ? `Area: ${berth.area}` : null,
berth.lengthFt ? `Length: ${berth.lengthFt}ft` : null,
berth.price ? `Price: ${berth.priceCurrency ?? 'USD'} ${Number(berth.price).toLocaleString()}` : null,
`Status: ${berth.status ?? 'available'}`,
]
.filter(Boolean)
.join('\n')
: 'No berth linked';
const stageAndCategory = [
`Stage: ${interest.pipelineStage ?? 'open'}`,
interest.leadCategory ? `Category: ${interest.leadCategory}` : null,
interest.source ? `Source: ${interest.source}` : null,
interest.eoiStatus ? `EOI status: ${interest.eoiStatus}` : null,
interest.contractStatus ? `Contract: ${interest.contractStatus}` : null,
interest.depositStatus ? `Deposit: ${interest.depositStatus}` : null,
]
.filter(Boolean)
.join(' | ');
const milestones = [
`First contact: ${formatDate(interest.dateFirstContact)}`,
`Last contact: ${formatDate(interest.dateLastContact)}`,
`EOI sent: ${formatDate(interest.dateEoiSent)}`,
`EOI signed: ${formatDate(interest.dateEoiSigned)}`,
`Contract sent: ${formatDate(interest.dateContractSent)}`,
`Contract signed: ${formatDate(interest.dateContractSigned)}`,
`Deposit received: ${formatDate(interest.dateDepositReceived)}`,
].join('\n');
const notesText = interest.notes
? `Notes:\n${interest.notes}`
: 'No notes';
const timelineText =
timeline.length > 0
? timeline
.map(
(e) =>
`${formatDate(e.createdAt)} ${e.action ?? e.eventType ?? 'event'} ${e.entityType ?? e.type ?? ''}${e.fieldChanged ? ` [${e.fieldChanged}]` : ''}`,
)
.join('\n')
: 'No timeline events';
return {
portName: port?.name ?? 'Port Nimara',
title: `Interest Summary — ${client?.fullName ?? 'Unknown Client'}`,
clientInfo,
berthInfo,
stageAndCategory,
milestones: `Milestones:\n${milestones}`,
notes: notesText,
recentTimeline: `Recent Timeline (last ${timeline.length}):\n${timelineText}`,
generatedAt: `Generated: ${new Date().toLocaleString('en-GB')}`,
};
}

View File

@@ -0,0 +1,120 @@
import type { Template } from '@pdfme/common';
export const invoiceTemplate: Template = {
basePdf: 'BLANK_PDF' as any,
schemas: [
[
// Header fields
{
name: 'portName',
type: 'text',
position: { x: 20, y: 15 },
width: 100,
height: 10,
fontSize: 16,
},
{
name: 'invoiceTitle',
type: 'text',
position: { x: 140, y: 15 },
width: 50,
height: 10,
fontSize: 16,
},
{
name: 'invoiceNumber',
type: 'text',
position: { x: 140, y: 27 },
width: 50,
height: 6,
fontSize: 10,
},
{
name: 'invoiceDate',
type: 'text',
position: { x: 140, y: 35 },
width: 50,
height: 6,
fontSize: 10,
},
{
name: 'dueDate',
type: 'text',
position: { x: 140, y: 43 },
width: 50,
height: 6,
fontSize: 10,
},
// Client info
{
name: 'clientInfo',
type: 'text',
position: { x: 20, y: 55 },
width: 100,
height: 20,
fontSize: 10,
},
// Line items as text block
{
name: 'lineItems',
type: 'text',
position: { x: 20, y: 85 },
width: 170,
height: 120,
fontSize: 9,
},
// Totals
{
name: 'totals',
type: 'text',
position: { x: 110, y: 215 },
width: 80,
height: 30,
fontSize: 10,
},
// Notes
{
name: 'notes',
type: 'text',
position: { x: 20, y: 250 },
width: 170,
height: 20,
fontSize: 8,
},
],
],
};
export function buildInvoiceInputs(
invoice: any,
lineItems: any[],
port: any,
): Record<string, string> {
const itemLines = lineItems
.map(
(li, i) =>
`${i + 1}. ${li.description} | Qty: ${li.quantity} | Unit: ${invoice.currency} ${Number(li.unitPrice).toFixed(2)} | Total: ${invoice.currency} ${Number(li.total).toFixed(2)}`,
)
.join('\n');
let totalsText = `Subtotal: ${invoice.currency} ${Number(invoice.subtotal).toFixed(2)}`;
if (Number(invoice.discountAmount) > 0) {
totalsText += `\nDiscount (${invoice.discountPct}%): -${invoice.currency} ${Number(invoice.discountAmount).toFixed(2)}`;
}
if (Number(invoice.feeAmount) > 0) {
totalsText += `\nFee (${invoice.feePct}%): +${invoice.currency} ${Number(invoice.feeAmount).toFixed(2)}`;
}
totalsText += `\n─────────────\nTOTAL: ${invoice.currency} ${Number(invoice.total).toFixed(2)}`;
return {
portName: port?.name ?? 'Port Nimara',
invoiceTitle: 'INVOICE',
invoiceNumber: invoice.invoiceNumber,
invoiceDate: `Date: ${new Date(invoice.createdAt).toLocaleDateString('en-GB')}`,
dueDate: `Due: ${invoice.dueDate}`,
clientInfo: `${invoice.clientName}\n${invoice.billingEmail ?? ''}\n${invoice.billingAddress ?? ''}`.trim(),
lineItems: itemLines || 'No line items',
totals: totalsText,
notes: invoice.notes ? `Notes: ${invoice.notes}` : '',
};
}

View File

@@ -0,0 +1,93 @@
import type { Template } from '@pdfme/common';
import type { ActivityData } from '@/lib/services/report-generators';
export const activityReportTemplate: Template = {
basePdf: 'BLANK_PDF' as any,
schemas: [
[
{
name: 'reportTitle',
type: 'text',
position: { x: 20, y: 15 },
width: 170,
height: 12,
fontSize: 20,
},
{
name: 'portName',
type: 'text',
position: { x: 20, y: 30 },
width: 130,
height: 8,
fontSize: 11,
},
{
name: 'generatedAt',
type: 'text',
position: { x: 140, y: 30 },
width: 50,
height: 8,
fontSize: 9,
},
{
name: 'activitySummary',
type: 'text',
position: { x: 20, y: 50 },
width: 170,
height: 80,
fontSize: 10,
},
{
name: 'activityDetails',
type: 'text',
position: { x: 20, y: 140 },
width: 170,
height: 120,
fontSize: 9,
},
],
],
};
export function buildActivityInputs(
data: ActivityData,
portName?: string,
): Record<string, string>[] {
const summaryLines = [
`Activity Summary (${data.logs.length} events)`,
'─────────────────────',
];
const sortedSummary = Object.entries(data.summary).sort((a, b) => b[1] - a[1]);
if (sortedSummary.length === 0) {
summaryLines.push('No activity recorded in the selected period.');
} else {
for (const [key, cnt] of sortedSummary.slice(0, 15)) {
summaryLines.push(`${key}: ${cnt}`);
}
}
const detailLines = ['Recent Activity Log', '─────────────────────'];
const recentLogs = data.logs.slice(0, 30);
if (recentLogs.length === 0) {
detailLines.push('No activity logs found.');
} else {
for (const log of recentLogs) {
const date = new Date(log.createdAt).toLocaleDateString('en-GB');
detailLines.push(
`${date} ${log.action} ${log.entityType}${log.entityId ? ` (${log.entityId.slice(0, 8)}...)` : ''}`,
);
}
}
return [
{
reportTitle: 'Activity Report',
portName: portName ?? 'Port Nimara',
generatedAt: `Generated: ${new Date(data.generatedAt).toLocaleString('en-GB')}`,
activitySummary: summaryLines.join('\n'),
activityDetails: detailLines.join('\n'),
},
];
}

View File

@@ -0,0 +1,87 @@
import type { Template } from '@pdfme/common';
import type { OccupancyData } from '@/lib/services/report-generators';
export const occupancyReportTemplate: Template = {
basePdf: 'BLANK_PDF' as any,
schemas: [
[
{
name: 'reportTitle',
type: 'text',
position: { x: 20, y: 15 },
width: 170,
height: 12,
fontSize: 20,
},
{
name: 'portName',
type: 'text',
position: { x: 20, y: 30 },
width: 130,
height: 8,
fontSize: 11,
},
{
name: 'generatedAt',
type: 'text',
position: { x: 140, y: 30 },
width: 50,
height: 8,
fontSize: 9,
},
{
name: 'occupancyRate',
type: 'text',
position: { x: 20, y: 50 },
width: 170,
height: 20,
fontSize: 16,
},
{
name: 'statusBreakdown',
type: 'text',
position: { x: 20, y: 80 },
width: 170,
height: 80,
fontSize: 10,
},
],
],
};
export function buildOccupancyInputs(
data: OccupancyData,
portName?: string,
): Record<string, string>[] {
const statusLabels: Record<string, string> = {
available: 'Available',
under_offer: 'Under Offer',
sold: 'Sold / Occupied',
};
const breakdownLines = ['Berth Status Breakdown', '─────────────────────'];
const allStatuses = ['available', 'under_offer', 'sold'];
const unknownStatuses = Object.keys(data.statusCounts).filter(
(s) => !allStatuses.includes(s),
);
for (const status of [...allStatuses, ...unknownStatuses]) {
const cnt = data.statusCounts[status] ?? 0;
const label = statusLabels[status] ?? status.replace(/_/g, ' ').replace(/\b\w/g, (c) => c.toUpperCase());
const pct = data.totalBerths > 0 ? ((cnt / data.totalBerths) * 100).toFixed(1) : '0.0';
breakdownLines.push(`${label}: ${cnt} berth(s) (${pct}%)`);
}
breakdownLines.push('─────────────────────');
breakdownLines.push(`Total Berths: ${data.totalBerths}`);
return [
{
reportTitle: 'Berth Occupancy Report',
portName: portName ?? 'Port Nimara',
generatedAt: `Generated: ${new Date(data.generatedAt).toLocaleString('en-GB')}`,
occupancyRate: `Occupancy Rate: ${data.occupancyRate}%`,
statusBreakdown: breakdownLines.join('\n'),
},
];
}

View File

@@ -0,0 +1,112 @@
import type { Template } from '@pdfme/common';
import type { PipelineData } from '@/lib/services/report-generators';
export const pipelineReportTemplate: Template = {
basePdf: 'BLANK_PDF' as any,
schemas: [
[
{
name: 'reportTitle',
type: 'text',
position: { x: 20, y: 15 },
width: 170,
height: 12,
fontSize: 20,
},
{
name: 'portName',
type: 'text',
position: { x: 20, y: 30 },
width: 130,
height: 8,
fontSize: 11,
},
{
name: 'generatedAt',
type: 'text',
position: { x: 140, y: 30 },
width: 50,
height: 8,
fontSize: 9,
},
{
name: 'summaryText',
type: 'text',
position: { x: 20, y: 50 },
width: 170,
height: 100,
fontSize: 10,
},
{
name: 'detailsText',
type: 'text',
position: { x: 20, y: 160 },
width: 170,
height: 100,
fontSize: 9,
},
],
],
};
export function buildPipelineInputs(
data: PipelineData,
portName?: string,
): Record<string, string>[] {
const stageOrder = [
'open',
'details_sent',
'in_communication',
'visited',
'signed_eoi_nda',
'deposit_10pct',
'contract',
'completed',
];
const summaryLines = stageOrder
.filter((stage) => (data.stageCounts[stage] ?? 0) > 0)
.map((stage) => {
const label = stage.replace(/_/g, ' ').replace(/\b\w/g, (c) => c.toUpperCase());
return `${label}: ${data.stageCounts[stage] ?? 0} interest(s)`;
});
// Include stages not in standard order
const unknownStages = Object.keys(data.stageCounts).filter(
(s) => !stageOrder.includes(s),
);
for (const stage of unknownStages) {
summaryLines.push(`${stage}: ${data.stageCounts[stage]} interest(s)`);
}
const totalInterests = Object.values(data.stageCounts).reduce((a, b) => a + b, 0);
summaryLines.unshift(`Total Active Interests: ${totalInterests}`);
summaryLines.unshift('Pipeline Stage Breakdown');
summaryLines.unshift('─────────────────────');
const detailLines = ['Top Interests by Value', '─────────────────────'];
if (data.topInterests.length === 0) {
detailLines.push('No interests with linked berths found.');
} else {
data.topInterests.forEach((interest, i) => {
const price = interest.berthPrice
? `Berth Price: ${Number(interest.berthPrice).toLocaleString()}`
: 'No berth linked';
const stage = interest.pipelineStage
.replace(/_/g, ' ')
.replace(/\b\w/g, (c) => c.toUpperCase());
detailLines.push(`${i + 1}. Stage: ${stage} | ${price}`);
});
}
return [
{
reportTitle: 'Pipeline Summary Report',
portName: portName ?? 'Port Nimara',
generatedAt: `Generated: ${new Date(data.generatedAt).toLocaleString('en-GB')}`,
summaryText: summaryLines.join('\n'),
detailsText: detailLines.join('\n'),
},
];
}

View File

@@ -0,0 +1,102 @@
import type { Template } from '@pdfme/common';
import type { RevenueData } from '@/lib/services/report-generators';
export const revenueReportTemplate: Template = {
basePdf: 'BLANK_PDF' as any,
schemas: [
[
{
name: 'reportTitle',
type: 'text',
position: { x: 20, y: 15 },
width: 170,
height: 12,
fontSize: 20,
},
{
name: 'portName',
type: 'text',
position: { x: 20, y: 30 },
width: 130,
height: 8,
fontSize: 11,
},
{
name: 'generatedAt',
type: 'text',
position: { x: 140, y: 30 },
width: 50,
height: 8,
fontSize: 9,
},
{
name: 'revenueBreakdown',
type: 'text',
position: { x: 20, y: 50 },
width: 170,
height: 120,
fontSize: 10,
},
{
name: 'totalText',
type: 'text',
position: { x: 20, y: 180 },
width: 170,
height: 20,
fontSize: 12,
},
],
],
};
export function buildRevenueInputs(
data: RevenueData,
portName?: string,
): Record<string, string>[] {
const stageOrder = [
'open',
'details_sent',
'in_communication',
'visited',
'signed_eoi_nda',
'deposit_10pct',
'contract',
'completed',
];
const breakdownLines = ['Revenue by Pipeline Stage', '─────────────────────'];
const orderedStages = [
...stageOrder.filter((s) => data.stageRevenue[s] !== undefined),
...Object.keys(data.stageRevenue).filter((s) => !stageOrder.includes(s)),
];
if (orderedStages.length === 0) {
breakdownLines.push('No revenue data available.');
} else {
for (const stage of orderedStages) {
const label = stage.replace(/_/g, ' ').replace(/\b\w/g, (c) => c.toUpperCase());
const amount = Number(data.stageRevenue[stage] ?? 0).toLocaleString(undefined, {
minimumFractionDigits: 2,
maximumFractionDigits: 2,
});
breakdownLines.push(`${label}: ${amount}`);
}
}
const totalCompleted = Number(data.totalCompleted).toLocaleString(undefined, {
minimumFractionDigits: 2,
maximumFractionDigits: 2,
});
return [
{
reportTitle: 'Revenue Report',
portName: portName ?? 'Port Nimara',
generatedAt: `Generated: ${new Date(data.generatedAt).toLocaleString('en-GB')}`,
revenueBreakdown: breakdownLines.join('\n'),
totalText: `TOTAL COMPLETED REVENUE: ${totalCompleted}`,
},
];
}

View File

@@ -0,0 +1,581 @@
/**
* TipTap JSON → @pdfme Template Serializer
*
* Converts a TipTap document JSON into a @pdfme Template suitable for PDF generation.
* Supports a constrained formatting subset; unsupported nodes are rejected at validation time.
*
* Supported nodes:
* paragraph, heading (h1-h3), bulletList, orderedList, listItem,
* table, tableRow, tableCell, tableHeader, image, hardBreak,
* text (with marks: bold, italic, underline)
*
* Unsupported (rejected at save time):
* blockquote, codeBlock, horizontalRule, taskList
*/
import type { Template } from '@pdfme/common';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface TipTapMark {
type: string;
attrs?: Record<string, unknown>;
}
export interface TipTapNode {
type: string;
content?: TipTapNode[];
text?: string;
marks?: TipTapMark[];
attrs?: Record<string, unknown>;
}
// @pdfme schema field shape (matches pdfme text plugin schema)
// We use an index signature to satisfy @pdfme/common's Schema type requirement
interface SchemaField {
name: string;
type: 'text';
position: { x: number; y: number };
width: number;
height: number;
fontSize: number;
fontName?: string;
fontColor?: string;
alignment?: 'left' | 'center' | 'right';
lineHeight?: number;
[key: string]: unknown;
}
// ─── Constants ────────────────────────────────────────────────────────────────
const PAGE_WIDTH_MM = 170; // A4 content width (210 - 20mm margins each side)
const PAGE_BREAK_THRESHOLD = 250; // y position (mm from page top) to start new logical page
const MARGIN_X_MM = 20; // Left margin
const MARGIN_TOP_MM = 20; // Top margin
const UNSUPPORTED_NODES = new Set([
'blockquote',
'codeBlock',
'horizontalRule',
'taskList',
'taskItem',
]);
// Line heights per node type (mm)
const PARAGRAPH_HEIGHT = 6;
const H1_HEIGHT = 12;
const H2_HEIGHT = 9;
const H3_HEIGHT = 7;
const LIST_ITEM_HEIGHT = 6;
const TABLE_ROW_HEIGHT = 8;
// Font sizes
const PARAGRAPH_FONT_SIZE = 10;
const H1_FONT_SIZE = 20;
const H2_FONT_SIZE = 16;
const H3_FONT_SIZE = 14;
const LIST_FONT_SIZE = 10;
const TABLE_FONT_SIZE = 9;
// ─── Template Variables ───────────────────────────────────────────────────────
export const TEMPLATE_VARIABLES: Array<{ key: string; label: string; example: string }> = [
{ key: 'client.name', label: 'Client Full Name', example: 'John Smith' },
{ key: 'client.company', label: 'Company Name', example: 'Smith Holdings' },
{ key: 'client.email', label: 'Client Email', example: 'john@smithholdings.com' },
{ key: 'client.phone', label: 'Client Phone', example: '+61 400 000 000' },
{ key: 'interest.stage', label: 'Pipeline Stage', example: 'Signed EOI/NDA' },
{ key: 'interest.berthNumber', label: 'Berth Number (from interest)', example: 'A-23' },
{ key: 'berth.mooring_number', label: 'Berth Number', example: 'A-23' },
{ key: 'berth.price', label: 'Berth Price', example: '$45,000' },
{ key: 'berth.tenure_type', label: 'Tenure Type', example: 'Freehold' },
{ key: 'port.name', label: 'Port Name', example: 'Port Nimara' },
{ key: 'port.currency', label: 'Port Currency', example: 'AUD' },
{ key: 'date.today', label: "Today's Date", example: '2026-03-15' },
{ key: 'date.year', label: 'Current Year', example: '2026' },
];
// ─── Validation ───────────────────────────────────────────────────────────────
/**
* Recursively walks a TipTap node tree and collects any unsupported node types.
* Returns an array of unsupported type names found, or empty array if valid.
*/
export function validateTipTapDocument(doc: TipTapNode): string[] {
const found = new Set<string>();
function walk(node: TipTapNode): void {
if (UNSUPPORTED_NODES.has(node.type)) {
found.add(node.type);
}
if (node.content) {
for (const child of node.content) {
walk(child);
}
}
}
walk(doc);
return Array.from(found);
}
// ─── Text extraction helpers ──────────────────────────────────────────────────
/**
* Extracts plain text from a TipTap node and its children.
* Preserves hardBreak as newline.
*/
function extractText(node: TipTapNode): string {
if (node.type === 'text') {
return node.text ?? '';
}
if (node.type === 'hardBreak') {
return '\n';
}
if (!node.content || node.content.length === 0) {
return '';
}
return node.content.map(extractText).join('');
}
/**
* For a paragraph's inline content, returns text and a flag indicating bold.
* (pdfme text schema doesn't support inline mixed formatting; we honour the
* dominant mark on the paragraph level for simplicity.)
*/
function extractParagraphContent(node: TipTapNode): { text: string; bold: boolean } {
let text = '';
let hasBold = false;
if (node.content) {
for (const child of node.content) {
if (child.type === 'text') {
text += child.text ?? '';
if ((child.marks ?? []).some((m) => m.type === 'bold')) hasBold = true;
} else if (child.type === 'hardBreak') {
text += '\n';
} else {
text += extractText(child);
}
}
}
return { text, bold: hasBold };
}
// ─── Field name generation ────────────────────────────────────────────────────
let fieldCounter = 0;
function nextFieldName(prefix: string): string {
return `${prefix}_${fieldCounter++}`;
}
// ─── Serializer State ─────────────────────────────────────────────────────────
interface SerializerState {
fields: SchemaField[];
y: number;
pageIndex: number;
}
function ensurePageSpace(state: SerializerState, needed: number): void {
if (state.y + needed > PAGE_BREAK_THRESHOLD) {
state.pageIndex++;
state.y = MARGIN_TOP_MM;
}
}
// ─── Node Processors ──────────────────────────────────────────────────────────
function processParagraph(node: TipTapNode, state: SerializerState): void {
const { text, bold } = extractParagraphContent(node);
const lineCount = Math.max(1, (text.match(/\n/g) ?? []).length + 1);
const height = PARAGRAPH_HEIGHT * lineCount;
ensurePageSpace(state, height);
const field: SchemaField = {
name: nextFieldName('para'),
type: 'text',
position: { x: MARGIN_X_MM, y: state.y },
width: PAGE_WIDTH_MM,
height,
fontSize: PARAGRAPH_FONT_SIZE,
fontName: bold ? 'Helvetica-Bold' : 'Helvetica',
};
state.fields.push(field);
state.y += height + 1;
}
function processHeading(node: TipTapNode, state: SerializerState): void {
const level = (node.attrs?.level as number) ?? 1;
let height: number;
let fontSize: number;
if (level === 1) {
height = H1_HEIGHT;
fontSize = H1_FONT_SIZE;
} else if (level === 2) {
height = H2_HEIGHT;
fontSize = H2_FONT_SIZE;
} else {
height = H3_HEIGHT;
fontSize = H3_FONT_SIZE;
}
ensurePageSpace(state, height);
const field: SchemaField = {
name: nextFieldName(`h${level}`),
type: 'text',
position: { x: MARGIN_X_MM, y: state.y },
width: PAGE_WIDTH_MM,
height,
fontSize,
fontName: 'Helvetica-Bold',
};
state.fields.push(field);
state.y += height + 2;
}
function processBulletList(node: TipTapNode, state: SerializerState): void {
if (!node.content) return;
for (const item of node.content) {
if (item.type !== 'listItem') continue;
const height = LIST_ITEM_HEIGHT;
ensurePageSpace(state, height);
state.fields.push({
name: nextFieldName('bullet'),
type: 'text',
position: { x: MARGIN_X_MM + 5, y: state.y },
width: PAGE_WIDTH_MM - 5,
height,
fontSize: LIST_FONT_SIZE,
fontName: 'Helvetica',
});
state.y += height + 0.5;
}
state.y += 2;
}
function processOrderedList(node: TipTapNode, state: SerializerState): void {
if (!node.content) return;
// Use a counter that increments to build ordered list prefixes
let listIndex = (node.attrs?.start as number) ?? 1;
for (const item of node.content) {
if (item.type !== 'listItem') continue;
const height = LIST_ITEM_HEIGHT;
ensurePageSpace(state, height);
// listIndex is used via the field name prefix to distinguish items
state.fields.push({
name: nextFieldName(`ol_${listIndex}`),
type: 'text',
position: { x: MARGIN_X_MM + 5, y: state.y },
width: PAGE_WIDTH_MM - 5,
height,
fontSize: LIST_FONT_SIZE,
fontName: 'Helvetica',
});
state.y += height + 0.5;
listIndex++;
}
state.y += 2;
}
function processTable(node: TipTapNode, state: SerializerState): void {
if (!node.content) return;
const rows = node.content.filter((r) => r.type === 'tableRow');
if (rows.length === 0) return;
const firstRow = rows[0];
const colCount = firstRow?.content?.length ?? 1;
const colWidth = PAGE_WIDTH_MM / colCount;
for (const row of rows) {
if (!row.content) continue;
const rowHeight = TABLE_ROW_HEIGHT;
ensurePageSpace(state, rowHeight);
row.content.forEach((cell, colIdx) => {
const isHeader = cell.type === 'tableHeader';
state.fields.push({
name: nextFieldName(isHeader ? 'th' : 'td'),
type: 'text',
position: {
x: MARGIN_X_MM + colIdx * colWidth,
y: state.y,
},
width: colWidth - 0.5,
height: rowHeight,
fontSize: TABLE_FONT_SIZE,
fontName: isHeader ? 'Helvetica-Bold' : 'Helvetica',
});
});
state.y += rowHeight + 0.5;
}
state.y += 3;
}
// ─── Top-level Node Dispatch ──────────────────────────────────────────────────
function processNode(node: TipTapNode, state: SerializerState): void {
switch (node.type) {
case 'paragraph':
processParagraph(node, state);
break;
case 'heading':
processHeading(node, state);
break;
case 'bulletList':
processBulletList(node, state);
break;
case 'orderedList':
processOrderedList(node, state);
break;
case 'table':
processTable(node, state);
break;
case 'image':
state.y += 20;
break;
case 'hardBreak':
state.y += 3;
break;
default:
break;
}
}
// ─── Main Serializer ──────────────────────────────────────────────────────────
/**
* Converts a TipTap JSON document to a @pdfme Template.
* Variables like {{client.name}} are left as-is in text content.
* Call buildContentInputsFromDoc to get inputs with actual values.
*/
export function tipTapToPdfmeTemplate(doc: TipTapNode): Template {
fieldCounter = 0;
const state: SerializerState = {
fields: [],
y: MARGIN_TOP_MM,
pageIndex: 0,
};
const children = doc.type === 'doc' ? (doc.content ?? []) : [doc];
for (const node of children) {
processNode(node, state);
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const basePdf = 'BLANK_PDF' as any;
// pdfme's Schema type has a string index signature we satisfy via the
// [key: string]: unknown on SchemaField
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const schemas = [state.fields] as any;
return { basePdf, schemas } as Template;
}
// ─── Input Builder ────────────────────────────────────────────────────────────
/**
* Given a @pdfme Template and a flat key→value data record,
* builds empty input records keyed by field name.
* Use buildContentInputsFromDoc for content-populated inputs.
*/
export function buildTemplateInputs(
template: Template,
data: Record<string, string>,
): Record<string, string>[] {
return template.schemas.map((pageSchema) => {
const record: Record<string, string> = {};
for (const field of pageSchema as SchemaField[]) {
record[field.name] = data[field.name] ?? '';
}
return record;
});
}
/**
* Replaces all {{variable.key}} tokens in a string with values from the data map.
* Unmatched tokens are left as-is.
*/
export function substituteVariables(
text: string,
data: Record<string, string>,
): string {
return text.replace(/\{\{([^}]+)\}\}/g, (_match, key: string) => {
return data[key.trim()] ?? _match;
});
}
/**
* Public alias for internal buildContentInputs.
* Builds pdfme input records by extracting text content from the TipTap doc
* and mapping it to generated field names (in schema order).
* Use after calling tipTapToPdfmeTemplate on the same (already-substituted) doc.
*/
export function buildContentInputsFromDoc(
doc: TipTapNode,
template: Template,
): Record<string, string>[] {
return buildContentInputs(doc, template);
}
/**
* Full pipeline: validate → substitute variables → convert to pdfme template + inputs.
* Returns { template, inputs, errors }.
*/
export function tiptapDocumentToTemplateWithData(
doc: TipTapNode,
data: Record<string, string> = {},
): {
template: Template | null;
inputs: Record<string, string>[];
errors: string[];
} {
const errors = validateTipTapDocument(doc);
if (errors.length > 0) {
return { template: null, inputs: [], errors };
}
const substitutedDoc = substituteInDoc(doc, data);
const template = tipTapToPdfmeTemplate(substitutedDoc);
const inputs = buildContentInputs(substitutedDoc, template);
return { template, inputs, errors: [] };
}
// ─── Internals ────────────────────────────────────────────────────────────────
/**
* Deeply substitutes variables in all text nodes of a TipTap document.
*/
function substituteInDoc(
node: TipTapNode,
data: Record<string, string>,
): TipTapNode {
if (node.type === 'text' && node.text) {
return { ...node, text: substituteVariables(node.text, data) };
}
if (node.content) {
return {
...node,
content: node.content.map((child) => substituteInDoc(child, data)),
};
}
return node;
}
/**
* Builds pdfme input records by extracting text content from the TipTap doc
* and mapping it to generated field names (in schema order).
*/
function buildContentInputs(
doc: TipTapNode,
template: Template,
): Record<string, string>[] {
const textContents = extractAllTextContents(doc);
const schemas = template.schemas;
return schemas.map((pageSchema, pageIdx) => {
const record: Record<string, string> = {};
const fields = pageSchema as SchemaField[];
fields.forEach((field, fieldIdx) => {
const globalIdx =
schemas
.slice(0, pageIdx)
.reduce((acc, ps) => acc + (ps as SchemaField[]).length, 0) + fieldIdx;
record[field.name] = textContents[globalIdx] ?? '';
});
return record;
});
}
/**
* Extracts text content for each schema field in document order.
* Order must mirror the order that processNode() creates fields.
*/
function extractAllTextContents(doc: TipTapNode): string[] {
const contents: string[] = [];
const children = doc.type === 'doc' ? (doc.content ?? []) : [doc];
for (const node of children) {
extractNodeContent(node, contents);
}
return contents;
}
function extractNodeContent(node: TipTapNode, out: string[]): void {
switch (node.type) {
case 'paragraph': {
const { text } = extractParagraphContent(node);
out.push(text);
break;
}
case 'heading': {
out.push(extractText(node));
break;
}
case 'bulletList': {
if (node.content) {
for (const item of node.content) {
if (item.type === 'listItem') {
out.push('• ' + extractText(item).replace(/\n/g, ' '));
}
}
}
break;
}
case 'orderedList': {
if (node.content) {
let idx = (node.attrs?.start as number) ?? 1;
for (const item of node.content) {
if (item.type === 'listItem') {
out.push(`${idx}. ` + extractText(item).replace(/\n/g, ' '));
idx++;
}
}
}
break;
}
case 'table': {
if (node.content) {
for (const row of node.content) {
if (row.type !== 'tableRow' || !row.content) continue;
for (const cell of row.content) {
out.push(extractText(cell));
}
}
}
break;
}
case 'image':
out.push('');
break;
case 'hardBreak':
break;
default:
break;
}
}

35
src/lib/portal/auth.ts Normal file
View File

@@ -0,0 +1,35 @@
import { SignJWT, jwtVerify } from 'jose';
import { cookies } from 'next/headers';
const PORTAL_SECRET = new TextEncoder().encode(process.env.BETTER_AUTH_SECRET);
export const PORTAL_COOKIE = 'portal_session';
export interface PortalSession {
clientId: string;
portId: string;
email: string;
}
export async function createPortalToken(session: PortalSession): Promise<string> {
return new SignJWT(session as unknown as Record<string, unknown>)
.setProtectedHeader({ alg: 'HS256' })
.setExpirationTime('24h')
.setIssuedAt()
.sign(PORTAL_SECRET);
}
export async function verifyPortalToken(token: string): Promise<PortalSession | null> {
try {
const { payload } = await jwtVerify(token, PORTAL_SECRET);
return payload as unknown as PortalSession;
} catch {
return null;
}
}
export async function getPortalSession(): Promise<PortalSession | null> {
const cookieStore = await cookies();
const token = cookieStore.get(PORTAL_COOKIE)?.value;
if (!token) return null;
return verifyPortalToken(token);
}

20
src/lib/portal/helpers.ts Normal file
View File

@@ -0,0 +1,20 @@
import { NextRequest, NextResponse } from 'next/server';
import { getPortalSession, type PortalSession } from './auth';
type PortalRouteHandler = (
req: NextRequest,
session: PortalSession,
params: Record<string, string>,
) => Promise<NextResponse>;
export function withPortalAuth(handler: PortalRouteHandler) {
return async (req: NextRequest, routeContext: { params: Promise<Record<string, string>> }) => {
const session = await getPortalSession();
if (!session) {
return NextResponse.json({ error: 'Portal authentication required' }, { status: 401 });
}
const params = await routeContext.params;
return handler(req, session, params);
};
}

40
src/lib/queue/index.ts Normal file
View File

@@ -0,0 +1,40 @@
import { Queue, type ConnectionOptions } from 'bullmq';
const redisUrl = process.env.REDIS_URL!;
// 10 queues matching 11-REALTIME-AND-BACKGROUND-JOBS.md Section 3.1
const QUEUE_CONFIGS = {
email: { concurrency: 5, maxAttempts: 5 },
documents: { concurrency: 3, maxAttempts: 5 },
notifications: { concurrency: 10, maxAttempts: 3 },
import: { concurrency: 1, maxAttempts: 1 },
export: { concurrency: 2, maxAttempts: 3 },
reports: { concurrency: 1, maxAttempts: 3 },
webhooks: { concurrency: 5, maxAttempts: 3 },
maintenance: { concurrency: 1, maxAttempts: 3 },
ai: { concurrency: 2, maxAttempts: 3 },
bulk: { concurrency: 2, maxAttempts: 3 },
} as const;
export type QueueName = keyof typeof QUEUE_CONFIGS;
const queues = new Map<QueueName, Queue>();
export function getQueue(name: QueueName): Queue {
let queue = queues.get(name);
if (!queue) {
queue = new Queue(name, {
connection: { url: redisUrl } as ConnectionOptions,
defaultJobOptions: {
attempts: QUEUE_CONFIGS[name].maxAttempts,
backoff: { type: 'exponential', delay: 1000 },
removeOnComplete: { age: 24 * 3600 }, // keep completed jobs 24 hours
removeOnFail: { age: 7 * 24 * 3600 }, // keep failed jobs 7 days
},
});
queues.set(name, queue);
}
return queue;
}
export { QUEUE_CONFIGS };

View File

@@ -0,0 +1,63 @@
import { getQueue, type QueueName } from './index';
import { logger } from '@/lib/logger';
interface RecurringJobDef {
queue: QueueName;
name: string;
pattern: string;
}
/**
* Register all recurring jobs from 11-REALTIME-AND-BACKGROUND-JOBS.md Section 3.2.
* Called once on server startup.
*/
export async function registerRecurringJobs(): Promise<void> {
const recurring: RecurringJobDef[] = [
// Documenso signature fallback poll — primary is webhooks, this is safety net
{ queue: 'documents', name: 'signature-poll', pattern: '0 */6 * * *' },
// Reminder checks
{ queue: 'notifications', name: 'reminder-check', pattern: '0 * * * *' },
{ queue: 'notifications', name: 'reminder-overdue-check', pattern: '*/15 * * * *' },
// Google Calendar background sync
{ queue: 'maintenance', name: 'calendar-sync', pattern: '*/30 * * * *' },
// Daily checks at 08:00
{ queue: 'notifications', name: 'invoice-overdue-check', pattern: '0 8 * * *' },
{ queue: 'notifications', name: 'tenure-expiry-check', pattern: '0 8 * * *' },
// Exchange rate refresh every 6 hours
{ queue: 'maintenance', name: 'currency-refresh', pattern: '0 */6 * * *' },
// Database backup / cleanup
{ queue: 'maintenance', name: 'database-backup', pattern: '0 2 * * *' },
{ queue: 'maintenance', name: 'backup-cleanup', pattern: '0 3 * * 0' }, // Sunday 03:00
// Session cleanup
{ queue: 'maintenance', name: 'session-cleanup', pattern: '0 4 * * *' },
// Report scheduler — checks every minute for reports due to run
{ queue: 'reports', name: 'report-scheduler', pattern: '* * * * *' },
// Notification digest — configurable per user; placeholder fires hourly
// TODO(L2): make per-user schedule configurable (read from user_settings)
{ queue: 'email', name: 'notification-digest', pattern: '0 * * * *' },
// Cleanup jobs
{ queue: 'maintenance', name: 'temp-file-cleanup', pattern: '0 5 * * *' },
{ queue: 'maintenance', name: 'form-expiry-check', pattern: '0 * * * *' },
];
for (const job of recurring) {
const queue = getQueue(job.queue);
await queue.upsertJobScheduler(
job.name,
{ pattern: job.pattern },
{ data: {}, name: job.name },
);
logger.info({ queue: job.queue, job: job.name, pattern: job.pattern }, 'Registered recurring job');
}
logger.info({ count: recurring.length }, 'All recurring jobs registered');
}

234
src/lib/queue/workers/ai.ts Normal file
View File

@@ -0,0 +1,234 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
// ─── Email draft generation ───────────────────────────────────────────────────
const MAX_OUTPUT_BYTES = 10 * 1024; // 10 KB
const OPENAI_TIMEOUT_MS = 30_000; // 30 s
interface GenerateEmailDraftPayload {
interestId: string;
clientId: string;
portId: string;
context: 'follow_up' | 'introduction' | 'stage_update' | 'general';
additionalInstructions?: string;
requestedBy: string;
}
interface DraftResult {
subject: string;
body: string;
generatedAt: string;
}
async function generateEmailDraft(payload: GenerateEmailDraftPayload): Promise<DraftResult> {
const { interestId, clientId, portId, context, additionalInstructions } = payload;
// Fetch data by IDs in the worker — never trust PII from the queue payload
const { db } = await import('@/lib/db');
const { interests } = await import('@/lib/db/schema/interests');
const { clients } = await import('@/lib/db/schema/clients');
const { berths } = await import('@/lib/db/schema/berths');
const { interestNotes } = await import('@/lib/db/schema/interests');
const { emailThreads, emailMessages } = await import('@/lib/db/schema/email');
const { and, eq, desc, inArray } = await import('drizzle-orm');
// Fetch interest, client, berth
const [interest, client] = await Promise.all([
db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
}),
db.query.clients.findFirst({ where: eq(clients.id, clientId) }),
]);
if (!interest || !client) {
throw new Error('Interest or client not found');
}
let berthMooring: string | null = null;
if (interest.berthId) {
const berth = await db.query.berths.findFirst({
where: eq(berths.id, interest.berthId),
});
berthMooring = berth?.mooringNumber ?? null;
}
// Fetch last 5 notes
const recentNotes = await db
.select({ content: interestNotes.content, createdAt: interestNotes.createdAt })
.from(interestNotes)
.where(eq(interestNotes.interestId, interestId))
.orderBy(desc(interestNotes.createdAt))
.limit(5);
// Fetch last 5 email subjects (via threads linked to client)
const recentThreads = await db
.select({ subject: emailThreads.subject, lastMessageAt: emailThreads.lastMessageAt })
.from(emailThreads)
.where(and(eq(emailThreads.clientId, clientId), eq(emailThreads.portId, portId)))
.orderBy(desc(emailThreads.lastMessageAt))
.limit(5);
const apiKey = process.env.OPENAI_API_KEY;
if (!apiKey) {
// Fallback: template-based draft
return buildTemplateDraft({ clientName: client.fullName, context, berthMooring, pipelineStage: interest.pipelineStage });
}
// Build prompt
const contextDescriptions: Record<string, string> = {
follow_up: 'a friendly follow-up email',
introduction: 'an initial introduction email',
stage_update: `an email informing the client about their pipeline progression to stage "${interest.pipelineStage}"`,
general: 'a general communication email',
};
const prompt = [
`Write ${contextDescriptions[context] ?? 'an email'} to a marina berth client.`,
'',
`Client name: ${client.fullName}`,
client.companyName ? `Company: ${client.companyName}` : null,
client.yachtName ? `Yacht: ${client.yachtName}` : null,
berthMooring ? `Berth: ${berthMooring}` : 'Berth: not yet assigned',
`Pipeline stage: ${interest.pipelineStage}`,
'',
recentNotes.length > 0
? `Recent notes:\n${recentNotes.map((n) => `- ${n.content.slice(0, 200)}`).join('\n')}`
: null,
recentThreads.length > 0
? `Recent email subjects:\n${recentThreads.map((t) => `- ${t.subject ?? '(no subject)'}`).join('\n')}`
: null,
additionalInstructions ? `Additional instructions: ${additionalInstructions}` : null,
'',
'Return JSON with keys: subject (string) and body (string, plain text).',
]
.filter(Boolean)
.join('\n');
// Call OpenAI with timeout
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), OPENAI_TIMEOUT_MS);
let subject: string;
let body: string;
try {
const response = await fetch('https://api.openai.com/v1/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
model: 'gpt-4o-mini',
messages: [
{
role: 'system',
content:
'You are an expert marina sales and relationship manager. Generate professional, concise emails. Always return valid JSON with "subject" and "body" keys only.',
},
{ role: 'user', content: prompt },
],
max_tokens: 800,
temperature: 0.7,
response_format: { type: 'json_object' },
}),
signal: controller.signal,
});
clearTimeout(timeoutId);
if (!response.ok) {
const errorText = await response.text().catch(() => '');
throw new Error(`OpenAI API error ${response.status}: ${errorText}`);
}
const data = (await response.json()) as {
choices: Array<{ message: { content: string } }>;
};
const content = data.choices[0]?.message?.content ?? '{}';
// Enforce output size cap
if (content.length > MAX_OUTPUT_BYTES) {
throw new Error('AI output exceeded 10 KB cap');
}
const parsed = JSON.parse(content) as { subject?: string; body?: string };
subject = parsed.subject ?? `Follow-up: ${client.fullName}`;
body = parsed.body ?? '';
} catch (err) {
clearTimeout(timeoutId);
logger.warn({ err, interestId }, 'OpenAI call failed, falling back to template draft');
return buildTemplateDraft({ clientName: client.fullName, context, berthMooring, pipelineStage: interest.pipelineStage });
}
return { subject, body, generatedAt: new Date().toISOString() };
}
// ─── Template fallback ────────────────────────────────────────────────────────
function buildTemplateDraft(opts: {
clientName: string;
context: string;
berthMooring: string | null;
pipelineStage: string;
}): DraftResult {
const { clientName, context, berthMooring, pipelineStage } = opts;
const berthText = berthMooring ? `berth ${berthMooring}` : 'your requested berth';
const templates: Record<string, { subject: string; body: string }> = {
introduction: {
subject: `Welcome to Port Nimara ${clientName}`,
body: `Dear ${clientName},\n\nThank you for your interest in Port Nimara. We are delighted to introduce our marina facilities and look forward to discussing how we can accommodate your needs for ${berthText}.\n\nPlease feel free to reach out at any time.\n\nKind regards,\nPort Nimara Team`,
},
follow_up: {
subject: `Following up ${clientName}`,
body: `Dear ${clientName},\n\nI wanted to follow up regarding your interest in ${berthText}. Please let us know if you have any questions or if there is anything we can assist you with.\n\nWe look forward to hearing from you.\n\nKind regards,\nPort Nimara Team`,
},
stage_update: {
subject: `Update on your application ${clientName}`,
body: `Dear ${clientName},\n\nWe are pleased to inform you that your application for ${berthText} has progressed to the "${pipelineStage.replace(/_/g, ' ')}" stage.\n\nWe will be in touch shortly with the next steps.\n\nKind regards,\nPort Nimara Team`,
},
general: {
subject: `Message from Port Nimara ${clientName}`,
body: `Dear ${clientName},\n\nThank you for your continued interest in Port Nimara. We appreciate your patience and look forward to assisting you with ${berthText}.\n\nKind regards,\nPort Nimara Team`,
},
};
const template = templates[context] ?? templates['general']!;
return { ...template, generatedAt: new Date().toISOString() };
}
// ─── Worker ───────────────────────────────────────────────────────────────────
export const aiWorker = new Worker(
'ai',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing AI job');
switch (job.name) {
case 'generate-email-draft': {
const payload = job.data as GenerateEmailDraftPayload;
const result = await generateEmailDraft(payload);
return result;
}
default:
logger.warn({ jobName: job.name }, 'Unknown AI job');
return undefined;
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.ai.concurrency,
},
);
aiWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'AI job failed');
});

View File

@@ -0,0 +1,24 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const bulkWorker = new Worker(
'bulk',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing bulk job');
// TODO(L2): implement bulk operation job handlers
// - bulk status change across multiple records
// - bulk tag assignment / removal
// - bulk delete with soft-delete support
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.bulk.concurrency,
},
);
bulkWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Bulk job failed');
});

View File

@@ -0,0 +1,29 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const documentsWorker = new Worker(
'documents',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing documents job');
switch (job.name) {
case 'signature-poll': {
const { processDocumensoPoll } = await import('@/jobs/processors/documenso-poll');
await processDocumensoPoll();
break;
}
default:
logger.warn({ jobName: job.name }, 'Unknown documents job');
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.documents.concurrency,
},
);
documentsWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Documents job failed');
});

View File

@@ -0,0 +1,30 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const emailWorker = new Worker(
'email',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing email job');
switch (job.name) {
case 'inbox-sync': {
const { accountId } = job.data as { accountId: string };
const { syncInbox } = await import('@/lib/services/email-threads.service');
await syncInbox(accountId);
break;
}
default:
logger.warn({ jobName: job.name }, 'Unknown email job');
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.email.concurrency,
},
);
emailWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Email job failed');
});

View File

@@ -0,0 +1,24 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const exportWorker = new Worker(
'export',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing export job');
// TODO(L2): implement export job handlers
// - CSV data export
// - PDF export
// - Parent company report export
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.export.concurrency,
},
);
exportWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Export job failed');
});

View File

@@ -0,0 +1,24 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const importWorker = new Worker(
'import',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing import job');
// TODO(L2): implement import job handlers
// - CSV client import
// - Excel berth spec import
// - Note: maxAttempts=1 — imports are idempotent, user retries manually
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.import.concurrency,
},
);
importWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Import job failed');
});

View File

@@ -0,0 +1,34 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const maintenanceWorker = new Worker(
'maintenance',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing maintenance job');
switch (job.name) {
case 'currency-refresh': {
const { refreshRates } = await import('@/lib/services/currency');
await refreshRates();
break;
}
case 'form-expiry-check': {
// TODO(L3): mark expired form submissions
logger.info('Form expiry check — not yet implemented');
break;
}
default:
logger.warn({ jobName: job.name }, 'Unknown maintenance job');
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.maintenance.concurrency,
},
);
maintenanceWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Maintenance job failed');
});

View File

@@ -0,0 +1,84 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const notificationsWorker = new Worker(
'notifications',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing notifications job');
switch (job.name) {
case 'invoice-overdue-check': {
const { detectOverdue } = await import('@/lib/services/invoices');
const { db } = await import('@/lib/db');
const { ports } = await import('@/lib/db/schema/ports');
const allPorts = await db.select({ id: ports.id }).from(ports);
for (const port of allPorts) {
try {
await detectOverdue(port.id);
} catch (err) {
logger.error({ err, portId: port.id }, 'Overdue detection failed');
}
}
break;
}
case 'reminder-check': {
const { processDocumentReminders } = await import(
'@/jobs/processors/document-reminder'
);
await processDocumentReminders();
break;
}
case 'send-notification-email': {
const { notificationId } = job.data as { notificationId: string };
const { db } = await import('@/lib/db');
const { notifications } = await import('@/lib/db/schema/operations');
const { user } = await import('@/lib/db/schema/users');
const { eq } = await import('drizzle-orm');
const { sendEmail } = await import('@/lib/email/index');
const [notif] = await db
.select()
.from(notifications)
.where(eq(notifications.id, notificationId))
.limit(1);
if (!notif) break;
// Get user email from the Better Auth user table
const [authUser] = await db
.select({ email: user.email, name: user.name })
.from(user)
.where(eq(user.id, notif.userId))
.limit(1);
if (!authUser?.email) break;
await sendEmail(
authUser.email,
`[Port Nimara] ${notif.title}`,
`<p>${notif.description ?? notif.title}</p>${
notif.link
? `<p><a href="${process.env.APP_URL}${notif.link}">View in CRM</a></p>`
: ''
}`,
);
await db
.update(notifications)
.set({ emailSent: true })
.where(eq(notifications.id, notificationId));
break;
}
default:
logger.warn({ jobName: job.name }, 'Unknown notifications job');
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.notifications.concurrency,
},
);
notificationsWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Notifications job failed');
});

View File

@@ -0,0 +1,74 @@
import { Worker, type Job } from 'bullmq';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
export const reportsWorker = new Worker(
'reports',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing reports job');
switch (job.name) {
case 'report-scheduler': {
// Check scheduled_reports for reports due to run
const { db } = await import('@/lib/db');
const { scheduledReports } = await import('@/lib/db/schema/operations');
const { generatedReports } = await import('@/lib/db/schema/operations');
const { eq, and, lte } = await import('drizzle-orm');
const dueReports = await db
.select()
.from(scheduledReports)
.where(
and(
eq(scheduledReports.isActive, true),
lte(scheduledReports.nextRunAt, new Date()),
),
);
for (const report of dueReports) {
const { getQueue } = await import('@/lib/queue');
const [genReport] = await db
.insert(generatedReports)
.values({
portId: report.portId,
scheduledReportId: report.id,
reportType: report.reportType,
name: `${report.name} - ${new Date().toISOString().split('T')[0]}`,
status: 'queued',
parameters: (report.config as Record<string, unknown>) ?? {},
requestedBy: report.createdBy,
})
.returning();
if (genReport) {
await getQueue('reports').add('generate-report', {
reportJobId: genReport.id,
});
}
}
break;
}
case 'generate-report': {
const { reportJobId } = job.data as { reportJobId: string };
const { generateReport } = await import('@/lib/services/reports.service');
await generateReport(reportJobId);
break;
}
default:
logger.warn({ jobName: job.name }, 'Unknown reports job');
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.reports.concurrency,
},
);
reportsWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Reports job failed');
});

View File

@@ -0,0 +1,205 @@
import { Worker, type Job } from 'bullmq';
import { createHmac } from 'node:crypto';
import type { ConnectionOptions } from 'bullmq';
import { logger } from '@/lib/logger';
import { QUEUE_CONFIGS } from '@/lib/queue';
// ─── Job Payload ─────────────────────────────────────────────────────────────
interface WebhookDeliverPayload {
webhookId: string;
portId: string;
event: string;
deliveryId: string;
payload: Record<string, unknown>;
}
// ─── Worker ──────────────────────────────────────────────────────────────────
export const webhooksWorker = new Worker(
'webhooks',
async (job: Job) => {
logger.info({ jobId: job.id, jobName: job.name }, 'Processing webhooks job');
if (job.name !== 'deliver') {
logger.warn({ jobName: job.name }, 'Unknown webhooks job');
return;
}
const { webhookId, portId, event, deliveryId, payload } =
job.data as WebhookDeliverPayload;
const { db } = await import('@/lib/db');
const { webhooks, webhookDeliveries } = await import('@/lib/db/schema/system');
const { userProfiles } = await import('@/lib/db/schema/users');
const { decrypt } = await import('@/lib/utils/encryption');
const { createNotification } = await import('@/lib/services/notifications.service');
const { eq, and } = await import('drizzle-orm');
// 1. Fetch webhook — skip if deleted
const webhook = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!webhook) {
logger.info({ webhookId }, 'Webhook deleted — skipping delivery');
await db
.delete(webhookDeliveries)
.where(eq(webhookDeliveries.id, deliveryId));
return;
}
// 2. Decrypt secret
let secret: string;
try {
secret = webhook.secret ? decrypt(webhook.secret) : '';
} catch (err) {
logger.error({ webhookId, err }, 'Failed to decrypt webhook secret');
throw err; // Let BullMQ retry
}
// 3. Build final payload
const finalPayload = {
id: deliveryId,
event,
timestamp: new Date().toISOString(),
port_id: portId,
data: payload,
};
const bodyString = JSON.stringify(finalPayload);
// 4. Sign with HMAC-SHA256
const signature = secret
? `sha256=${createHmac('sha256', secret).update(bodyString).digest('hex')}`
: '';
const attempt = (job.attemptsMade ?? 0) + 1;
// 5. POST to webhook URL with 10s timeout
let responseStatus: number | null = null;
let responseBody: string | null = null;
let success = false;
try {
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 10_000);
const response = await fetch(webhook.url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'User-Agent': 'PortNimara-Webhook/1.0',
'X-Webhook-Id': webhookId,
'X-Webhook-Event': event,
'X-Webhook-Signature': signature,
'X-Webhook-Delivery': deliveryId,
},
body: bodyString,
signal: controller.signal,
});
clearTimeout(timeoutId);
responseStatus = response.status;
// Read up to 1KB of response body
const rawBody = await response.text();
responseBody = rawBody.slice(0, 1024);
success = response.status >= 200 && response.status < 300;
} catch (err) {
// Network error or timeout
logger.warn({ webhookId, deliveryId, err }, 'Webhook delivery network error');
responseStatus = null;
responseBody = err instanceof Error ? err.message.slice(0, 1024) : String(err).slice(0, 1024);
success = false;
}
const maxAttempts = QUEUE_CONFIGS.webhooks.maxAttempts;
const isFinalAttempt = attempt >= maxAttempts;
if (success) {
// 6a. Record success
await db
.update(webhookDeliveries)
.set({
status: 'success',
responseStatus,
responseBody,
attempt,
deliveredAt: new Date(),
})
.where(eq(webhookDeliveries.id, deliveryId));
logger.info({ webhookId, deliveryId, event }, 'Webhook delivered successfully');
} else if (!success && isFinalAttempt) {
// 6b. Final failure → dead_letter + system alert
await db
.update(webhookDeliveries)
.set({
status: 'dead_letter',
responseStatus,
responseBody,
attempt,
})
.where(eq(webhookDeliveries.id, deliveryId));
logger.error(
{ webhookId, deliveryId, event, attempt },
'Webhook delivery permanently failed — dead_letter',
);
// Notify all super admins
try {
const superAdmins = await db
.select({ userId: userProfiles.userId })
.from(userProfiles)
.where(and(eq(userProfiles.isSuperAdmin, true), eq(userProfiles.isActive, true)));
for (const admin of superAdmins) {
void createNotification({
portId,
userId: admin.userId,
type: 'system_alert',
title: 'Webhook delivery failed permanently',
description: `Webhook "${webhook.name}" failed to deliver event "${event}" after ${maxAttempts} attempts.`,
link: `/admin/webhooks/${webhookId}`,
entityType: 'webhook',
entityId: webhookId,
dedupeKey: `webhook:dead_letter:${deliveryId}`,
cooldownMs: 0,
});
}
} catch (notifyErr) {
logger.error({ notifyErr }, 'Failed to send dead_letter notification');
}
// Throw to let BullMQ mark job as failed (it won't retry since it's the final attempt)
throw new Error(
`Webhook delivery failed after ${attempt} attempts. Status: ${responseStatus ?? 'network error'}`,
);
} else {
// 6c. Non-final failure → update record then throw to trigger retry
await db
.update(webhookDeliveries)
.set({
status: 'failed',
responseStatus,
responseBody,
attempt,
})
.where(eq(webhookDeliveries.id, deliveryId));
throw new Error(
`Webhook delivery attempt ${attempt} failed. Status: ${responseStatus ?? 'network error'}. Retrying...`,
);
}
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,
concurrency: QUEUE_CONFIGS.webhooks.concurrency,
},
);
webhooksWorker.on('failed', (job, err) => {
logger.error({ jobId: job?.id, jobName: job?.name, err }, 'Webhooks job failed');
});

80
src/lib/rate-limit.ts Normal file
View File

@@ -0,0 +1,80 @@
import { redis } from '@/lib/redis';
export interface RateLimitConfig {
/** Duration of the sliding window in milliseconds. */
windowMs: number;
/** Maximum number of requests allowed per window. */
max: number;
/** Redis key prefix distinguishing different limit types. */
keyPrefix: string;
}
export interface RateLimitResult {
allowed: boolean;
limit: number;
remaining: number;
/** Unix timestamp (ms) at which the oldest entry in the window expires. */
resetAt: number;
}
/**
* Redis sliding-window rate limiter.
*
* Uses a sorted set per identifier where each member is a unique request
* timestamp. Old entries outside the window are pruned on each call.
*/
export async function checkRateLimit(
identifier: string,
config: RateLimitConfig,
): Promise<RateLimitResult> {
const key = `rl:${config.keyPrefix}:${identifier}`;
const now = Date.now();
const windowStart = now - config.windowMs;
const pipeline = redis.pipeline();
// Remove entries older than the window.
pipeline.zremrangebyscore(key, '-inf', windowStart);
// Record this request; score = timestamp, member adds randomness for uniqueness.
pipeline.zadd(key, now, `${now}:${Math.random().toString(36).slice(2)}`);
// Count entries currently in the window.
pipeline.zcard(key);
// Expire the key after one full window so Redis doesn't accumulate stale keys.
pipeline.pexpire(key, config.windowMs);
const results = await pipeline.exec();
const count = (results?.[2]?.[1] as number) ?? 0;
const remaining = Math.max(0, config.max - count);
return {
allowed: count <= config.max,
limit: config.max,
remaining,
resetAt: now + config.windowMs,
};
}
/**
* Returns standard rate-limit response headers from a RateLimitResult.
*/
export function rateLimitHeaders(result: RateLimitResult): Record<string, string> {
return {
'X-RateLimit-Limit': result.limit.toString(),
'X-RateLimit-Remaining': result.remaining.toString(),
'X-RateLimit-Reset': Math.ceil(result.resetAt / 1000).toString(),
};
}
/**
* Pre-configured rate limiters matching SECURITY-GUIDELINES.md §6.1.
*/
export const rateLimiters = {
/** Auth endpoints: 5 attempts per 15 minutes per identifier. */
auth: { windowMs: 15 * 60 * 1000, max: 5, keyPrefix: 'auth' },
/** Authenticated API: 120 requests per minute. */
api: { windowMs: 60 * 1000, max: 120, keyPrefix: 'api' },
/** File uploads: 10 per minute. */
upload: { windowMs: 60 * 1000, max: 10, keyPrefix: 'upload' },
/** Bulk operations: 5 per minute. */
bulk: { windowMs: 60 * 1000, max: 5, keyPrefix: 'bulk' },
} as const satisfies Record<string, RateLimitConfig>;

18
src/lib/redis.ts Normal file
View File

@@ -0,0 +1,18 @@
import Redis from 'ioredis';
import { logger } from '@/lib/logger';
const redisUrl = process.env.REDIS_URL!;
export const redis = new Redis(redisUrl, {
maxRetriesPerRequest: 3,
retryStrategy(times) {
const delay = Math.min(times * 200, 2000);
return delay;
},
lazyConnect: true,
});
redis.on('error', (err) => logger.error({ err }, 'Redis connection error'));
redis.on('connect', () => logger.info('Redis connected'));
redis.on('reconnecting', () => logger.warn('Redis reconnecting'));

View File

@@ -0,0 +1,144 @@
import { and, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { interests } from '@/lib/db/schema/interests';
import { berths } from '@/lib/db/schema/berths';
import { systemSettings } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { emitToRoom } from '@/lib/socket/server';
// ─── Types ────────────────────────────────────────────────────────────────────
export type BerthRuleTrigger =
| 'eoi_sent'
| 'eoi_signed'
| 'deposit_received'
| 'contract_signed'
| 'interest_archived'
| 'interest_completed'
| 'berth_unlinked';
export type BerthRuleMode = 'auto' | 'suggest' | 'off';
export interface BerthRuleResult {
action: 'applied' | 'suggested' | 'none';
newStatus?: string;
message?: string;
}
interface RuleConfig {
mode: BerthRuleMode;
targetStatus: string;
}
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Defaults ────────────────────────────────────────────────────────────────
const DEFAULT_RULES: Record<BerthRuleTrigger, RuleConfig> = {
eoi_sent: { mode: 'suggest', targetStatus: 'under_offer' },
eoi_signed: { mode: 'auto', targetStatus: 'under_offer' },
deposit_received: { mode: 'auto', targetStatus: 'sold' },
contract_signed: { mode: 'auto', targetStatus: 'sold' },
interest_archived: { mode: 'suggest', targetStatus: 'available' },
interest_completed: { mode: 'auto', targetStatus: 'sold' },
berth_unlinked: { mode: 'off', targetStatus: 'available' },
};
// ─── Config ───────────────────────────────────────────────────────────────────
async function getRulesConfig(
portId: string,
): Promise<Record<BerthRuleTrigger, RuleConfig>> {
const setting = await db.query.systemSettings.findFirst({
where: and(
eq(systemSettings.key, 'berth_rules'),
eq(systemSettings.portId, portId),
),
});
if (!setting?.value) {
return { ...DEFAULT_RULES };
}
const stored = setting.value as Partial<Record<BerthRuleTrigger, RuleConfig>>;
const merged = { ...DEFAULT_RULES };
for (const trigger of Object.keys(DEFAULT_RULES) as BerthRuleTrigger[]) {
if (stored[trigger]) {
merged[trigger] = stored[trigger]!;
}
}
return merged;
}
// ─── Evaluate Rule ────────────────────────────────────────────────────────────
export async function evaluateRule(
trigger: BerthRuleTrigger,
interestId: string,
portId: string,
meta: AuditMeta,
): Promise<BerthRuleResult> {
const interest = await db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
});
if (!interest?.berthId) {
return { action: 'none' };
}
const rulesConfig = await getRulesConfig(portId);
const rule = rulesConfig[trigger];
if (rule.mode === 'off') {
return { action: 'none' };
}
if (rule.mode === 'auto') {
await db
.update(berths)
.set({
status: rule.targetStatus,
statusLastChangedBy: meta.userId,
statusLastChangedReason: `Auto-applied by rule: ${trigger}`,
statusLastModified: new Date(),
updatedAt: new Date(),
})
.where(and(eq(berths.id, interest.berthId), eq(berths.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'berth',
entityId: interest.berthId,
newValue: { status: rule.targetStatus },
metadata: { type: 'berth_rule_auto', trigger, interestId },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'berth:statusChanged', {
berthId: interest.berthId,
newStatus: rule.targetStatus,
triggeredBy: meta.userId,
trigger,
});
return { action: 'applied', newStatus: rule.targetStatus };
}
// suggest mode
return {
action: 'suggested',
newStatus: rule.targetStatus,
message: `Suggested status change to "${rule.targetStatus}" based on trigger "${trigger}"`,
};
}

View File

@@ -0,0 +1,471 @@
import { and, eq, gte, lte, inArray, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import {
berths,
berthTags,
berthWaitingList,
berthMaintenanceLog,
berthMapData,
} from '@/lib/db/schema/berths';
import { tags } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { diffEntity } from '@/lib/entity-diff';
import { NotFoundError } from '@/lib/errors';
import { buildListQuery } from '@/lib/db/query-builder';
import { emitToRoom } from '@/lib/socket/server';
import type {
UpdateBerthInput,
UpdateBerthStatusInput,
ListBerthsQuery,
AddMaintenanceLogInput,
UpdateWaitingListInput,
} from '@/lib/validators/berths';
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listBerths(portId: string, query: ListBerthsQuery) {
const filters = [];
if (query.status) {
filters.push(eq(berths.status, query.status));
}
if (query.area) {
filters.push(eq(berths.area, query.area));
}
if (query.minLength !== undefined) {
filters.push(gte(berths.lengthM, String(query.minLength)));
}
if (query.maxLength !== undefined) {
filters.push(lte(berths.lengthM, String(query.maxLength)));
}
if (query.minPrice !== undefined) {
filters.push(gte(berths.price, String(query.minPrice)));
}
if (query.maxPrice !== undefined) {
filters.push(lte(berths.price, String(query.maxPrice)));
}
if (query.tenureType) {
filters.push(eq(berths.tenureType, query.tenureType));
}
// Tag filter: join against berthTags
if (query.tagIds && query.tagIds.length > 0) {
const tagIds = query.tagIds;
const berthsWithTags = await db
.selectDistinct({ berthId: berthTags.berthId })
.from(berthTags)
.where(inArray(berthTags.tagId, tagIds));
const matchingIds = berthsWithTags.map((r) => r.berthId);
if (matchingIds.length === 0) {
return { data: [], total: 0 };
}
filters.push(inArray(berths.id, matchingIds));
}
const sortColumn = (() => {
switch (query.sort) {
case 'mooringNumber': return berths.mooringNumber;
case 'area': return berths.area;
case 'price': return berths.price;
case 'status': return berths.status;
case 'lengthM': return berths.lengthM;
default: return berths.updatedAt;
}
})();
const result = await buildListQuery({
table: berths,
portIdColumn: berths.portId,
portId,
idColumn: berths.id,
updatedAtColumn: berths.updatedAt,
filters,
sort: { column: sortColumn, direction: query.order },
page: query.page,
pageSize: query.limit,
searchColumns: [berths.mooringNumber, berths.area],
searchTerm: query.search,
// No archivedAt column on berths
includeArchived: true,
});
// Attach tags for list items
const berthIds = (result.data as Array<{ id: string }>).map((b) => b.id);
const tagsByBerthId: Record<string, Array<{ id: string; name: string; color: string }>> = {};
if (berthIds.length > 0) {
const tagRows = await db
.select({
berthId: berthTags.berthId,
id: tags.id,
name: tags.name,
color: tags.color,
})
.from(berthTags)
.innerJoin(tags, eq(berthTags.tagId, tags.id))
.where(inArray(berthTags.berthId, berthIds));
for (const row of tagRows) {
if (!tagsByBerthId[row.berthId]) tagsByBerthId[row.berthId] = [];
tagsByBerthId[row.berthId]!.push({ id: row.id, name: row.name, color: row.color });
}
}
const data = (result.data as Array<Record<string, unknown>>).map((b) => ({
...b,
tags: tagsByBerthId[b.id as string] ?? [],
}));
return { data, total: result.total };
}
// ─── Get By ID ────────────────────────────────────────────────────────────────
export async function getBerthById(id: string, portId: string) {
const berth = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
with: {
mapData: true,
},
});
if (!berth) throw new NotFoundError('Berth');
// Fetch tags
const tagRows = await db
.select({ id: tags.id, name: tags.name, color: tags.color })
.from(berthTags)
.innerJoin(tags, eq(berthTags.tagId, tags.id))
.where(eq(berthTags.berthId, id));
return { ...berth, tags: tagRows };
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateBerth(
id: string,
portId: string,
data: UpdateBerthInput,
meta: AuditMeta,
) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
const { changed, diff } = diffEntity(existing as Record<string, unknown>, data as Record<string, unknown>);
if (!changed) return existing;
// Drizzle numeric columns expect string | null — coerce numbers to strings
const n = (v: number | undefined) => (v !== undefined ? String(v) : undefined);
const [updated] = await db
.update(berths)
.set({
area: data.area,
lengthFt: n(data.lengthFt),
lengthM: n(data.lengthM),
widthFt: n(data.widthFt),
widthM: n(data.widthM),
draftFt: n(data.draftFt),
draftM: n(data.draftM),
widthIsMinimum: data.widthIsMinimum,
nominalBoatSize: data.nominalBoatSize,
nominalBoatSizeM: data.nominalBoatSizeM,
waterDepth: n(data.waterDepth),
waterDepthM: n(data.waterDepthM),
waterDepthIsMinimum: data.waterDepthIsMinimum,
sidePontoon: data.sidePontoon,
powerCapacity: data.powerCapacity,
voltage: data.voltage,
mooringType: data.mooringType,
cleatType: data.cleatType,
cleatCapacity: data.cleatCapacity,
bollardType: data.bollardType,
bollardCapacity: data.bollardCapacity,
access: data.access,
price: n(data.price),
priceCurrency: data.priceCurrency,
bowFacing: data.bowFacing,
berthApproved: data.berthApproved,
tenureType: data.tenureType,
tenureYears: data.tenureYears,
tenureStartDate: data.tenureStartDate,
tenureEndDate: data.tenureEndDate,
updatedAt: new Date(),
})
.where(and(eq(berths.id, id), eq(berths.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'berth',
entityId: id,
oldValue: diff as unknown as Record<string, unknown>,
newValue: data as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'berth:updated', {
berthId: id,
changedFields: Object.keys(diff),
});
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'berth:updated', { berthId: id }),
);
return updated!;
}
// ─── Update Status ────────────────────────────────────────────────────────────
export async function updateBerthStatus(
id: string,
portId: string,
data: UpdateBerthStatusInput,
meta: AuditMeta,
) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
const [updated] = await db
.update(berths)
.set({
status: data.status,
statusLastChangedBy: meta.userId,
statusLastChangedReason: data.reason,
statusLastModified: new Date(),
updatedAt: new Date(),
})
.where(and(eq(berths.id, id), eq(berths.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'berth',
entityId: id,
oldValue: { status: existing.status },
newValue: { status: data.status, reason: data.reason },
metadata: { type: 'status_change', reason: data.reason },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'berth:statusChanged', {
berthId: id,
oldStatus: existing.status,
newStatus: data.status,
triggeredBy: meta.userId,
});
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'berth:statusChanged', {
berthId: id,
oldStatus: existing.status,
newStatus: data.status,
}),
);
return updated!;
}
// ─── Set Tags ─────────────────────────────────────────────────────────────────
export async function setBerthTags(
id: string,
portId: string,
tagIds: string[],
meta: AuditMeta,
) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
// Delete existing tags then insert new ones
await db.delete(berthTags).where(eq(berthTags.berthId, id));
if (tagIds.length > 0) {
await db.insert(berthTags).values(tagIds.map((tagId) => ({ berthId: id, tagId })));
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'berth',
entityId: id,
metadata: { type: 'tags_updated', tagIds },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'berth:updated', {
berthId: id,
changedFields: ['tags'],
});
return { berthId: id, tagIds };
}
// ─── Add Maintenance Log ──────────────────────────────────────────────────────
export async function addMaintenanceLog(
id: string,
portId: string,
data: AddMaintenanceLogInput,
meta: AuditMeta,
) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
const rows = await db
.insert(berthMaintenanceLog)
.values({
berthId: id,
portId,
category: data.category,
description: data.description,
cost: data.cost !== undefined ? String(data.cost) : undefined,
costCurrency: data.costCurrency,
responsibleParty: data.responsibleParty,
performedDate: data.performedDate,
photoFileIds: data.photoFileIds,
createdBy: meta.userId,
})
.returning();
const log = rows[0]!;
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'berth_maintenance_log',
entityId: log.id,
metadata: { berthId: id, category: data.category },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'berth:maintenanceAdded', {
berthId: id,
logEntry: log,
});
return log;
}
// ─── Get Maintenance Logs ─────────────────────────────────────────────────────
export async function getMaintenanceLogs(id: string, portId: string) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
return db
.select()
.from(berthMaintenanceLog)
.where(and(eq(berthMaintenanceLog.berthId, id), eq(berthMaintenanceLog.portId, portId)))
.orderBy(berthMaintenanceLog.performedDate);
}
// ─── Get Waiting List ─────────────────────────────────────────────────────────
export async function getWaitingList(id: string, portId: string) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
return db
.select()
.from(berthWaitingList)
.where(eq(berthWaitingList.berthId, id))
.orderBy(berthWaitingList.position);
}
// ─── Update Waiting List ──────────────────────────────────────────────────────
export async function updateWaitingList(
id: string,
portId: string,
data: UpdateWaitingListInput,
meta: AuditMeta,
) {
const existing = await db.query.berths.findFirst({
where: and(eq(berths.id, id), eq(berths.portId, portId)),
});
if (!existing) throw new NotFoundError('Berth');
// Replace entire waiting list
await db.delete(berthWaitingList).where(eq(berthWaitingList.berthId, id));
if (data.entries.length > 0) {
await db.insert(berthWaitingList).values(
data.entries.map((entry) => ({
berthId: id,
clientId: entry.clientId,
position: entry.position,
priority: entry.priority ?? 'normal',
notifyPref: entry.notifyPref ?? 'email',
notes: entry.notes,
})),
);
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'berth',
entityId: id,
metadata: { type: 'waiting_list_updated', count: data.entries.length },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'berth:waitingListChanged', {
berthId: id,
action: 'replaced',
entry: data.entries,
});
return data.entries;
}
// ─── Options ──────────────────────────────────────────────────────────────────
export async function getBerthOptions(portId: string) {
return db
.select({
id: berths.id,
mooringNumber: berths.mooringNumber,
area: berths.area,
status: berths.status,
})
.from(berths)
.where(eq(berths.portId, portId))
.orderBy(berths.mooringNumber);
}

View File

@@ -0,0 +1,489 @@
import { and, eq, ilike, inArray, or } from 'drizzle-orm';
import { db } from '@/lib/db';
import {
clients,
clientContacts,
clientRelationships,
clientTags,
} from '@/lib/db/schema/clients';
import { tags } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { NotFoundError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { buildListQuery } from '@/lib/db/query-builder';
import { diffEntity } from '@/lib/entity-diff';
import { softDelete, restore, withTransaction } from '@/lib/db/utils';
import type {
CreateClientInput,
UpdateClientInput,
ListClientsInput,
} from '@/lib/validators/clients';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listClients(portId: string, query: ListClientsInput) {
const { page, limit, sort, order, search, includeArchived, source, nationality, isProxy, tagIds } = query;
const filters = [];
if (source) {
filters.push(eq(clients.source, source));
}
if (nationality) {
filters.push(ilike(clients.nationality, `%${nationality}%`));
}
if (isProxy !== undefined) {
filters.push(eq(clients.isProxy, isProxy));
}
if (tagIds && tagIds.length > 0) {
const clientsWithTags = await db
.selectDistinct({ clientId: clientTags.clientId })
.from(clientTags)
.where(inArray(clientTags.tagId, tagIds));
const matchingIds = clientsWithTags.map((r) => r.clientId);
if (matchingIds.length > 0) {
filters.push(inArray(clients.id, matchingIds));
} else {
// No clients match these tags — return empty
return { data: [], total: 0 };
}
}
let sortColumn: typeof clients.fullName | typeof clients.createdAt | typeof clients.updatedAt =
clients.updatedAt;
if (sort === 'fullName') sortColumn = clients.fullName;
else if (sort === 'createdAt') sortColumn = clients.createdAt;
const result = await buildListQuery({
table: clients,
portIdColumn: clients.portId,
portId,
idColumn: clients.id,
updatedAtColumn: clients.updatedAt,
searchColumns: [clients.fullName, clients.companyName],
searchTerm: search,
filters,
sort: sort ? { column: sortColumn, direction: order } : undefined,
page,
pageSize: limit,
includeArchived,
archivedAtColumn: clients.archivedAt,
});
return result;
}
// ─── Get by ID ────────────────────────────────────────────────────────────────
export async function getClientById(id: string, portId: string) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, id),
});
if (!client || client.portId !== portId) {
throw new NotFoundError('Client');
}
const contacts = await db.query.clientContacts.findMany({
where: eq(clientContacts.clientId, id),
orderBy: (t, { desc }) => [desc(t.isPrimary), desc(t.createdAt)],
});
const clientTagRows = await db
.select({ tag: tags })
.from(clientTags)
.innerJoin(tags, eq(clientTags.tagId, tags.id))
.where(eq(clientTags.clientId, id));
return {
...client,
contacts,
tags: clientTagRows.map((r) => r.tag),
};
}
// ─── Create ───────────────────────────────────────────────────────────────────
export async function createClient(
portId: string,
data: CreateClientInput,
meta: AuditMeta,
) {
const result = await withTransaction(async (tx) => {
const { contacts: contactsInput, tagIds, ...clientData } = data;
const [client] = await tx
.insert(clients)
.values({ portId, ...clientData })
.returning();
if (contactsInput.length > 0) {
await tx.insert(clientContacts).values(
contactsInput.map((c) => ({ clientId: client!.id, ...c })),
);
}
if (tagIds && tagIds.length > 0) {
await tx.insert(clientTags).values(
tagIds.map((tagId) => ({ clientId: client!.id, tagId })),
);
}
return client!;
});
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'client',
entityId: result.id,
newValue: { fullName: result.fullName, companyName: result.companyName },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'client:created', { clientId: result.id, clientName: result.fullName ?? '', source: result.source ?? '' });
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'client:created', { clientId: result.id }),
);
return result;
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateClient(
id: string,
portId: string,
data: UpdateClientInput,
meta: AuditMeta,
) {
const existing = await db.query.clients.findFirst({
where: eq(clients.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Client');
}
const { diff } = diffEntity(existing as Record<string, unknown>, data as Record<string, unknown>);
const [updated] = await db
.update(clients)
.set({ ...data, updatedAt: new Date() })
.where(and(eq(clients.id, id), eq(clients.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'client',
entityId: id,
oldValue: diff as any,
newValue: data as any,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'client:updated', { clientId: id, changedFields: Object.keys(diff) });
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'client:updated', { clientId: id }),
);
return updated;
}
// ─── Archive / Restore ────────────────────────────────────────────────────────
export async function archiveClient(id: string, portId: string, meta: AuditMeta) {
const existing = await db.query.clients.findFirst({
where: eq(clients.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Client');
}
await softDelete(clients, clients.id, id);
void createAuditLog({
userId: meta.userId,
portId,
action: 'archive',
entityType: 'client',
entityId: id,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'client:archived', { clientId: id });
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'client:archived', { clientId: id }),
);
}
export async function restoreClient(id: string, portId: string, meta: AuditMeta) {
const existing = await db.query.clients.findFirst({
where: eq(clients.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Client');
}
await restore(clients, clients.id, id);
void createAuditLog({
userId: meta.userId,
portId,
action: 'restore',
entityType: 'client',
entityId: id,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'client:restored', { clientId: id });
}
// ─── Contacts ─────────────────────────────────────────────────────────────────
export async function listContacts(clientId: string, portId: string) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
return db.query.clientContacts.findMany({
where: eq(clientContacts.clientId, clientId),
orderBy: (t, { desc }) => [desc(t.isPrimary), desc(t.createdAt)],
});
}
export async function addContact(
clientId: string,
portId: string,
data: { channel: string; value: string; label?: string; isPrimary?: boolean; notes?: string },
meta: AuditMeta,
) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
const [contact] = await db
.insert(clientContacts)
.values({ clientId, ...data })
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'clientContact',
entityId: contact!.id,
newValue: { clientId, channel: contact!.channel },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'client:updated', { clientId, changedFields: ['contacts'] });
return contact!;
}
export async function updateContact(
contactId: string,
clientId: string,
portId: string,
data: Partial<{ channel: string; value: string; label: string; isPrimary: boolean; notes: string }>,
meta: AuditMeta,
) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
const contact = await db.query.clientContacts.findFirst({
where: and(eq(clientContacts.id, contactId), eq(clientContacts.clientId, clientId)),
});
if (!contact) throw new NotFoundError('Contact');
const [updated] = await db
.update(clientContacts)
.set({ ...data, updatedAt: new Date() })
.where(eq(clientContacts.id, contactId))
.returning();
emitToRoom(`port:${portId}`, 'client:updated', { clientId, changedFields: ['contacts'] });
return updated;
}
export async function removeContact(
contactId: string,
clientId: string,
portId: string,
meta: AuditMeta,
) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
const contact = await db.query.clientContacts.findFirst({
where: and(eq(clientContacts.id, contactId), eq(clientContacts.clientId, clientId)),
});
if (!contact) throw new NotFoundError('Contact');
await db.delete(clientContacts).where(eq(clientContacts.id, contactId));
emitToRoom(`port:${portId}`, 'client:updated', { clientId, changedFields: ['contacts'] });
}
// ─── Tags ─────────────────────────────────────────────────────────────────────
export async function setClientTags(
clientId: string,
portId: string,
tagIds: string[],
meta: AuditMeta,
) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
await db.delete(clientTags).where(eq(clientTags.clientId, clientId));
if (tagIds.length > 0) {
await db.insert(clientTags).values(tagIds.map((tagId) => ({ clientId, tagId })));
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'client',
entityId: clientId,
newValue: { tagIds },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'client:updated', { clientId, changedFields: ['tags'] });
}
// ─── Relationships ────────────────────────────────────────────────────────────
export async function listRelationships(clientId: string, portId: string) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
return db.query.clientRelationships.findMany({
where: (r, { or, eq }) =>
or(eq(r.clientAId, clientId), eq(r.clientBId, clientId)),
});
}
export async function createRelationship(
clientId: string,
portId: string,
data: { clientBId: string; relationshipType: string; description?: string },
meta: AuditMeta,
) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) throw new NotFoundError('Client');
const [rel] = await db
.insert(clientRelationships)
.values({ portId, clientAId: clientId, ...data })
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'clientRelationship',
entityId: rel!.id,
newValue: { clientAId: clientId, clientBId: data.clientBId, type: data.relationshipType },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return rel!;
}
export async function deleteRelationship(
relId: string,
clientId: string,
portId: string,
meta: AuditMeta,
) {
const rel = await db.query.clientRelationships.findFirst({
where: eq(clientRelationships.id, relId),
});
if (!rel || rel.portId !== portId) throw new NotFoundError('Relationship');
await db.delete(clientRelationships).where(eq(clientRelationships.id, relId));
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'clientRelationship',
entityId: relId,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
}
// ─── Find Duplicates ──────────────────────────────────────────────────────────
export async function findDuplicates(portId: string, fullName: string) {
return db.query.clients.findMany({
where: (c, { and, eq }) =>
and(eq(c.portId, portId), ilike(c.fullName, `%${fullName}%`)),
limit: 5,
});
}
// ─── Options (for comboboxes) ─────────────────────────────────────────────────
export async function listClientOptions(portId: string, search?: string) {
const conditions = [eq(clients.portId, portId)];
if (search) {
conditions.push(
or(
ilike(clients.fullName, `%${search}%`),
ilike(clients.companyName, `%${search}%`),
)!,
);
}
return db
.select({ id: clients.id, fullName: clients.fullName })
.from(clients)
.where(and(...conditions))
.orderBy(clients.fullName)
.limit(50);
}

View File

@@ -0,0 +1,69 @@
import { db } from '@/lib/db';
import { currencyRates } from '@/lib/db/schema/system';
import { eq, and } from 'drizzle-orm';
import { logger } from '@/lib/logger';
export async function getRate(from: string, to: string): Promise<number | null> {
if (from === to) return 1;
const rate = await db.query.currencyRates.findFirst({
where: and(eq(currencyRates.baseCurrency, from), eq(currencyRates.targetCurrency, to)),
});
return rate ? Number(rate.rate) : null;
}
export async function convert(
amount: number,
from: string,
to: string,
): Promise<{ result: number; rate: number } | null> {
const rate = await getRate(from, to);
if (!rate) return null;
return { result: Number((amount * rate).toFixed(2)), rate };
}
export async function refreshRates(): Promise<void> {
try {
const res = await fetch('https://api.frankfurter.dev/v1/latest?base=USD');
if (!res.ok) throw new Error(`Frankfurter API error: ${res.status}`);
const data = await res.json();
const rates = data.rates as Record<string, number>;
for (const [currency, rate] of Object.entries(rates)) {
await db
.insert(currencyRates)
.values({
baseCurrency: 'USD',
targetCurrency: currency,
rate: String(rate),
source: 'frankfurter',
fetchedAt: new Date(),
})
.onConflictDoUpdate({
target: [currencyRates.baseCurrency, currencyRates.targetCurrency],
set: { rate: String(rate), fetchedAt: new Date(), source: 'frankfurter' },
});
}
// Store inverse rates for common conversions
for (const [currency, rate] of Object.entries(rates)) {
const inverse = 1 / rate;
await db
.insert(currencyRates)
.values({
baseCurrency: currency,
targetCurrency: 'USD',
rate: String(inverse.toFixed(6)),
source: 'frankfurter',
fetchedAt: new Date(),
})
.onConflictDoUpdate({
target: [currencyRates.baseCurrency, currencyRates.targetCurrency],
set: { rate: String(inverse.toFixed(6)), fetchedAt: new Date(), source: 'frankfurter' },
});
}
logger.info({ rateCount: Object.keys(rates).length }, 'Currency rates refreshed');
} catch (err) {
logger.error({ err }, 'Failed to refresh currency rates');
}
}

View File

@@ -0,0 +1,323 @@
import { and, eq, count } from 'drizzle-orm';
import { db } from '@/lib/db';
import { customFieldDefinitions, customFieldValues } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { NotFoundError, ValidationError, ConflictError } from '@/lib/errors';
import type { CreateFieldInput, UpdateFieldInput } from '@/lib/validators/custom-fields';
import type { CustomFieldDefinition } from '@/lib/db/schema/system';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Value Validation ─────────────────────────────────────────────────────────
function validateCustomFieldValue(
definition: CustomFieldDefinition,
value: unknown,
): string | null {
if (value === null || value === undefined) {
return definition.isRequired ? 'This field is required' : null;
}
switch (definition.fieldType) {
case 'text':
return typeof value !== 'string'
? 'Must be text'
: value.length > 1000
? 'Max 1000 chars'
: null;
case 'number':
return typeof value !== 'number' || isNaN(value) ? 'Must be a number' : null;
case 'date':
return typeof value !== 'string' || isNaN(Date.parse(value))
? 'Must be a valid date'
: null;
case 'boolean':
return typeof value !== 'boolean' ? 'Must be true or false' : null;
case 'select': {
const options = (definition.selectOptions as string[] | null) ?? [];
return !options.includes(value as string)
? `Must be one of: ${options.join(', ')}`
: null;
}
default:
return 'Unknown field type';
}
}
// ─── Definitions ──────────────────────────────────────────────────────────────
export async function listDefinitions(portId: string, entityType?: string) {
const conditions = [eq(customFieldDefinitions.portId, portId)];
if (entityType) {
conditions.push(eq(customFieldDefinitions.entityType, entityType));
}
return db.query.customFieldDefinitions.findMany({
where: and(...conditions),
orderBy: (fields, { asc }) => [asc(fields.sortOrder), asc(fields.createdAt)],
});
}
export async function createDefinition(
portId: string,
userId: string,
data: CreateFieldInput,
meta: AuditMeta,
) {
// Check for duplicate fieldName within portId + entityType
const existing = await db.query.customFieldDefinitions.findFirst({
where: and(
eq(customFieldDefinitions.portId, portId),
eq(customFieldDefinitions.entityType, data.entityType),
eq(customFieldDefinitions.fieldName, data.fieldName),
),
});
if (existing) {
throw new ConflictError(
`A custom field named "${data.fieldName}" already exists for ${data.entityType}`,
);
}
const rows = await db
.insert(customFieldDefinitions)
.values({
portId,
entityType: data.entityType,
fieldName: data.fieldName,
fieldLabel: data.fieldLabel,
fieldType: data.fieldType,
selectOptions: data.selectOptions ?? null,
isRequired: data.isRequired,
sortOrder: data.sortOrder,
})
.returning();
const created = rows[0];
if (!created) throw new Error('Insert failed — no row returned');
void createAuditLog({
userId,
portId,
action: 'create',
entityType: 'custom_field_definition',
entityId: created.id,
newValue: {
fieldName: created.fieldName,
fieldLabel: created.fieldLabel,
fieldType: created.fieldType,
entityType: created.entityType,
},
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return created;
}
export async function updateDefinition(
portId: string,
fieldId: string,
userId: string,
data: UpdateFieldInput & { fieldType?: unknown },
meta: AuditMeta,
) {
// Immutability guard — fieldType must never change
if ('fieldType' in data && data.fieldType !== undefined) {
throw new ValidationError('Field type cannot be changed after creation');
}
const existing = await db.query.customFieldDefinitions.findFirst({
where: and(
eq(customFieldDefinitions.id, fieldId),
eq(customFieldDefinitions.portId, portId),
),
});
if (!existing) {
throw new NotFoundError('Custom field definition');
}
const updateRows = await db
.update(customFieldDefinitions)
.set({
...(data.fieldLabel !== undefined && { fieldLabel: data.fieldLabel }),
...(data.selectOptions !== undefined && { selectOptions: data.selectOptions }),
...(data.isRequired !== undefined && { isRequired: data.isRequired }),
...(data.sortOrder !== undefined && { sortOrder: data.sortOrder }),
})
.where(eq(customFieldDefinitions.id, fieldId))
.returning();
const updated = updateRows[0];
if (!updated) throw new Error('Update failed — no row returned');
void createAuditLog({
userId,
portId,
action: 'update',
entityType: 'custom_field_definition',
entityId: fieldId,
oldValue: {
fieldLabel: existing.fieldLabel,
selectOptions: existing.selectOptions,
isRequired: existing.isRequired,
sortOrder: existing.sortOrder,
},
newValue: {
fieldLabel: updated.fieldLabel,
selectOptions: updated.selectOptions,
isRequired: updated.isRequired,
sortOrder: updated.sortOrder,
},
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return updated;
}
export async function deleteDefinition(
portId: string,
fieldId: string,
userId: string,
meta: AuditMeta,
) {
const existing = await db.query.customFieldDefinitions.findFirst({
where: and(
eq(customFieldDefinitions.id, fieldId),
eq(customFieldDefinitions.portId, portId),
),
});
if (!existing) {
throw new NotFoundError('Custom field definition');
}
// Count associated values before deletion
const countResult = await db
.select({ count: count() })
.from(customFieldValues)
.where(eq(customFieldValues.fieldId, fieldId));
const valueCount = countResult[0]?.count ?? 0;
// Delete definition — CASCADE handles values
await db
.delete(customFieldDefinitions)
.where(eq(customFieldDefinitions.id, fieldId));
void createAuditLog({
userId,
portId,
action: 'delete',
entityType: 'custom_field_definition',
entityId: fieldId,
oldValue: {
fieldName: existing.fieldName,
fieldLabel: existing.fieldLabel,
fieldType: existing.fieldType,
entityType: existing.entityType,
},
metadata: { deletedValueCount: Number(valueCount) },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return { deletedValueCount: Number(valueCount) };
}
// ─── Values ───────────────────────────────────────────────────────────────────
export async function getValues(entityId: string, portId: string) {
const definitions = await db.query.customFieldDefinitions.findMany({
where: eq(customFieldDefinitions.portId, portId),
orderBy: (fields, { asc }) => [asc(fields.sortOrder), asc(fields.createdAt)],
});
const values = await db.query.customFieldValues.findMany({
where: eq(customFieldValues.entityId, entityId),
});
const valueMap = new Map(values.map((v) => [v.fieldId, v]));
return definitions.map((definition) => ({
definition,
value: valueMap.get(definition.id) ?? null,
}));
}
export async function setValues(
entityId: string,
portId: string,
userId: string,
values: Array<{ fieldId: string; value: unknown }>,
meta: AuditMeta,
) {
if (values.length === 0) return [];
// Fetch relevant definitions to validate values
const fieldIds = values.map((v) => v.fieldId);
const definitions = await db.query.customFieldDefinitions.findMany({
where: eq(customFieldDefinitions.portId, portId),
});
const definitionMap = new Map(definitions.map((d) => [d.id, d]));
// Validate each value
const errors: Array<{ field: string; message: string }> = [];
for (const { fieldId, value } of values) {
const definition = definitionMap.get(fieldId);
if (!definition) {
errors.push({ field: fieldId, message: 'Custom field not found for this port' });
continue;
}
const error = validateCustomFieldValue(definition, value);
if (error) {
errors.push({ field: definition.fieldName, message: error });
}
}
if (errors.length > 0) {
throw new ValidationError('Custom field validation failed', errors);
}
// Upsert all values
const results = await Promise.all(
values.map(async ({ fieldId, value }) => {
const [upserted] = await db
.insert(customFieldValues)
.values({
fieldId,
entityId,
value: value as Record<string, unknown>,
updatedAt: new Date(),
})
.onConflictDoUpdate({
target: [customFieldValues.fieldId, customFieldValues.entityId],
set: {
value: value as Record<string, unknown>,
updatedAt: new Date(),
},
})
.returning();
return upserted;
}),
);
void createAuditLog({
userId,
portId,
action: 'update',
entityType: 'custom_field_values',
entityId,
metadata: { fieldIds, updatedCount: results.length },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return results;
}

View File

@@ -0,0 +1,189 @@
import { and, count, desc, eq, inArray, isNull, sql, sum } from 'drizzle-orm';
import { db } from '@/lib/db';
import { clients } from '@/lib/db/schema/clients';
import { interests } from '@/lib/db/schema/interests';
import { berths } from '@/lib/db/schema/berths';
import { systemSettings, auditLogs } from '@/lib/db/schema/system';
import { PIPELINE_STAGES } from '@/lib/constants';
// ─── Default pipeline weights ────────────────────────────────────────────────
const DEFAULT_PIPELINE_WEIGHTS: Record<string, number> = {
open: 0.05,
details_sent: 0.10,
in_communication: 0.20,
visited: 0.35,
signed_eoi_nda: 0.50,
deposit_10pct: 0.70,
contract: 0.90,
completed: 1.00,
};
// ─── KPIs ─────────────────────────────────────────────────────────────────────
export async function getKpis(portId: string) {
const [totalClientsRow] = await db
.select({ value: count() })
.from(clients)
.where(and(eq(clients.portId, portId), isNull(clients.archivedAt)));
const [activeInterestsRow] = await db
.select({ value: count() })
.from(interests)
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)));
// Pipeline value: SUM berths.price via JOIN from non-archived interests with berthId
const pipelineRows = await db
.select({ price: berths.price })
.from(interests)
.innerJoin(berths, eq(interests.berthId, berths.id))
.where(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
sql`${interests.berthId} IS NOT NULL`,
),
);
const pipelineValueUsd = pipelineRows.reduce((acc, row) => {
return acc + (row.price ? parseFloat(String(row.price)) : 0);
}, 0);
// Occupancy rate: (sold + under_offer) / total * 100
const allBerthsRows = await db
.select({ status: berths.status })
.from(berths)
.where(eq(berths.portId, portId));
const totalBerths = allBerthsRows.length;
const occupiedBerths = allBerthsRows.filter(
(b) => b.status === 'sold' || b.status === 'under_offer',
).length;
const occupancyRate = totalBerths > 0 ? (occupiedBerths / totalBerths) * 100 : 0;
return {
totalClients: totalClientsRow?.value ?? 0,
activeInterests: activeInterestsRow?.value ?? 0,
pipelineValueUsd,
occupancyRate,
};
}
// ─── Pipeline Counts ──────────────────────────────────────────────────────────
export async function getPipelineCounts(portId: string) {
const rows = await db
.select({
stage: interests.pipelineStage,
count: sql<number>`count(*)::int`,
})
.from(interests)
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)))
.groupBy(interests.pipelineStage);
const countsByStage = Object.fromEntries(rows.map((r) => [r.stage, r.count]));
return PIPELINE_STAGES.map((stage) => ({
stage,
count: countsByStage[stage] ?? 0,
}));
}
// ─── Revenue Forecast ─────────────────────────────────────────────────────────
export async function getRevenueForecast(portId: string) {
// Load weights from systemSettings
let weights: Record<string, number> = DEFAULT_PIPELINE_WEIGHTS;
let weightsSource: 'db' | 'default' = 'default';
const settingRow = await db.query.systemSettings.findFirst({
where: and(
eq(systemSettings.key, 'pipeline_weights'),
eq(systemSettings.portId, portId),
),
});
if (settingRow?.value) {
try {
const parsed = settingRow.value as Record<string, number>;
if (typeof parsed === 'object' && parsed !== null) {
weights = parsed;
weightsSource = 'db';
}
} catch {
// Fall through to defaults
}
}
// Fetch all non-archived interests with a linked berth and its price
const interestRows = await db
.select({
id: interests.id,
pipelineStage: interests.pipelineStage,
berthPrice: berths.price,
})
.from(interests)
.innerJoin(berths, eq(interests.berthId, berths.id))
.where(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
sql`${interests.berthId} IS NOT NULL`,
),
);
// Build stageBreakdown
const stageMap: Record<string, { count: number; weightedValue: number }> = {};
for (const row of interestRows) {
const stage = row.pipelineStage ?? 'open';
const price = row.berthPrice ? parseFloat(String(row.berthPrice)) : 0;
const weight = weights[stage] ?? 0;
const weighted = price * weight;
if (!stageMap[stage]) {
stageMap[stage] = { count: 0, weightedValue: 0 };
}
stageMap[stage]!.count += 1;
stageMap[stage]!.weightedValue += weighted;
}
const stageBreakdown = PIPELINE_STAGES.map((stage) => ({
stage,
count: stageMap[stage]?.count ?? 0,
weightedValue: stageMap[stage]?.weightedValue ?? 0,
}));
const totalWeightedValue = stageBreakdown.reduce(
(acc, s) => acc + s.weightedValue,
0,
);
return {
totalWeightedValue,
stageBreakdown,
weightsSource,
};
}
// ─── Recent Activity ──────────────────────────────────────────────────────────
export async function getRecentActivity(portId: string, limit = 20) {
const rows = await db
.select({
id: auditLogs.id,
action: auditLogs.action,
entityType: auditLogs.entityType,
entityId: auditLogs.entityId,
userId: auditLogs.userId,
metadata: auditLogs.metadata,
createdAt: auditLogs.createdAt,
})
.from(auditLogs)
.where(eq(auditLogs.portId, portId))
.orderBy(desc(auditLogs.createdAt))
.limit(limit);
return rows;
}

View File

@@ -0,0 +1,88 @@
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
const BASE_URL = env.DOCUMENSO_API_URL;
const API_KEY = env.DOCUMENSO_API_KEY;
async function documensoFetch(path: string, options?: RequestInit): Promise<unknown> {
const res = await fetch(`${BASE_URL}${path}`, {
...options,
headers: {
Authorization: `Bearer ${API_KEY}`,
'Content-Type': 'application/json',
...options?.headers,
},
});
if (!res.ok) {
const err = await res.text();
logger.error({ path, status: res.status, err }, 'Documenso API error');
throw new Error(`Documenso API error: ${res.status}`);
}
return res.json();
}
export interface DocumensoRecipient {
name: string;
email: string;
role: string;
signingOrder: number;
}
export interface DocumensoDocument {
id: string;
status: string;
recipients: Array<{
id: string;
name: string;
email: string;
role: string;
signingOrder: number;
status: string;
signingUrl?: string;
embeddedUrl?: string;
}>;
}
export async function createDocument(
title: string,
pdfBase64: string,
recipients: DocumensoRecipient[],
): Promise<DocumensoDocument> {
return documensoFetch('/api/v1/documents', {
method: 'POST',
body: JSON.stringify({ title, document: pdfBase64, recipients }),
}) as Promise<DocumensoDocument>;
}
export async function sendDocument(docId: string): Promise<DocumensoDocument> {
return documensoFetch(`/api/v1/documents/${docId}/send`, {
method: 'POST',
}) as Promise<DocumensoDocument>;
}
export async function getDocument(docId: string): Promise<DocumensoDocument> {
return documensoFetch(`/api/v1/documents/${docId}`) as Promise<DocumensoDocument>;
}
export async function sendReminder(docId: string, signerId: string): Promise<void> {
await documensoFetch(`/api/v1/documents/${docId}/recipients/${signerId}/remind`, {
method: 'POST',
});
}
export async function downloadSignedPdf(docId: string): Promise<Buffer> {
const res = await fetch(`${BASE_URL}/api/v1/documents/${docId}/download`, {
headers: { Authorization: `Bearer ${API_KEY}` },
});
if (!res.ok) {
const err = await res.text();
logger.error({ docId, status: res.status, err }, 'Documenso download error');
throw new Error(`Documenso download error: ${res.status}`);
}
const arrayBuffer = await res.arrayBuffer();
return Buffer.from(arrayBuffer);
}

View File

@@ -0,0 +1,15 @@
import { createHmac } from 'crypto';
import { timingSafeEqual } from 'crypto';
export function verifyDocumensoSignature(
payload: string,
signature: string,
secret: string,
): boolean {
const hmac = createHmac('sha256', secret).update(payload).digest('hex');
try {
return timingSafeEqual(Buffer.from(hmac), Buffer.from(signature));
} catch {
return false;
}
}

View File

@@ -0,0 +1,123 @@
import { and, eq, inArray } from 'drizzle-orm';
import { db } from '@/lib/db';
import { documents, documentSigners, documentEvents } from '@/lib/db/schema/documents';
import { interests } from '@/lib/db/schema/interests';
import { ports } from '@/lib/db/schema/ports';
import { sendReminder as documensoRemind } from '@/lib/services/documenso-client';
import { logger } from '@/lib/logger';
// BR-023: Reminders only during 9-16 in port timezone, with 24h cooldown
function getCurrentHourInTimezone(timezone: string): number {
const now = new Date();
const formatter = new Intl.DateTimeFormat('en-US', {
timeZone: timezone,
hour: 'numeric',
hour12: false,
});
return parseInt(formatter.format(now), 10);
}
export async function sendReminderIfAllowed(
documentId: string,
portId: string,
): Promise<boolean> {
const doc = await db.query.documents.findFirst({
where: and(eq(documents.id, documentId), eq(documents.portId, portId)),
});
if (!doc || !doc.interestId || !doc.documensoId) return false;
if (!['sent', 'partially_signed'].includes(doc.status)) return false;
// Check interest.reminderEnabled
const interest = await db.query.interests.findFirst({
where: eq(interests.id, doc.interestId),
});
if (!interest?.reminderEnabled) return false;
// Check port timezone
const port = await db.query.ports.findFirst({
where: eq(ports.id, portId),
});
const timezone = port?.timezone ?? 'UTC';
const currentHour = getCurrentHourInTimezone(timezone);
if (currentHour < 9 || currentHour >= 16) return false;
// Check 24h cooldown — last reminder_sent event for this document
const twentyFourHoursAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
const lastReminder = await db.query.documentEvents.findFirst({
where: and(
eq(documentEvents.documentId, documentId),
eq(documentEvents.eventType, 'reminder_sent'),
),
orderBy: (de, { desc }) => [desc(de.createdAt)],
});
if (lastReminder && lastReminder.createdAt > twentyFourHoursAgo) {
return false;
}
// Find current pending signer (lowest signingOrder with status='pending')
const pendingSigner = await db.query.documentSigners.findFirst({
where: and(
eq(documentSigners.documentId, documentId),
eq(documentSigners.status, 'pending'),
),
orderBy: (ds, { asc }) => [asc(ds.signingOrder)],
});
if (!pendingSigner) return false;
// Send reminder via Documenso
try {
await documensoRemind(doc.documensoId, pendingSigner.id);
} catch (err) {
logger.error({ err, documentId, signerId: pendingSigner.id }, 'Failed to send Documenso reminder');
return false;
}
// Record event
await db.insert(documentEvents).values({
documentId,
eventType: 'reminder_sent',
signerId: pendingSigner.id,
eventData: { signerEmail: pendingSigner.signerEmail, signerRole: pendingSigner.signerRole },
});
return true;
}
export async function processReminderQueue(portId: string): Promise<void> {
// Find all documents with status 'sent' or 'partially_signed' linked to interests with reminderEnabled=true
const activeInterests = await db.query.interests.findMany({
where: and(
eq(interests.portId, portId),
eq(interests.reminderEnabled, true),
),
});
if (activeInterests.length === 0) return;
const interestIds = activeInterests.map((i) => i.id);
const activeDocs = await db.query.documents.findMany({
where: and(
eq(documents.portId, portId),
inArray(documents.status, ['sent', 'partially_signed']),
inArray(documents.interestId, interestIds),
),
});
for (const doc of activeDocs) {
try {
await sendReminderIfAllowed(doc.id, portId);
} catch (err) {
logger.error({ err, documentId: doc.id, portId }, 'Reminder processing failed for document');
}
}
}

View File

@@ -0,0 +1,421 @@
/**
* Admin Document Template Service — TipTap JSON-based templates
*
* This service manages templates whose content is stored as TipTap JSON
* (serialised to the `bodyHtml` text column). Version history is maintained
* via audit_log entries (action='update', entityType='document_template',
* metadata.version + metadata.content).
*
* Template type values: eoi | contract | nda | reservation_agreement | letter | other
* These are stored in the `templateType` column.
*/
import { and, eq, desc } from 'drizzle-orm';
import { db } from '@/lib/db';
import { documentTemplates } from '@/lib/db/schema/documents';
import { auditLogs } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { NotFoundError, ValidationError } from '@/lib/errors';
import { validateTipTapDocument } from '@/lib/pdf/tiptap-to-pdfme';
import type {
CreateAdminTemplateInput,
UpdateAdminTemplateInput,
ListAdminTemplatesInput,
} from '@/lib/validators/document-templates';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
/**
* A version entry reconstructed from audit_log records.
*/
export interface TemplateVersion {
version: number;
content: Record<string, unknown>;
changedBy: string | null;
changedAt: Date;
auditLogId: string;
}
/**
* Helper: extract the numeric version stored in a templateType-encoded field.
* We use a convention: version is stored in the `mergeFields` jsonb array
* as `["__version__:N"]` to avoid adding a new column.
*/
function getVersionFromRecord(
record: typeof documentTemplates.$inferSelect,
): number {
const mf = record.mergeFields as unknown;
if (!Array.isArray(mf)) return 1;
const versionEntry = (mf as string[]).find((e) =>
e.startsWith('__version__:'),
);
if (!versionEntry) return 1;
const n = parseInt(versionEntry.split(':')[1] ?? '1', 10);
return isNaN(n) ? 1 : n;
}
function buildMergeFieldsWithVersion(
version: number,
): string[] {
return [`__version__:${version}`];
}
/**
* Parse TipTap JSON from bodyHtml field. Returns the parsed object, or null
* if bodyHtml is plain HTML (legacy records).
*/
function parseTipTapContent(
bodyHtml: string,
): Record<string, unknown> | null {
try {
const parsed = JSON.parse(bodyHtml) as unknown;
if (
parsed !== null &&
typeof parsed === 'object' &&
'type' in (parsed as Record<string, unknown>)
) {
return parsed as Record<string, unknown>;
}
return null;
} catch {
return null;
}
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listAdminTemplates(
portId: string,
query: ListAdminTemplatesInput,
) {
const { type, isActive } = query;
const conditions = [eq(documentTemplates.portId, portId)];
if (type) {
conditions.push(eq(documentTemplates.templateType, type));
}
if (isActive !== undefined) {
conditions.push(eq(documentTemplates.isActive, isActive));
}
const rows = await db
.select()
.from(documentTemplates)
.where(and(...conditions))
.orderBy(documentTemplates.name);
return rows.map((row) => ({
...row,
version: getVersionFromRecord(row),
content: parseTipTapContent(row.bodyHtml),
}));
}
// ─── Get by ID ────────────────────────────────────────────────────────────────
export async function getAdminTemplate(
portId: string,
templateId: string,
) {
const row = await db.query.documentTemplates.findFirst({
where: and(
eq(documentTemplates.id, templateId),
eq(documentTemplates.portId, portId),
),
});
if (!row) {
throw new NotFoundError('Document template');
}
return {
...row,
version: getVersionFromRecord(row),
content: parseTipTapContent(row.bodyHtml),
};
}
// ─── Validate TipTap Content ─────────────────────────────────────────────────
function assertValidContent(
content: Record<string, unknown>,
): void {
const unsupported = validateTipTapDocument(
content as unknown as Parameters<typeof validateTipTapDocument>[0],
);
if (unsupported.length > 0) {
throw new ValidationError(
`Template content contains unsupported node types: ${unsupported.join(', ')}. ` +
'Supported: paragraph, heading (h1-h3), bulletList, orderedList, listItem, ' +
'table, tableRow, tableCell, tableHeader, image, hardBreak, text.',
);
}
}
// ─── Create ───────────────────────────────────────────────────────────────────
export async function createAdminTemplate(
portId: string,
userId: string,
data: CreateAdminTemplateInput,
meta: AuditMeta,
) {
assertValidContent(data.content);
const [template] = await db
.insert(documentTemplates)
.values({
portId,
name: data.name,
templateType: data.type,
bodyHtml: JSON.stringify(data.content),
mergeFields: buildMergeFieldsWithVersion(1),
isActive: true,
createdBy: userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'document_template',
entityId: template!.id,
newValue: { name: template!.name, type: data.type, version: 1 },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return {
...template!,
version: 1,
content: data.content,
};
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateAdminTemplate(
portId: string,
templateId: string,
userId: string,
data: UpdateAdminTemplateInput,
meta: AuditMeta,
) {
const existing = await getAdminTemplate(portId, templateId);
if (data.content !== undefined) {
assertValidContent(data.content);
}
const currentVersion = existing.version;
const newVersion = data.content !== undefined ? currentVersion + 1 : currentVersion;
// Before updating content, save old content to audit log for versioning
if (data.content !== undefined) {
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'document_template',
entityId: templateId,
oldValue: { version: currentVersion, name: existing.name },
newValue: { version: newVersion, name: data.name ?? existing.name },
metadata: {
versionSnapshot: currentVersion,
content: existing.content ?? {},
},
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
}
const updateValues: Partial<typeof documentTemplates.$inferInsert> = {
updatedAt: new Date(),
};
if (data.name !== undefined) {
updateValues.name = data.name;
}
if (data.content !== undefined) {
updateValues.bodyHtml = JSON.stringify(data.content);
updateValues.mergeFields = buildMergeFieldsWithVersion(newVersion);
}
if (data.isActive !== undefined) {
updateValues.isActive = data.isActive;
}
const [updated] = await db
.update(documentTemplates)
.set(updateValues)
.where(
and(
eq(documentTemplates.id, templateId),
eq(documentTemplates.portId, portId),
),
)
.returning();
return {
...updated!,
version: newVersion,
content:
data.content !== undefined
? data.content
: existing.content,
};
}
// ─── Delete ───────────────────────────────────────────────────────────────────
export async function deleteAdminTemplate(
portId: string,
templateId: string,
userId: string,
meta: AuditMeta,
) {
const existing = await getAdminTemplate(portId, templateId);
await db
.delete(documentTemplates)
.where(
and(
eq(documentTemplates.id, templateId),
eq(documentTemplates.portId, portId),
),
);
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'document_template',
entityId: templateId,
oldValue: { name: existing.name, type: existing.templateType, version: existing.version },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
}
// ─── Version History ──────────────────────────────────────────────────────────
/**
* Retrieves version history for a template by querying audit_logs.
* Each 'update' audit log entry with entityType='document_template' and
* metadata.versionSnapshot contains a saved version.
*/
export async function getAdminTemplateVersions(
portId: string,
templateId: string,
): Promise<TemplateVersion[]> {
// Verify template exists and belongs to port
await getAdminTemplate(portId, templateId);
const logs = await db
.select()
.from(auditLogs)
.where(
and(
eq(auditLogs.entityType, 'document_template'),
eq(auditLogs.entityId, templateId),
eq(auditLogs.action, 'update'),
eq(auditLogs.portId, portId),
),
)
.orderBy(desc(auditLogs.createdAt));
return logs
.filter((log) => {
const meta = log.metadata as Record<string, unknown> | null;
return (
meta !== null &&
typeof meta === 'object' &&
'versionSnapshot' in meta &&
'content' in meta
);
})
.map((log) => {
const meta = log.metadata as Record<string, unknown>;
return {
version: meta.versionSnapshot as number,
content: meta.content as Record<string, unknown>,
changedBy: log.userId,
changedAt: log.createdAt,
auditLogId: log.id,
};
});
}
// ─── Rollback ─────────────────────────────────────────────────────────────────
/**
* Restores a template to a previous version found in audit_logs.
* Creates a new version number (current + 1) with the restored content.
*/
export async function rollbackAdminTemplate(
portId: string,
templateId: string,
version: number,
userId: string,
meta: AuditMeta,
) {
const existing = await getAdminTemplate(portId, templateId);
const versions = await getAdminTemplateVersions(portId, templateId);
const targetVersion = versions.find((v) => v.version === version);
if (!targetVersion) {
throw new NotFoundError(`Template version ${version}`);
}
const newVersion = existing.version + 1;
// Save current state to audit log before rollback
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'document_template',
entityId: templateId,
oldValue: { version: existing.version, name: existing.name },
newValue: { version: newVersion, name: existing.name, rolledBackTo: version },
metadata: {
versionSnapshot: existing.version,
content: existing.content ?? {},
rolledBackTo: version,
},
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
const [updated] = await db
.update(documentTemplates)
.set({
bodyHtml: JSON.stringify(targetVersion.content),
mergeFields: buildMergeFieldsWithVersion(newVersion),
updatedAt: new Date(),
})
.where(
and(
eq(documentTemplates.id, templateId),
eq(documentTemplates.portId, portId),
),
)
.returning();
return {
...updated!,
version: newVersion,
content: targetVersion.content,
rolledBackFrom: existing.version,
rolledBackTo: version,
};
}

View File

@@ -0,0 +1,617 @@
import { and, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { documentTemplates, documents, files } from '@/lib/db/schema/documents';
import { clients, clientContacts } from '@/lib/db/schema/clients';
import { interests } from '@/lib/db/schema/interests';
import { berths } from '@/lib/db/schema/berths';
import { ports } from '@/lib/db/schema/ports';
import { buildListQuery } from '@/lib/db/query-builder';
import { createAuditLog } from '@/lib/audit';
import { diffEntity } from '@/lib/entity-diff';
import { NotFoundError, ValidationError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { minioClient, buildStoragePath } from '@/lib/minio';
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
import { generatePdf } from '@/lib/pdf/generate';
import { createDocument as documensoCreate, sendDocument as documensoSend } from '@/lib/services/documenso-client';
import { sendEmail } from '@/lib/email';
import type {
CreateTemplateInput,
UpdateTemplateInput,
ListTemplatesInput,
GenerateInput,
GenerateAndSendInput,
GenerateAndSignInput,
} from '@/lib/validators/document-templates';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Merge Field Definitions ──────────────────────────────────────────────────
const MERGE_FIELDS: Record<string, Array<{ token: string; label: string; required: boolean }>> = {
client: [
{ token: '{{client.fullName}}', label: 'Client Full Name', required: true },
{ token: '{{client.companyName}}', label: 'Company Name', required: false },
{ token: '{{client.email}}', label: 'Primary Email', required: false },
{ token: '{{client.phone}}', label: 'Primary Phone', required: false },
{ token: '{{client.nationality}}', label: 'Nationality', required: false },
{ token: '{{client.yachtName}}', label: 'Yacht Name', required: false },
{ token: '{{client.yachtLengthFt}}', label: 'Yacht Length (ft)', required: false },
{ token: '{{client.yachtLengthM}}', label: 'Yacht Length (m)', required: false },
{ token: '{{client.yachtWidthFt}}', label: 'Yacht Beam (ft)', required: false },
{ token: '{{client.yachtDraftFt}}', label: 'Yacht Draft (ft)', required: false },
{ token: '{{client.source}}', label: 'Lead Source', required: false },
],
interest: [
{ token: '{{interest.stage}}', label: 'Pipeline Stage', required: false },
{ token: '{{interest.leadCategory}}', label: 'Lead Category', required: false },
{ token: '{{interest.berthNumber}}', label: 'Berth Number', required: false },
{ token: '{{interest.eoiStatus}}', label: 'EOI Status', required: false },
{ token: '{{interest.dateFirstContact}}', label: 'Date First Contact', required: false },
{ token: '{{interest.dateEoiSigned}}', label: 'Date EOI Signed', required: false },
{ token: '{{interest.dateContractSigned}}', label: 'Date Contract Signed', required: false },
{ token: '{{interest.notes}}', label: 'Interest Notes', required: false },
],
berth: [
{ token: '{{berth.mooringNumber}}', label: 'Mooring Number', required: true },
{ token: '{{berth.area}}', label: 'Area', required: false },
{ token: '{{berth.status}}', label: 'Berth Status', required: false },
{ token: '{{berth.price}}', label: 'Price', required: false },
{ token: '{{berth.priceCurrency}}', label: 'Price Currency', required: false },
{ token: '{{berth.lengthFt}}', label: 'Length (ft)', required: false },
{ token: '{{berth.widthFt}}', label: 'Beam (ft)', required: false },
{ token: '{{berth.tenureType}}', label: 'Tenure Type', required: false },
{ token: '{{berth.tenureYears}}', label: 'Tenure Years', required: false },
],
port: [
{ token: '{{port.name}}', label: 'Port Name', required: false },
{ token: '{{port.defaultCurrency}}', label: 'Default Currency', required: false },
],
date: [
{ token: '{{date.today}}', label: "Today's Date", required: false },
{ token: '{{date.year}}', label: 'Current Year', required: false },
],
};
export function getMergeFields(): typeof MERGE_FIELDS {
return MERGE_FIELDS;
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listTemplates(portId: string, query: ListTemplatesInput) {
const { page, limit, sort, order, search, templateType, isActive } = query;
const filters = [];
if (templateType) {
filters.push(eq(documentTemplates.templateType, templateType));
}
if (isActive !== undefined) {
filters.push(eq(documentTemplates.isActive, isActive));
}
const sortColumn =
sort === 'name' ? documentTemplates.name :
sort === 'templateType' ? documentTemplates.templateType :
sort === 'createdAt' ? documentTemplates.createdAt :
documentTemplates.updatedAt;
return buildListQuery({
table: documentTemplates,
portIdColumn: documentTemplates.portId,
portId,
idColumn: documentTemplates.id,
updatedAtColumn: documentTemplates.updatedAt,
searchColumns: [documentTemplates.name],
searchTerm: search,
filters,
sort: { column: sortColumn, direction: order },
page,
pageSize: limit,
});
}
// ─── Get by ID ────────────────────────────────────────────────────────────────
export async function getTemplateById(id: string, portId: string) {
const template = await db.query.documentTemplates.findFirst({
where: eq(documentTemplates.id, id),
});
if (!template || template.portId !== portId) {
throw new NotFoundError('Document template');
}
return template;
}
// ─── Create ───────────────────────────────────────────────────────────────────
export async function createTemplate(portId: string, data: CreateTemplateInput, meta: AuditMeta) {
const [template] = await db
.insert(documentTemplates)
.values({
portId,
name: data.name,
description: data.description ?? null,
templateType: data.templateType,
bodyHtml: data.bodyHtml,
mergeFields: data.mergeFields ?? [],
isActive: data.isActive ?? true,
createdBy: meta.userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'documentTemplate',
entityId: template!.id,
newValue: { name: template!.name, templateType: template!.templateType },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'documentTemplate:created', { templateId: template!.id });
return template!;
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateTemplate(
id: string,
portId: string,
data: UpdateTemplateInput,
meta: AuditMeta,
) {
const existing = await getTemplateById(id, portId);
const { diff } = diffEntity(
existing as Record<string, unknown>,
data as Record<string, unknown>,
);
const [updated] = await db
.update(documentTemplates)
.set({ ...data, updatedAt: new Date() })
.where(and(eq(documentTemplates.id, id), eq(documentTemplates.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'documentTemplate',
entityId: id,
oldValue: diff as Record<string, unknown>,
newValue: data as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'documentTemplate:updated', { templateId: id });
return updated!;
}
// ─── Delete ───────────────────────────────────────────────────────────────────
export async function deleteTemplate(id: string, portId: string, meta: AuditMeta) {
const existing = await getTemplateById(id, portId);
await db
.delete(documentTemplates)
.where(and(eq(documentTemplates.id, id), eq(documentTemplates.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'documentTemplate',
entityId: id,
oldValue: { name: existing.name, templateType: existing.templateType },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'documentTemplate:deleted', { templateId: id });
}
// ─── Resolve Template ─────────────────────────────────────────────────────────
/**
* Interpolates all {{entity.field}} tokens in the template body HTML.
* BR-140: Required merge fields with no value throw ValidationError.
*/
export async function resolveTemplate(
templateId: string,
context: {
clientId?: string;
interestId?: string;
berthId?: string;
portId: string;
},
): Promise<string> {
const template = await getTemplateById(templateId, context.portId);
// Build token→value map from context
const tokenMap: Record<string, string> = {};
// Date tokens
const now = new Date();
tokenMap['{{date.today}}'] = now.toLocaleDateString('en-GB');
tokenMap['{{date.year}}'] = String(now.getFullYear());
// Port tokens
const port = await db.query.ports.findFirst({ where: eq(ports.id, context.portId) });
if (port) {
tokenMap['{{port.name}}'] = port.name;
tokenMap['{{port.defaultCurrency}}'] = port.defaultCurrency;
}
// Client tokens
if (context.clientId) {
const client = await db.query.clients.findFirst({
where: eq(clients.id, context.clientId),
});
if (client && client.portId === context.portId) {
const contactList = await db.query.clientContacts.findMany({
where: eq(clientContacts.clientId, context.clientId),
orderBy: (t, { desc }) => [desc(t.isPrimary), desc(t.createdAt)],
});
const emailContact = contactList.find((c) => c.channel === 'email');
const phoneContact = contactList.find((c) => c.channel === 'phone' || c.channel === 'whatsapp');
tokenMap['{{client.fullName}}'] = client.fullName ?? '';
tokenMap['{{client.companyName}}'] = client.companyName ?? '';
tokenMap['{{client.email}}'] = emailContact?.value ?? '';
tokenMap['{{client.phone}}'] = phoneContact?.value ?? '';
tokenMap['{{client.nationality}}'] = client.nationality ?? '';
tokenMap['{{client.yachtName}}'] = client.yachtName ?? '';
tokenMap['{{client.yachtLengthFt}}'] = client.yachtLengthFt ? String(client.yachtLengthFt) : '';
tokenMap['{{client.yachtLengthM}}'] = client.yachtLengthM ? String(client.yachtLengthM) : '';
tokenMap['{{client.yachtWidthFt}}'] = client.yachtWidthFt ? String(client.yachtWidthFt) : '';
tokenMap['{{client.yachtDraftFt}}'] = client.yachtDraftFt ? String(client.yachtDraftFt) : '';
tokenMap['{{client.source}}'] = client.source ?? '';
}
}
// Interest tokens
if (context.interestId) {
const interest = await db.query.interests.findFirst({
where: eq(interests.id, context.interestId),
});
if (interest && interest.portId === context.portId) {
tokenMap['{{interest.stage}}'] = interest.pipelineStage ?? '';
tokenMap['{{interest.leadCategory}}'] = interest.leadCategory ?? '';
tokenMap['{{interest.eoiStatus}}'] = interest.eoiStatus ?? '';
tokenMap['{{interest.dateFirstContact}}'] = interest.dateFirstContact
? new Date(interest.dateFirstContact).toLocaleDateString('en-GB')
: '';
tokenMap['{{interest.dateEoiSigned}}'] = interest.dateEoiSigned
? new Date(interest.dateEoiSigned).toLocaleDateString('en-GB')
: '';
tokenMap['{{interest.dateContractSigned}}'] = interest.dateContractSigned
? new Date(interest.dateContractSigned).toLocaleDateString('en-GB')
: '';
tokenMap['{{interest.notes}}'] = interest.notes ?? '';
// Berth number from interest if berthId not separately provided
if (interest.berthId && !context.berthId) {
const interestBerth = await db.query.berths.findFirst({
where: eq(berths.id, interest.berthId),
});
tokenMap['{{interest.berthNumber}}'] = interestBerth?.mooringNumber ?? '';
tokenMap['{{berth.mooringNumber}}'] = interestBerth?.mooringNumber ?? '';
} else {
tokenMap['{{interest.berthNumber}}'] = context.berthId
? tokenMap['{{berth.mooringNumber}}'] ?? ''
: '';
}
}
}
// Berth tokens
if (context.berthId) {
const berth = await db.query.berths.findFirst({
where: eq(berths.id, context.berthId),
});
if (berth && berth.portId === context.portId) {
tokenMap['{{berth.mooringNumber}}'] = berth.mooringNumber;
tokenMap['{{berth.area}}'] = berth.area ?? '';
tokenMap['{{berth.status}}'] = berth.status;
tokenMap['{{berth.price}}'] = berth.price ? String(berth.price) : '';
tokenMap['{{berth.priceCurrency}}'] = berth.priceCurrency;
tokenMap['{{berth.lengthFt}}'] = berth.lengthFt ? String(berth.lengthFt) : '';
tokenMap['{{berth.widthFt}}'] = berth.widthFt ? String(berth.widthFt) : '';
tokenMap['{{berth.tenureType}}'] = berth.tenureType;
tokenMap['{{berth.tenureYears}}'] = berth.tenureYears ? String(berth.tenureYears) : '';
tokenMap['{{interest.berthNumber}}'] = berth.mooringNumber;
}
}
// BR-140: Check required merge fields have values
const missing: string[] = [];
for (const [_category, fields] of Object.entries(MERGE_FIELDS)) {
for (const field of fields) {
if (field.required) {
const value = tokenMap[field.token];
if (value !== undefined && value.trim() === '') {
missing.push(field.label);
}
}
}
}
if (missing.length > 0) {
throw new ValidationError(
`Missing required merge field values: ${missing.join(', ')}`,
);
}
// Interpolate all tokens
let resolved = template.bodyHtml;
for (const [token, value] of Object.entries(tokenMap)) {
// Escape token for use in regex
const escaped = token.replace(/[{}]/g, '\\$&');
resolved = resolved.replace(new RegExp(escaped, 'g'), value);
}
return resolved;
}
// ─── Generate From Template ───────────────────────────────────────────────────
/**
* BR-142: Resolve template → HTML → PDF. Store in MinIO + create file/document records.
*/
export async function generateFromTemplate(
templateId: string,
portId: string,
context: GenerateInput,
meta: AuditMeta,
): Promise<{ document: any; file: any }> {
const template = await getTemplateById(templateId, portId);
const resolvedHtml = await resolveTemplate(templateId, { ...context, portId });
const port = await db.query.ports.findFirst({ where: eq(ports.id, portId) });
// Wrap HTML in a minimal full-page document for pdfme text block
const wrappedContent = resolvedHtml
.replace(/<[^>]+>/g, ' ') // strip HTML tags for plain-text PDF rendering
.replace(/\s+/g, ' ')
.trim();
// Use a simple single-field pdfme template for the HTML body
const pdfTemplate = {
basePdf: 'BLANK_PDF' as any,
schemas: [
[
{
name: 'portName',
type: 'text' as const,
position: { x: 20, y: 15 },
width: 170,
height: 10,
fontSize: 14,
},
{
name: 'body',
type: 'text' as const,
position: { x: 20, y: 30 },
width: 170,
height: 230,
fontSize: 9,
},
{
name: 'generatedAt',
type: 'text' as const,
position: { x: 20, y: 275 },
width: 170,
height: 6,
fontSize: 7,
},
],
],
};
const pdfBytes = await generatePdf(pdfTemplate, [
{
portName: `${port?.name ?? 'Port Nimara'}${template.name}`,
body: wrappedContent,
generatedAt: `Generated: ${new Date().toLocaleString('en-GB')}`,
},
]);
// Store in MinIO
const fileId = crypto.randomUUID();
const storagePath = buildStoragePath(
port?.slug ?? portId,
'document-templates',
templateId,
fileId,
'pdf',
);
await minioClient.putObject(
env.MINIO_BUCKET,
storagePath,
Buffer.from(pdfBytes),
pdfBytes.byteLength,
{ 'Content-Type': 'application/pdf' },
);
// Create file record
const [fileRecord] = await db
.insert(files)
.values({
portId,
clientId: context.clientId ?? null,
filename: `${template.name.toLowerCase().replace(/\s+/g, '-')}.pdf`,
originalName: `${template.name}.pdf`,
mimeType: 'application/pdf',
sizeBytes: String(pdfBytes.byteLength),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: 'correspondence',
uploadedBy: meta.userId,
})
.returning();
// Create document record
const [documentRecord] = await db
.insert(documents)
.values({
portId,
clientId: context.clientId ?? null,
interestId: context.interestId ?? null,
documentType: template.templateType,
title: template.name,
status: 'draft',
fileId: fileRecord!.id,
isManualUpload: false,
createdBy: meta.userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'document',
entityId: documentRecord!.id,
newValue: {
templateId,
templateName: template.name,
clientId: context.clientId,
interestId: context.interestId,
berthId: context.berthId,
},
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:created', { documentId: documentRecord!.id });
return { document: documentRecord!, file: fileRecord! };
}
// ─── Generate and Send ────────────────────────────────────────────────────────
export async function generateAndSend(
templateId: string,
portId: string,
context: GenerateInput,
recipientEmail: string,
meta: AuditMeta,
) {
const { document, file } = await generateFromTemplate(templateId, portId, context, meta);
const template = await getTemplateById(templateId, portId);
const port = await db.query.ports.findFirst({ where: eq(ports.id, portId) });
// Send email with PDF as attachment (base64 encoded body)
try {
const resolvedHtml = await resolveTemplate(templateId, { ...context, portId });
await sendEmail(
recipientEmail,
template.name,
`<p>Please find the attached document: <strong>${template.name}</strong></p><hr/>${resolvedHtml}`,
`${port?.name ?? 'Port Nimara'} <noreply@${env.SMTP_HOST}>`,
);
} catch (err) {
logger.error({ err, templateId, recipientEmail }, 'Failed to send template email');
// Don't throw — document was created successfully; email failure is non-fatal
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'documentTemplate',
entityId: templateId,
metadata: { action: 'generate_and_send', recipientEmail },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return { document, file };
}
// ─── Generate and Sign ────────────────────────────────────────────────────────
export async function generateAndSign(
templateId: string,
portId: string,
context: GenerateInput,
signers: GenerateAndSignInput['signers'],
meta: AuditMeta,
) {
const { document: documentRecord, file } = await generateFromTemplate(
templateId,
portId,
context,
meta,
);
const template = await getTemplateById(templateId, portId);
// Fetch PDF bytes from MinIO to send to Documenso
const pdfStream = await minioClient.getObject(env.MINIO_BUCKET, file.storagePath);
const chunks: Buffer[] = [];
for await (const chunk of pdfStream) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk as ArrayBuffer));
}
const pdfBase64 = Buffer.concat(chunks).toString('base64');
// Create Documenso document
const documensoDoc = await documensoCreate(
template.name,
pdfBase64,
signers.map((s) => ({
name: s.name,
email: s.email,
role: s.role,
signingOrder: s.signingOrder,
})),
);
// Send document for signing
await documensoSend(documensoDoc.id);
// Update our document record with Documenso ID and status
await db
.update(documents)
.set({
documensoId: documensoDoc.id,
status: 'sent',
updatedAt: new Date(),
})
.where(eq(documents.id, documentRecord.id));
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'document',
entityId: documentRecord.id,
newValue: { status: 'sent', documensoId: documensoDoc.id },
metadata: { action: 'generate_and_sign', signerCount: signers.length },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:updated', { documentId: documentRecord.id, changedFields: ['status', 'documensoId'] });
return { document: { ...documentRecord, documensoId: documensoDoc.id, status: 'sent' }, file };
}

View File

@@ -0,0 +1,754 @@
import { and, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { documents, documentSigners, documentEvents, files } from '@/lib/db/schema/documents';
import { interests } from '@/lib/db/schema/interests';
import { clients } from '@/lib/db/schema/clients';
import { berths } from '@/lib/db/schema/berths';
import { ports } from '@/lib/db/schema/ports';
import { buildListQuery } from '@/lib/db/query-builder';
import { createAuditLog } from '@/lib/audit';
import { diffEntity } from '@/lib/entity-diff';
import { NotFoundError, ValidationError, ConflictError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { minioClient, getPresignedUrl, buildStoragePath } from '@/lib/minio';
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
import { generatePdf } from '@/lib/pdf/generate';
import { eoiTemplate, buildEoiInputs } from '@/lib/pdf/templates/eoi-template';
import { evaluateRule } from '@/lib/services/berth-rules-engine';
import {
createDocument as documensoCreate,
sendDocument as documensoSend,
getDocument as documensoGet,
sendReminder as documensoRemind,
downloadSignedPdf,
} from '@/lib/services/documenso-client';
import type {
CreateDocumentInput,
UpdateDocumentInput,
ListDocumentsInput,
GenerateEoiInput,
} from '@/lib/validators/documents';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listDocuments(portId: string, query: ListDocumentsInput) {
const { page, limit, sort, order, search, interestId, clientId, documentType, status } = query;
const filters = [];
if (interestId) filters.push(eq(documents.interestId, interestId));
if (clientId) filters.push(eq(documents.clientId, clientId));
if (documentType) filters.push(eq(documents.documentType, documentType));
if (status) filters.push(eq(documents.status, status));
const sortColumn =
sort === 'title' ? documents.title :
sort === 'status' ? documents.status :
sort === 'documentType' ? documents.documentType :
documents.createdAt;
return buildListQuery({
table: documents,
portIdColumn: documents.portId,
portId,
idColumn: documents.id,
updatedAtColumn: documents.updatedAt,
searchColumns: [documents.title],
searchTerm: search,
filters,
sort: sort ? { column: sortColumn, direction: order } : undefined,
page,
pageSize: limit,
});
}
// ─── Get by ID ────────────────────────────────────────────────────────────────
export async function getDocumentById(id: string, portId: string) {
const doc = await db.query.documents.findFirst({
where: and(eq(documents.id, id), eq(documents.portId, portId)),
with: { signers: true },
});
if (!doc) throw new NotFoundError('Document');
return doc;
}
// ─── Create ───────────────────────────────────────────────────────────────────
export async function createDocument(
portId: string,
data: CreateDocumentInput,
meta: AuditMeta,
) {
const [doc] = await db
.insert(documents)
.values({
portId,
interestId: data.interestId ?? null,
clientId: data.clientId ?? null,
documentType: data.documentType,
title: data.title,
notes: data.notes ?? null,
status: 'draft',
createdBy: meta.userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'document',
entityId: doc!.id,
newValue: { documentType: doc!.documentType, title: doc!.title },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:created', { documentId: doc!.id });
return doc!;
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateDocument(
id: string,
portId: string,
data: UpdateDocumentInput,
meta: AuditMeta,
) {
const existing = await getDocumentById(id, portId);
const updates: Partial<typeof documents.$inferInsert> = {};
if (data.title !== undefined) updates.title = data.title;
if (data.notes !== undefined) updates.notes = data.notes;
if (data.status !== undefined) updates.status = data.status;
updates.updatedAt = new Date();
const [updated] = await db
.update(documents)
.set(updates)
.where(and(eq(documents.id, id), eq(documents.portId, portId)))
.returning();
const diff = diffEntity(existing, updated!);
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'document',
entityId: id,
oldValue: existing as unknown as Record<string, unknown>,
newValue: updated as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:updated', { documentId: id });
return updated!;
}
// ─── Delete ───────────────────────────────────────────────────────────────────
export async function deleteDocument(id: string, portId: string, meta: AuditMeta) {
const existing = await getDocumentById(id, portId);
if (['sent', 'partially_signed'].includes(existing.status)) {
throw new ConflictError('Cannot delete a document that is currently in signing process');
}
await db
.delete(documents)
.where(and(eq(documents.id, id), eq(documents.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'document',
entityId: id,
oldValue: { title: existing.title, status: existing.status },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:deleted', { documentId: id });
}
// ─── Generate EOI (BR-020) ────────────────────────────────────────────────────
export async function generateEoi(interestId: string, portId: string, meta: AuditMeta) {
// Fetch interest + related data
const interest = await db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
});
if (!interest) throw new NotFoundError('Interest');
const client = await db.query.clients.findFirst({
where: eq(clients.id, interest.clientId),
with: { contacts: true },
});
if (!client) throw new NotFoundError('Client');
// BR-020: Check prerequisites
const missing: Array<{ field: string; message: string }> = [];
if (!client.fullName) missing.push({ field: 'client.fullName', message: 'Client must have a full name' });
const emailContact = (client.contacts as Array<{ channel: string; value: string }> | undefined)?.find(
(c) => c.channel === 'email',
);
if (!emailContact?.value) missing.push({ field: 'client.email', message: 'Client must have an email contact' });
if (!client.yachtLengthFt && !client.yachtLengthM) {
missing.push({ field: 'client.yachtDimensions', message: 'Client must have yacht dimensions' });
}
if (!interest.berthId) missing.push({ field: 'interest.berthId', message: 'Interest must have a berth linked' });
if (missing.length > 0) {
throw new ValidationError('Missing prerequisites for EOI generation', missing);
}
const [berth, port] = await Promise.all([
db.query.berths.findFirst({ where: eq(berths.id, interest.berthId!) }),
db.query.ports.findFirst({ where: eq(ports.id, portId) }),
]);
if (!berth) throw new NotFoundError('Berth');
if (!port) throw new NotFoundError('Port');
// Generate PDF
const inputs = buildEoiInputs(
interest as unknown as Record<string, unknown>,
{ ...client, contacts: client.contacts } as unknown as Record<string, unknown>,
berth as unknown as Record<string, unknown>,
port as unknown as Record<string, unknown>,
);
const pdfBytes = await generatePdf(eoiTemplate, [inputs]);
const pdfBuffer = Buffer.from(pdfBytes);
// Store in MinIO
const fileId = crypto.randomUUID();
const storagePath = buildStoragePath(port.slug, 'eoi', interestId, fileId, 'pdf');
await minioClient.putObject(env.MINIO_BUCKET, storagePath, pdfBuffer, pdfBuffer.length, {
'Content-Type': 'application/pdf',
});
// Create files record
const [fileRecord] = await db
.insert(files)
.values({
portId,
clientId: client.id,
filename: `eoi-${interestId}.pdf`,
originalName: `eoi-${interestId}.pdf`,
mimeType: 'application/pdf',
sizeBytes: String(pdfBuffer.length),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: 'eoi',
uploadedBy: meta.userId,
})
.returning();
// Create document record
const [doc] = await db
.insert(documents)
.values({
portId,
interestId,
clientId: client.id,
documentType: 'eoi',
title: `EOI ${client.fullName} / ${berth.mooringNumber}`,
status: 'draft',
fileId: fileRecord!.id,
createdBy: meta.userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'document',
entityId: doc!.id,
newValue: { documentType: 'eoi', interestId },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:created', { documentId: doc!.id, type: 'eoi' });
return doc!;
}
// ─── Send for Signing (BR-021) ────────────────────────────────────────────────
export async function sendForSigning(documentId: string, portId: string, meta: AuditMeta) {
const doc = await getDocumentById(documentId, portId);
if (!doc.fileId) throw new ValidationError('Document has no associated file');
if (doc.status !== 'draft') throw new ConflictError('Document is not in draft status');
// Fetch interest + client to build signers
const interest = doc.interestId
? await db.query.interests.findFirst({ where: eq(interests.id, doc.interestId) })
: null;
const client = doc.clientId
? await db.query.clients.findFirst({
where: eq(clients.id, doc.clientId),
with: { contacts: true },
})
: null;
if (!client) throw new ValidationError('Document has no associated client');
const emailContact = (client.contacts as Array<{ channel: string; value: string }> | undefined)?.find(
(c) => c.channel === 'email',
);
if (!emailContact?.value) throw new ValidationError('Client has no email contact');
const port = await db.query.ports.findFirst({ where: eq(ports.id, portId) });
if (!port) throw new NotFoundError('Port');
// BR-021: Create 3 signers — client (1), developer (2), sales/approver (3)
const signerRecords = await db
.insert(documentSigners)
.values([
{
documentId,
signerName: client.fullName,
signerEmail: emailContact.value,
signerRole: 'client',
signingOrder: 1,
status: 'pending',
},
{
documentId,
signerName: port.name,
signerEmail: `developer@${port.slug}.com`,
signerRole: 'developer',
signingOrder: 2,
status: 'pending',
},
{
documentId,
signerName: `${port.name} Sales`,
signerEmail: `sales@${port.slug}.com`,
signerRole: 'approver',
signingOrder: 3,
status: 'pending',
},
])
.returning();
// Get file from MinIO and base64 encode
const fileRecord = await db.query.files.findFirst({ where: eq(files.id, doc.fileId) });
if (!fileRecord) throw new NotFoundError('File');
const fileStream = await minioClient.getObject(env.MINIO_BUCKET, fileRecord.storagePath);
const chunks: Buffer[] = [];
for await (const chunk of fileStream) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
}
const pdfBuffer = Buffer.concat(chunks);
const pdfBase64 = pdfBuffer.toString('base64');
// Create document in Documenso + send
const documensoDoc = await documensoCreate(doc.title, pdfBase64, [
{ name: client.fullName, email: emailContact.value, role: 'SIGNER', signingOrder: 1 },
{ name: port.name, email: `developer@${port.slug}.com`, role: 'SIGNER', signingOrder: 2 },
{ name: `${port.name} Sales`, email: `sales@${port.slug}.com`, role: 'SIGNER', signingOrder: 3 },
]);
await documensoSend(documensoDoc.id);
// Update signer records with signing URLs from Documenso response
for (const docSigner of documensoDoc.recipients) {
const localSigner = signerRecords.find((s) => s.signerEmail === docSigner.email);
if (localSigner) {
await db
.update(documentSigners)
.set({
signingUrl: docSigner.signingUrl ?? null,
embeddedUrl: docSigner.embeddedUrl ?? null,
})
.where(eq(documentSigners.id, localSigner.id));
}
}
// Update document status
await db
.update(documents)
.set({ status: 'sent', documensoId: documensoDoc.id, updatedAt: new Date() })
.where(eq(documents.id, documentId));
// Update interest if linked
if (interest) {
await db
.update(interests)
.set({
documensoId: documensoDoc.id,
dateEoiSent: new Date(),
eoiStatus: 'waiting_for_signatures',
updatedAt: new Date(),
})
.where(eq(interests.id, interest.id));
// Trigger berth rules
void evaluateRule('eoi_sent', interest.id, portId, meta);
}
// Create document event
await db.insert(documentEvents).values({
documentId,
eventType: 'sent',
eventData: { documensoId: documensoDoc.id },
});
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'document',
entityId: documentId,
newValue: { status: 'sent', documensoId: documensoDoc.id },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:sent', { documentId, type: doc.documentType, signerCount: 3, documensoId: documensoDoc.id });
return await getDocumentById(documentId, portId);
}
// ─── Upload Signed Manually (BR-013) ─────────────────────────────────────────
export async function uploadSignedManually(
documentId: string,
portId: string,
fileData: { buffer: Buffer; originalName: string; mimeType: string; size: number },
meta: AuditMeta,
) {
const doc = await getDocumentById(documentId, portId);
const port = await db.query.ports.findFirst({ where: eq(ports.id, portId) });
if (!port) throw new NotFoundError('Port');
// Store the signed file
const fileId = crypto.randomUUID();
const storagePath = buildStoragePath(port.slug, 'eoi-signed', documentId, fileId, 'pdf');
await minioClient.putObject(
env.MINIO_BUCKET,
storagePath,
fileData.buffer,
fileData.size,
{ 'Content-Type': fileData.mimeType },
);
const [fileRecord] = await db
.insert(files)
.values({
portId,
clientId: doc.clientId ?? null,
filename: fileData.originalName,
originalName: fileData.originalName,
mimeType: fileData.mimeType,
sizeBytes: String(fileData.size),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: 'eoi',
uploadedBy: meta.userId,
})
.returning();
// Update document
await db
.update(documents)
.set({
signedFileId: fileRecord!.id,
status: 'completed',
isManualUpload: true,
updatedAt: new Date(),
})
.where(eq(documents.id, documentId));
// Update interest if linked and type is eoi
if (doc.interestId && doc.documentType === 'eoi') {
const interest = await db.query.interests.findFirst({
where: eq(interests.id, doc.interestId),
});
await db
.update(interests)
.set({ eoiStatus: 'signed', dateEoiSigned: new Date(), updatedAt: new Date() })
.where(eq(interests.id, doc.interestId));
if (interest) {
void evaluateRule('eoi_signed', doc.interestId, portId, meta);
}
}
await db.insert(documentEvents).values({
documentId,
eventType: 'completed',
eventData: { isManualUpload: true, fileId: fileRecord!.id },
});
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'document',
entityId: documentId,
newValue: { status: 'completed', isManualUpload: true },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'document:completed', { documentId });
// Notify creator about manual completion
void import('@/lib/services/notifications.service').then(({ createNotification }) =>
createNotification({
portId,
userId: meta.userId,
type: 'document_signed',
title: 'Document marked as signed',
description: `"${doc.title}" has been manually uploaded as signed`,
link: `/documents/${documentId}`,
entityType: 'document',
entityId: documentId,
dedupeKey: `document:${documentId}:completed`,
}),
);
return await getDocumentById(documentId, portId);
}
// ─── List Signers ─────────────────────────────────────────────────────────────
export async function listDocumentSigners(documentId: string, portId: string) {
await getDocumentById(documentId, portId); // verify access
return db.query.documentSigners.findMany({
where: eq(documentSigners.documentId, documentId),
orderBy: (ds, { asc }) => [asc(ds.signingOrder)],
});
}
// ─── List Events ──────────────────────────────────────────────────────────────
export async function listDocumentEvents(documentId: string, portId: string) {
await getDocumentById(documentId, portId); // verify access
return db.query.documentEvents.findMany({
where: eq(documentEvents.documentId, documentId),
orderBy: (de, { desc }) => [desc(de.createdAt)],
});
}
// ─── Webhook Handlers ─────────────────────────────────────────────────────────
export async function handleRecipientSigned(eventData: {
documentId: string;
recipientEmail: string;
signatureHash?: string;
}) {
const doc = await db.query.documents.findFirst({
where: eq(documents.documensoId, eventData.documentId),
});
if (!doc) {
logger.warn({ documensoId: eventData.documentId }, 'Document not found for webhook');
return;
}
// Update signer status
const [signer] = await db
.update(documentSigners)
.set({ status: 'signed', signedAt: new Date() })
.where(
and(
eq(documentSigners.documentId, doc.id),
eq(documentSigners.signerEmail, eventData.recipientEmail),
),
)
.returning();
// Update document to partially_signed if eoi type
if (doc.documentType === 'eoi' && doc.status === 'sent') {
await db
.update(documents)
.set({ status: 'partially_signed', updatedAt: new Date() })
.where(eq(documents.id, doc.id));
}
await db.insert(documentEvents).values({
documentId: doc.id,
eventType: 'signed',
signerId: signer?.id ?? null,
signatureHash: eventData.signatureHash ?? null,
eventData: { recipientEmail: eventData.recipientEmail },
});
emitToRoom(`port:${doc.portId}`, 'document:signer:signed', {
documentId: doc.id,
signerEmail: eventData.recipientEmail,
});
}
export async function handleDocumentCompleted(eventData: {
documentId: string;
}) {
const doc = await db.query.documents.findFirst({
where: eq(documents.documensoId, eventData.documentId),
});
if (!doc) {
logger.warn({ documensoId: eventData.documentId }, 'Document not found for webhook');
return;
}
// BR-022: Download signed PDF and store in MinIO
const port = await db.query.ports.findFirst({ where: eq(ports.id, doc.portId) });
if (!port) {
logger.error({ portId: doc.portId }, 'Port not found during document completion');
return;
}
try {
const signedPdfBuffer = await downloadSignedPdf(eventData.documentId);
const fileId = crypto.randomUUID();
const storagePath = buildStoragePath(port.slug, 'eoi-signed', doc.id, fileId, 'pdf');
await minioClient.putObject(
env.MINIO_BUCKET,
storagePath,
signedPdfBuffer,
signedPdfBuffer.length,
{ 'Content-Type': 'application/pdf' },
);
const [fileRecord] = await db
.insert(files)
.values({
portId: doc.portId,
clientId: doc.clientId ?? null,
filename: `signed-${doc.id}.pdf`,
originalName: `signed-${doc.id}.pdf`,
mimeType: 'application/pdf',
sizeBytes: String(signedPdfBuffer.length),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: 'eoi',
uploadedBy: 'system',
})
.returning();
await db
.update(documents)
.set({ status: 'completed', signedFileId: fileRecord!.id, updatedAt: new Date() })
.where(eq(documents.id, doc.id));
} catch (err) {
logger.error({ err, documentId: doc.id }, 'Failed to download/store signed PDF');
await db
.update(documents)
.set({ status: 'completed', updatedAt: new Date() })
.where(eq(documents.id, doc.id));
}
// Update interest if eoi type
if (doc.interestId && doc.documentType === 'eoi') {
const interest = await db.query.interests.findFirst({
where: eq(interests.id, doc.interestId),
});
await db
.update(interests)
.set({ eoiStatus: 'signed', dateEoiSigned: new Date(), updatedAt: new Date() })
.where(eq(interests.id, doc.interestId));
if (interest) {
void evaluateRule('eoi_signed', doc.interestId, doc.portId, {
userId: 'system',
portId: doc.portId,
ipAddress: '0.0.0.0',
userAgent: 'webhook',
});
}
}
await db.insert(documentEvents).values({
documentId: doc.id,
eventType: 'completed',
eventData: { documensoId: eventData.documentId },
});
emitToRoom(`port:${doc.portId}`, 'document:completed', { documentId: doc.id });
// Notify the document creator about completion
if (doc.createdBy && doc.createdBy !== 'system') {
void import('@/lib/services/notifications.service').then(({ createNotification }) =>
createNotification({
portId: doc.portId,
userId: doc.createdBy!,
type: 'document_signed',
title: 'Document fully signed',
description: `"${doc.title}" has been signed by all parties`,
link: `/documents/${doc.id}`,
entityType: 'document',
entityId: doc.id,
dedupeKey: `document:${doc.id}:completed`,
}),
);
}
}
export async function handleDocumentExpired(eventData: {
documentId: string;
}) {
const doc = await db.query.documents.findFirst({
where: eq(documents.documensoId, eventData.documentId),
});
if (!doc) {
logger.warn({ documensoId: eventData.documentId }, 'Document not found for webhook');
return;
}
await db
.update(documents)
.set({ status: 'expired', updatedAt: new Date() })
.where(eq(documents.id, doc.id));
if (doc.interestId && doc.documentType === 'eoi') {
await db
.update(interests)
.set({ eoiStatus: 'expired', updatedAt: new Date() })
.where(eq(interests.id, doc.interestId));
}
await db.insert(documentEvents).values({
documentId: doc.id,
eventType: 'expired',
eventData: { documensoId: eventData.documentId },
});
emitToRoom(`port:${doc.portId}`, 'document:expired', { documentId: doc.id });
}

View File

@@ -0,0 +1,173 @@
import { and, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { emailAccounts } from '@/lib/db/schema/email';
import { encrypt, decrypt } from '@/lib/utils/encryption';
import { createAuditLog } from '@/lib/audit';
import { NotFoundError, ForbiddenError } from '@/lib/errors';
import type { ConnectAccountInput, ToggleAccountInput } from '@/lib/validators/email';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
type AccountWithoutCredentials = Omit<typeof emailAccounts.$inferSelect, 'credentialsEnc'>;
// ─── Helpers ──────────────────────────────────────────────────────────────────
function stripCredentials(
account: typeof emailAccounts.$inferSelect,
): AccountWithoutCredentials {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { credentialsEnc: _, ...safe } = account;
return safe;
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listAccounts(
userId: string,
portId: string,
): Promise<AccountWithoutCredentials[]> {
const accounts = await db
.select()
.from(emailAccounts)
.where(and(eq(emailAccounts.userId, userId), eq(emailAccounts.portId, portId)));
return accounts.map(stripCredentials);
}
// ─── Connect ──────────────────────────────────────────────────────────────────
export async function connectAccount(
userId: string,
portId: string,
data: ConnectAccountInput,
audit: AuditMeta,
): Promise<AccountWithoutCredentials> {
const credentialsEnc = encrypt(
JSON.stringify({ username: data.username, password: data.password }),
);
const inserted = await db
.insert(emailAccounts)
.values({
userId,
portId,
provider: data.provider,
emailAddress: data.emailAddress,
smtpHost: data.smtpHost,
smtpPort: data.smtpPort,
imapHost: data.imapHost,
imapPort: data.imapPort,
credentialsEnc,
isActive: true,
})
.returning();
const account = inserted[0];
if (!account) throw new Error('Failed to insert email account');
void createAuditLog({
userId: audit.userId,
portId: audit.portId,
action: 'create',
entityType: 'email_account',
entityId: account.id,
metadata: { emailAddress: data.emailAddress, provider: data.provider },
ipAddress: audit.ipAddress,
userAgent: audit.userAgent,
});
return stripCredentials(account);
}
// ─── Toggle ───────────────────────────────────────────────────────────────────
export async function toggleAccount(
accountId: string,
userId: string,
data: ToggleAccountInput,
): Promise<AccountWithoutCredentials> {
const existing = await db.query.emailAccounts.findFirst({
where: eq(emailAccounts.id, accountId),
});
if (!existing) {
throw new NotFoundError('Email account');
}
if (existing.userId !== userId) {
throw new ForbiddenError('You do not own this email account');
}
const updatedRows = await db
.update(emailAccounts)
.set({ isActive: data.isActive, updatedAt: new Date() })
.where(eq(emailAccounts.id, accountId))
.returning();
const updated = updatedRows[0];
if (!updated) throw new Error('Failed to update email account');
return stripCredentials(updated);
}
// ─── Disconnect ───────────────────────────────────────────────────────────────
export async function disconnectAccount(
accountId: string,
userId: string,
audit: AuditMeta,
): Promise<void> {
const existing = await db.query.emailAccounts.findFirst({
where: eq(emailAccounts.id, accountId),
});
if (!existing) {
throw new NotFoundError('Email account');
}
if (existing.userId !== userId) {
throw new ForbiddenError('You do not own this email account');
}
await db.delete(emailAccounts).where(eq(emailAccounts.id, accountId));
void createAuditLog({
userId: audit.userId,
portId: audit.portId,
action: 'delete',
entityType: 'email_account',
entityId: accountId,
metadata: { emailAddress: existing.emailAddress },
ipAddress: audit.ipAddress,
userAgent: audit.userAgent,
});
}
// ─── Get Decrypted Credentials (INTERNAL ONLY) ────────────────────────────────
export async function getDecryptedCredentials(
accountId: string,
): Promise<{ username: string; password: string }> {
const account = await db.query.emailAccounts.findFirst({
where: eq(emailAccounts.id, accountId),
});
if (!account) {
throw new NotFoundError('Email account');
}
const { username, password } = JSON.parse(decrypt(account.credentialsEnc)) as {
username: string;
password: string;
};
return { username, password };
}

View File

@@ -0,0 +1,176 @@
import nodemailer from 'nodemailer';
import { and, eq, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { emailAccounts, emailMessages, emailThreads } from '@/lib/db/schema/email';
import { createAuditLog } from '@/lib/audit';
import { NotFoundError, ForbiddenError } from '@/lib/errors';
import { getDecryptedCredentials } from '@/lib/services/email-accounts.service';
import type { ComposeEmailInput } from '@/lib/validators/email';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Send Email ───────────────────────────────────────────────────────────────
export async function sendEmail(
userId: string,
portId: string,
data: ComposeEmailInput,
audit: AuditMeta,
) {
// Verify the account belongs to the user
const account = await db.query.emailAccounts.findFirst({
where: and(
eq(emailAccounts.id, data.accountId),
eq(emailAccounts.userId, userId),
),
});
if (!account) {
throw new NotFoundError('Email account');
}
if (account.portId !== portId) {
throw new ForbiddenError('Email account does not belong to this port');
}
// Decrypt credentials (INTERNAL — never logged or returned)
const creds = await getDecryptedCredentials(data.accountId);
// Build user-specific SMTP transporter
const transporter = nodemailer.createTransport({
host: account.smtpHost,
port: account.smtpPort,
secure: account.smtpPort === 465,
auth: { user: creds.username, pass: creds.password },
});
// Resolve threading headers if replying
let inReplyTo: string | undefined;
let references: string | undefined;
if (data.inReplyToMessageId) {
inReplyTo = data.inReplyToMessageId;
// Gather the full references chain from the thread
if (data.threadId) {
const existingMessages = await db
.select({ messageIdHeader: emailMessages.messageIdHeader })
.from(emailMessages)
.where(
and(
eq(emailMessages.threadId, data.threadId),
),
)
.orderBy(emailMessages.sentAt);
const refIds = existingMessages
.map((m) => m.messageIdHeader)
.filter(Boolean) as string[];
if (refIds.length > 0) {
references = refIds.join(' ');
}
}
}
// Send via the user's SMTP transporter
const info = await transporter.sendMail({
from: account.emailAddress,
to: data.to.join(', '),
cc: data.cc?.join(', '),
subject: data.subject,
html: data.bodyHtml,
inReplyTo,
references,
});
const sentMessageId: string =
typeof info.messageId === 'string' ? info.messageId : String(info.messageId ?? '');
// Resolve or create thread
let threadId: string;
if (data.threadId) {
// Verify thread belongs to this port
const existingThread = await db.query.emailThreads.findFirst({
where: and(
eq(emailThreads.id, data.threadId),
eq(emailThreads.portId, portId),
),
});
if (!existingThread) {
throw new NotFoundError('Email thread');
}
threadId = existingThread.id;
} else {
const newThreadRows = await db
.insert(emailThreads)
.values({
portId,
subject: data.subject,
lastMessageAt: new Date(),
messageCount: 0,
})
.returning();
const newThread = newThreadRows[0];
if (!newThread) throw new Error('Failed to create email thread');
threadId = newThread.id;
}
const now = new Date();
// Persist the outbound message
const messageRows = await db
.insert(emailMessages)
.values({
threadId,
messageIdHeader: sentMessageId || null,
fromAddress: account.emailAddress,
toAddresses: data.to,
ccAddresses: data.cc ?? null,
subject: data.subject,
bodyHtml: data.bodyHtml,
direction: 'outbound',
sentAt: now,
})
.returning();
const message = messageRows[0];
if (!message) throw new Error('Failed to persist outbound email message');
// Update thread metadata
await db
.update(emailThreads)
.set({
lastMessageAt: now,
messageCount: sql`${emailThreads.messageCount} + 1`,
updatedAt: now,
})
.where(eq(emailThreads.id, threadId));
void createAuditLog({
userId: audit.userId,
portId: audit.portId,
action: 'create',
entityType: 'email_message',
entityId: message.id,
metadata: {
threadId,
to: data.to,
subject: data.subject,
accountId: data.accountId,
},
ipAddress: audit.ipAddress,
userAgent: audit.userAgent,
});
return { message, threadId };
}

View File

@@ -0,0 +1,73 @@
import { getQueue } from '@/lib/queue';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface DraftRequest {
interestId: string;
clientId: string;
portId: string;
context: 'follow_up' | 'introduction' | 'stage_update' | 'general';
additionalInstructions?: string;
}
export interface DraftResult {
subject: string;
body: string;
generatedAt: Date;
}
// ─── Request draft (enqueues job) ─────────────────────────────────────────────
/**
* Request an AI-generated email draft.
* Enqueues a job on the 'ai' queue. Returns jobId for polling.
* Job payload contains ONLY entity IDs (no PII).
*/
export async function requestEmailDraft(
userId: string,
request: DraftRequest,
): Promise<{ jobId: string }> {
const aiQueue = getQueue('ai');
const job = await aiQueue.add('generate-email-draft', {
// No PII — only IDs and context parameters
interestId: request.interestId,
clientId: request.clientId,
portId: request.portId,
context: request.context,
additionalInstructions: request.additionalInstructions,
requestedBy: userId,
});
return { jobId: job.id! };
}
// ─── Poll for result ──────────────────────────────────────────────────────────
/**
* Get the result of an email draft generation job.
* Returns null if still processing.
*/
export async function getEmailDraftResult(jobId: string): Promise<DraftResult | null> {
const aiQueue = getQueue('ai');
const job = await aiQueue.getJob(jobId);
if (!job) return null;
const state = await job.getState();
if (state !== 'completed') return null;
const returnValue = job.returnvalue as
| { subject: string; body: string; generatedAt: string }
| undefined
| null;
if (!returnValue) return null;
return {
subject: returnValue.subject,
body: returnValue.body,
generatedAt: new Date(returnValue.generatedAt),
};
}

View File

@@ -0,0 +1,354 @@
import { and, desc, eq, ilike, or, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { emailAccounts, emailMessages, emailThreads } from '@/lib/db/schema/email';
import { clientContacts, clients } from '@/lib/db/schema/clients';
import { NotFoundError } from '@/lib/errors';
import { getDecryptedCredentials } from '@/lib/services/email-accounts.service';
import { logger } from '@/lib/logger';
import type { ListThreadsInput } from '@/lib/validators/email';
// ─── Types ────────────────────────────────────────────────────────────────────
interface ParsedEmail {
messageId: string;
from: string;
to: string[];
cc?: string[];
subject: string;
text?: string;
html?: string;
date: Date;
inReplyTo?: string;
references?: string[];
}
// ─── List Threads ─────────────────────────────────────────────────────────────
export async function listThreads(portId: string, query: ListThreadsInput) {
const { page, limit, clientId } = query;
const offset = (page - 1) * limit;
const conditions = [eq(emailThreads.portId, portId)];
if (clientId) {
conditions.push(eq(emailThreads.clientId, clientId));
}
const where = and(...conditions);
const [rows, countResult] = await Promise.all([
db
.select({
thread: emailThreads,
clientName: clients.fullName,
})
.from(emailThreads)
.leftJoin(clients, eq(emailThreads.clientId, clients.id))
.where(where)
.orderBy(desc(emailThreads.lastMessageAt))
.limit(limit)
.offset(offset),
db
.select({ count: sql<string>`count(*)` })
.from(emailThreads)
.where(where),
]);
const total = parseInt(countResult[0]?.count ?? '0', 10);
return {
data: rows.map((r) => ({ ...r.thread, clientName: r.clientName ?? null })),
total,
};
}
// ─── Get Thread ───────────────────────────────────────────────────────────────
export async function getThread(threadId: string, portId: string) {
const thread = await db.query.emailThreads.findFirst({
where: eq(emailThreads.id, threadId),
});
if (!thread) {
throw new NotFoundError('Email thread');
}
if (thread.portId !== portId) {
throw new NotFoundError('Email thread');
}
const messages = await db
.select()
.from(emailMessages)
.where(eq(emailMessages.threadId, threadId))
.orderBy(emailMessages.sentAt);
return { ...thread, messages };
}
// ─── Ingest Message ───────────────────────────────────────────────────────────
export async function ingestMessage(portId: string, parsedEmail: ParsedEmail) {
let threadId: string | null = null;
// Step 1: Message-ID chain — check inReplyTo and references headers
const referencedIds = [
...(parsedEmail.inReplyTo ? [parsedEmail.inReplyTo] : []),
...(parsedEmail.references ?? []),
];
if (referencedIds.length > 0) {
const existingMessage = await db.query.emailMessages.findFirst({
where: or(...referencedIds.map((id) => eq(emailMessages.messageIdHeader, id))),
});
if (existingMessage) {
// Verify thread belongs to this port
const thread = await db.query.emailThreads.findFirst({
where: and(
eq(emailThreads.id, existingMessage.threadId),
eq(emailThreads.portId, portId),
),
});
if (thread) {
threadId = thread.id;
}
}
}
// Step 2: Email address match against client contacts
if (!threadId) {
const fromAddress = parsedEmail.from.replace(/.*<(.+)>/, '$1').trim().toLowerCase();
const contactRows = await db
.select({
clientId: clientContacts.clientId,
clientPortId: clients.portId,
})
.from(clientContacts)
.innerJoin(clients, eq(clientContacts.clientId, clients.id))
.where(
and(
eq(clientContacts.channel, 'email'),
eq(sql`lower(${clientContacts.value})`, fromAddress),
eq(clients.portId, portId),
),
)
.limit(1);
const contactRow = contactRows[0];
if (contactRow) {
const clientId = contactRow.clientId;
// Find most recent thread for this client or create one
const existingThread = await db.query.emailThreads.findFirst({
where: and(eq(emailThreads.portId, portId), eq(emailThreads.clientId, clientId)),
orderBy: [desc(emailThreads.lastMessageAt)],
});
if (existingThread) {
threadId = existingThread.id;
} else {
const newThreadRows = await db
.insert(emailThreads)
.values({
portId,
clientId,
subject: parsedEmail.subject,
lastMessageAt: parsedEmail.date,
messageCount: 0,
})
.returning();
const newThread = newThreadRows[0];
if (!newThread) throw new Error('Failed to create email thread');
threadId = newThread.id;
}
}
}
// Step 3: Subject + sender fuzzy match
if (!threadId) {
const normalizedSubject = parsedEmail.subject
.replace(/^(re|fwd|fw):\s*/i, '')
.trim();
if (normalizedSubject) {
const matchingThread = await db.query.emailThreads.findFirst({
where: and(
eq(emailThreads.portId, portId),
ilike(emailThreads.subject, `%${normalizedSubject}%`),
),
orderBy: [desc(emailThreads.lastMessageAt)],
});
if (matchingThread) {
threadId = matchingThread.id;
}
}
}
// No thread found — create a new one
if (!threadId) {
const newThreadRows = await db
.insert(emailThreads)
.values({
portId,
subject: parsedEmail.subject,
lastMessageAt: parsedEmail.date,
messageCount: 0,
})
.returning();
const newThread = newThreadRows[0];
if (!newThread) throw new Error('Failed to create email thread');
threadId = newThread.id;
}
// Insert the message
const messageRows = await db
.insert(emailMessages)
.values({
threadId,
messageIdHeader: parsedEmail.messageId || null,
fromAddress: parsedEmail.from,
toAddresses: parsedEmail.to,
ccAddresses: parsedEmail.cc ?? null,
subject: parsedEmail.subject,
bodyText: parsedEmail.text ?? null,
bodyHtml: parsedEmail.html ?? null,
direction: 'inbound',
sentAt: parsedEmail.date,
})
.returning();
const message = messageRows[0];
if (!message) throw new Error('Failed to insert email message');
// Update thread's lastMessageAt and messageCount
await db
.update(emailThreads)
.set({
lastMessageAt: parsedEmail.date,
messageCount: sql`${emailThreads.messageCount} + 1`,
updatedAt: new Date(),
})
.where(eq(emailThreads.id, threadId));
return { message, threadId };
}
// ─── Sync Inbox ───────────────────────────────────────────────────────────────
export async function syncInbox(accountId: string): Promise<void> {
// Dynamic imports to avoid loading heavy IMAP/mail modules at module initialisation
const imapflowModule = await import('imapflow');
const ImapFlow = imapflowModule.ImapFlow;
const mailparserModule = await import('mailparser');
const simpleParser = mailparserModule.simpleParser;
const account = await db.query.emailAccounts.findFirst({
where: eq(emailAccounts.id, accountId),
});
if (!account) {
throw new NotFoundError('Email account');
}
const creds = await getDecryptedCredentials(accountId);
// Determine the since date: last sync or 30 days ago
const since = account.lastSyncAt ?? new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
const client = new ImapFlow({
host: account.imapHost,
port: account.imapPort,
secure: account.imapPort === 993,
auth: {
user: creds.username,
pass: creds.password,
},
logger: false,
});
try {
await client.connect();
const mailbox = await client.mailboxOpen('INBOX');
logger.info({ accountId, exists: mailbox.exists }, 'IMAP INBOX opened');
// Search for messages since the last sync date
// client.search() returns false | number[] — false means nothing found
const searchResult = await client.search({ since });
const uids: number[] = searchResult === false ? [] : searchResult;
if (uids.length === 0) {
logger.info({ accountId }, 'No new messages to sync');
return;
}
for await (const message of client.fetch(uids, { source: true })) {
try {
if (!message.source) continue;
const parsed = await simpleParser(message.source);
// Normalise messageId — mailparser returns string | string[] | undefined
const rawMsgId = parsed.messageId;
const messageId =
rawMsgId == null
? ''
: Array.isArray(rawMsgId)
? (rawMsgId[0] ?? '')
: rawMsgId;
const from = parsed.from?.text ?? '';
// Normalise to/cc — mailparser AddressObject can be an array
const resolveAddresses = (
field: typeof parsed.to,
): string[] => {
if (!field) return [];
const arr = Array.isArray(field) ? field : [field];
return arr.flatMap((a) =>
(a.value ?? []).map((x: { address?: string }) => x.address ?? ''),
);
};
const to = resolveAddresses(parsed.to);
const cc = parsed.cc ? resolveAddresses(parsed.cc) : undefined;
const rawRefs = parsed.references;
const references: string[] =
rawRefs == null
? []
: typeof rawRefs === 'string'
? rawRefs.split(/\s+/).filter(Boolean)
: rawRefs;
await ingestMessage(account.portId, {
messageId,
from,
to,
cc,
subject: parsed.subject ?? '(no subject)',
text: parsed.text ?? undefined,
html: typeof parsed.html === 'string' ? parsed.html : undefined,
date: parsed.date ?? new Date(),
inReplyTo: parsed.inReplyTo ?? undefined,
references,
});
} catch (err) {
logger.error({ err, accountId, uid: message.uid }, 'Failed to ingest email message');
}
}
// Update lastSyncAt on the account
await db
.update(emailAccounts)
.set({ lastSyncAt: new Date(), updatedAt: new Date() })
.where(eq(emailAccounts.id, accountId));
logger.info({ accountId, messageCount: uids.length }, 'IMAP sync complete');
} finally {
await client.logout();
}
}

View File

@@ -0,0 +1,211 @@
import { eq, and, gte, lte, isNull, or, ilike } from 'drizzle-orm';
import { db } from '@/lib/db';
import { expenses } from '@/lib/db/schema/financial';
import { generatePdf } from '@/lib/pdf/generate';
import { getRate } from '@/lib/services/currency';
import { logger } from '@/lib/logger';
import type { ListExpensesInput } from '@/lib/validators/expenses';
async function fetchAllExpenses(portId: string, query: ListExpensesInput) {
const conditions: ReturnType<typeof eq>[] = [eq(expenses.portId, portId) as any];
if (!query.includeArchived) {
conditions.push(isNull(expenses.archivedAt) as any);
}
if (query.category) {
conditions.push(eq(expenses.category, query.category) as any);
}
if (query.paymentStatus) {
conditions.push(eq(expenses.paymentStatus, query.paymentStatus) as any);
}
if (query.currency) {
conditions.push(eq(expenses.currency, query.currency) as any);
}
if (query.payer) {
conditions.push(eq(expenses.payer, query.payer) as any);
}
if (query.dateFrom) {
conditions.push(gte(expenses.expenseDate, new Date(query.dateFrom)) as any);
}
if (query.dateTo) {
conditions.push(lte(expenses.expenseDate, new Date(query.dateTo)) as any);
}
if (query.search) {
conditions.push(
or(
ilike(expenses.establishmentName, `%${query.search}%`),
ilike(expenses.description, `%${query.search}%`),
) as any,
);
}
return db
.select()
.from(expenses)
.where(and(...(conditions as any[])));
}
export async function exportCsv(portId: string, query: ListExpensesInput): Promise<string> {
const rows = await fetchAllExpenses(portId, query);
const headers = [
'Date',
'Establishment',
'Category',
'Amount',
'Currency',
'Amount USD',
'Payment Status',
'Payment Method',
'Description',
];
const csvRows = rows.map((r) => {
const date = r.expenseDate ? new Date(r.expenseDate).toISOString().split('T')[0] : '';
return [
date,
r.establishmentName ?? '',
r.category ?? '',
r.amount,
r.currency,
r.amountUsd ?? 'N/A',
r.paymentStatus ?? '',
r.paymentMethod ?? '',
(r.description ?? '').replace(/"/g, '""'),
]
.map((v) => `"${v}"`)
.join(',');
});
return [headers.join(','), ...csvRows].join('\n');
}
export async function exportPdf(portId: string, query: ListExpensesInput): Promise<Uint8Array> {
const rows = await fetchAllExpenses(portId, query);
const template = {
basePdf: { width: 210, height: 297, padding: [10, 10, 10, 10] },
schemas: [
[
{
name: 'title',
type: 'text',
position: { x: 10, y: 10 },
width: 190,
height: 10,
fontSize: 14,
fontColor: '#000000',
},
{
name: 'content',
type: 'text',
position: { x: 10, y: 25 },
width: 190,
height: 260,
fontSize: 8,
fontColor: '#000000',
},
],
],
};
const lines = rows.map((r) => {
const date = r.expenseDate ? new Date(r.expenseDate).toISOString().split('T')[0] : '';
return `${date} | ${r.establishmentName ?? '-'} | ${r.category ?? '-'} | ${r.amount} ${r.currency} | ${r.paymentStatus ?? '-'}`;
});
const inputs = [
{
title: 'Expense Report',
content: lines.join('\n'),
},
];
return generatePdf(template as any, inputs);
}
export async function exportParentCompany(
portId: string,
query: ListExpensesInput,
): Promise<Uint8Array> {
// BR-043: Convert all amounts to EUR, add 5% management fee
const rows = await fetchAllExpenses(portId, query);
const eurRate = await getRate('USD', 'EUR');
if (!eurRate) {
logger.warn('EUR rate unavailable for parent company export, using 1:1 fallback');
}
const rate = eurRate ?? 1;
const convertedRows = rows.map((r) => {
const amountUsd = r.amountUsd ? Number(r.amountUsd) : Number(r.amount);
const amountEur = Number((amountUsd * rate).toFixed(2));
return {
date: r.expenseDate ? new Date(r.expenseDate).toISOString().split('T')[0] : '',
establishment: r.establishmentName ?? '-',
category: r.category ?? '-',
amountEur,
};
});
const subtotal = convertedRows.reduce((sum, r) => sum + r.amountEur, 0);
const fee = Number((subtotal * 0.05).toFixed(2));
const total = Number((subtotal + fee).toFixed(2));
const template = {
basePdf: { width: 210, height: 297, padding: [10, 10, 10, 10] },
schemas: [
[
{
name: 'title',
type: 'text',
position: { x: 10, y: 10 },
width: 190,
height: 10,
fontSize: 14,
fontColor: '#000000',
},
{
name: 'content',
type: 'text',
position: { x: 10, y: 25 },
width: 190,
height: 230,
fontSize: 8,
fontColor: '#000000',
},
{
name: 'summary',
type: 'text',
position: { x: 10, y: 260 },
width: 190,
height: 30,
fontSize: 10,
fontColor: '#000000',
},
],
],
};
const lines = convertedRows.map(
(r) => `${r.date} | ${r.establishment} | ${r.category} | EUR ${r.amountEur.toFixed(2)}`,
);
const summary = [
`Subtotal: EUR ${subtotal.toFixed(2)}`,
`Management Fee (5%): EUR ${fee.toFixed(2)}`,
`Total: EUR ${total.toFixed(2)}`,
].join('\n');
const inputs = [
{
title: 'Parent Company Expense Report (EUR)',
content: lines.join('\n'),
summary,
},
];
return generatePdf(template as any, inputs);
}

View File

@@ -0,0 +1,307 @@
import { eq, and, gte, lte, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { expenses, invoices, invoiceExpenses } from '@/lib/db/schema/financial';
import { buildListQuery } from '@/lib/db/query-builder';
import { createAuditLog } from '@/lib/audit';
import { diffEntity } from '@/lib/entity-diff';
import { softDelete, restore } from '@/lib/db/utils';
import { NotFoundError, ConflictError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { convert } from '@/lib/services/currency';
import { logger } from '@/lib/logger';
import type { CreateExpenseInput, UpdateExpenseInput, ListExpensesInput } from '@/lib/validators/expenses';
export type { ListExpensesInput };
// AuditMeta type expected by service functions
export interface ServiceAuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
export async function listExpenses(portId: string, query: ListExpensesInput) {
const filters = [];
if (query.category) {
filters.push(eq(expenses.category, query.category));
}
if (query.paymentStatus) {
filters.push(eq(expenses.paymentStatus, query.paymentStatus));
}
if (query.currency) {
filters.push(eq(expenses.currency, query.currency));
}
if (query.payer) {
filters.push(eq(expenses.payer, query.payer));
}
if (query.dateFrom) {
filters.push(gte(expenses.expenseDate, new Date(query.dateFrom)));
}
if (query.dateTo) {
filters.push(lte(expenses.expenseDate, new Date(query.dateTo)));
}
return buildListQuery({
table: expenses,
portIdColumn: expenses.portId,
portId,
idColumn: expenses.id,
updatedAtColumn: expenses.updatedAt,
filters,
page: query.page,
pageSize: query.limit,
searchColumns: [expenses.establishmentName, expenses.description],
searchTerm: query.search,
includeArchived: query.includeArchived,
archivedAtColumn: expenses.archivedAt,
sort: query.sort
? { column: expenses[query.sort as keyof typeof expenses] as any, direction: query.order }
: undefined,
});
}
export async function getExpenseById(id: string, portId: string) {
const expense = await db.query.expenses.findFirst({
where: and(eq(expenses.id, id), eq(expenses.portId, portId)),
});
if (!expense) throw new NotFoundError('Expense');
return expense;
}
export async function createExpense(
portId: string,
data: CreateExpenseInput,
meta: ServiceAuditMeta,
) {
let amountUsd: string | null = null;
let exchangeRate: string | null = null;
if (data.currency !== 'USD') {
const conversion = await convert(data.amount, data.currency, 'USD');
if (conversion) {
amountUsd = String(conversion.result);
exchangeRate = String(conversion.rate);
} else {
// BR-040: if rate unavailable, save without conversion + log warning
logger.warn({ currency: data.currency }, 'Currency rate unavailable, saving expense without USD conversion');
}
} else {
amountUsd = String(data.amount);
exchangeRate = '1';
}
const [expense] = await db
.insert(expenses)
.values({
portId,
establishmentName: data.establishmentName,
amount: String(data.amount),
currency: data.currency,
amountUsd,
exchangeRate,
paymentMethod: data.paymentMethod,
category: data.category,
payer: data.payer,
expenseDate: data.expenseDate,
description: data.description,
receiptFileIds: data.receiptFileIds ?? [],
paymentStatus: data.paymentStatus,
paymentDate: data.paymentDate ?? null,
paymentReference: data.paymentReference ?? null,
paymentNotes: data.paymentNotes ?? null,
createdBy: meta.userId,
})
.returning();
if (!expense) throw new Error('Insert failed');
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'expense',
entityId: expense.id,
newValue: expense as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'expense:created', {
expenseId: expense.id,
amount: Number(expense.amount),
currency: expense.currency,
category: expense.category ?? '',
});
return expense;
}
export async function updateExpense(
id: string,
portId: string,
data: UpdateExpenseInput,
meta: ServiceAuditMeta,
) {
const existing = await getExpenseById(id, portId);
const updateData: Record<string, unknown> = { ...data, updatedAt: new Date() };
// Re-convert to USD if amount or currency changed
const newAmount = data.amount ?? Number(existing.amount);
const newCurrency = data.currency ?? existing.currency;
if (data.amount !== undefined || data.currency !== undefined) {
if (newCurrency !== 'USD') {
const conversion = await convert(newAmount, newCurrency, 'USD');
if (conversion) {
updateData.amountUsd = String(conversion.result);
updateData.exchangeRate = String(conversion.rate);
} else {
logger.warn({ currency: newCurrency }, 'Currency rate unavailable during update, clearing USD conversion');
updateData.amountUsd = null;
updateData.exchangeRate = null;
}
} else {
updateData.amountUsd = String(newAmount);
updateData.exchangeRate = '1';
}
}
if (data.amount !== undefined) updateData.amount = String(data.amount);
const { diff } = diffEntity(existing as unknown as Record<string, unknown>, updateData);
const [updated] = await db
.update(expenses)
.set(updateData as any)
.where(and(eq(expenses.id, id), eq(expenses.portId, portId)))
.returning();
if (!updated) throw new NotFoundError('Expense');
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'expense',
entityId: id,
oldValue: existing as unknown as Record<string, unknown>,
newValue: updated as unknown as Record<string, unknown>,
metadata: { diff },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'expense:updated', {
expenseId: id,
changedFields: Object.keys(diff),
});
return updated;
}
export async function archiveExpense(
id: string,
portId: string,
meta: ServiceAuditMeta,
) {
const existing = await getExpenseById(id, portId);
// BR-045: Check if linked to non-draft invoice
const linkedInvoice = await db
.select({ invoiceId: invoiceExpenses.invoiceId })
.from(invoiceExpenses)
.innerJoin(invoices, eq(invoices.id, invoiceExpenses.invoiceId))
.where(
and(
eq(invoiceExpenses.expenseId, id),
sql`${invoices.status} != 'draft'`,
),
)
.limit(1);
if (linkedInvoice.length > 0) {
throw new ConflictError('Cannot archive expense linked to a non-draft invoice');
}
await softDelete(expenses, expenses.id, id);
void createAuditLog({
userId: meta.userId,
portId,
action: 'archive',
entityType: 'expense',
entityId: id,
oldValue: existing as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'expense:archived', { expenseId: id });
}
export async function restoreExpense(
id: string,
portId: string,
meta: ServiceAuditMeta,
) {
await getExpenseById(id, portId);
await restore(expenses, expenses.id, id);
const restored = await getExpenseById(id, portId);
void createAuditLog({
userId: meta.userId,
portId,
action: 'restore',
entityType: 'expense',
entityId: id,
newValue: restored as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'expense:updated', {
expenseId: id,
changedFields: ['archivedAt'],
});
return restored;
}
export async function addReceiptFiles(
id: string,
portId: string,
fileIds: string[],
meta: ServiceAuditMeta,
) {
await getExpenseById(id, portId);
const [updated] = await db
.update(expenses)
.set({
receiptFileIds: sql`array_cat(receipt_file_ids, ${fileIds}::text[])`,
updatedAt: new Date(),
} as any)
.where(and(eq(expenses.id, id), eq(expenses.portId, portId)))
.returning();
if (!updated) throw new NotFoundError('Expense');
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'expense',
entityId: id,
metadata: { addedFileIds: fileIds },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return updated;
}

270
src/lib/services/files.ts Normal file
View File

@@ -0,0 +1,270 @@
import { and, arrayContains, eq, or } from 'drizzle-orm';
import { db } from '@/lib/db';
import { files, documents } from '@/lib/db/schema/documents';
import { expenses } from '@/lib/db/schema/financial';
import { berthMaintenanceLog } from '@/lib/db/schema/berths';
import { createAuditLog } from '@/lib/audit';
import { ConflictError, NotFoundError, ValidationError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { minioClient, getPresignedUrl } from '@/lib/minio';
import { buildListQuery } from '@/lib/db/query-builder';
import { env } from '@/lib/env';
import {
ALLOWED_MIME_TYPES,
MAX_FILE_SIZE,
PREVIEWABLE_MIMES,
} from '@/lib/constants/file-validation';
import { generateStorageKey, sanitizeFilename } from '@/lib/services/storage';
import type { UploadFileInput, UpdateFileInput, ListFilesInput } from '@/lib/validators/files';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
interface UploadFileParams {
buffer: Buffer;
originalName: string;
mimeType: string;
size: number;
}
// ─── Upload ───────────────────────────────────────────────────────────────────
export async function uploadFile(
portId: string,
portSlug: string,
file: UploadFileParams,
data: UploadFileInput,
meta: AuditMeta,
) {
if (!ALLOWED_MIME_TYPES.has(file.mimeType)) {
throw new ValidationError(`File type '${file.mimeType}' is not allowed`);
}
if (file.size > MAX_FILE_SIZE) {
throw new ValidationError('File exceeds maximum size of 50MB');
}
const entity = data.entityType ?? 'general';
const entityId = data.entityId ?? portId;
const storagePath = generateStorageKey(portSlug, entity, entityId, file.mimeType);
const sanitizedOriginal = sanitizeFilename(file.originalName);
const sanitizedFilename = sanitizeFilename(data.filename);
await minioClient.putObject(
env.MINIO_BUCKET,
storagePath,
file.buffer,
file.size,
{ 'Content-Type': file.mimeType },
);
const [record] = await db
.insert(files)
.values({
portId,
clientId: data.clientId ?? null,
filename: sanitizedFilename,
originalName: sanitizedOriginal,
mimeType: file.mimeType,
sizeBytes: String(file.size),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: data.category ?? null,
uploadedBy: meta.userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'file',
entityId: record!.id,
newValue: { filename: record!.filename, mimeType: file.mimeType, size: file.size },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'file:uploaded', {
fileId: record!.id,
filename: record!.filename,
});
return record!;
}
// ─── Download / Preview URLs ──────────────────────────────────────────────────
export async function getDownloadUrl(id: string, portId: string) {
const file = await getFileById(id, portId);
const url = await getPresignedUrl(file.storagePath);
return { url, filename: file.filename };
}
export async function getPreviewUrl(id: string, portId: string) {
const file = await getFileById(id, portId);
if (!file.mimeType || !PREVIEWABLE_MIMES.has(file.mimeType)) {
throw new ValidationError('This file type cannot be previewed');
}
const url = await getPresignedUrl(file.storagePath);
return { url, mimeType: file.mimeType };
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateFile(
id: string,
portId: string,
data: UpdateFileInput,
meta: AuditMeta,
) {
const existing = await getFileById(id, portId);
const updates: { filename?: string; category?: string } = {};
if (data.filename !== undefined) updates.filename = sanitizeFilename(data.filename);
if (data.category !== undefined) updates.category = data.category;
const [updated] = await db
.update(files)
.set(updates)
.where(and(eq(files.id, id), eq(files.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'file',
entityId: id,
oldValue: { filename: existing.filename, category: existing.category },
newValue: updates,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'file:updated', { fileId: id });
return updated!;
}
// ─── Delete (BR-091) ──────────────────────────────────────────────────────────
export async function deleteFile(id: string, portId: string, meta: AuditMeta) {
const existing = await getFileById(id, portId);
// BR-091: check references before deleting
const [docRefs, expenseRefs, maintenanceRefs] = await Promise.all([
db
.select({ id: documents.id })
.from(documents)
.where(
and(
eq(documents.portId, portId),
or(eq(documents.fileId, id), eq(documents.signedFileId, id)),
),
)
.limit(1),
db
.select({ id: expenses.id })
.from(expenses)
.where(
and(
eq(expenses.portId, portId),
arrayContains(expenses.receiptFileIds, [id]),
),
)
.limit(1),
db
.select({ id: berthMaintenanceLog.id })
.from(berthMaintenanceLog)
.where(
and(
eq(berthMaintenanceLog.portId, portId),
arrayContains(berthMaintenanceLog.photoFileIds, [id]),
),
)
.limit(1),
]);
if (docRefs.length > 0 || expenseRefs.length > 0 || maintenanceRefs.length > 0) {
throw new ConflictError(
'File cannot be deleted because it is referenced by other records',
);
}
// Delete from MinIO first, then DB
await minioClient.removeObject(env.MINIO_BUCKET, existing.storagePath);
await db.delete(files).where(and(eq(files.id, id), eq(files.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'file',
entityId: id,
oldValue: { filename: existing.filename },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'file:deleted', { fileId: id });
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listFiles(portId: string, query: ListFilesInput) {
const { page, limit, sort, order, search, clientId, category } = query;
const filters = [];
if (clientId) {
filters.push(eq(files.clientId, clientId));
}
if (category) {
filters.push(eq(files.category, category));
}
const sortColumn =
sort === 'filename' ? files.filename :
sort === 'sizeBytes' ? files.sizeBytes :
files.createdAt;
return buildListQuery({
table: files,
portIdColumn: files.portId,
portId,
idColumn: files.id,
updatedAtColumn: files.createdAt, // no updatedAt on files
searchColumns: [files.filename, files.originalName],
searchTerm: search,
filters,
sort: sort ? { column: sortColumn, direction: order } : undefined,
page,
pageSize: limit,
// no archivedAtColumn — files are immutable records
});
}
// ─── Get by ID ────────────────────────────────────────────────────────────────
export async function getFileById(id: string, portId: string) {
const file = await db.query.files.findFirst({
where: eq(files.id, id),
});
if (!file || file.portId !== portId) {
throw new NotFoundError('File');
}
return file;
}

View File

@@ -0,0 +1,234 @@
import { and, count, eq, gte, isNull } from 'drizzle-orm';
import { db } from '@/lib/db';
import { redis } from '@/lib/redis';
import { interests, interestNotes } from '@/lib/db/schema/interests';
import { reminders } from '@/lib/db/schema/operations';
import { emailThreads } from '@/lib/db/schema/email';
import { logger } from '@/lib/logger';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface InterestScore {
totalScore: number; // 0-100 (normalised)
breakdown: {
pipelineAge: number; // 0-100
stageSpeed: number; // 0-100
documentCompleteness: number; // 0-100
engagement: number; // 0-100
berthLinked: number; // 0 or 25
};
calculatedAt: Date;
}
// ─── Redis cache ──────────────────────────────────────────────────────────────
const SCORE_KEY = (interestId: string) => `interest-score:${interestId}`;
const SCORE_TTL = 3600; // 1 hour
// ─── Scoring helpers ──────────────────────────────────────────────────────────
function scorePipelineAge(createdAt: Date): number {
const days = Math.floor((Date.now() - createdAt.getTime()) / (1000 * 60 * 60 * 24));
if (days <= 30) return 100;
if (days <= 60) return 80;
if (days <= 90) return 60;
if (days <= 180) return 40;
return 20;
}
function scoreStageSpeed(createdAt: Date, pipelineStage: string): number {
// Approximate stage index based on known pipeline order
const STAGE_ORDER: Record<string, number> = {
open: 0,
details_sent: 1,
in_communication: 2,
visited: 3,
signed_eoi_nda: 4,
deposit_10pct: 5,
contract: 6,
completed: 7,
};
const stageIndex = STAGE_ORDER[pipelineStage] ?? 0;
if (stageIndex === 0) {
// Still at open — no progression
return 0;
}
const daysSinceCreation = Math.max(
1,
(Date.now() - createdAt.getTime()) / (1000 * 60 * 60 * 24),
);
// Average days per stage transition
const avgDaysPerStage = daysSinceCreation / stageIndex;
// Thresholds: <7 days/stage = great, <14 = ok, <30 = slow, >=30 = cold
if (avgDaysPerStage < 7) return 100;
if (avgDaysPerStage < 14) return 75;
if (avgDaysPerStage < 30) return 50;
if (avgDaysPerStage < 60) return 25;
return 10;
}
function scoreDocumentCompleteness(interest: {
eoiStatus: string | null;
contractStatus: string | null;
depositStatus: string | null;
dateEoiSigned: Date | null;
dateContractSigned: Date | null;
dateDepositReceived: Date | null;
}): number {
let score = 0;
// EOI signed
if (interest.eoiStatus === 'signed' || interest.dateEoiSigned != null) {
score += 30;
}
// Contract
if (interest.contractStatus === 'signed' || interest.dateContractSigned != null) {
score += 30;
}
// Deposit
if (
interest.depositStatus === 'received' ||
interest.depositStatus === 'paid' ||
interest.dateDepositReceived != null
) {
score += 40;
}
return Math.min(score, 100);
}
// ─── Main scoring function ────────────────────────────────────────────────────
export async function calculateInterestScore(
interestId: string,
portId: string,
): Promise<InterestScore> {
// Try cache first
try {
const cached = await redis.get(SCORE_KEY(interestId));
if (cached) {
const parsed = JSON.parse(cached) as InterestScore & { calculatedAt: string };
return { ...parsed, calculatedAt: new Date(parsed.calculatedAt) };
}
} catch (err) {
logger.warn({ err, interestId }, 'Redis cache read failed for interest score');
}
// Fetch interest
const interest = await db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
});
if (!interest) {
throw new Error(`Interest not found: ${interestId}`);
}
// 1. Pipeline age
const pipelineAge = scorePipelineAge(interest.createdAt);
// 2. Stage speed
const stageSpeed = scoreStageSpeed(interest.createdAt, interest.pipelineStage);
// 3. Document completeness
const documentCompleteness = scoreDocumentCompleteness(interest);
// 4. Engagement — notes, emails, reminders in last 30 days
const thirtyDaysAgo = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
const [notesResult, remindersResult, emailResult] = await Promise.all([
db
.select({ value: count() })
.from(interestNotes)
.where(
and(
eq(interestNotes.interestId, interestId),
gte(interestNotes.createdAt, thirtyDaysAgo),
),
),
db
.select({ value: count() })
.from(reminders)
.where(
and(
eq(reminders.interestId, interestId),
eq(reminders.status, 'completed'),
gte(reminders.completedAt, thirtyDaysAgo),
),
),
db
.select({ value: count() })
.from(emailThreads)
.where(
and(
eq(emailThreads.clientId, interest.clientId),
eq(emailThreads.portId, portId),
gte(emailThreads.lastMessageAt, thirtyDaysAgo),
),
),
]);
const notesCount = notesResult[0]?.value ?? 0;
const remindersCount = remindersResult[0]?.value ?? 0;
const emailCount = emailResult[0]?.value ?? 0;
const notesScore = Math.min(notesCount * 10, 50);
const emailScore = Math.min(emailCount * 5, 30);
const remindersScore = Math.min(remindersCount * 10, 20);
const engagement = Math.min(notesScore + emailScore + remindersScore, 100);
// 5. Berth linked
const berthLinked = interest.berthId != null ? 25 : 0;
// ── Normalise: max raw = 100+100+100+100+25 = 425 → /425 * 100 ──
const RAW_MAX = 425;
const rawTotal = pipelineAge + stageSpeed + documentCompleteness + engagement + berthLinked;
const totalScore = Math.round((rawTotal / RAW_MAX) * 100);
const result: InterestScore = {
totalScore,
breakdown: {
pipelineAge,
stageSpeed,
documentCompleteness,
engagement,
berthLinked,
},
calculatedAt: new Date(),
};
// Write to cache (fire-and-forget)
redis
.setex(SCORE_KEY(interestId), SCORE_TTL, JSON.stringify(result))
.catch((err) => logger.warn({ err, interestId }, 'Redis cache write failed for interest score'));
return result;
}
// ─── Bulk scoring ─────────────────────────────────────────────────────────────
export async function calculateBulkScores(
portId: string,
): Promise<Array<{ interestId: string; score: InterestScore }>> {
const allInterests = await db
.select({ id: interests.id })
.from(interests)
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)));
const results = await Promise.allSettled(
allInterests.map(async (i) => {
const score = await calculateInterestScore(i.id, portId);
return { interestId: i.id, score };
}),
);
return results
.filter((r): r is PromiseFulfilledResult<{ interestId: string; score: InterestScore }> =>
r.status === 'fulfilled',
)
.map((r) => r.value);
}

View File

@@ -0,0 +1,591 @@
import { and, eq, inArray, isNull, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { interests, interestTags } from '@/lib/db/schema/interests';
import { clients } from '@/lib/db/schema/clients';
import { berths } from '@/lib/db/schema/berths';
import { tags } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { NotFoundError, ConflictError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { buildListQuery } from '@/lib/db/query-builder';
import { diffEntity } from '@/lib/entity-diff';
import { softDelete, restore, withTransaction } from '@/lib/db/utils';
import type {
CreateInterestInput,
UpdateInterestInput,
ChangeStageInput,
ListInterestsInput,
} from '@/lib/validators/interests';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── BR-011: Auto-promote leadCategory ───────────────────────────────────────
async function resolveLeadCategory(
clientId: string,
leadCategory: string | undefined | null,
): Promise<string | undefined> {
if (leadCategory && leadCategory !== 'general_interest') {
return leadCategory;
}
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (client && (client.yachtLengthFt || client.yachtLengthM)) {
return 'specific_qualified';
}
return leadCategory ?? undefined;
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listInterests(portId: string, query: ListInterestsInput) {
const {
page,
limit,
sort,
order,
search,
includeArchived,
clientId,
berthId,
pipelineStage,
leadCategory,
eoiStatus,
tagIds,
} = query;
const filters = [];
if (clientId) {
filters.push(eq(interests.clientId, clientId));
}
if (berthId) {
filters.push(eq(interests.berthId, berthId));
}
if (pipelineStage && pipelineStage.length > 0) {
filters.push(inArray(interests.pipelineStage, pipelineStage));
}
if (leadCategory) {
filters.push(eq(interests.leadCategory, leadCategory));
}
if (eoiStatus) {
filters.push(eq(interests.eoiStatus, eoiStatus));
}
if (tagIds && tagIds.length > 0) {
const interestsWithTags = await db
.selectDistinct({ interestId: interestTags.interestId })
.from(interestTags)
.where(inArray(interestTags.tagId, tagIds));
const matchingIds = interestsWithTags.map((r) => r.interestId);
if (matchingIds.length > 0) {
filters.push(inArray(interests.id, matchingIds));
} else {
return { data: [], total: 0 };
}
}
const sortColumn = (() => {
switch (sort) {
case 'pipelineStage': return interests.pipelineStage;
case 'leadCategory': return interests.leadCategory;
case 'createdAt': return interests.createdAt;
default: return interests.updatedAt;
}
})();
const result = await buildListQuery({
table: interests,
portIdColumn: interests.portId,
portId,
idColumn: interests.id,
updatedAtColumn: interests.updatedAt,
filters,
sort: { column: sortColumn, direction: order },
page,
pageSize: limit,
searchColumns: [],
searchTerm: search,
includeArchived,
archivedAtColumn: interests.archivedAt,
});
// Join client names and berth mooring numbers
const interestIds = (result.data as Array<{ id: string; clientId: string; berthId: string | null }>).map((i) => i.id);
const clientIds = [...new Set((result.data as Array<{ clientId: string }>).map((i) => i.clientId))];
const berthIds = [...new Set(
(result.data as Array<{ berthId: string | null }>)
.map((i) => i.berthId)
.filter(Boolean) as string[]
)];
let clientsMap: Record<string, string> = {};
let berthsMap: Record<string, string> = {};
let tagsByInterestId: Record<string, Array<{ id: string; name: string; color: string }>> = {};
if (clientIds.length > 0) {
const clientRows = await db
.select({ id: clients.id, fullName: clients.fullName })
.from(clients)
.where(inArray(clients.id, clientIds));
clientsMap = Object.fromEntries(clientRows.map((c) => [c.id, c.fullName]));
}
if (berthIds.length > 0) {
const berthRows = await db
.select({ id: berths.id, mooringNumber: berths.mooringNumber })
.from(berths)
.where(inArray(berths.id, berthIds));
berthsMap = Object.fromEntries(berthRows.map((b) => [b.id, b.mooringNumber]));
}
if (interestIds.length > 0) {
const tagRows = await db
.select({
interestId: interestTags.interestId,
id: tags.id,
name: tags.name,
color: tags.color,
})
.from(interestTags)
.innerJoin(tags, eq(interestTags.tagId, tags.id))
.where(inArray(interestTags.interestId, interestIds));
for (const row of tagRows) {
if (!tagsByInterestId[row.interestId]) tagsByInterestId[row.interestId] = [];
tagsByInterestId[row.interestId]!.push({ id: row.id, name: row.name, color: row.color });
}
}
const data = (result.data as Array<Record<string, unknown>>).map((i) => ({
...i,
clientName: clientsMap[i.clientId as string] ?? null,
berthMooringNumber: i.berthId ? (berthsMap[i.berthId as string] ?? null) : null,
tags: tagsByInterestId[i.id as string] ?? [],
}));
return { data, total: result.total };
}
// ─── Get by ID ────────────────────────────────────────────────────────────────
export async function getInterestById(id: string, portId: string) {
const interest = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!interest || interest.portId !== portId) {
throw new NotFoundError('Interest');
}
const [clientRow] = await db
.select({ fullName: clients.fullName })
.from(clients)
.where(eq(clients.id, interest.clientId));
let berthMooringNumber: string | null = null;
if (interest.berthId) {
const [berthRow] = await db
.select({ mooringNumber: berths.mooringNumber })
.from(berths)
.where(eq(berths.id, interest.berthId));
berthMooringNumber = berthRow?.mooringNumber ?? null;
}
const tagRows = await db
.select({ id: tags.id, name: tags.name, color: tags.color })
.from(interestTags)
.innerJoin(tags, eq(interestTags.tagId, tags.id))
.where(eq(interestTags.interestId, id));
return {
...interest,
clientName: clientRow?.fullName ?? null,
berthMooringNumber,
tags: tagRows,
};
}
// ─── Create ───────────────────────────────────────────────────────────────────
export async function createInterest(
portId: string,
data: CreateInterestInput,
meta: AuditMeta,
) {
const { tagIds, ...interestData } = data;
// BR-011: auto-promote leadCategory
const resolvedLeadCategory = await resolveLeadCategory(
data.clientId,
data.leadCategory,
);
const result = await withTransaction(async (tx) => {
const [interest] = await tx
.insert(interests)
.values({
portId,
...interestData,
leadCategory: resolvedLeadCategory,
})
.returning();
if (tagIds && tagIds.length > 0) {
await tx.insert(interestTags).values(
tagIds.map((tagId) => ({ interestId: interest!.id, tagId })),
);
}
return interest!;
});
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'interest',
entityId: result.id,
newValue: { clientId: result.clientId, pipelineStage: result.pipelineStage },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:created', { interestId: result.id, clientId: result.clientId, berthId: result.berthId ?? null, source: result.source ?? '' });
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'interest:created', { interestId: result.id, clientId: result.clientId }),
);
return result;
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateInterest(
id: string,
portId: string,
data: UpdateInterestInput,
meta: AuditMeta,
) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
// BR-011: auto-promote leadCategory if provided
let resolvedLeadCategory = data.leadCategory;
if ('leadCategory' in data) {
resolvedLeadCategory = await resolveLeadCategory(
existing.clientId,
data.leadCategory,
) as typeof data.leadCategory;
}
const updateData = { ...data, leadCategory: resolvedLeadCategory };
const { diff } = diffEntity(
existing as Record<string, unknown>,
updateData as Record<string, unknown>,
);
const [updated] = await db
.update(interests)
.set({ ...updateData, updatedAt: new Date() })
.where(and(eq(interests.id, id), eq(interests.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'interest',
entityId: id,
oldValue: diff as Record<string, unknown>,
newValue: updateData as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:updated', { interestId: id, changedFields: Object.keys(diff) });
return updated!;
}
// ─── Change Stage ─────────────────────────────────────────────────────────────
export async function changeInterestStage(
id: string,
portId: string,
data: ChangeStageInput,
meta: AuditMeta,
) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
const oldStage = existing.pipelineStage;
const [updated] = await db
.update(interests)
.set({ pipelineStage: data.pipelineStage, updatedAt: new Date() })
.where(and(eq(interests.id, id), eq(interests.portId, portId)))
.returning();
// BR-133: Auto-populate milestones based on stage
const milestoneUpdates: Record<string, unknown> = {};
if (data.pipelineStage === 'signed_eoi_nda') milestoneUpdates.dateEoiSigned = new Date();
if (data.pipelineStage === 'contract') milestoneUpdates.dateContractSigned = new Date();
if (data.pipelineStage === 'deposit_10pct') milestoneUpdates.dateDepositReceived = new Date();
if (Object.keys(milestoneUpdates).length > 0) {
await db.update(interests).set({ ...milestoneUpdates, updatedAt: new Date() }).where(eq(interests.id, id));
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'interest',
entityId: id,
oldValue: { pipelineStage: oldStage },
newValue: { pipelineStage: data.pipelineStage, reason: data.reason },
metadata: { type: 'stage_change', reason: data.reason },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:stageChanged', {
interestId: id,
oldStage: oldStage ?? '',
newStage: data.pipelineStage,
clientName: '',
berthNumber: '',
});
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'interest:stageChanged', {
interestId: id,
oldStage: oldStage ?? null,
newStage: data.pipelineStage,
}),
);
// Fire-and-forget notification to the acting user
void import('@/lib/services/notifications.service').then(({ createNotification }) =>
createNotification({
portId,
userId: meta.userId,
type: 'interest_stage_changed',
title: `Interest moved to ${data.pipelineStage}`,
description: `Interest ${id} stage changed from ${oldStage ?? 'unknown'} to ${data.pipelineStage}`,
link: `/interests/${id}`,
entityType: 'interest',
entityId: id,
dedupeKey: `interest:${id}:stage:${data.pipelineStage}`,
cooldownMs: 300_000,
}),
);
return updated!;
}
// ─── Archive / Restore ────────────────────────────────────────────────────────
export async function archiveInterest(id: string, portId: string, meta: AuditMeta) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
// BR-014: Block archive if pending EOI/contract
if (existing.eoiStatus === 'waiting_for_signatures' || existing.contractStatus === 'pending') {
throw new ConflictError('Cannot archive interest with pending documents. Cancel documents first.');
}
await softDelete(interests, interests.id, id);
void createAuditLog({
userId: meta.userId,
portId,
action: 'archive',
entityType: 'interest',
entityId: id,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:archived', { interestId: id });
}
export async function restoreInterest(id: string, portId: string, meta: AuditMeta) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
await restore(interests, interests.id, id);
void createAuditLog({
userId: meta.userId,
portId,
action: 'restore',
entityType: 'interest',
entityId: id,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:updated', { interestId: id, changedFields: [] });
}
// ─── Set Tags ─────────────────────────────────────────────────────────────────
export async function setInterestTags(
id: string,
portId: string,
tagIds: string[],
meta: AuditMeta,
) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
await db.delete(interestTags).where(eq(interestTags.interestId, id));
if (tagIds.length > 0) {
await db
.insert(interestTags)
.values(tagIds.map((tagId) => ({ interestId: id, tagId })));
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'interest',
entityId: id,
metadata: { type: 'tags_updated', tagIds },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:updated', { interestId: id, changedFields: ['tags'] });
return { interestId: id, tagIds };
}
// ─── Link / Unlink Berth ──────────────────────────────────────────────────────
export async function linkBerth(
id: string,
portId: string,
berthId: string,
meta: AuditMeta,
) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
const [updated] = await db
.update(interests)
.set({ berthId, updatedAt: new Date() })
.where(and(eq(interests.id, id), eq(interests.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'interest',
entityId: id,
oldValue: { berthId: existing.berthId },
newValue: { berthId },
metadata: { type: 'berth_linked' },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:berthLinked', { interestId: id, berthId });
void import('@/lib/services/webhook-dispatch').then(({ dispatchWebhookEvent }) =>
dispatchWebhookEvent(portId, 'interest:berthLinked', { interestId: id, berthId }),
);
return updated!;
}
export async function unlinkBerth(id: string, portId: string, meta: AuditMeta) {
const existing = await db.query.interests.findFirst({
where: eq(interests.id, id),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Interest');
}
const oldBerthId = existing.berthId;
const [updated] = await db
.update(interests)
.set({ berthId: null, updatedAt: new Date() })
.where(and(eq(interests.id, id), eq(interests.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'interest',
entityId: id,
oldValue: { berthId: oldBerthId },
newValue: { berthId: null },
metadata: { type: 'berth_unlinked' },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'interest:berthUnlinked', { interestId: id, berthId: oldBerthId ?? '' });
return updated!;
}
// ─── Stage Counts (for board) ────────────────────────────────────────────────
export async function getInterestStageCounts(portId: string) {
const rows = await db.select({ stage: interests.pipelineStage, count: sql<number>`count(*)::int` })
.from(interests)
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)))
.groupBy(interests.pipelineStage);
return Object.fromEntries(rows.map(r => [r.stage, r.count]));
}

View File

@@ -0,0 +1,657 @@
import { eq, and, desc, like, lt, sql, gte, lte, inArray, ne } from 'drizzle-orm';
import { db } from '@/lib/db';
import {
invoices,
invoiceLineItems,
invoiceExpenses,
expenses,
} from '@/lib/db/schema/financial';
import { files } from '@/lib/db/schema/documents';
import { ports } from '@/lib/db/schema/ports';
import { systemSettings } from '@/lib/db/schema/system';
import { buildListQuery } from '@/lib/db/query-builder';
import { createAuditLog } from '@/lib/audit';
import { diffEntity } from '@/lib/entity-diff';
import { withTransaction } from '@/lib/db/utils';
import { NotFoundError, ConflictError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import { logger } from '@/lib/logger';
import { generatePdf } from '@/lib/pdf/generate';
import { invoiceTemplate, buildInvoiceInputs } from '@/lib/pdf/templates/invoice-template';
import { minioClient, buildStoragePath } from '@/lib/minio';
import { getQueue } from '@/lib/queue';
import { env } from '@/lib/env';
import type {
CreateInvoiceInput,
UpdateInvoiceInput,
RecordPaymentInput,
ListInvoicesInput,
} from '@/lib/validators/invoices';
// AuditMeta type expected by service functions
export interface ServiceAuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Auto-numbering (BR-041) ───────────────────────────────────────────────
async function generateInvoiceNumber(portId: string, tx: typeof db): Promise<string> {
const lockKey = `invoice_${portId}`;
await tx.execute(sql`SELECT pg_advisory_xact_lock(hashtext(${lockKey}))`);
const now = new Date();
const prefix = `INV-${now.getFullYear()}${String(now.getMonth() + 1).padStart(2, '0')}`;
const [existing] = await tx
.select({ invoiceNumber: invoices.invoiceNumber })
.from(invoices)
.where(
and(eq(invoices.portId, portId), like(invoices.invoiceNumber, `${prefix}-%`)),
)
.orderBy(desc(invoices.invoiceNumber))
.limit(1);
let seq = 1;
if (existing) {
const parts = existing.invoiceNumber.split('-');
seq = parseInt(parts[parts.length - 1] ?? '0', 10) + 1;
}
return `${prefix}-${String(seq).padStart(3, '0')}`;
}
// ─── List ─────────────────────────────────────────────────────────────────
export async function listInvoices(portId: string, query: ListInvoicesInput) {
const filters = [];
if (query.status) {
filters.push(eq(invoices.status, query.status));
}
if (query.clientName) {
filters.push(like(invoices.clientName, `%${query.clientName}%`));
}
if (query.dateFrom) {
filters.push(gte(invoices.dueDate, query.dateFrom));
}
if (query.dateTo) {
filters.push(lte(invoices.dueDate, query.dateTo));
}
return buildListQuery({
table: invoices,
portIdColumn: invoices.portId,
portId,
idColumn: invoices.id,
updatedAtColumn: invoices.updatedAt,
filters,
page: query.page,
pageSize: query.limit,
searchColumns: [invoices.clientName, invoices.invoiceNumber],
searchTerm: query.search,
includeArchived: query.includeArchived,
archivedAtColumn: invoices.archivedAt,
sort: query.sort
? {
column: invoices[query.sort as keyof typeof invoices] as any,
direction: query.order,
}
: undefined,
});
}
// ─── Get by ID ────────────────────────────────────────────────────────────
export async function getInvoiceById(id: string, portId: string) {
const invoice = await db.query.invoices.findFirst({
where: and(eq(invoices.id, id), eq(invoices.portId, portId)),
});
if (!invoice) throw new NotFoundError('Invoice');
const lineItems = await db
.select()
.from(invoiceLineItems)
.where(eq(invoiceLineItems.invoiceId, id))
.orderBy(invoiceLineItems.sortOrder);
const linkedExpenses = await db
.select({ expense: expenses })
.from(invoiceExpenses)
.innerJoin(expenses, eq(expenses.id, invoiceExpenses.expenseId))
.where(eq(invoiceExpenses.invoiceId, id));
return {
...invoice,
lineItems,
linkedExpenses: linkedExpenses.map((r) => r.expense),
};
}
// ─── Create (BR-041, BR-042, BR-045) ─────────────────────────────────────
export async function createInvoice(
portId: string,
data: CreateInvoiceInput,
meta: ServiceAuditMeta,
) {
const invoice = await withTransaction(async (tx) => {
const invoiceNumber = await generateInvoiceNumber(portId, tx);
// Calculate subtotal from line items
const lineItemsData = data.lineItems ?? [];
const subtotal = lineItemsData.reduce(
(sum, li) => sum + li.quantity * li.unitPrice,
0,
);
// BR-042: net10 discount — read from systemSettings
let discountPct = 0;
if (data.paymentTerms === 'net10') {
const [setting] = await tx
.select({ value: systemSettings.value })
.from(systemSettings)
.where(
and(
eq(systemSettings.key, 'invoice_net10_discount'),
eq(systemSettings.portId, portId),
),
)
.limit(1);
if (setting) {
discountPct = Number(setting.value) || 2;
} else {
discountPct = 2;
}
}
const discountAmount = (subtotal * discountPct) / 100;
const feeAmount = 0; // No fee by default
const feePct = 0;
const total = subtotal - discountAmount + feeAmount;
// BR-045: Verify expenses aren't already linked to a non-draft invoice
const expenseIds = data.expenseIds ?? [];
if (expenseIds.length > 0) {
const alreadyLinked = await tx
.select({ expenseId: invoiceExpenses.expenseId })
.from(invoiceExpenses)
.innerJoin(invoices, eq(invoices.id, invoiceExpenses.invoiceId))
.where(
and(
inArray(invoiceExpenses.expenseId, expenseIds),
sql`${invoices.status} != 'draft'`,
),
)
.limit(1);
if (alreadyLinked.length > 0) {
throw new ConflictError(
'One or more expenses are already linked to a non-draft invoice',
);
}
}
const [newInvoice] = await tx
.insert(invoices)
.values({
portId,
invoiceNumber,
clientName: data.clientName,
billingEmail: data.billingEmail ?? null,
billingAddress: data.billingAddress ?? null,
dueDate: data.dueDate,
paymentTerms: data.paymentTerms ?? 'net30',
currency: data.currency ?? 'USD',
subtotal: String(subtotal),
discountPct: String(discountPct),
discountAmount: String(discountAmount),
feePct: String(feePct),
feeAmount: String(feeAmount),
total: String(total),
status: 'draft',
paymentStatus: 'unpaid',
notes: data.notes ?? null,
createdBy: meta.userId,
})
.returning();
if (!newInvoice) throw new Error('Insert failed');
// Insert line items
if (lineItemsData.length > 0) {
await tx.insert(invoiceLineItems).values(
lineItemsData.map((li, idx) => ({
invoiceId: newInvoice.id,
description: li.description,
quantity: String(li.quantity),
unitPrice: String(li.unitPrice),
total: String(li.quantity * li.unitPrice),
sortOrder: idx,
})),
);
}
// Link expenses
if (expenseIds.length > 0) {
await tx.insert(invoiceExpenses).values(
expenseIds.map((expenseId) => ({
invoiceId: newInvoice.id,
expenseId,
})),
);
}
return newInvoice;
});
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'invoice',
entityId: invoice.id,
newValue: invoice as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'invoice:created', {
invoiceId: invoice.id,
invoiceNumber: invoice.invoiceNumber,
total: Number(invoice.total),
clientName: invoice.clientName,
});
return invoice;
}
// ─── Update (draft only) ──────────────────────────────────────────────────
export async function updateInvoice(
id: string,
portId: string,
data: UpdateInvoiceInput,
meta: ServiceAuditMeta,
) {
const existing = await getInvoiceById(id, portId);
if (existing.status !== 'draft') {
throw new ConflictError('Only draft invoices can be updated');
}
const updated = await withTransaction(async (tx) => {
const updateData: Record<string, unknown> = { updatedAt: new Date() };
if (data.clientName !== undefined) updateData.clientName = data.clientName;
if (data.billingEmail !== undefined) updateData.billingEmail = data.billingEmail;
if (data.billingAddress !== undefined) updateData.billingAddress = data.billingAddress;
if (data.dueDate !== undefined) updateData.dueDate = data.dueDate;
if (data.paymentTerms !== undefined) updateData.paymentTerms = data.paymentTerms;
if (data.currency !== undefined) updateData.currency = data.currency;
if (data.notes !== undefined) updateData.notes = data.notes;
// Recalculate totals if line items changed
if (data.lineItems !== undefined) {
const lineItemsData = data.lineItems;
const subtotal = lineItemsData.reduce(
(sum, li) => sum + li.quantity * li.unitPrice,
0,
);
const paymentTerms = data.paymentTerms ?? existing.paymentTerms;
let discountPct = 0;
if (paymentTerms === 'net10') {
const [setting] = await tx
.select({ value: systemSettings.value })
.from(systemSettings)
.where(
and(
eq(systemSettings.key, 'invoice_net10_discount'),
eq(systemSettings.portId, portId),
),
)
.limit(1);
discountPct = setting ? Number(setting.value) || 2 : 2;
}
const discountAmount = (subtotal * discountPct) / 100;
const feeAmount = Number(existing.feeAmount) || 0;
const feePct = Number(existing.feePct) || 0;
const total = subtotal - discountAmount + feeAmount;
updateData.subtotal = String(subtotal);
updateData.discountPct = String(discountPct);
updateData.discountAmount = String(discountAmount);
updateData.feePct = String(feePct);
updateData.feeAmount = String(feeAmount);
updateData.total = String(total);
// Replace line items
await tx.delete(invoiceLineItems).where(eq(invoiceLineItems.invoiceId, id));
if (lineItemsData.length > 0) {
await tx.insert(invoiceLineItems).values(
lineItemsData.map((li, idx) => ({
invoiceId: id,
description: li.description,
quantity: String(li.quantity),
unitPrice: String(li.unitPrice),
total: String(li.quantity * li.unitPrice),
sortOrder: idx,
})),
);
}
}
// Replace expense links if provided
if (data.expenseIds !== undefined) {
// BR-045
if (data.expenseIds.length > 0) {
const alreadyLinked = await tx
.select({ expenseId: invoiceExpenses.expenseId })
.from(invoiceExpenses)
.innerJoin(invoices, eq(invoices.id, invoiceExpenses.invoiceId))
.where(
and(
inArray(invoiceExpenses.expenseId, data.expenseIds),
sql`${invoices.status} != 'draft'`,
ne(invoices.id, id),
),
)
.limit(1);
if (alreadyLinked.length > 0) {
throw new ConflictError(
'One or more expenses are already linked to a non-draft invoice',
);
}
}
await tx.delete(invoiceExpenses).where(eq(invoiceExpenses.invoiceId, id));
if (data.expenseIds.length > 0) {
await tx.insert(invoiceExpenses).values(
data.expenseIds.map((expenseId) => ({ invoiceId: id, expenseId })),
);
}
}
const [result] = await tx
.update(invoices)
.set(updateData as any)
.where(and(eq(invoices.id, id), eq(invoices.portId, portId)))
.returning();
if (!result) throw new NotFoundError('Invoice');
return result;
});
const { diff } = diffEntity(
existing as unknown as Record<string, unknown>,
updated as unknown as Record<string, unknown>,
);
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'invoice',
entityId: id,
oldValue: existing as unknown as Record<string, unknown>,
newValue: updated as unknown as Record<string, unknown>,
metadata: { diff },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'invoice:updated', {
invoiceId: id,
changedFields: Object.keys(diff),
});
return updated;
}
// ─── Delete (draft only) ──────────────────────────────────────────────────
export async function deleteInvoice(
id: string,
portId: string,
meta: ServiceAuditMeta,
) {
const existing = await getInvoiceById(id, portId);
if (existing.status !== 'draft') {
throw new ConflictError('Only draft invoices can be deleted');
}
await withTransaction(async (tx) => {
await tx.delete(invoiceExpenses).where(eq(invoiceExpenses.invoiceId, id));
await tx.delete(invoiceLineItems).where(eq(invoiceLineItems.invoiceId, id));
await tx
.delete(invoices)
.where(and(eq(invoices.id, id), eq(invoices.portId, portId)));
});
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'invoice',
entityId: id,
oldValue: existing as unknown as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'invoice:updated', {
invoiceId: id,
changedFields: ['status'],
});
}
// ─── Generate PDF ─────────────────────────────────────────────────────────
export async function generateInvoicePdf(
id: string,
portId: string,
meta: ServiceAuditMeta,
) {
const invoice = await getInvoiceById(id, portId);
const [port] = await db
.select({ id: ports.id, name: ports.name, slug: ports.slug })
.from(ports)
.where(eq(ports.id, portId))
.limit(1);
const inputs = buildInvoiceInputs(invoice, invoice.lineItems, port);
const pdfBytes = await generatePdf(invoiceTemplate, [inputs]);
const fileId = crypto.randomUUID();
const storagePath = buildStoragePath(port?.slug ?? portId, 'invoices', id, fileId, 'pdf');
await minioClient.putObject(
env.MINIO_BUCKET,
storagePath,
Buffer.from(pdfBytes),
pdfBytes.length,
{ 'Content-Type': 'application/pdf' },
);
const [fileRecord] = await db
.insert(files)
.values({
portId,
filename: `invoice-${invoice.invoiceNumber}.pdf`,
originalName: `invoice-${invoice.invoiceNumber}.pdf`,
mimeType: 'application/pdf',
sizeBytes: String(pdfBytes.length),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: 'invoice',
uploadedBy: meta.userId,
})
.returning();
if (!fileRecord) throw new Error('File record insert failed');
await db
.update(invoices)
.set({ pdfFileId: fileRecord.id, updatedAt: new Date() })
.where(and(eq(invoices.id, id), eq(invoices.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'invoice',
entityId: id,
metadata: { action: 'pdf_generated', fileId: fileRecord.id },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return fileRecord;
}
// ─── Send invoice ─────────────────────────────────────────────────────────
export async function sendInvoice(
id: string,
portId: string,
meta: ServiceAuditMeta,
) {
const invoice = await getInvoiceById(id, portId);
// Generate PDF if not exists
let pdfFileId = invoice.pdfFileId;
if (!pdfFileId) {
const fileRecord = await generateInvoicePdf(id, portId, meta);
pdfFileId = fileRecord.id;
}
// Queue email job
await getQueue('email').add('send-invoice', { invoiceId: id, portId });
// Update status to 'sent'
const [updated] = await db
.update(invoices)
.set({ status: 'sent', updatedAt: new Date() })
.where(and(eq(invoices.id, id), eq(invoices.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'invoice',
entityId: id,
oldValue: { status: invoice.status },
newValue: { status: 'sent' },
metadata: { action: 'invoice_sent' },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'invoice:sent', {
invoiceId: id,
invoiceNumber: invoice.invoiceNumber,
recipientEmail: invoice.billingEmail ?? '',
});
return updated;
}
// ─── Record payment ───────────────────────────────────────────────────────
export async function recordPayment(
id: string,
portId: string,
data: RecordPaymentInput,
meta: ServiceAuditMeta,
) {
const existing = await getInvoiceById(id, portId);
const [updated] = await db
.update(invoices)
.set({
paymentStatus: 'paid',
paymentDate: data.paymentDate,
paymentMethod: data.paymentMethod ?? null,
paymentReference: data.paymentReference ?? null,
status: 'paid',
updatedAt: new Date(),
})
.where(and(eq(invoices.id, id), eq(invoices.portId, portId)))
.returning();
if (!updated) throw new NotFoundError('Invoice');
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'invoice',
entityId: id,
oldValue: { status: existing.status, paymentStatus: existing.paymentStatus },
newValue: { status: 'paid', paymentStatus: 'paid', paymentDate: data.paymentDate },
metadata: { action: 'payment_recorded' },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'invoice:paid', {
invoiceId: id,
invoiceNumber: existing.invoiceNumber,
amount: Number(existing.total),
});
return updated;
}
// ─── Detect overdue (BR-044) ──────────────────────────────────────────────
export async function detectOverdue(portId: string) {
const today = new Date().toISOString().split('T')[0]!;
const overdueInvoices = await db
.select({ id: invoices.id, invoiceNumber: invoices.invoiceNumber, dueDate: invoices.dueDate })
.from(invoices)
.where(
and(
eq(invoices.portId, portId),
eq(invoices.status, 'sent'),
lt(invoices.dueDate, today),
),
);
if (overdueInvoices.length === 0) return;
for (const inv of overdueInvoices) {
await db
.update(invoices)
.set({ status: 'overdue', updatedAt: new Date() })
.where(eq(invoices.id, inv.id));
const daysPastDue = Math.max(1, Math.ceil(
(Date.now() - new Date(inv.dueDate).getTime()) / (1000 * 60 * 60 * 24),
));
emitToRoom(`port:${portId}`, 'invoice:overdue', {
invoiceId: inv.id,
invoiceNumber: inv.invoiceNumber,
daysPastDue,
});
await getQueue('notifications').add('invoice-overdue-notify', {
invoiceId: inv.id,
portId,
});
logger.info(
{ invoiceId: inv.id, invoiceNumber: inv.invoiceNumber, portId },
'Invoice marked overdue',
);
}
}

View File

@@ -0,0 +1,281 @@
import { eq, and, desc } from 'drizzle-orm';
import { db } from '@/lib/db';
import { clientNotes, clients } from '@/lib/db/schema/clients';
import { interestNotes, interests } from '@/lib/db/schema/interests';
import { userProfiles } from '@/lib/db/schema/users';
import { NotFoundError, ValidationError } from '@/lib/errors';
import type { CreateNoteInput, UpdateNoteInput } from '@/lib/validators/notes';
const EDIT_WINDOW_MS = 15 * 60 * 1000; // 15 minutes
type EntityType = 'clients' | 'interests';
// ─── Helpers ─────────────────────────────────────────────────────────────────
function getTable(entityType: EntityType) {
return entityType === 'clients' ? clientNotes : interestNotes;
}
function getEntityIdField(entityType: EntityType) {
return entityType === 'clients' ? clientNotes.clientId : interestNotes.interestId;
}
async function verifyParentBelongsToPort(
entityType: EntityType,
entityId: string,
portId: string,
): Promise<void> {
if (entityType === 'clients') {
const client = await db
.select({ id: clients.id })
.from(clients)
.where(and(eq(clients.id, entityId), eq(clients.portId, portId)))
.limit(1);
if (!client.length) throw new NotFoundError('Client');
} else {
const interest = await db
.select({ id: interests.id })
.from(interests)
.where(and(eq(interests.id, entityId), eq(interests.portId, portId)))
.limit(1);
if (!interest.length) throw new NotFoundError('Interest');
}
}
// ─── Service ─────────────────────────────────────────────────────────────────
export async function listForEntity(
portId: string,
entityType: EntityType,
entityId: string,
) {
await verifyParentBelongsToPort(entityType, entityId, portId);
if (entityType === 'clients') {
const rows = await db
.select({
id: clientNotes.id,
clientId: clientNotes.clientId,
authorId: clientNotes.authorId,
content: clientNotes.content,
mentions: clientNotes.mentions,
isLocked: clientNotes.isLocked,
createdAt: clientNotes.createdAt,
updatedAt: clientNotes.updatedAt,
authorName: userProfiles.displayName,
})
.from(clientNotes)
.leftJoin(userProfiles, eq(userProfiles.userId, clientNotes.authorId))
.where(eq(clientNotes.clientId, entityId))
.orderBy(desc(clientNotes.createdAt));
return rows;
} else {
const rows = await db
.select({
id: interestNotes.id,
interestId: interestNotes.interestId,
authorId: interestNotes.authorId,
content: interestNotes.content,
mentions: interestNotes.mentions,
isLocked: interestNotes.isLocked,
createdAt: interestNotes.createdAt,
updatedAt: interestNotes.updatedAt,
authorName: userProfiles.displayName,
})
.from(interestNotes)
.leftJoin(userProfiles, eq(userProfiles.userId, interestNotes.authorId))
.where(eq(interestNotes.interestId, entityId))
.orderBy(desc(interestNotes.createdAt));
return rows;
}
}
export async function create(
portId: string,
entityType: EntityType,
entityId: string,
authorId: string,
data: CreateNoteInput,
) {
await verifyParentBelongsToPort(entityType, entityId, portId);
if (entityType === 'clients') {
const [note] = await db
.insert(clientNotes)
.values({ clientId: entityId, authorId, content: data.content })
.returning();
if (!note) throw new Error('Insert failed');
const profile = await db
.select({ displayName: userProfiles.displayName })
.from(userProfiles)
.where(eq(userProfiles.userId, authorId))
.limit(1);
const authorName = profile[0]?.displayName ?? null;
// Fire mention notifications (fire-and-forget)
if (note.mentions && note.mentions.length > 0) {
for (const mentionedUserId of note.mentions) {
void import('@/lib/services/notifications.service').then(({ createNotification }) =>
createNotification({
portId,
userId: mentionedUserId,
type: 'mention',
title: 'You were mentioned in a note',
description: `${authorName ?? 'Someone'} mentioned you in a note`,
link: `/clients/${entityId}`,
entityType: 'client',
entityId,
dedupeKey: `note:${note.id}:mention:${mentionedUserId}`,
}),
);
}
}
return { ...note, authorName };
} else {
const [note] = await db
.insert(interestNotes)
.values({ interestId: entityId, authorId, content: data.content })
.returning();
if (!note) throw new Error('Insert failed');
const profile = await db
.select({ displayName: userProfiles.displayName })
.from(userProfiles)
.where(eq(userProfiles.userId, authorId))
.limit(1);
const authorName = profile[0]?.displayName ?? null;
// Fire mention notifications (fire-and-forget)
if (note.mentions && note.mentions.length > 0) {
for (const mentionedUserId of note.mentions) {
void import('@/lib/services/notifications.service').then(({ createNotification }) =>
createNotification({
portId,
userId: mentionedUserId,
type: 'mention',
title: 'You were mentioned in a note',
description: `${authorName ?? 'Someone'} mentioned you in a note`,
link: `/interests/${entityId}`,
entityType: 'interest',
entityId,
dedupeKey: `note:${note.id}:mention:${mentionedUserId}`,
}),
);
}
}
return { ...note, authorName };
}
}
export async function update(
portId: string,
entityType: EntityType,
entityId: string,
noteId: string,
data: UpdateNoteInput,
) {
await verifyParentBelongsToPort(entityType, entityId, portId);
if (entityType === 'clients') {
const [existing] = await db
.select()
.from(clientNotes)
.where(and(eq(clientNotes.id, noteId), eq(clientNotes.clientId, entityId)))
.limit(1);
if (!existing) throw new NotFoundError('Note');
if (Date.now() - new Date(existing.createdAt).getTime() > EDIT_WINDOW_MS) {
throw new ValidationError('Note edit window has expired (15 minutes)');
}
const [updated] = await db
.update(clientNotes)
.set({ content: data.content, updatedAt: new Date() })
.where(eq(clientNotes.id, noteId))
.returning();
if (!updated) throw new NotFoundError('Note');
const profile = await db
.select({ displayName: userProfiles.displayName })
.from(userProfiles)
.where(eq(userProfiles.userId, updated.authorId))
.limit(1);
return { ...updated, authorName: profile[0]?.displayName ?? null };
} else {
const [existing] = await db
.select()
.from(interestNotes)
.where(and(eq(interestNotes.id, noteId), eq(interestNotes.interestId, entityId)))
.limit(1);
if (!existing) throw new NotFoundError('Note');
if (Date.now() - new Date(existing.createdAt).getTime() > EDIT_WINDOW_MS) {
throw new ValidationError('Note edit window has expired (15 minutes)');
}
const [updated] = await db
.update(interestNotes)
.set({ content: data.content, updatedAt: new Date() })
.where(eq(interestNotes.id, noteId))
.returning();
if (!updated) throw new NotFoundError('Note');
const profile = await db
.select({ displayName: userProfiles.displayName })
.from(userProfiles)
.where(eq(userProfiles.userId, updated.authorId))
.limit(1);
return { ...updated, authorName: profile[0]?.displayName ?? null };
}
}
export async function deleteNote(
portId: string,
entityType: EntityType,
entityId: string,
noteId: string,
) {
await verifyParentBelongsToPort(entityType, entityId, portId);
if (entityType === 'clients') {
const [existing] = await db
.select()
.from(clientNotes)
.where(and(eq(clientNotes.id, noteId), eq(clientNotes.clientId, entityId)))
.limit(1);
if (!existing) throw new NotFoundError('Note');
if (Date.now() - new Date(existing.createdAt).getTime() > EDIT_WINDOW_MS) {
throw new ValidationError('Note edit window has expired (15 minutes)');
}
await db.delete(clientNotes).where(eq(clientNotes.id, noteId));
return existing;
} else {
const [existing] = await db
.select()
.from(interestNotes)
.where(and(eq(interestNotes.id, noteId), eq(interestNotes.interestId, entityId)))
.limit(1);
if (!existing) throw new NotFoundError('Note');
if (Date.now() - new Date(existing.createdAt).getTime() > EDIT_WINDOW_MS) {
throw new ValidationError('Note edit window has expired (15 minutes)');
}
await db.delete(interestNotes).where(eq(interestNotes.id, noteId));
return existing;
}
}

View File

@@ -0,0 +1,296 @@
import { and, count, eq, gt, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { notifications } from '@/lib/db/schema/operations';
import { userNotificationPreferences } from '@/lib/db/schema/system';
import { userProfiles } from '@/lib/db/schema/users';
import { emitToRoom } from '@/lib/socket/server';
import { getQueue } from '@/lib/queue';
import { NotFoundError } from '@/lib/errors';
import type { ListNotificationsInput, UpdatePreferencesInput } from '@/lib/validators/notifications';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface CreateNotificationParams {
portId: string;
userId: string;
type: string;
title: string;
description?: string;
link?: string;
entityType?: string;
entityId?: string;
dedupeKey?: string;
cooldownMs?: number;
}
// ─── Helpers ─────────────────────────────────────────────────────────────────
async function getUnreadCountValue(userId: string, portId: string): Promise<number> {
const [row] = await db
.select({ count: count() })
.from(notifications)
.where(
and(
eq(notifications.userId, userId),
eq(notifications.portId, portId),
eq(notifications.isRead, false),
),
);
return row?.count ?? 0;
}
// ─── createNotification ───────────────────────────────────────────────────────
export async function createNotification(
params: CreateNotificationParams,
): Promise<typeof notifications.$inferSelect | null> {
const {
portId,
userId,
type,
title,
description,
link,
entityType,
entityId,
dedupeKey,
cooldownMs = 300_000,
} = params;
// 1. Cooldown / deduplication check
if (dedupeKey) {
const cutoff = new Date(Date.now() - cooldownMs);
const [existing] = await db
.select({ id: notifications.id })
.from(notifications)
.where(
and(
eq(notifications.userId, userId),
eq(notifications.type, type),
gt(notifications.createdAt, cutoff),
sql`${notifications.metadata}->>'dedupeKey' = ${dedupeKey}`,
),
)
.limit(1);
if (existing) {
return null; // suppressed by cooldown
}
}
// 2. Preference check (skip for system_alert type — always delivered)
if (type !== 'system_alert') {
const [pref] = await db
.select({ inApp: userNotificationPreferences.inApp, email: userNotificationPreferences.email })
.from(userNotificationPreferences)
.where(
and(
eq(userNotificationPreferences.userId, userId),
eq(userNotificationPreferences.portId, portId),
eq(userNotificationPreferences.notificationType, type),
),
)
.limit(1);
if (pref && pref.inApp === false) {
// Check if email is enabled — if neither, skip entirely
if (pref.email === false) {
return null;
}
// inApp disabled but email enabled: still enqueue email but skip insert
// We can't insert and emit, so just enqueue if there were a row — but we need an ID.
// Per spec: if inApp=false, skip insert. Email requires notificationId so skip email too.
return null;
}
}
// 3. Insert notification
const [notif] = await db
.insert(notifications)
.values({
portId,
userId,
type,
title,
description: description ?? null,
link: link ?? null,
entityType: entityType ?? null,
entityId: entityId ?? null,
isRead: false,
emailSent: false,
metadata: dedupeKey ? { dedupeKey } : {},
})
.returning();
if (!notif) return null;
// 4. Emit socket events
emitToRoom(`user:${userId}`, 'notification:new', {
notificationId: notif.id,
type: notif.type,
title: notif.title,
description: notif.description ?? '',
link: notif.link ?? '',
});
const unreadCount = await getUnreadCountValue(userId, portId);
emitToRoom(`user:${userId}`, 'notification:unreadCount', { count: unreadCount });
// 5. Check email preference and enqueue if needed
const [pref] = await db
.select({ email: userNotificationPreferences.email })
.from(userNotificationPreferences)
.where(
and(
eq(userNotificationPreferences.userId, userId),
eq(userNotificationPreferences.portId, portId),
eq(userNotificationPreferences.notificationType, type),
),
)
.limit(1);
// Default to sending email if no preference record exists (opt-in by default)
const shouldEmail = pref ? pref.email : false;
if (shouldEmail) {
const queue = getQueue('notifications');
await queue.add('send-notification-email', { notificationId: notif.id });
}
return notif;
}
// ─── listNotifications ────────────────────────────────────────────────────────
export async function listNotifications(
userId: string,
portId: string,
query: ListNotificationsInput,
): Promise<{ data: (typeof notifications.$inferSelect)[]; total: number }> {
const { page, limit, unreadOnly } = query;
const offset = (page - 1) * limit;
const conditions = [
eq(notifications.userId, userId),
eq(notifications.portId, portId),
];
if (unreadOnly) {
conditions.push(eq(notifications.isRead, false));
}
const where = and(...conditions);
const [totalRow, rows] = await Promise.all([
db.select({ count: count() }).from(notifications).where(where),
db
.select()
.from(notifications)
.where(where)
.orderBy(sql`${notifications.createdAt} DESC`)
.limit(limit)
.offset(offset),
]);
return {
data: rows,
total: totalRow[0]?.count ?? 0,
};
}
// ─── markRead ─────────────────────────────────────────────────────────────────
export async function markRead(notificationId: string, userId: string): Promise<void> {
const [notif] = await db
.select({ id: notifications.id, portId: notifications.portId, userId: notifications.userId })
.from(notifications)
.where(eq(notifications.id, notificationId))
.limit(1);
if (!notif || notif.userId !== userId) {
throw new NotFoundError('Notification');
}
await db
.update(notifications)
.set({ isRead: true })
.where(and(eq(notifications.id, notificationId), eq(notifications.userId, userId)));
const unreadCount = await getUnreadCountValue(userId, notif.portId);
emitToRoom(`user:${userId}`, 'notification:unreadCount', { count: unreadCount });
}
// ─── markAllRead ──────────────────────────────────────────────────────────────
export async function markAllRead(userId: string, portId: string): Promise<void> {
await db
.update(notifications)
.set({ isRead: true })
.where(
and(
eq(notifications.userId, userId),
eq(notifications.portId, portId),
eq(notifications.isRead, false),
),
);
emitToRoom(`user:${userId}`, 'notification:unreadCount', { count: 0 });
}
// ─── getUnreadCount ───────────────────────────────────────────────────────────
export async function getUnreadCount(
userId: string,
portId: string,
): Promise<{ count: number }> {
const c = await getUnreadCountValue(userId, portId);
return { count: c };
}
// ─── getPreferences ───────────────────────────────────────────────────────────
export async function getPreferences(userId: string, portId: string) {
return db
.select()
.from(userNotificationPreferences)
.where(
and(
eq(userNotificationPreferences.userId, userId),
eq(userNotificationPreferences.portId, portId),
),
);
}
// ─── updatePreferences ────────────────────────────────────────────────────────
export async function updatePreferences(
userId: string,
portId: string,
data: UpdatePreferencesInput,
) {
for (const pref of data.preferences) {
await db
.insert(userNotificationPreferences)
.values({
userId,
portId,
notificationType: pref.notificationType,
inApp: pref.inApp,
email: pref.email,
})
.onConflictDoUpdate({
target: [
userNotificationPreferences.userId,
userNotificationPreferences.portId,
userNotificationPreferences.notificationType,
],
set: {
inApp: pref.inApp,
email: pref.email,
},
});
}
return getPreferences(userId, portId);
}

View File

@@ -0,0 +1,389 @@
import { and, eq, count } from 'drizzle-orm';
import { db } from '@/lib/db';
import { clients, clientContacts } from '@/lib/db/schema/clients';
import { interests } from '@/lib/db/schema/interests';
import { documents, documentSigners, files } from '@/lib/db/schema/documents';
import { invoices } from '@/lib/db/schema/financial';
import { berths } from '@/lib/db/schema/berths';
import { ports } from '@/lib/db/schema/ports';
import { createPortalToken } from '@/lib/portal/auth';
import { sendEmail } from '@/lib/email';
import { getPresignedUrl } from '@/lib/minio';
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
// ─── Magic Link ────────────────────────────────────────────────────────────────
/**
* Requests a magic link for portal access.
* Always returns success — never reveals whether an email exists in the system.
*/
export async function requestMagicLink(email: string): Promise<void> {
const normalizedEmail = email.toLowerCase().trim();
// Find client contact with matching email
const contact = await db.query.clientContacts.findFirst({
where: and(
eq(clientContacts.channel, 'email'),
eq(clientContacts.value, normalizedEmail),
),
with: {
client: true,
},
});
if (!contact || !contact.client) {
// Don't reveal that the email doesn't exist — silently return
logger.debug({ email: normalizedEmail }, 'Portal magic link: no matching client contact');
return;
}
const client = contact.client;
// Build the JWT
const token = await createPortalToken({
clientId: client.id,
portId: client.portId,
email: normalizedEmail,
});
const magicLinkUrl = `${env.APP_URL}/verify?token=${encodeURIComponent(token)}`;
// Fetch port name for the email
const port = await db.query.ports.findFirst({
where: eq(ports.id, client.portId),
});
const portName = port?.name ?? 'Port Nimara';
const clientName = client.fullName;
const html = `
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body style="font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; background: #f5f5f5; padding: 40px 0; margin: 0;">
<div style="max-width: 480px; margin: 0 auto; background: #ffffff; border-radius: 8px; overflow: hidden; box-shadow: 0 2px 8px rgba(0,0,0,0.08);">
<div style="background: #1e2844; padding: 32px 40px; text-align: center;">
<h1 style="color: #ffffff; margin: 0; font-size: 22px; font-weight: 600;">${portName}</h1>
<p style="color: #9ca3af; margin: 6px 0 0; font-size: 14px;">Client Portal</p>
</div>
<div style="padding: 40px;">
<p style="color: #374151; font-size: 16px; margin: 0 0 8px;">Hello, ${clientName}</p>
<p style="color: #6b7280; font-size: 15px; margin: 0 0 32px; line-height: 1.6;">
You requested access to your client portal. Click the button below to sign in. This link expires in 24 hours.
</p>
<div style="text-align: center; margin: 0 0 32px;">
<a href="${magicLinkUrl}" style="display: inline-block; background: #1e2844; color: #ffffff; text-decoration: none; padding: 14px 32px; border-radius: 6px; font-size: 15px; font-weight: 500;">
Access My Portal
</a>
</div>
<p style="color: #9ca3af; font-size: 13px; margin: 0; line-height: 1.6;">
If you didn't request this, you can safely ignore this email. If you're having trouble with the button above, copy and paste this URL into your browser:
<br><br>
<span style="color: #6b7280; word-break: break-all;">${magicLinkUrl}</span>
</p>
</div>
<div style="background: #f9fafb; padding: 20px 40px; text-align: center; border-top: 1px solid #e5e7eb;">
<p style="color: #9ca3af; font-size: 12px; margin: 0;">&copy; ${new Date().getFullYear()} ${portName}. All rights reserved.</p>
</div>
</div>
</body>
</html>
`;
await sendEmail(
normalizedEmail,
`Your ${portName} portal access link`,
html,
);
logger.info({ clientId: client.id, portId: client.portId }, 'Portal magic link sent');
}
// ─── Dashboard ────────────────────────────────────────────────────────────────
export interface PortalDashboard {
client: {
id: string;
fullName: string;
companyName: string | null;
yachtName: string | null;
};
port: {
name: string;
logoUrl: string | null;
};
counts: {
interests: number;
documents: number;
invoices: number;
};
}
export async function getPortalDashboard(
clientId: string,
portId: string,
): Promise<PortalDashboard | null> {
const [client, port, interestCount, documentCount] = await Promise.all([
db.query.clients.findFirst({
where: and(eq(clients.id, clientId), eq(clients.portId, portId)),
with: { contacts: true },
}),
db.query.ports.findFirst({
where: eq(ports.id, portId),
}),
db
.select({ value: count() })
.from(interests)
.where(and(eq(interests.clientId, clientId), eq(interests.portId, portId))),
db
.select({ value: count() })
.from(documents)
.where(and(eq(documents.clientId, clientId), eq(documents.portId, portId))),
]);
if (!client || !port) return null;
// Count invoices matched by client's billing email addresses
const emailContacts = (client.contacts ?? [])
.filter((c) => c.channel === 'email')
.map((c) => c.value.toLowerCase());
let invoiceCount = 0;
if (emailContacts.length > 0) {
const allPortInvoices = await db
.select({ billingEmail: invoices.billingEmail })
.from(invoices)
.where(eq(invoices.portId, portId));
invoiceCount = allPortInvoices.filter(
(inv) => inv.billingEmail && emailContacts.includes(inv.billingEmail.toLowerCase()),
).length;
}
return {
client: {
id: client.id,
fullName: client.fullName,
companyName: client.companyName ?? null,
yachtName: client.yachtName ?? null,
},
port: {
name: port.name,
logoUrl: port.logoUrl ?? null,
},
counts: {
interests: interestCount[0]?.value ?? 0,
documents: documentCount[0]?.value ?? 0,
invoices: invoiceCount,
},
};
}
// ─── Interests ────────────────────────────────────────────────────────────────
export interface PortalInterest {
id: string;
pipelineStage: string;
leadCategory: string | null;
berthMooringNumber: string | null;
berthArea: string | null;
eoiStatus: string | null;
contractStatus: string | null;
dateFirstContact: Date | null;
createdAt: Date;
}
export async function getClientInterests(
clientId: string,
portId: string,
): Promise<PortalInterest[]> {
const rows = await db
.select({
id: interests.id,
pipelineStage: interests.pipelineStage,
leadCategory: interests.leadCategory,
berthId: interests.berthId,
eoiStatus: interests.eoiStatus,
contractStatus: interests.contractStatus,
dateFirstContact: interests.dateFirstContact,
createdAt: interests.createdAt,
})
.from(interests)
.where(
and(
eq(interests.clientId, clientId),
eq(interests.portId, portId),
),
)
.orderBy(interests.createdAt);
// Fetch berth details for interests that have a berth
const berthIds = rows.flatMap((r) => (r.berthId ? [r.berthId] : []));
const berthMap = new Map<string, { mooringNumber: string; area: string | null }>();
if (berthIds.length > 0) {
const berthRows = await db
.select({ id: berths.id, mooringNumber: berths.mooringNumber, area: berths.area })
.from(berths)
.where(eq(berths.portId, portId));
for (const b of berthRows) {
berthMap.set(b.id, { mooringNumber: b.mooringNumber, area: b.area });
}
}
return rows.map((r) => ({
id: r.id,
pipelineStage: r.pipelineStage,
leadCategory: r.leadCategory,
berthMooringNumber: r.berthId ? (berthMap.get(r.berthId)?.mooringNumber ?? null) : null,
berthArea: r.berthId ? (berthMap.get(r.berthId)?.area ?? null) : null,
eoiStatus: r.eoiStatus,
contractStatus: r.contractStatus,
dateFirstContact: r.dateFirstContact,
createdAt: r.createdAt,
}));
}
// ─── Documents ────────────────────────────────────────────────────────────────
export interface PortalDocument {
id: string;
documentType: string;
title: string;
status: string;
isManualUpload: boolean;
hasSignedFile: boolean;
signers: Array<{
signerName: string;
signerEmail: string;
signerRole: string;
status: string;
}>;
createdAt: Date;
}
export async function getClientDocuments(
clientId: string,
portId: string,
): Promise<PortalDocument[]> {
const rows = await db.query.documents.findMany({
where: and(
eq(documents.clientId, clientId),
eq(documents.portId, portId),
),
with: {
signers: true,
},
orderBy: (docs, { desc }) => [desc(docs.createdAt)],
});
return rows.map((doc) => ({
id: doc.id,
documentType: doc.documentType,
title: doc.title,
status: doc.status,
isManualUpload: doc.isManualUpload,
hasSignedFile: doc.signedFileId != null,
signers: (doc.signers ?? []).map((s) => ({
signerName: s.signerName,
signerEmail: s.signerEmail,
signerRole: s.signerRole,
status: s.status,
})),
createdAt: doc.createdAt,
}));
}
// ─── Invoices ─────────────────────────────────────────────────────────────────
export interface PortalInvoice {
id: string;
invoiceNumber: string;
status: string;
currency: string;
total: string;
dueDate: string;
paymentStatus: string | null;
paymentDate: string | null;
createdAt: Date;
}
export async function getClientInvoices(
clientId: string,
portId: string,
): Promise<PortalInvoice[]> {
// Look up the client to get billing email for invoice matching
const client = await db.query.clients.findFirst({
where: and(eq(clients.id, clientId), eq(clients.portId, portId)),
with: {
contacts: true,
},
});
if (!client) return [];
// Get client's email addresses to match against billingEmail
const emailContacts = (client.contacts ?? [])
.filter((c) => c.channel === 'email')
.map((c) => c.value.toLowerCase());
if (emailContacts.length === 0) return [];
// Fetch invoices matching any of the client's email addresses
const allInvoices = await db
.select()
.from(invoices)
.where(eq(invoices.portId, portId))
.orderBy(invoices.createdAt);
const clientInvoices = allInvoices.filter(
(inv) =>
inv.billingEmail && emailContacts.includes(inv.billingEmail.toLowerCase()),
);
return clientInvoices.map((inv) => ({
id: inv.id,
invoiceNumber: inv.invoiceNumber,
status: inv.status,
currency: inv.currency,
total: inv.total,
dueDate: inv.dueDate,
paymentStatus: inv.paymentStatus ?? null,
paymentDate: inv.paymentDate ?? null,
createdAt: inv.createdAt,
}));
}
// ─── Document Download ────────────────────────────────────────────────────────
export async function getDocumentDownloadUrl(
clientId: string,
documentId: string,
portId: string,
): Promise<string | null> {
const doc = await db.query.documents.findFirst({
where: and(
eq(documents.id, documentId),
eq(documents.clientId, clientId),
eq(documents.portId, portId),
),
});
if (!doc) return null;
// Prefer signed file, fall back to original file
const fileId = doc.signedFileId ?? doc.fileId;
if (!fileId) return null;
const file = await db.query.files.findFirst({
where: eq(files.id, fileId),
});
if (!file) return null;
return getPresignedUrl(file.storagePath);
}

View File

@@ -0,0 +1,55 @@
import OpenAI from 'openai';
import { logger } from '@/lib/logger';
const openai = new OpenAI(); // uses OPENAI_API_KEY from env
interface ScanResult {
establishment: string | null;
date: string | null;
amount: number | null;
currency: string | null;
lineItems: Array<{ description: string; amount: number }>;
confidence: number;
}
export async function scanReceipt(
imageBuffer: Buffer,
mimeType: string,
): Promise<ScanResult> {
try {
const base64 = imageBuffer.toString('base64');
const response = await openai.chat.completions.create({
model: 'gpt-4o',
messages: [
{
role: 'user',
content: [
{
type: 'text',
text: 'Extract receipt data as JSON: { establishment, date (ISO), amount (number), currency (3-letter code), lineItems: [{ description, amount }], confidence (0-1) }. Return ONLY valid JSON.',
},
{
type: 'image_url',
image_url: { url: `data:${mimeType};base64,${base64}` },
},
],
},
],
max_tokens: 1000,
});
const content = response.choices[0]?.message?.content ?? '{}';
const cleaned = content.replace(/```json\n?|\n?```/g, '').trim();
return JSON.parse(cleaned) as ScanResult;
} catch (err) {
logger.error({ err }, 'Receipt scan failed');
return {
establishment: null,
date: null,
amount: null,
currency: null,
lineItems: [],
confidence: 0,
};
}
}

View File

@@ -0,0 +1,217 @@
import { and, eq, isNull } from 'drizzle-orm';
import { db } from '@/lib/db';
import { interests } from '@/lib/db/schema/interests';
import { clients } from '@/lib/db/schema/clients';
import { berths, berthRecommendations } from '@/lib/db/schema/berths';
import { NotFoundError } from '@/lib/errors';
import { createAuditLog } from '@/lib/audit';
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Score a single berth ─────────────────────────────────────────────────────
function scoreBerth(
berth: typeof berths.$inferSelect,
yachtLengthFt: number | null,
yachtWidthFt: number | null,
yachtDraftFt: number | null,
): { score: number; reasons: Record<string, number> } {
const reasons: Record<string, number> = {};
const weights: number[] = [];
if (yachtLengthFt && berth.lengthFt) {
const berthLen = parseFloat(berth.lengthFt);
if (berthLen >= yachtLengthFt) {
const fit = Math.min(100, (berthLen / yachtLengthFt) * 100);
// Prefer berths that are not too oversized (within 20% extra is ideal)
const score = berthLen <= yachtLengthFt * 1.2 ? 100 : Math.max(50, 100 - (berthLen / yachtLengthFt - 1.2) * 100);
reasons['length_fit'] = Math.round(score);
weights.push(score);
} else {
// Berth too small
reasons['length_fit'] = 0;
weights.push(0);
}
}
if (yachtWidthFt && berth.widthFt) {
const berthWidth = parseFloat(berth.widthFt);
if (berthWidth >= yachtWidthFt) {
const score = berthWidth <= yachtWidthFt * 1.3 ? 100 : Math.max(40, 100 - (berthWidth / yachtWidthFt - 1.3) * 80);
reasons['beam_fit'] = Math.round(score);
weights.push(score);
} else {
reasons['beam_fit'] = 0;
weights.push(0);
}
}
if (yachtDraftFt && berth.draftFt) {
const berthDraft = parseFloat(berth.draftFt);
if (berthDraft >= yachtDraftFt) {
const score = 100;
reasons['draft_fit'] = score;
weights.push(score);
} else {
reasons['draft_fit'] = 0;
weights.push(0);
}
}
if (weights.length === 0) {
return { score: 50, reasons: { no_dimensions: 50 } };
}
const score = Math.round(weights.reduce((a, b) => a + b, 0) / weights.length);
return { score, reasons };
}
// ─── Generate Recommendations ─────────────────────────────────────────────────
export async function generateRecommendations(
interestId: string,
portId: string,
meta: AuditMeta,
) {
const interest = await db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
});
if (!interest) throw new NotFoundError('Interest');
const client = await db.query.clients.findFirst({
where: eq(clients.id, interest.clientId),
});
const yachtLengthFt = client?.yachtLengthFt ? parseFloat(client.yachtLengthFt) : null;
const yachtWidthFt = client?.yachtWidthFt ? parseFloat(client.yachtWidthFt) : null;
const yachtDraftFt = client?.yachtDraftFt ? parseFloat(client.yachtDraftFt) : null;
// Get all available berths for the port
const availableBerths = await db
.select()
.from(berths)
.where(and(eq(berths.portId, portId), eq(berths.status, 'available')));
// Score each berth
const scored = availableBerths.map((berth) => {
const { score, reasons } = scoreBerth(berth, yachtLengthFt, yachtWidthFt, yachtDraftFt);
return { berth, score, reasons };
});
// Sort by score and take top 10
scored.sort((a, b) => b.score - a.score);
const top10 = scored.slice(0, 10);
// Delete existing AI recommendations for this interest
await db
.delete(berthRecommendations)
.where(
and(
eq(berthRecommendations.interestId, interestId),
eq(berthRecommendations.source, 'ai'),
),
);
// Insert new recommendations
if (top10.length > 0) {
await db.insert(berthRecommendations).values(
top10.map(({ berth, score, reasons }) => ({
interestId,
berthId: berth.id,
matchScore: String(score),
matchReasons: reasons,
source: 'ai' as const,
createdBy: meta.userId,
})),
);
}
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'berth_recommendation',
entityId: interestId,
metadata: { type: 'ai_generated', count: top10.length },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return listRecommendations(interestId, portId);
}
// ─── List Recommendations ─────────────────────────────────────────────────────
export async function listRecommendations(interestId: string, portId: string) {
const rows = await db
.select({
id: berthRecommendations.id,
interestId: berthRecommendations.interestId,
berthId: berthRecommendations.berthId,
matchScore: berthRecommendations.matchScore,
matchReasons: berthRecommendations.matchReasons,
source: berthRecommendations.source,
createdBy: berthRecommendations.createdBy,
createdAt: berthRecommendations.createdAt,
mooringNumber: berths.mooringNumber,
area: berths.area,
status: berths.status,
lengthFt: berths.lengthFt,
widthFt: berths.widthFt,
draftFt: berths.draftFt,
})
.from(berthRecommendations)
.innerJoin(berths, eq(berthRecommendations.berthId, berths.id))
.where(eq(berthRecommendations.interestId, interestId))
.orderBy(berthRecommendations.matchScore);
return rows.reverse(); // highest score first
}
// ─── Add Manual Recommendation ────────────────────────────────────────────────
export async function addManualRecommendation(
interestId: string,
portId: string,
berthId: string,
meta: AuditMeta,
) {
const interest = await db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
});
if (!interest) throw new NotFoundError('Interest');
const berth = await db.query.berths.findFirst({
where: and(eq(berths.id, berthId), eq(berths.portId, portId)),
});
if (!berth) throw new NotFoundError('Berth');
const [rec] = await db
.insert(berthRecommendations)
.values({
interestId,
berthId,
source: 'manual',
createdBy: meta.userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'berth_recommendation',
entityId: rec!.id,
metadata: { type: 'manual', interestId, berthId },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return rec!;
}

View File

@@ -0,0 +1,189 @@
import { and, desc, eq, inArray } from 'drizzle-orm';
import { db } from '@/lib/db';
import { clients, clientContacts } from '@/lib/db/schema/clients';
import { interests } from '@/lib/db/schema/interests';
import { berths, berthWaitingList, berthMaintenanceLog } from '@/lib/db/schema/berths';
import { auditLogs } from '@/lib/db/schema/system';
import { ports } from '@/lib/db/schema/ports';
import { NotFoundError } from '@/lib/errors';
import { generatePdf } from '@/lib/pdf/generate';
import {
clientSummaryTemplate,
buildClientSummaryInputs,
} from '@/lib/pdf/templates/client-summary-template';
import {
berthSpecTemplate,
buildBerthSpecInputs,
} from '@/lib/pdf/templates/berth-spec-template';
import {
interestSummaryTemplate,
buildInterestSummaryInputs,
} from '@/lib/pdf/templates/interest-summary-template';
// ─── Export Client PDF ────────────────────────────────────────────────────────
export async function exportClientPdf(clientId: string, portId: string): Promise<Uint8Array> {
const client = await db.query.clients.findFirst({
where: eq(clients.id, clientId),
});
if (!client || client.portId !== portId) {
throw new NotFoundError('Client');
}
const [contactList, port] = await Promise.all([
db.query.clientContacts.findMany({
where: eq(clientContacts.clientId, clientId),
orderBy: (t, { desc }) => [desc(t.isPrimary), desc(t.createdAt)],
}),
db.query.ports.findFirst({ where: eq(ports.id, portId) }),
]);
// Fetch last 20 interests for this client in this port
const interestList = await db
.select()
.from(interests)
.where(and(eq(interests.clientId, clientId), eq(interests.portId, portId)))
.orderBy(desc(interests.updatedAt))
.limit(20);
// Fetch last 20 audit logs for this client
const activity = await db
.select()
.from(auditLogs)
.where(
and(
eq(auditLogs.portId, portId),
eq(auditLogs.entityType, 'client'),
eq(auditLogs.entityId, clientId),
),
)
.orderBy(desc(auditLogs.createdAt))
.limit(20);
// Enrich interests with berth mooring numbers
const berthIds = interestList
.map((i) => i.berthId)
.filter(Boolean) as string[];
let berthsMap: Record<string, string> = {};
if (berthIds.length > 0) {
const berthRows = await db
.select({ id: berths.id, mooringNumber: berths.mooringNumber })
.from(berths)
.where(inArray(berths.id, berthIds));
berthsMap = Object.fromEntries(berthRows.map((b) => [b.id, b.mooringNumber]));
}
const enrichedInterests = interestList.map((i) => ({
...i,
berthMooringNumber: i.berthId ? (berthsMap[i.berthId] ?? null) : null,
}));
const inputs = buildClientSummaryInputs(client, contactList, enrichedInterests, activity, port);
return generatePdf(clientSummaryTemplate, [inputs]);
}
// ─── Export Berth PDF ─────────────────────────────────────────────────────────
export async function exportBerthPdf(berthId: string, portId: string): Promise<Uint8Array> {
const berth = await db.query.berths.findFirst({
where: eq(berths.id, berthId),
});
if (!berth || berth.portId !== portId) {
throw new NotFoundError('Berth');
}
const port = await db.query.ports.findFirst({ where: eq(ports.id, portId) });
// Waiting list with client names
const waitingListRows = await db
.select({
id: berthWaitingList.id,
position: berthWaitingList.position,
priority: berthWaitingList.priority,
notes: berthWaitingList.notes,
clientId: berthWaitingList.clientId,
})
.from(berthWaitingList)
.where(eq(berthWaitingList.berthId, berthId))
.orderBy(berthWaitingList.position);
const clientIds = waitingListRows.map((w) => w.clientId);
let clientsMap: Record<string, string> = {};
if (clientIds.length > 0) {
const clientRows = await db
.select({ id: clients.id, fullName: clients.fullName })
.from(clients)
.where(inArray(clients.id, clientIds));
clientsMap = Object.fromEntries(clientRows.map((c) => [c.id, c.fullName]));
}
const enrichedWaitingList = waitingListRows.map((w) => ({
...w,
clientName: clientsMap[w.clientId] ?? 'Unknown',
}));
// Maintenance log (last 20)
const maintenance = await db
.select()
.from(berthMaintenanceLog)
.where(eq(berthMaintenanceLog.berthId, berthId))
.orderBy(desc(berthMaintenanceLog.performedDate))
.limit(20);
// Linked interests
const linkedInterests = await db
.select()
.from(interests)
.where(and(eq(interests.berthId, berthId), eq(interests.portId, portId)))
.orderBy(desc(interests.updatedAt))
.limit(20);
const inputs = buildBerthSpecInputs(berth, enrichedWaitingList, maintenance, linkedInterests, port);
return generatePdf(berthSpecTemplate, [inputs]);
}
// ─── Export Interest PDF ──────────────────────────────────────────────────────
export async function exportInterestPdf(interestId: string, portId: string): Promise<Uint8Array> {
const interest = await db.query.interests.findFirst({
where: eq(interests.id, interestId),
});
if (!interest || interest.portId !== portId) {
throw new NotFoundError('Interest');
}
const [client, port] = await Promise.all([
db.query.clients.findFirst({ where: eq(clients.id, interest.clientId) }),
db.query.ports.findFirst({ where: eq(ports.id, portId) }),
]);
let berth = null;
if (interest.berthId) {
berth = await db.query.berths.findFirst({ where: eq(berths.id, interest.berthId) });
}
// Audit timeline (last 20 events for this interest)
const timeline = await db
.select()
.from(auditLogs)
.where(
and(
eq(auditLogs.portId, portId),
eq(auditLogs.entityType, 'interest'),
eq(auditLogs.entityId, interestId),
),
)
.orderBy(desc(auditLogs.createdAt))
.limit(20);
const inputs = buildInterestSummaryInputs(interest, client, berth, timeline, port);
return generatePdf(interestSummaryTemplate, [inputs]);
}

View File

@@ -0,0 +1,218 @@
import { and, count, eq, gte, isNull, lte, sql, sum } from 'drizzle-orm';
import { db } from '@/lib/db';
import { interests } from '@/lib/db/schema/interests';
import { berths } from '@/lib/db/schema/berths';
import { auditLogs } from '@/lib/db/schema/system';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface PipelineData {
stageCounts: Record<string, number>;
topInterests: Array<{
id: string;
clientId: string;
pipelineStage: string;
berthPrice: string | null;
}>;
generatedAt: string;
}
export interface RevenueData {
stageRevenue: Record<string, string>;
totalCompleted: string;
generatedAt: string;
}
export interface ActivityData {
logs: Array<{
id: string;
action: string;
entityType: string;
entityId: string | null;
userId: string | null;
createdAt: Date;
}>;
summary: Record<string, number>;
generatedAt: string;
}
export interface OccupancyData {
statusCounts: Record<string, number>;
occupancyRate: number;
totalBerths: number;
generatedAt: string;
}
// ─── Pipeline ─────────────────────────────────────────────────────────────────
export async function fetchPipelineData(
portId: string,
_params: Record<string, unknown>,
): Promise<PipelineData> {
// Count interests per pipeline stage (non-archived)
const stageCounts = await db
.select({
stage: interests.pipelineStage,
count: count(),
})
.from(interests)
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)));
const stageCountMap: Record<string, number> = {};
for (const row of stageCounts) {
stageCountMap[row.stage] = row.count;
}
// Top 10 interests by berth price (via join)
const topInterestsRows = await db
.select({
id: interests.id,
clientId: interests.clientId,
pipelineStage: interests.pipelineStage,
berthPrice: berths.price,
})
.from(interests)
.leftJoin(berths, eq(interests.berthId, berths.id))
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)))
.orderBy(sql`${berths.price} DESC NULLS LAST`)
.limit(10);
return {
stageCounts: stageCountMap,
topInterests: topInterestsRows.map((r) => ({
id: r.id,
clientId: r.clientId,
pipelineStage: r.pipelineStage,
berthPrice: r.berthPrice ? String(r.berthPrice) : null,
})),
generatedAt: new Date().toISOString(),
};
}
// ─── Revenue ──────────────────────────────────────────────────────────────────
export async function fetchRevenueData(
portId: string,
_params: Record<string, unknown>,
): Promise<RevenueData> {
// Sum berth prices grouped by pipeline stage
const stageRevenue = await db
.select({
stage: interests.pipelineStage,
revenue: sum(berths.price),
})
.from(interests)
.leftJoin(berths, eq(interests.berthId, berths.id))
.where(and(eq(interests.portId, portId), isNull(interests.archivedAt)))
.groupBy(interests.pipelineStage);
const stageRevenueMap: Record<string, string> = {};
for (const row of stageRevenue) {
stageRevenueMap[row.stage] = row.revenue ? String(row.revenue) : '0';
}
// Total revenue from completed interests
const completedRevenue = await db
.select({ total: sum(berths.price) })
.from(interests)
.leftJoin(berths, eq(interests.berthId, berths.id))
.where(
and(
eq(interests.portId, portId),
eq(interests.pipelineStage, 'completed'),
isNull(interests.archivedAt),
),
);
return {
stageRevenue: stageRevenueMap,
totalCompleted: completedRevenue[0]?.total ? String(completedRevenue[0].total) : '0',
generatedAt: new Date().toISOString(),
};
}
// ─── Activity ─────────────────────────────────────────────────────────────────
export async function fetchActivityData(
portId: string,
params: Record<string, unknown>,
): Promise<ActivityData> {
const dateFrom = params.dateFrom as string | undefined;
const dateTo = params.dateTo as string | undefined;
const thirtyDaysAgo = new Date();
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
const fromDate = dateFrom ? new Date(dateFrom) : thirtyDaysAgo;
const conditions = [
eq(auditLogs.portId, portId),
gte(auditLogs.createdAt, fromDate),
];
if (dateTo) {
conditions.push(lte(auditLogs.createdAt, new Date(dateTo)));
}
const logs = await db
.select({
id: auditLogs.id,
action: auditLogs.action,
entityType: auditLogs.entityType,
entityId: auditLogs.entityId,
userId: auditLogs.userId,
createdAt: auditLogs.createdAt,
})
.from(auditLogs)
.where(and(...conditions))
.orderBy(sql`${auditLogs.createdAt} DESC`)
.limit(200);
// Group by action type
const summary: Record<string, number> = {};
for (const log of logs) {
const key = `${log.action}:${log.entityType}`;
summary[key] = (summary[key] ?? 0) + 1;
}
return {
logs,
summary,
generatedAt: new Date().toISOString(),
};
}
// ─── Occupancy ────────────────────────────────────────────────────────────────
export async function fetchOccupancyData(
portId: string,
_params: Record<string, unknown>,
): Promise<OccupancyData> {
const statusCounts = await db
.select({
status: berths.status,
count: count(),
})
.from(berths)
.where(eq(berths.portId, portId))
.groupBy(berths.status);
const statusCountMap: Record<string, number> = {};
let totalBerths = 0;
for (const row of statusCounts) {
statusCountMap[row.status] = row.count;
totalBerths += row.count;
}
const occupiedCount =
(statusCountMap['under_offer'] ?? 0) + (statusCountMap['sold'] ?? 0);
const occupancyRate = totalBerths > 0 ? (occupiedCount / totalBerths) * 100 : 0;
return {
statusCounts: statusCountMap,
occupancyRate: Math.round(occupancyRate * 10) / 10,
totalBerths,
generatedAt: new Date().toISOString(),
};
}

View File

@@ -0,0 +1,301 @@
import { and, desc, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { generatedReports } from '@/lib/db/schema/operations';
import { notifications } from '@/lib/db/schema/operations';
import { files } from '@/lib/db/schema/documents';
import { ports } from '@/lib/db/schema/ports';
import { generatePdf } from '@/lib/pdf/generate';
import { minioClient, getPresignedUrl, buildStoragePath } from '@/lib/minio/index';
import { emitToRoom } from '@/lib/socket/server';
import { getQueue } from '@/lib/queue';
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
import { NotFoundError } from '@/lib/errors';
import {
fetchPipelineData,
fetchRevenueData,
fetchActivityData,
fetchOccupancyData,
} from '@/lib/services/report-generators';
import {
pipelineReportTemplate,
buildPipelineInputs,
} from '@/lib/pdf/templates/reports/pipeline-report';
import {
revenueReportTemplate,
buildRevenueInputs,
} from '@/lib/pdf/templates/reports/revenue-report';
import {
activityReportTemplate,
buildActivityInputs,
} from '@/lib/pdf/templates/reports/activity-report';
import {
occupancyReportTemplate,
buildOccupancyInputs,
} from '@/lib/pdf/templates/reports/occupancy-report';
import type { RequestReportInput, ListReportsInput } from '@/lib/validators/reports';
// ─── Report Type Map ──────────────────────────────────────────────────────────
const REPORT_TYPE_MAP = {
pipeline: {
fetchData: fetchPipelineData,
template: pipelineReportTemplate,
buildInputs: buildPipelineInputs,
},
revenue: {
fetchData: fetchRevenueData,
template: revenueReportTemplate,
buildInputs: buildRevenueInputs,
},
activity: {
fetchData: fetchActivityData,
template: activityReportTemplate,
buildInputs: buildActivityInputs,
},
occupancy: {
fetchData: fetchOccupancyData,
template: occupancyReportTemplate,
buildInputs: buildOccupancyInputs,
},
} as const;
type ReportType = keyof typeof REPORT_TYPE_MAP;
// ─── requestReport ────────────────────────────────────────────────────────────
export async function requestReport(
portId: string,
userId: string,
data: RequestReportInput,
) {
const [report] = await db
.insert(generatedReports)
.values({
portId,
reportType: data.reportType,
name: data.name,
status: 'queued',
parameters: data.parameters ?? {},
requestedBy: userId,
})
.returning();
if (!report) {
throw new Error('Failed to create report record');
}
await getQueue('reports').add('generate-report', { reportJobId: report.id });
emitToRoom(`user:${userId}`, 'report:queued', {
reportId: report.id,
reportType: report.reportType,
name: report.name,
});
return report;
}
// ─── listReports ──────────────────────────────────────────────────────────────
export async function listReports(portId: string, query: ListReportsInput) {
const conditions = [eq(generatedReports.portId, portId)];
if (query.status) {
conditions.push(eq(generatedReports.status, query.status));
}
const offset = (query.page - 1) * query.limit;
const [rows, countResult] = await Promise.all([
db
.select()
.from(generatedReports)
.where(and(...conditions))
.orderBy(desc(generatedReports.createdAt))
.limit(query.limit)
.offset(offset),
db.$count(generatedReports, and(...conditions)),
]);
return {
data: rows,
total: Number(countResult),
};
}
// ─── getReport ────────────────────────────────────────────────────────────────
export async function getReport(reportId: string, portId: string) {
const report = await db.query.generatedReports.findFirst({
where: and(eq(generatedReports.id, reportId), eq(generatedReports.portId, portId)),
});
if (!report) {
throw new NotFoundError('Report');
}
return report;
}
// ─── getDownloadUrl ───────────────────────────────────────────────────────────
export async function getDownloadUrl(reportId: string, portId: string) {
const report = await db.query.generatedReports.findFirst({
where: and(eq(generatedReports.id, reportId), eq(generatedReports.portId, portId)),
});
if (!report) {
throw new NotFoundError('Report');
}
if (report.status !== 'ready' || !report.fileId) {
throw new Error('Report is not ready for download');
}
const file = await db.query.files.findFirst({
where: eq(files.id, report.fileId),
});
if (!file) {
throw new NotFoundError('File');
}
const url = await getPresignedUrl(file.storagePath);
return { url };
}
// ─── generateReport ───────────────────────────────────────────────────────────
export async function generateReport(reportJobId: string): Promise<void> {
// 1. Fetch the generatedReports record
const report = await db.query.generatedReports.findFirst({
where: eq(generatedReports.id, reportJobId),
});
if (!report) {
throw new Error(`Report job not found: ${reportJobId}`);
}
const { portId, reportType, name, parameters, requestedBy } = report;
try {
// 2. Update status = 'processing', startedAt = now
await db
.update(generatedReports)
.set({ status: 'processing', startedAt: new Date() })
.where(eq(generatedReports.id, reportJobId));
// 3. Look up REPORT_TYPE_MAP[reportType]
const typeKey = reportType as ReportType;
const config = REPORT_TYPE_MAP[typeKey];
if (!config) {
throw new Error(`Unknown report type: ${reportType}`);
}
const params = (parameters ?? {}) as Record<string, unknown>;
// 4. Fetch data
const data = await config.fetchData(portId, params);
// 5. Get port info for name in PDF
const port = await db.query.ports.findFirst({
where: eq(ports.id, portId),
});
const portName = port?.name ?? 'Port Nimara';
const portSlug = port?.slug ?? 'port';
// 6. Build inputs (pass portName)
const inputs = (config.buildInputs as (data: any, portName: string) => Record<string, string>[])(data, portName);
// 7. Generate PDF
const pdfBytes = await generatePdf(config.template, inputs);
// 8. Build storage path
const fileId = crypto.randomUUID();
const storagePath = buildStoragePath(portSlug, 'reports', reportJobId, fileId, 'pdf');
// 9. Upload PDF to MinIO
const buffer = Buffer.from(pdfBytes);
await minioClient.putObject(
env.MINIO_BUCKET,
storagePath,
buffer,
buffer.length,
{ 'Content-Type': 'application/pdf', 'report-type': reportType },
);
// 10. Insert into files table
const [fileRecord] = await db
.insert(files)
.values({
id: fileId,
portId,
filename: `${name.replace(/[^a-z0-9]/gi, '_').toLowerCase()}_${Date.now()}.pdf`,
originalName: `${name}.pdf`,
mimeType: 'application/pdf',
sizeBytes: String(buffer.length),
storagePath,
storageBucket: env.MINIO_BUCKET,
category: 'misc',
uploadedBy: requestedBy,
})
.returning();
if (!fileRecord) {
throw new Error('Failed to insert file record');
}
// 11. Update generatedReports: status='ready', fileId, completedAt
await db
.update(generatedReports)
.set({
status: 'ready',
fileId: fileRecord.id,
completedAt: new Date(),
updatedAt: new Date(),
})
.where(eq(generatedReports.id, reportJobId));
// 12. Emit report:ready socket event
emitToRoom(`user:${requestedBy}`, 'report:ready', {
reportId: reportJobId,
name,
});
// 13. Create notification for requestedBy user
await db.insert(notifications).values({
portId,
userId: requestedBy,
type: 'system_alert',
title: 'Report Ready',
description: `Your report "${name}" is ready to download.`,
entityType: 'report',
entityId: reportJobId,
});
logger.info({ reportJobId, reportType }, 'Report generated successfully');
} catch (err) {
const errorMessage = err instanceof Error ? err.message : 'Unknown error';
logger.error({ reportJobId, err }, 'Report generation failed');
await db
.update(generatedReports)
.set({
status: 'failed',
errorMessage,
updatedAt: new Date(),
})
.where(eq(generatedReports.id, reportJobId));
emitToRoom(`user:${requestedBy}`, 'report:failed', {
reportId: reportJobId,
name,
error: errorMessage,
});
throw err;
}
}

View File

@@ -0,0 +1,173 @@
import { and, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { savedViews } from '@/lib/db/schema';
import { NotFoundError } from '@/lib/errors';
import type { CreateSavedViewInput, UpdateSavedViewInput } from '@/lib/validators/saved-views';
export const savedViewsService = {
async list(portId: string, userId: string, entityType?: string) {
const conditions = [
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
];
if (entityType) {
conditions.push(eq(savedViews.entityType, entityType));
}
return db
.select()
.from(savedViews)
.where(and(...conditions));
},
async create(portId: string, userId: string, data: CreateSavedViewInput) {
if (data.isDefault) {
await db
.update(savedViews)
.set({ isDefault: false })
.where(
and(
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
eq(savedViews.entityType, data.entityType),
eq(savedViews.isDefault, true),
),
);
}
const [view] = await db
.insert(savedViews)
.values({
portId,
userId,
entityType: data.entityType,
name: data.name,
filters: data.filters ?? {},
sortConfig: data.sortConfig ?? null,
columnConfig: data.columnConfig ?? null,
isShared: data.isShared ?? false,
isDefault: data.isDefault ?? false,
})
.returning();
return view;
},
async update(portId: string, userId: string, viewId: string, data: UpdateSavedViewInput) {
const existing = await db.query.savedViews.findFirst({
where: and(
eq(savedViews.id, viewId),
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
),
});
if (!existing) {
throw new NotFoundError('Saved view');
}
if (data.isDefault) {
const entityType = data.entityType ?? existing.entityType;
await db
.update(savedViews)
.set({ isDefault: false })
.where(
and(
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
eq(savedViews.entityType, entityType),
eq(savedViews.isDefault, true),
),
);
}
const [updated] = await db
.update(savedViews)
.set({
...(data.name !== undefined && { name: data.name }),
...(data.entityType !== undefined && { entityType: data.entityType }),
...(data.filters !== undefined && { filters: data.filters }),
...(data.sortConfig !== undefined && { sortConfig: data.sortConfig }),
...(data.columnConfig !== undefined && { columnConfig: data.columnConfig }),
...(data.isShared !== undefined && { isShared: data.isShared }),
...(data.isDefault !== undefined && { isDefault: data.isDefault }),
updatedAt: new Date(),
})
.where(
and(
eq(savedViews.id, viewId),
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
),
)
.returning();
return updated;
},
async delete(portId: string, userId: string, viewId: string) {
const existing = await db.query.savedViews.findFirst({
where: and(
eq(savedViews.id, viewId),
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
),
});
if (!existing) {
throw new NotFoundError('Saved view');
}
await db
.delete(savedViews)
.where(
and(
eq(savedViews.id, viewId),
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
),
);
},
async setDefault(portId: string, userId: string, entityType: string, viewId: string) {
const existing = await db.query.savedViews.findFirst({
where: and(
eq(savedViews.id, viewId),
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
eq(savedViews.entityType, entityType),
),
});
if (!existing) {
throw new NotFoundError('Saved view');
}
// Unset any existing default for this entityType + user + port
await db
.update(savedViews)
.set({ isDefault: false })
.where(
and(
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
eq(savedViews.entityType, entityType),
eq(savedViews.isDefault, true),
),
);
const [updated] = await db
.update(savedViews)
.set({ isDefault: true, updatedAt: new Date() })
.where(
and(
eq(savedViews.id, viewId),
eq(savedViews.portId, portId),
eq(savedViews.userId, userId),
),
)
.returning();
return updated;
},
};

View File

@@ -0,0 +1,139 @@
import { sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { redis } from '@/lib/redis';
// ─── Types ────────────────────────────────────────────────────────────────────
interface ClientResult {
id: string;
fullName: string;
companyName: string | null;
}
interface InterestResult {
id: string;
clientName: string;
berthMooringNumber: string | null;
pipelineStage: string;
}
interface BerthResult {
id: string;
mooringNumber: string;
area: string | null;
status: string;
}
interface SearchResults {
clients: ClientResult[];
interests: InterestResult[];
berths: BerthResult[];
}
// ─── Search ───────────────────────────────────────────────────────────────────
export async function search(portId: string, query: string): Promise<SearchResults> {
const [clientRows, berthRows, interestRows] = await Promise.all([
// Clients: full-text search via tsvector
db.execute<{ id: string; full_name: string; company_name: string | null }>(sql`
SELECT id, full_name, company_name
FROM clients
WHERE port_id = ${portId}
AND archived_at IS NULL
AND to_tsvector('simple', coalesce(full_name, '') || ' ' || coalesce(company_name, ''))
@@ plainto_tsquery('simple', ${query})
ORDER BY ts_rank(
to_tsvector('simple', coalesce(full_name, '') || ' ' || coalesce(company_name, '')),
plainto_tsquery('simple', ${query})
) DESC
LIMIT 10
`),
// Berths: trigram similarity on mooring_number
db.execute<{ id: string; mooring_number: string; area: string | null; status: string }>(sql`
SELECT id, mooring_number, area, status
FROM berths
WHERE port_id = ${portId}
AND mooring_number % ${query}
ORDER BY similarity(mooring_number, ${query}) DESC
LIMIT 10
`),
// Interests: JOIN to clients and berths, ILIKE search
db.execute<{
id: string;
full_name: string;
mooring_number: string | null;
pipeline_stage: string;
}>(sql`
SELECT
i.id,
c.full_name,
b.mooring_number,
i.pipeline_stage
FROM interests i
JOIN clients c ON i.client_id = c.id
LEFT JOIN berths b ON i.berth_id = b.id
WHERE i.port_id = ${portId}
AND i.archived_at IS NULL
AND (
c.full_name ILIKE ${'%' + query + '%'}
OR b.mooring_number ILIKE ${'%' + query + '%'}
)
LIMIT 10
`),
]);
return {
clients: Array.from(clientRows).map((r) => ({
id: r.id,
fullName: r.full_name,
companyName: r.company_name ?? null,
})),
berths: Array.from(berthRows).map((r) => ({
id: r.id,
mooringNumber: r.mooring_number,
area: r.area ?? null,
status: r.status,
})),
interests: Array.from(interestRows).map((r) => ({
id: r.id,
clientName: r.full_name,
berthMooringNumber: r.mooring_number ?? null,
pipelineStage: r.pipeline_stage,
})),
};
}
// ─── Recent Searches ──────────────────────────────────────────────────────────
const RECENT_SEARCH_TTL = 2592000; // 30 days in seconds
const RECENT_SEARCH_MAX = 10;
function recentSearchKey(userId: string, portId: string): string {
return `recent-search:${userId}:${portId}`;
}
/**
* Fire-and-forget — saves a search term to the user's recent searches sorted set.
*/
export function saveRecentSearch(userId: string, portId: string, searchTerm: string): void {
const key = recentSearchKey(userId, portId);
redis
.zadd(key, Date.now(), searchTerm)
.then(() => redis.zremrangebyrank(key, 0, -(RECENT_SEARCH_MAX + 1)))
.then(() => redis.expire(key, RECENT_SEARCH_TTL))
.catch(() => {
// Intentionally swallowed — recent searches are non-critical
});
}
/**
* Returns the user's most recent searches, newest first.
*/
export async function getRecentSearches(userId: string, portId: string): Promise<string[]> {
const key = recentSearchKey(userId, portId);
const items = await redis.zrevrange(key, 0, RECENT_SEARCH_MAX - 1);
return items;
}

View File

@@ -0,0 +1,23 @@
import { buildStoragePath } from '@/lib/minio';
import { MIME_TO_EXT } from '@/lib/constants/file-validation';
export function generateStorageKey(
portSlug: string,
entity: string,
entityId: string,
mimeType: string,
): string {
const fileId = crypto.randomUUID();
const extension = MIME_TO_EXT[mimeType] ?? 'bin';
return buildStoragePath(portSlug, entity, entityId, fileId, extension);
}
export function sanitizeFilename(name: string): string {
return name
.replace(/[/\\:]/g, '') // strip path chars
.replace(/\x00/g, '') // strip null bytes
// eslint-disable-next-line no-control-regex
.replace(/[\x01-\x1f\x7f]/g, '') // strip control chars
.trim()
.slice(0, 255);
}

View File

@@ -0,0 +1,377 @@
import { db } from '@/lib/db';
import { auditLogs } from '@/lib/db/schema';
import { redis } from '@/lib/redis';
import { minioClient } from '@/lib/minio/index';
import { getQueue, QUEUE_CONFIGS, type QueueName } from '@/lib/queue';
import { createAuditLog } from '@/lib/audit';
import { env } from '@/lib/env';
import { sql, desc, or, eq } from 'drizzle-orm';
import { logger } from '@/lib/logger';
// ─── Types ────────────────────────────────────────────────────────────────────
export interface ServiceStatus {
name: string;
status: 'healthy' | 'degraded' | 'down';
responseTimeMs: number;
details?: string;
}
export interface HealthStatus {
overall: 'healthy' | 'degraded' | 'down';
services: ServiceStatus[];
checkedAt: Date;
}
export interface QueueStatus {
name: string;
waiting: number;
active: number;
completed: number;
failed: number;
delayed: number;
}
export interface QueueJobSummary {
id: string;
name: string;
data: unknown;
status: string;
timestamp: number | undefined;
processedOn: number | undefined;
finishedOn: number | undefined;
failedReason: string | undefined;
}
export interface PaginatedQueueJobs {
jobs: QueueJobSummary[];
total: number;
page: number;
limit: number;
}
export interface ConnectionStatus {
totalConnections: number;
}
export interface RecentError {
id: string;
source: 'audit' | 'queue';
message: string;
timestamp: Date;
metadata?: Record<string, unknown>;
}
// ─── Timeout helper ───────────────────────────────────────────────────────────
function withTimeout<T>(promise: Promise<T>, ms: number): Promise<T> {
return Promise.race([
promise,
new Promise<T>((_, reject) =>
setTimeout(() => reject(new Error(`Timed out after ${ms}ms`)), ms),
),
]);
}
// ─── healthCheck ──────────────────────────────────────────────────────────────
export async function healthCheck(): Promise<HealthStatus> {
const checks = await Promise.allSettled([
checkPostgres(),
checkRedis(),
checkMinio(),
checkDocumenso(),
]);
const services: ServiceStatus[] = checks.map((result) => {
if (result.status === 'fulfilled') return result.value;
// Should not happen since each checker catches internally
return {
name: 'unknown',
status: 'down' as const,
responseTimeMs: 0,
details: String(result.reason),
};
});
const hasDown = services.some((s) => s.status === 'down');
const hasDegraded = services.some((s) => s.status === 'degraded');
const overall = hasDown ? 'down' : hasDegraded ? 'degraded' : 'healthy';
return { overall, services, checkedAt: new Date() };
}
async function checkPostgres(): Promise<ServiceStatus> {
const start = Date.now();
try {
await withTimeout(db.execute(sql`SELECT 1`), 5000);
return { name: 'PostgreSQL', status: 'healthy', responseTimeMs: Date.now() - start };
} catch (err) {
return {
name: 'PostgreSQL',
status: 'down',
responseTimeMs: Date.now() - start,
details: err instanceof Error ? err.message : 'Unknown error',
};
}
}
async function checkRedis(): Promise<ServiceStatus> {
const start = Date.now();
try {
const result = await withTimeout(redis.ping(), 5000);
const status = result === 'PONG' ? 'healthy' : 'degraded';
return { name: 'Redis', status, responseTimeMs: Date.now() - start };
} catch (err) {
return {
name: 'Redis',
status: 'down',
responseTimeMs: Date.now() - start,
details: err instanceof Error ? err.message : 'Unknown error',
};
}
}
async function checkMinio(): Promise<ServiceStatus> {
const start = Date.now();
try {
await withTimeout(minioClient.bucketExists(env.MINIO_BUCKET), 5000);
return { name: 'MinIO', status: 'healthy', responseTimeMs: Date.now() - start };
} catch (err) {
return {
name: 'MinIO',
status: 'down',
responseTimeMs: Date.now() - start,
details: err instanceof Error ? err.message : 'Unknown error',
};
}
}
async function checkDocumenso(): Promise<ServiceStatus> {
const start = Date.now();
try {
const controller = new AbortController();
const timer = setTimeout(() => controller.abort(), 5000);
try {
const res = await fetch(`${env.DOCUMENSO_API_URL}/api/v1/health`, {
signal: controller.signal,
method: 'GET',
});
clearTimeout(timer);
const status = res.ok ? 'healthy' : 'degraded';
return { name: 'Documenso', status, responseTimeMs: Date.now() - start };
} finally {
clearTimeout(timer);
}
} catch (err) {
return {
name: 'Documenso',
status: 'down',
responseTimeMs: Date.now() - start,
details: err instanceof Error ? err.message : 'Unreachable',
};
}
}
// ─── getQueueDashboard ────────────────────────────────────────────────────────
export async function getQueueDashboard(): Promise<QueueStatus[]> {
const queueNames = Object.keys(QUEUE_CONFIGS) as QueueName[];
const results = await Promise.allSettled(
queueNames.map(async (name) => {
const queue = getQueue(name);
const counts = await queue.getJobCounts(
'waiting',
'active',
'completed',
'failed',
'delayed',
);
return {
name,
waiting: counts.waiting ?? 0,
active: counts.active ?? 0,
completed: counts.completed ?? 0,
failed: counts.failed ?? 0,
delayed: counts.delayed ?? 0,
} satisfies QueueStatus;
}),
);
return results.map((r, i) => {
if (r.status === 'fulfilled') return r.value;
const name = queueNames[i] ?? 'unknown';
logger.warn({ queue: name, err: r.reason }, 'Failed to get queue counts');
return {
name,
waiting: 0,
active: 0,
completed: 0,
failed: 0,
delayed: 0,
};
});
}
// ─── getQueueJobs ─────────────────────────────────────────────────────────────
type JobStatus = 'waiting' | 'active' | 'completed' | 'failed' | 'delayed';
export async function getQueueJobs(
queueName: QueueName,
status: JobStatus = 'failed',
page = 1,
limit = 20,
): Promise<PaginatedQueueJobs> {
const queue = getQueue(queueName);
const start = (page - 1) * limit;
const end = start + limit - 1;
const jobs = await queue.getJobs([status], start, end);
const counts = await queue.getJobCounts(status);
const total = counts[status] ?? 0;
const summaries: QueueJobSummary[] = jobs.map((job) => {
// Truncate job data to prevent huge payloads
let truncatedData: unknown;
try {
const dataStr = JSON.stringify(job.data);
truncatedData =
dataStr.length > 500
? JSON.parse(dataStr.slice(0, 500) + '...(truncated)')
: job.data;
} catch {
truncatedData = '[unparseable]';
}
return {
id: job.id ?? '',
name: job.name,
data: truncatedData,
status,
timestamp: job.timestamp,
processedOn: job.processedOn ?? undefined,
finishedOn: job.finishedOn ?? undefined,
failedReason: job.failedReason ?? undefined,
};
});
return { jobs: summaries, total, page, limit };
}
// ─── retryJob ─────────────────────────────────────────────────────────────────
export async function retryJob(
queueName: QueueName,
jobId: string,
userId: string,
): Promise<void> {
const queue = getQueue(queueName);
const job = await queue.getJob(jobId);
if (!job) throw new Error(`Job ${jobId} not found in queue ${queueName}`);
await job.retry();
void createAuditLog({
userId,
portId: null,
action: 'update',
entityType: 'queue_job',
entityId: jobId,
metadata: { queueName, jobName: job.name, action: 'retry' },
ipAddress: 'system',
userAgent: 'system',
});
}
// ─── deleteJob ────────────────────────────────────────────────────────────────
export async function deleteJob(
queueName: QueueName,
jobId: string,
userId: string,
): Promise<void> {
const queue = getQueue(queueName);
const job = await queue.getJob(jobId);
if (!job) throw new Error(`Job ${jobId} not found in queue ${queueName}`);
await job.remove();
void createAuditLog({
userId,
portId: null,
action: 'delete',
entityType: 'queue_job',
entityId: jobId,
metadata: { queueName, jobName: job.name, action: 'delete' },
ipAddress: 'system',
userAgent: 'system',
});
}
// ─── getActiveConnections ─────────────────────────────────────────────────────
export async function getActiveConnections(): Promise<ConnectionStatus> {
try {
const { getIO } = await import('@/lib/socket/server');
const io = getIO();
const sockets = await io.fetchSockets();
return { totalConnections: sockets.length };
} catch {
return { totalConnections: 0 };
}
}
// ─── getRecentErrors ──────────────────────────────────────────────────────────
export async function getRecentErrors(limit = 20): Promise<RecentError[]> {
// Fetch permission-denied audit log entries
const auditErrors = await db
.select({
id: auditLogs.id,
action: auditLogs.action,
entityType: auditLogs.entityType,
entityId: auditLogs.entityId,
metadata: auditLogs.metadata,
createdAt: auditLogs.createdAt,
})
.from(auditLogs)
.where(eq(auditLogs.action, 'permission_denied'))
.orderBy(desc(auditLogs.createdAt))
.limit(limit);
const auditResults: RecentError[] = auditErrors.map((row) => ({
id: row.id,
source: 'audit' as const,
message: `Permission denied on ${row.entityType}`,
timestamp: row.createdAt,
metadata: (row.metadata as Record<string, unknown>) ?? {},
}));
// Fetch failed jobs from all queues (sample — top 5 per queue)
const queueNames = Object.keys(QUEUE_CONFIGS) as QueueName[];
const failedJobResults = await Promise.allSettled(
queueNames.map(async (name) => {
const queue = getQueue(name);
const jobs = await queue.getJobs(['failed'], 0, 4);
return jobs.map((job): RecentError => ({
id: `${name}:${job.id ?? ''}`,
source: 'queue',
message: `Queue job failed: ${job.name} in ${name}`,
timestamp: job.finishedOn ? new Date(job.finishedOn) : new Date(job.timestamp),
metadata: { queueName: name, failedReason: job.failedReason },
}));
}),
);
const queueErrors: RecentError[] = failedJobResults
.filter((r): r is PromiseFulfilledResult<RecentError[]> => r.status === 'fulfilled')
.flatMap((r) => r.value);
// Merge and sort combined list by timestamp descending
const combined = [...auditResults, ...queueErrors].sort(
(a, b) => b.timestamp.getTime() - a.timestamp.getTime(),
);
return combined.slice(0, limit);
}

View File

@@ -0,0 +1,139 @@
import { and, eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { tags } from '@/lib/db/schema';
import { createAuditLog } from '@/lib/audit';
import { ConflictError, NotFoundError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
import type { CreateTagInput, UpdateTagInput } from '@/lib/validators/tags';
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
export async function listTags(portId: string) {
return db
.select()
.from(tags)
.where(eq(tags.portId, portId))
.orderBy(tags.name);
}
export async function createTag(
portId: string,
data: CreateTagInput,
meta: AuditMeta,
) {
// Enforce unique (portId, name)
const existing = await db.query.tags.findFirst({
where: and(eq(tags.portId, portId), eq(tags.name, data.name)),
});
if (existing) {
throw new ConflictError(`A tag named "${data.name}" already exists in this port`);
}
const [tag] = await db
.insert(tags)
.values({ portId, name: data.name, color: data.color })
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'tag',
entityId: tag!.id,
newValue: { name: tag!.name, color: tag!.color },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'system:alert', {
alertType: 'tag:created',
message: `Tag "${tag!.name}" created`,
severity: 'info',
});
return tag!;
}
export async function updateTag(
id: string,
portId: string,
data: UpdateTagInput,
meta: AuditMeta,
) {
const tag = await db.query.tags.findFirst({
where: and(eq(tags.id, id), eq(tags.portId, portId)),
});
if (!tag) throw new NotFoundError('Tag');
// Check name uniqueness if name is being changed
if (data.name && data.name !== tag.name) {
const conflict = await db.query.tags.findFirst({
where: and(eq(tags.portId, portId), eq(tags.name, data.name)),
});
if (conflict) {
throw new ConflictError(`A tag named "${data.name}" already exists in this port`);
}
}
const [updated] = await db
.update(tags)
.set({ ...(data.name ? { name: data.name } : {}), ...(data.color ? { color: data.color } : {}) })
.where(and(eq(tags.id, id), eq(tags.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'tag',
entityId: id,
oldValue: { name: tag.name, color: tag.color },
newValue: { name: updated!.name, color: updated!.color },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'system:alert', {
alertType: 'tag:updated',
message: `Tag "${updated!.name}" updated`,
severity: 'info',
});
return updated!;
}
export async function deleteTag(
id: string,
portId: string,
meta: AuditMeta,
) {
const tag = await db.query.tags.findFirst({
where: and(eq(tags.id, id), eq(tags.portId, portId)),
});
if (!tag) throw new NotFoundError('Tag');
await db.delete(tags).where(and(eq(tags.id, id), eq(tags.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'tag',
entityId: id,
oldValue: { name: tag.name, color: tag.color },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
emitToRoom(`port:${portId}`, 'system:alert', {
alertType: 'tag:deleted',
message: `Tag "${tag.name}" deleted`,
severity: 'info',
});
}

View File

@@ -0,0 +1,74 @@
import { and, eq, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { webhooks, webhookDeliveries } from '@/lib/db/schema/system';
import { getQueue } from '@/lib/queue';
import { logger } from '@/lib/logger';
import { INTERNAL_TO_WEBHOOK_MAP } from '@/lib/services/webhook-event-map';
/**
* Translates an internal socket event to the outbound webhook event name,
* queries all active webhooks for the given port that are subscribed to that
* event, and enqueues a BullMQ delivery job for each one.
*
* This function is fire-and-forget — callers should use `void dispatchWebhookEvent(...)`.
*/
export async function dispatchWebhookEvent(
portId: string,
internalEvent: string,
payload: Record<string, unknown>,
): Promise<void> {
const webhookEvent = INTERNAL_TO_WEBHOOK_MAP[internalEvent];
if (!webhookEvent) {
// No mapping for this event — skip silently
return;
}
try {
// Query active webhooks for this port that subscribe to this event
const matchingWebhooks = await db
.select({ id: webhooks.id })
.from(webhooks)
.where(
and(
eq(webhooks.portId, portId),
eq(webhooks.isActive, true),
// Check if events array contains the webhook event
sql`${webhooks.events} @> ARRAY[${webhookEvent}]::text[]`,
),
);
if (matchingWebhooks.length === 0) {
return;
}
const queue = getQueue('webhooks');
for (const webhook of matchingWebhooks) {
// Create a pending delivery record before enqueueing
const [delivery] = await db
.insert(webhookDeliveries)
.values({
webhookId: webhook.id,
eventType: webhookEvent,
payload,
status: 'pending',
})
.returning({ id: webhookDeliveries.id });
await queue.add('deliver', {
webhookId: webhook.id,
portId,
event: webhookEvent,
deliveryId: delivery!.id,
payload,
});
}
} catch (err) {
// Never block callers — log and swallow
logger.error(
{ portId, internalEvent, webhookEvent, err },
'Failed to dispatch webhook event',
);
}
}

View File

@@ -0,0 +1,53 @@
// ─── Webhook Event Map ────────────────────────────────────────────────────────
// Defines the canonical set of outbound webhook event names and provides a
// translation map from internal camelCase socket events to dot-style webhook
// event names.
export const WEBHOOK_EVENTS = [
'client.created',
'client.updated',
'client.archived',
'client.merged',
'interest.created',
'interest.stage_changed',
'interest.berth_linked',
'berth.status_changed',
'berth.updated',
'document.sent',
'document.signed',
'document.completed',
'document.expired',
'expense.created',
'expense.updated',
'invoice.created',
'invoice.sent',
'invoice.paid',
'invoice.overdue',
'registration.new',
] as const;
export type WebhookEvent = (typeof WEBHOOK_EVENTS)[number];
/** Maps internal socket event names to outbound webhook event names. */
export const INTERNAL_TO_WEBHOOK_MAP: Record<string, WebhookEvent> = {
'client:created': 'client.created',
'client:updated': 'client.updated',
'client:archived': 'client.archived',
'client:merged': 'client.merged',
'interest:created': 'interest.created',
'interest:stageChanged': 'interest.stage_changed',
'interest:berthLinked': 'interest.berth_linked',
'berth:statusChanged': 'berth.status_changed',
'berth:updated': 'berth.updated',
'document:sent': 'document.sent',
'document:signed': 'document.signed',
'document:completed': 'document.completed',
'document:expired': 'document.expired',
'expense:created': 'expense.created',
'expense:updated': 'expense.updated',
'invoice:created': 'invoice.created',
'invoice:sent': 'invoice.sent',
'invoice:paid': 'invoice.paid',
'invoice:overdue': 'invoice.overdue',
'registration:new': 'registration.new',
};

View File

@@ -0,0 +1,331 @@
import { randomBytes } from 'node:crypto';
import { and, desc, eq, count } from 'drizzle-orm';
import { db } from '@/lib/db';
import { webhooks, webhookDeliveries } from '@/lib/db/schema/system';
import { createAuditLog } from '@/lib/audit';
import { encrypt, decrypt } from '@/lib/utils/encryption';
import { NotFoundError } from '@/lib/errors';
import { getQueue } from '@/lib/queue';
import type {
CreateWebhookInput,
UpdateWebhookInput,
ListDeliveriesInput,
} from '@/lib/validators/webhooks';
import type { WebhookEvent } from '@/lib/services/webhook-event-map';
// ─── Types ────────────────────────────────────────────────────────────────────
interface AuditMeta {
userId: string;
portId: string;
ipAddress: string;
userAgent: string;
}
// ─── Helpers ─────────────────────────────────────────────────────────────────
/** Generates a 32-byte hex secret for signing webhook payloads. */
function generateSecret(): string {
return randomBytes(32).toString('hex');
}
/**
* Returns a masked representation of the plaintext secret.
* Shows the first 5 and last 3 characters: `wh_ab...xyz`
*/
function maskSecret(plaintext: string): string {
if (plaintext.length < 10) return '***';
return `${plaintext.slice(0, 5)}...${plaintext.slice(-3)}`;
}
// ─── Create ───────────────────────────────────────────────────────────────────
export async function createWebhook(
portId: string,
userId: string,
data: CreateWebhookInput,
meta: AuditMeta,
) {
const plaintextSecret = generateSecret();
const encryptedSecret = encrypt(plaintextSecret);
const [webhook] = await db
.insert(webhooks)
.values({
portId,
name: data.name,
url: data.url,
secret: encryptedSecret,
events: data.events,
isActive: data.isActive,
createdBy: userId,
})
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'create',
entityType: 'webhook',
entityId: webhook!.id,
newValue: { name: data.name, url: data.url, events: data.events },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
// Return with plaintext secret — shown ONCE only on creation
return {
...webhook!,
secret: plaintextSecret,
secretMasked: maskSecret(plaintextSecret),
};
}
// ─── List ─────────────────────────────────────────────────────────────────────
export async function listWebhooks(portId: string) {
const rows = await db
.select()
.from(webhooks)
.where(eq(webhooks.portId, portId))
.orderBy(desc(webhooks.createdAt));
return rows.map((w) => {
let secretMasked = '***';
if (w.secret) {
try {
const plaintext = decrypt(w.secret);
secretMasked = maskSecret(plaintext);
} catch {
secretMasked = '***';
}
}
return { ...w, secret: undefined, secretMasked };
});
}
// ─── Get Single ───────────────────────────────────────────────────────────────
export async function getWebhook(portId: string, webhookId: string) {
const webhook = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!webhook || webhook.portId !== portId) {
throw new NotFoundError('Webhook');
}
let secretMasked = '***';
if (webhook.secret) {
try {
const plaintext = decrypt(webhook.secret);
secretMasked = maskSecret(plaintext);
} catch {
secretMasked = '***';
}
}
return { ...webhook, secret: undefined, secretMasked };
}
// ─── Update ───────────────────────────────────────────────────────────────────
export async function updateWebhook(
portId: string,
webhookId: string,
data: UpdateWebhookInput,
meta: AuditMeta,
) {
const existing = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Webhook');
}
const [updated] = await db
.update(webhooks)
.set({ ...data, updatedAt: new Date() })
.where(and(eq(webhooks.id, webhookId), eq(webhooks.portId, portId)))
.returning();
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'webhook',
entityId: webhookId,
oldValue: {
name: existing.name,
url: existing.url,
events: existing.events,
isActive: existing.isActive,
},
newValue: data as Record<string, unknown>,
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
return { ...updated!, secret: undefined };
}
// ─── Delete ───────────────────────────────────────────────────────────────────
export async function deleteWebhook(
portId: string,
webhookId: string,
meta: AuditMeta,
) {
const existing = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Webhook');
}
// CASCADE deletes webhook_deliveries
await db
.delete(webhooks)
.where(and(eq(webhooks.id, webhookId), eq(webhooks.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'delete',
entityType: 'webhook',
entityId: webhookId,
oldValue: { name: existing.name, url: existing.url },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
}
// ─── Regenerate Secret ────────────────────────────────────────────────────────
export async function regenerateSecret(
portId: string,
webhookId: string,
meta: AuditMeta,
) {
const existing = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!existing || existing.portId !== portId) {
throw new NotFoundError('Webhook');
}
const plaintextSecret = generateSecret();
const encryptedSecret = encrypt(plaintextSecret);
await db
.update(webhooks)
.set({ secret: encryptedSecret, updatedAt: new Date() })
.where(and(eq(webhooks.id, webhookId), eq(webhooks.portId, portId)));
void createAuditLog({
userId: meta.userId,
portId,
action: 'update',
entityType: 'webhook',
entityId: webhookId,
metadata: { type: 'secret_regenerated' },
ipAddress: meta.ipAddress,
userAgent: meta.userAgent,
});
// Return new plaintext secret — shown ONCE
return {
webhookId,
secret: plaintextSecret,
secretMasked: maskSecret(plaintextSecret),
};
}
// ─── List Deliveries ─────────────────────────────────────────────────────────
export async function listDeliveries(
portId: string,
webhookId: string,
query: ListDeliveriesInput,
) {
// Verify webhook belongs to port
const webhook = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!webhook || webhook.portId !== portId) {
throw new NotFoundError('Webhook');
}
const { page, limit, status } = query;
const offset = (page - 1) * limit;
const filters = [eq(webhookDeliveries.webhookId, webhookId)];
if (status) {
filters.push(eq(webhookDeliveries.status, status));
}
const [countRow] = await db
.select({ total: count() })
.from(webhookDeliveries)
.where(and(...filters));
const total = countRow?.total ?? 0;
const data = await db
.select()
.from(webhookDeliveries)
.where(and(...filters))
.orderBy(desc(webhookDeliveries.createdAt))
.limit(limit)
.offset(offset);
return { data, total };
}
// ─── Send Test Webhook ────────────────────────────────────────────────────────
export async function sendTestWebhook(
portId: string,
webhookId: string,
eventType: WebhookEvent,
) {
const webhook = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!webhook || webhook.portId !== portId) {
throw new NotFoundError('Webhook');
}
// Create a pending delivery record
const [delivery] = await db
.insert(webhookDeliveries)
.values({
webhookId,
eventType,
payload: {
test: true,
event: eventType,
port_id: portId,
data: { message: 'This is a test webhook delivery' },
},
status: 'pending',
})
.returning();
// Enqueue the job
const queue = getQueue('webhooks');
await queue.add('deliver', {
webhookId,
portId,
event: eventType,
deliveryId: delivery!.id,
payload: delivery!.payload,
});
return { deliveryId: delivery!.id, status: 'queued' };
}

89
src/lib/socket/events.ts Normal file
View File

@@ -0,0 +1,89 @@
// Server → Client events
export interface ServerToClientEvents {
// Berth events
'berth:statusChanged': (payload: { berthId: string; oldStatus?: string; newStatus: string; triggeredBy: string; trigger?: string }) => void;
'berth:updated': (payload: { berthId: string; changedFields: string[] }) => void;
'berth:waitingListChanged': (payload: { berthId: string; action: string; entry: unknown }) => void;
'berth:maintenanceAdded': (payload: { berthId: string; logEntry: unknown }) => void;
// Client events
'client:created': (payload: { clientId: string; clientName: string; source: string }) => void;
'client:updated': (payload: { clientId: string; changedFields: string[] }) => void;
'client:archived': (payload: { clientId: string }) => void;
'client:restored': (payload: { clientId: string }) => void;
'client:merged': (payload: { survivingId: string; mergedId: string }) => void;
'client:noteAdded': (payload: { clientId: string; noteId: string; authorName: string; preview: string }) => void;
'client:duplicateDetected': (payload: { clientAId: string; clientBId: string; score: number; reason: string }) => void;
// Interest events
'interest:created': (payload: { interestId: string; clientId: string; berthId: string | null; source: string }) => void;
'interest:updated': (payload: { interestId: string; changedFields: string[] }) => void;
'interest:stageChanged': (payload: { interestId: string; oldStage: string; newStage: string; clientName: string; berthNumber: string }) => void;
'interest:berthLinked': (payload: { interestId: string; berthId: string }) => void;
'interest:berthUnlinked': (payload: { interestId: string; berthId: string }) => void;
'interest:archived': (payload: { interestId: string }) => void;
'interest:noteAdded': (payload: { interestId: string; noteId: string; authorName: string; preview: string }) => void;
'interest:recommendationsGenerated': (payload: { interestId: string; count: number; topBerthId: string }) => void;
'interest:recommendationAdded': (payload: { interestId: string; berthId: string; source: string; matchScore: number }) => void;
'interest:leadCategoryChanged': (payload: { interestId: string; oldCategory: string; newCategory: string; auto: boolean }) => void;
// Document events
'document:created': (payload: { documentId: string; type?: string; interestId?: string }) => void;
'document:updated': (payload: { documentId: string; changedFields?: string[] }) => void;
'document:deleted': (payload: { documentId: string }) => void;
'document:sent': (payload: { documentId: string; type?: string; signerCount?: number; documensoId?: string }) => void;
'document:signed': (payload: { documentId: string; signerName: string; signerRole: string; remainingSigners: number }) => void;
'document:signer:signed': (payload: { documentId: string; signerName?: string; signerEmail?: string; signerRole?: string; order?: number }) => void;
'document:completed': (payload: { documentId: string; type?: string; interestId?: string; clientName?: string }) => void;
'document:expired': (payload: { documentId: string }) => void;
'document:reminderSent': (payload: { documentId: string; recipientEmail: string }) => void;
// Document template events
'documentTemplate:created': (payload: { templateId: string; name?: string; type?: string }) => void;
'documentTemplate:updated': (payload: { templateId: string; changedFields?: string[] }) => void;
'documentTemplate:deleted': (payload: { templateId: string }) => void;
// Financial events
'expense:created': (payload: { expenseId: string; amount: number; currency: string; category: string }) => void;
'expense:updated': (payload: { expenseId: string; changedFields: string[] }) => void;
'expense:archived': (payload: { expenseId: string }) => void;
'invoice:created': (payload: { invoiceId: string; invoiceNumber: string; total: number; clientName: string }) => void;
'invoice:updated': (payload: { invoiceId: string; changedFields: string[] }) => void;
'invoice:sent': (payload: { invoiceId: string; invoiceNumber: string; recipientEmail: string }) => void;
'invoice:paid': (payload: { invoiceId: string; invoiceNumber: string; amount: number }) => void;
'invoice:overdue': (payload: { invoiceId: string; invoiceNumber: string; daysPastDue: number }) => void;
// Reminder & Calendar events
'reminder:created': (payload: { reminderId: string; title: string; assignedTo: string; dueAt: string }) => void;
'reminder:updated': (payload: { reminderId: string; changedFields: string[] }) => void;
'reminder:completed': (payload: { reminderId: string; title: string; completedBy: string }) => void;
'reminder:overdue': (payload: { reminderId: string; title: string; dueAt: string }) => void;
'reminder:snoozed': (payload: { reminderId: string; snoozedUntil: string }) => void;
'calendar:synced': (payload: { eventCount: number; lastSyncAt: string }) => void;
'calendar:disconnected': (payload: { reason: string }) => void;
// Notification events
'notification:new': (payload: { notificationId: string; type: string; title: string; description: string; link: string }) => void;
'notification:unreadCount': (payload: { count: number }) => void;
// Report events
'report:queued': (payload: { reportId: string; reportType: string; name: string }) => void;
'report:ready': (payload: { reportId: string; name: string }) => void;
'report:failed': (payload: { reportId: string; name: string; error: string }) => void;
// System events
'system:alert': (payload: { alertType: string; message: string; severity: string }) => void;
'system:jobFailed': (payload: { queueName: string; jobId: string; error: string }) => void;
'registration:new': (payload: { clientId: string; interestId: string; clientName: string; berthNumber: string }) => void;
// File events
'file:uploaded': (payload: { fileId: string; filename: string; clientId?: string; category?: string }) => void;
'file:updated': (payload: { fileId: string; changedFields?: string[] }) => void;
'file:deleted': (payload: { fileId: string; filename?: string }) => void;
}
// Client → Server events (minimal — most actions go through REST API)
export interface ClientToServerEvents {
'join:entity': (payload: { type: 'berth' | 'client' | 'interest'; id: string }) => void;
'leave:entity': (payload: { type: 'berth' | 'client' | 'interest'; id: string }) => void;
}

103
src/lib/socket/server.ts Normal file
View File

@@ -0,0 +1,103 @@
import { Server } from 'socket.io';
import { createAdapter } from '@socket.io/redis-adapter';
import type { Server as HTTPServer } from 'node:http';
import { redis } from '@/lib/redis';
import { auth } from '@/lib/auth';
import { logger } from '@/lib/logger';
import type { ServerToClientEvents, ClientToServerEvents } from './events';
let io: Server<ClientToServerEvents, ServerToClientEvents> | null = null;
export function initSocketServer(httpServer: HTTPServer): Server<ClientToServerEvents, ServerToClientEvents> {
const pubClient = redis.duplicate();
const subClient = redis.duplicate();
io = new Server<ClientToServerEvents, ServerToClientEvents>(httpServer, {
path: '/socket.io/',
adapter: createAdapter(pubClient, subClient),
cors: {
origin: process.env.APP_URL,
credentials: true,
},
connectionStateRecovery: { maxDisconnectionDuration: 2 * 60 * 1000 },
maxHttpBufferSize: 1e6, // 1MB message limit
});
// Auth middleware — validate session cookie via Better Auth
io.use(async (socket, next) => {
try {
const cookie = socket.handshake.headers.cookie;
if (!cookie) return next(new Error('Authentication required'));
// Parse session from cookie
const session = await auth.api.getSession({
headers: new Headers({ cookie }),
});
if (!session?.user) return next(new Error('Invalid session'));
// Enforce max 10 connections per user
const userSockets = await io!.in(`user:${session.user.id}`).fetchSockets();
if (userSockets.length >= 10) {
return next(new Error('Maximum connections reached'));
}
socket.data = {
userId: session.user.id,
portId: socket.handshake.auth.portId as string | undefined,
};
next();
} catch {
next(new Error('Authentication failed'));
}
});
// Connection handler
io.on('connection', (socket) => {
const { userId, portId } = socket.data as { userId: string; portId: string | undefined };
logger.debug({ userId, portId }, 'Socket connected');
// Auto-join personal and port rooms
socket.join(`user:${userId}`);
if (portId) socket.join(`port:${portId}`);
// Entity-level room management
socket.on('join:entity', ({ type, id }) => {
socket.join(`${type}:${id}`);
});
socket.on('leave:entity', ({ type, id }) => {
socket.leave(`${type}:${id}`);
});
// Idle timeout (30 seconds — for development only, would be longer in prod)
let idleTimer = setTimeout(() => socket.disconnect(), 30_000);
socket.onAny(() => {
clearTimeout(idleTimer);
idleTimer = setTimeout(() => socket.disconnect(), 30_000);
});
socket.on('disconnect', () => {
clearTimeout(idleTimer);
logger.debug({ userId }, 'Socket disconnected');
});
});
return io;
}
export function getIO(): Server<ClientToServerEvents, ServerToClientEvents> {
if (!io) throw new Error('Socket.io not initialized');
return io;
}
/**
* Emit an event to a specific room. Used by service layer after mutations.
*/
export function emitToRoom<E extends keyof ServerToClientEvents>(
room: string,
event: E,
...args: Parameters<ServerToClientEvents[E]>
): void {
if (!io) return;
io.to(room).emit(event, ...args);
}

10
src/lib/utils.ts Normal file
View File

@@ -0,0 +1,10 @@
import { type ClassValue, clsx } from 'clsx';
import { twMerge } from 'tailwind-merge';
/**
* Utility for constructing conditional Tailwind class strings.
* Merges clsx-style class values and resolves Tailwind conflicts via tailwind-merge.
*/
export function cn(...inputs: ClassValue[]): string {
return twMerge(clsx(inputs));
}

View File

@@ -0,0 +1,54 @@
import { createCipheriv, createDecipheriv, randomBytes } from 'node:crypto';
const ALGORITHM = 'aes-256-gcm';
const IV_LENGTH = 12;
const TAG_LENGTH = 16;
function getKey(): Buffer {
const hex = process.env.EMAIL_CREDENTIAL_KEY;
if (!hex || hex.length !== 64) {
throw new Error('EMAIL_CREDENTIAL_KEY must be a 64-character hex string');
}
return Buffer.from(hex, 'hex');
}
/**
* Encrypts plaintext using AES-256-GCM.
* Returns a JSON string containing hex-encoded iv, tag, and data.
*/
export function encrypt(plaintext: string): string {
const key = getKey();
const iv = randomBytes(IV_LENGTH);
const cipher = createCipheriv(ALGORITHM, key, iv);
let encrypted = cipher.update(plaintext, 'utf8', 'hex');
encrypted += cipher.final('hex');
const tag = cipher.getAuthTag();
return JSON.stringify({
iv: iv.toString('hex'),
tag: tag.toString('hex'),
data: encrypted,
});
}
/**
* Decrypts a stored encrypted value (JSON string with iv, tag, data).
* Returns the original plaintext.
*/
export function decrypt(stored: string): string {
const key = getKey();
const { iv, tag, data } = JSON.parse(stored) as {
iv: string;
tag: string;
data: string;
};
const decipher = createDecipheriv(ALGORITHM, key, Buffer.from(iv, 'hex'));
decipher.setAuthTag(Buffer.from(tag, 'hex'));
let decrypted = decipher.update(data, 'hex', 'utf8');
decrypted += decipher.final('utf8');
return decrypted;
}

Some files were not shown because too many files have changed in this diff Show More