chore(style): codebase em-dash sweep + minor layout polish
Some checks failed
Build & Push Docker Images / lint (push) Failing after 1m18s
Build & Push Docker Images / build-and-push (push) Has been skipped

Replaces every em-dash and en-dash with regular ASCII hyphens
across comments, JSX strings, and dev-facing logs. Mostly cosmetic
but stops the inconsistent mix that crept in over the last few
months (some files used em-dashes in comments, others didn't,
some used both).

Bundles two small dashboard-layout tweaks that touch a couple of
already-modified files:
- (dashboard)/layout.tsx main padding goes from p-6 to pt-3 px-6
  pb-6 so page content sits closer to the topbar.
- Sidebar now receives the ports list it needs for the footer
  port switcher.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Matt Ciaccio
2026-05-04 22:57:01 +02:00
parent d62822c284
commit 8699f81879
225 changed files with 844 additions and 845 deletions

View File

@@ -20,7 +20,7 @@ import { logger } from '@/lib/logger';
export type BudgetPeriod = 'day' | 'week' | 'month';
export interface AiBudget {
/** When false, the budget is disabled no caps enforced. */
/** When false, the budget is disabled - no caps enforced. */
enabled: boolean;
softCapTokens: number;
hardCapTokens: number;
@@ -137,7 +137,7 @@ export async function checkBudget(args: {
const { portId, estimatedTokens } = args;
const budget = await readBudget(portId);
if (!budget.enabled) {
// Budget is off usage still gets logged, but no caps enforced.
// Budget is off - usage still gets logged, but no caps enforced.
return { ok: true, remaining: Number.POSITIVE_INFINITY, usedTokens: 0, softCap: false };
}
const used = await currentPeriodTokens(portId);
@@ -177,7 +177,7 @@ interface RecordUsageInput {
requestId?: string | null;
}
/** Insert a ledger row. Never throws logged failures degrade silently. */
/** Insert a ledger row. Never throws - logged failures degrade silently. */
export async function recordAiUsage(input: RecordUsageInput): Promise<void> {
try {
const total = (input.inputTokens || 0) + (input.outputTokens || 0);
@@ -193,13 +193,13 @@ export async function recordAiUsage(input: RecordUsageInput): Promise<void> {
requestId: input.requestId ?? null,
});
} catch (err) {
// Don't fail the user-facing call because the ledger write hiccuped
// Don't fail the user-facing call because the ledger write hiccuped -
// we'd rather silently lose a row than blow up an OCR scan.
logger.error({ err, feature: input.feature }, 'recordAiUsage failed');
}
}
/** Per-feature breakdown for the current period feeds the admin dashboard. */
/** Per-feature breakdown for the current period - feeds the admin dashboard. */
export async function periodBreakdown(
portId: string,
): Promise<Array<{ feature: string; tokens: number; calls: number }>> {

View File

@@ -1,5 +1,5 @@
/**
* Alert engine runs every rule against every port. Called by the
* Alert engine - runs every rule against every port. Called by the
* BullMQ recurring job 'alerts-evaluate' every 5 minutes; exposed as a
* function so integration tests can drive it without a worker.
*/

View File

@@ -139,7 +139,7 @@ async function documentSignerOverdue(portId: string): Promise<AlertCandidate[]>
ruleId: 'document.signer_overdue',
severity: 'warning',
title: `Signer overdue: ${r.signerName}`,
body: `${r.docType.toUpperCase()} "${r.title}" pending >14 days.`,
body: `${r.docType.toUpperCase()} "${r.title}" - pending >14 days.`,
link: `/[port]/documents/${r.docId}`,
entityType: 'document',
entityId: r.docId,
@@ -202,7 +202,7 @@ async function expenseDuplicate(portId: string): Promise<AlertCandidate[]> {
ruleId: 'expense.duplicate',
severity: 'info',
title: `Possible duplicate expense`,
body: `${r.vendor ?? 'Unknown vendor'} ${r.amount}.`,
body: `${r.vendor ?? 'Unknown vendor'} - ${r.amount}.`,
link: `/[port]/expenses/${r.id}`,
entityType: 'expense',
entityId: r.id,
@@ -236,7 +236,7 @@ async function expenseUnscanned(portId: string): Promise<AlertCandidate[]> {
ruleId: 'expense.unscanned',
severity: 'info',
title: `Receipt not scanned`,
body: `${r.vendor ?? 'Unknown vendor'} uploaded over an hour ago.`,
body: `${r.vendor ?? 'Unknown vendor'} - uploaded over an hour ago.`,
link: `/[port]/expenses/${r.id}`,
entityType: 'expense',
entityId: r.id,
@@ -271,7 +271,7 @@ async function interestHighValueSilent(portId: string): Promise<AlertCandidate[]
ruleId: 'interest.high_value_silent',
severity: 'critical',
title: `Hot lead silent: ${r.clientName}`,
body: `No contact for 7+ days high-value at risk.`,
body: `No contact for 7+ days - high-value at risk.`,
link: `/[port]/interests/${r.id}`,
entityType: 'interest',
entityId: r.id,
@@ -303,7 +303,7 @@ async function eoiUnsignedLong(portId: string): Promise<AlertCandidate[]> {
ruleId: 'eoi.unsigned_long',
severity: 'warning',
title: `EOI unsigned >21 days`,
body: `"${r.title}" sent over 3 weeks ago.`,
body: `"${r.title}" - sent over 3 weeks ago.`,
link: `/[port]/documents/${r.id}`,
entityType: 'document',
entityId: r.id,

View File

@@ -1,5 +1,5 @@
/**
* Phase B alert framework service layer.
* Phase B alert framework - service layer.
*
* This is the skeleton: types, function shapes, and behaviour stubs. The
* actual rule evaluators live in `alert-rules.ts` (PR2). The cron
@@ -46,7 +46,7 @@ export async function reconcileAlertsForPort(
ruleId: AlertRuleId,
candidates: AlertCandidate[],
): Promise<void> {
// Insert new / leave existing only one open row per fingerprint
// Insert new / leave existing - only one open row per fingerprint
// thanks to the partial unique index. Track newly inserted rows so we
// can emit `alert:created` to the port room.
for (const c of candidates) {

View File

@@ -5,7 +5,7 @@
* every 15 minutes per port × per metric.
*/
import { and, eq, gte, isNull, sql } from 'drizzle-orm';
import { and, between, eq, isNull, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { analyticsSnapshots } from '@/lib/db/schema/insights';
@@ -13,8 +13,19 @@ import { interests } from '@/lib/db/schema/interests';
import { invoices } from '@/lib/db/schema/financial';
import { berthReservations } from '@/lib/db/schema/reservations';
import { PIPELINE_STAGES } from '@/lib/constants';
import {
ALL_RANGES,
isCustomRange,
rangeToBounds,
type CustomDateRange,
type DateRange,
type PresetDateRange,
} from '@/lib/analytics/range';
export type DateRange = '7d' | '30d' | '90d' | 'today';
// Re-export the shared types for callers that already import from this
// module - keeps the existing public API intact.
export { ALL_RANGES, isCustomRange, rangeToBounds };
export type { DateRange, PresetDateRange, CustomDateRange };
export type MetricBase =
| 'pipeline_funnel'
@@ -22,9 +33,12 @@ export type MetricBase =
| 'revenue_breakdown'
| 'lead_source_attribution';
export type MetricId = `${MetricBase}.${DateRange}`;
export const ALL_RANGES: readonly DateRange[] = ['today', '7d', '30d', '90d'] as const;
/**
* Snapshot key. Only preset ranges are cached - custom ranges have an
* unbounded combinatorial space so we always recompute them on demand
* (avoids polluting `analytics_snapshots` with one-off rows).
*/
export type MetricId = `${MetricBase}.${PresetDateRange}`;
export const ALL_METRICS: readonly MetricBase[] = [
'pipeline_funnel',
'occupancy_timeline',
@@ -91,34 +105,9 @@ export async function writeSnapshot(
});
}
// ─── Range helpers ────────────────────────────────────────────────────────────
function rangeToCutoff(range: DateRange): Date {
const now = Date.now();
switch (range) {
case 'today':
return new Date(now - 1 * 86_400_000);
case '7d':
return new Date(now - 7 * 86_400_000);
case '30d':
return new Date(now - 30 * 86_400_000);
case '90d':
return new Date(now - 90 * 86_400_000);
}
}
function rangeToDays(range: DateRange): number {
switch (range) {
case 'today':
return 1;
case '7d':
return 7;
case '30d':
return 30;
case '90d':
return 90;
}
}
// Range helpers (rangeToBounds, rangeToDays, rangeSpanDays) moved to
// @/lib/analytics/range - that file is client-safe (no DB imports) so it
// can be used from React components AND this server module.
// ─── Computations ─────────────────────────────────────────────────────────────
@@ -126,9 +115,9 @@ export async function computePipelineFunnel(
portId: string,
range: DateRange,
): Promise<PipelineFunnelData> {
const cutoff = rangeToCutoff(range);
const { from, to } = rangeToBounds(range);
// Stage counts EXCLUDE lost/cancelled outcomes those never become
// Stage counts EXCLUDE lost/cancelled outcomes - those never become
// conversions, so polluting the funnel with them gives meaningless math.
// Lost is reported separately in the `lost` block.
const stageRows = await db
@@ -138,7 +127,7 @@ export async function computePipelineFunnel(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
gte(interests.createdAt, cutoff),
between(interests.createdAt, from, to),
sql`(${interests.outcome} IS NULL OR ${interests.outcome} = 'won')`,
),
)
@@ -161,7 +150,7 @@ export async function computePipelineFunnel(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
gte(interests.createdAt, cutoff),
between(interests.createdAt, from, to),
sql`${interests.outcome} IS NOT NULL AND ${interests.outcome} != 'won'`,
),
)
@@ -182,8 +171,9 @@ export async function computeOccupancyTimeline(
portId: string,
range: DateRange,
): Promise<OccupancyTimelineData> {
const days = rangeToDays(range);
// Total berths per port (current count — assumes no churn).
const { from, to } = rangeToBounds(range);
const days = Math.max(1, Math.ceil((to.getTime() - from.getTime()) / 86_400_000));
// Total berths per port (current count - assumes no churn).
const totalRow = await db.execute<{ total: number }>(
sql`SELECT count(*)::int AS total FROM berths WHERE port_id = ${portId}`,
);
@@ -191,10 +181,12 @@ export async function computeOccupancyTimeline(
// For each day in range, count berths that have an active reservation
// covering that day. A reservation is "covering" if start_date <= day
// AND (end_date IS NULL OR end_date >= day).
// AND (end_date IS NULL OR end_date >= day). Walk forward from `from`
// so custom ranges produce the right calendar days, not just N
// most-recent days from "now".
const points: OccupancyTimelineData['points'] = [];
for (let i = days - 1; i >= 0; i--) {
const day = new Date(Date.now() - i * 86_400_000);
for (let i = 0; i < days; i++) {
const day = new Date(from.getTime() + i * 86_400_000);
const dayStr = day.toISOString().slice(0, 10);
const occRow = await db
.select({ occupied: sql<number>`count(distinct ${berthReservations.berthId})::int` })
@@ -218,7 +210,7 @@ export async function computeRevenueBreakdown(
portId: string,
range: DateRange,
): Promise<RevenueBreakdownData> {
const cutoff = rangeToCutoff(range);
const { from, to } = rangeToBounds(range);
const rows = await db
.select({
status: invoices.status,
@@ -230,7 +222,7 @@ export async function computeRevenueBreakdown(
and(
eq(invoices.portId, portId),
isNull(invoices.archivedAt),
gte(invoices.createdAt, cutoff),
between(invoices.createdAt, from, to),
),
)
.groupBy(invoices.status, invoices.currency);
@@ -248,7 +240,7 @@ export async function computeLeadSourceAttribution(
portId: string,
range: DateRange,
): Promise<LeadSourceAttributionData> {
const cutoff = rangeToCutoff(range);
const { from, to } = rangeToBounds(range);
const rows = await db
.select({ source: interests.source, count: sql<number>`count(*)::int` })
.from(interests)
@@ -256,7 +248,7 @@ export async function computeLeadSourceAttribution(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
gte(interests.createdAt, cutoff),
between(interests.createdAt, from, to),
),
)
.groupBy(interests.source);
@@ -272,11 +264,15 @@ export async function computeLeadSourceAttribution(
}
// ─── Public read API (cache → compute → write back) ──────────────────────────
//
// Custom ranges always recompute (cache key would be unbounded). Preset
// ranges go cache → compute → write-back as before.
export async function getPipelineFunnel(
portId: string,
range: DateRange,
): Promise<PipelineFunnelData> {
if (isCustomRange(range)) return computePipelineFunnel(portId, range);
const metricId = `pipeline_funnel.${range}` as const;
const cached = await readSnapshot<PipelineFunnelData>(portId, metricId);
if (cached) return cached;
@@ -289,6 +285,7 @@ export async function getOccupancyTimeline(
portId: string,
range: DateRange,
): Promise<OccupancyTimelineData> {
if (isCustomRange(range)) return computeOccupancyTimeline(portId, range);
const metricId = `occupancy_timeline.${range}` as const;
const cached = await readSnapshot<OccupancyTimelineData>(portId, metricId);
if (cached) return cached;
@@ -301,6 +298,7 @@ export async function getRevenueBreakdown(
portId: string,
range: DateRange,
): Promise<RevenueBreakdownData> {
if (isCustomRange(range)) return computeRevenueBreakdown(portId, range);
const metricId = `revenue_breakdown.${range}` as const;
const cached = await readSnapshot<RevenueBreakdownData>(portId, metricId);
if (cached) return cached;
@@ -313,6 +311,7 @@ export async function getLeadSourceAttribution(
portId: string,
range: DateRange,
): Promise<LeadSourceAttributionData> {
if (isCustomRange(range)) return computeLeadSourceAttribution(portId, range);
const metricId = `lead_source_attribution.${range}` as const;
const cached = await readSnapshot<LeadSourceAttributionData>(portId, metricId);
if (cached) return cached;

View File

@@ -1,5 +1,5 @@
/**
* Audit log search PR1 skeleton. PR10 fills in the cursor pagination
* Audit log search - PR1 skeleton. PR10 fills in the cursor pagination
* and per-port + super-admin scoping; v1 already has the GIN index on
* `audit_logs.search_text`.
*/
@@ -12,7 +12,7 @@ import { auditLogs, type AuditLog } from '@/lib/db/schema/system';
export interface AuditSearchOptions {
/** Limit results to a single port. Omit for super-admin all-ports view. */
portId?: string;
/** Free-text query runs against the GIN-indexed search_text column. */
/** Free-text query - runs against the GIN-indexed search_text column. */
q?: string;
/** Filter by actor (user id). */
userId?: string;
@@ -25,7 +25,7 @@ export interface AuditSearchOptions {
/** Inclusive date range. */
from?: Date;
to?: Date;
/** Pagination cursor on (createdAt, id). */
/** Pagination - cursor on (createdAt, id). */
cursor?: { createdAt: Date; id: string };
limit?: number;
}

View File

@@ -166,7 +166,7 @@ export async function updateBerth(
if (!changed) return existing;
// Drizzle numeric columns expect string | null coerce numbers to strings
// Drizzle numeric columns expect string | null - coerce numbers to strings
const n = (v: number | undefined) => (v !== undefined ? String(v) : undefined);
const [updated] = await db
@@ -404,7 +404,7 @@ export async function updateWaitingList(
// Validate every supplied clientId belongs to portId. Without this
// check, a port-A admin could insert port-B clientIds into the
// waiting list corrupting reportable data and creating a join
// waiting list - corrupting reportable data and creating a join
// surface that hydrates foreign-tenant client rows.
if (data.entries.length > 0) {
const clientIds = [...new Set(data.entries.map((e) => e.clientId))];

View File

@@ -1,9 +1,9 @@
/**
* Client merge service atomically combines two client records.
* Client merge service - atomically combines two client records.
*
* Used by:
* - /admin/duplicates review queue (when an admin confirms a merge)
* - the at-create suggestion path ("use existing client") though
* - the at-create suggestion path ("use existing client") - though
* that path uses the lighter `attachInterestToClient` and never
* actually merges two pre-existing clients
* - the migration script's `--apply` (eventually)
@@ -37,7 +37,7 @@ import { auditLogs } from '@/lib/db/schema/system';
// ─── Public API ─────────────────────────────────────────────────────────────
export interface MergeFieldChoices {
/** Per-field overrides `winner` keeps the surviving client's value;
/** Per-field overrides - `winner` keeps the surviving client's value;
* `loser` copies the loser's value over. Fields not listed default
* to `winner` (no change). */
fullName?: 'winner' | 'loser';
@@ -217,7 +217,7 @@ export async function mergeClients(opts: MergeOptions): Promise<MergeResult> {
for (const c of loserContacts) {
const key = `${c.channel}::${c.value.toLowerCase()}`;
if (winnerContactKeys.has(key)) {
// Winner already has this contact drop loser's row (cascade
// Winner already has this contact - drop loser's row (cascade
// will clean up when loser is archived). But we keep snapshot
// so undo restores it.
continue;
@@ -264,7 +264,7 @@ export async function mergeClients(opts: MergeOptions): Promise<MergeResult> {
await tx.delete(clientTags).where(eq(clientTags.clientId, opts.loserId));
// Relationships: rewrite each FK side to point at the winner. Keep
// both sides regardless even if A and B both end up as the same
// both sides regardless - even if A and B both end up as the same
// person, the row is preserved for audit; the UI hides self-loops.
const movedRelationships =
(
@@ -305,7 +305,7 @@ export async function mergeClients(opts: MergeOptions): Promise<MergeResult> {
.where(
and(
eq(clientMergeCandidates.portId, winnerRow.portId),
// pair stored in canonical order match either direction
// pair stored in canonical order - match either direction
sql`(
(${clientMergeCandidates.clientAId} = ${opts.winnerId}
AND ${clientMergeCandidates.clientBId} = ${opts.loserId})

View File

@@ -54,7 +54,7 @@ export async function listClients(portId: string, query: ListClientsInput) {
if (matchingIds.length > 0) {
filters.push(inArray(clients.id, matchingIds));
} else {
// No clients match these tags return empty
// No clients match these tags - return empty
return { data: [], total: 0 };
}
}
@@ -252,7 +252,7 @@ export async function getClientById(id: string, portId: string) {
const portalEnabled = await isPortalEnabledForPort(portId);
// Counts surfaced for tab badges (Interests + Notes Yachts/Companies/etc
// Counts surfaced for tab badges (Interests + Notes - Yachts/Companies/etc
// get their counts from the corresponding row arrays we already fetched).
const [interestCountRow] = await db
.select({ count: count() })
@@ -577,7 +577,7 @@ export async function addClientAddress(
// The unique partial index requires us to demote any existing primary
// before inserting a new one. We grab a row lock on the client to
// serialize concurrent primary-toggle requests against the same client
// serialize concurrent primary-toggle requests against the same client -
// without this, two simultaneous "isPrimary=true" inserts can both
// observe "no existing primary" and one trips the unique index with a
// 5xx instead of being safely ordered.
@@ -642,7 +642,7 @@ export async function updateClientAddress(
if (!existing) throw new NotFoundError('Address');
const updated = await withTransaction(async (tx) => {
// Lock the client row to serialize primary-toggle changes see addClientAddress.
// Lock the client row to serialize primary-toggle changes - see addClientAddress.
await tx.select({ id: clients.id }).from(clients).where(eq(clients.id, clientId)).for('update');
if (data.isPrimary === true && !existing.isPrimary) {

View File

@@ -33,7 +33,7 @@ function isUniqueViolation(err: unknown): boolean {
* Loads a membership row and verifies the joined company belongs to `portId`.
* Throws NotFoundError('Membership') if the row is missing or cross-tenant.
*
* Uses a JOIN to companies (memberships have no portId column they inherit
* Uses a JOIN to companies (memberships have no portId column - they inherit
* tenancy via the parent company).
*/
async function loadMembershipScoped(
@@ -210,7 +210,7 @@ export async function setPrimary(
portId: string,
meta: AuditMeta,
): Promise<CompanyMembership> {
// Tenant-scoped load (outside tx is fine we re-read inside).
// Tenant-scoped load (outside tx is fine - we re-read inside).
const existing = await loadMembershipScoped(membershipId, portId);
return await withTransaction(async (tx) => {

View File

@@ -21,7 +21,7 @@ export async function createCrmInvite(args: {
isSuperAdmin?: boolean;
/**
* Caller identity. Required when minting a super-admin invitation so the
* service can fail closed if the caller isn't already a super-admin
* service can fail closed if the caller isn't already a super-admin -
* defense-in-depth for the route's authorization gate.
*/
invitedBy?: { userId: string; isSuperAdmin: boolean };
@@ -33,7 +33,7 @@ export async function createCrmInvite(args: {
throw new ValidationError('Only super admins can mint super-admin invitations');
}
// Reject if there's already a better-auth user with this email they
// Reject if there's already a better-auth user with this email - they
// should reset their password instead.
const sql = postgres(env.DATABASE_URL);
try {
@@ -171,7 +171,7 @@ export async function revokeCrmInvite(inviteId: string, meta: AuditMeta): Promis
where: eq(crmUserInvites.id, inviteId),
});
if (!invite) throw new NotFoundError('Invite');
if (invite.usedAt) throw new ConflictError('Invite already accepted cannot revoke');
if (invite.usedAt) throw new ConflictError('Invite already accepted - cannot revoke');
// Force expiration; tokenHash stays in place so any in-flight click fails
// the `expiresAt > now` check at consume time.
@@ -200,7 +200,7 @@ export async function resendCrmInvite(
where: eq(crmUserInvites.id, inviteId),
});
if (!invite) throw new NotFoundError('Invite');
if (invite.usedAt) throw new ConflictError('Invite already accepted nothing to resend');
if (invite.usedAt) throw new ConflictError('Invite already accepted - nothing to resend');
// Mint a fresh token + push expiry forward so the resent link is the only
// working one. The old token hash is overwritten so prior emails become

View File

@@ -89,7 +89,7 @@ export async function createDefinition(
.returning();
const created = rows[0];
if (!created) throw new Error('Insert failed no row returned');
if (!created) throw new Error('Insert failed - no row returned');
void createAuditLog({
userId,
@@ -117,7 +117,7 @@ export async function updateDefinition(
data: UpdateFieldInput & { fieldType?: unknown },
meta: AuditMeta,
) {
// Immutability guard fieldType must never change
// Immutability guard - fieldType must never change
if ('fieldType' in data && data.fieldType !== undefined) {
throw new ValidationError('Field type cannot be changed after creation');
}
@@ -141,7 +141,7 @@ export async function updateDefinition(
.returning();
const updated = updateRows[0];
if (!updated) throw new Error('Update failed no row returned');
if (!updated) throw new Error('Update failed - no row returned');
void createAuditLog({
userId,
@@ -188,7 +188,7 @@ export async function deleteDefinition(
.where(eq(customFieldValues.fieldId, fieldId));
const valueCount = countResult[0]?.count ?? 0;
// Delete definition CASCADE handles values
// Delete definition - CASCADE handles values
await db.delete(customFieldDefinitions).where(eq(customFieldDefinitions.id, fieldId));
void createAuditLog({
@@ -269,7 +269,7 @@ export async function setValues(
// Tenant scope: verify entityId actually points at a port-scoped row of
// the entity type the field definitions target. Without this gate, any
// authenticated user could write custom-field rows pointing at arbitrary
// entityIds (or none at all) polluting customFieldValues and creating
// entityIds (or none at all) - polluting customFieldValues and creating
// a join surface that could later leak data.
const entityTypes = new Set(
values

View File

@@ -110,7 +110,7 @@ export async function getRevenueForecast(portId: string) {
}
}
// Forecast excludes lost/cancelled only currently-active or won-out
// Forecast excludes lost/cancelled - only currently-active or won-out
// interests should affect the weighted pipeline value.
const interestRows = await db
.select({

View File

@@ -120,7 +120,7 @@ function applyPayloadRedirect(payload: Record<string, unknown>): Record<string,
email: env.EMAIL_REDIRECT_TO,
}));
}
// v1.13 formValues shape keys vary per template; key by anything that
// v1.13 formValues shape - keys vary per template; key by anything that
// looks like an email field. The conservative approach: only touch keys
// that already hold a string and end with `Email` / `email`.
if (out.formValues && typeof out.formValues === 'object') {
@@ -193,7 +193,7 @@ export async function sendDocument(docId: string, portId?: string): Promise<Docu
if (env.EMAIL_REDIRECT_TO) {
logger.warn(
{ docId, portId, redirect: env.EMAIL_REDIRECT_TO },
'sendDocument SKIPPED EMAIL_REDIRECT_TO is set, outbound comms paused',
'sendDocument SKIPPED - EMAIL_REDIRECT_TO is set, outbound comms paused',
);
// Return the existing doc shape so downstream code doesn't see an
// unexpected null. The document remains in DRAFT/PENDING from
@@ -215,7 +215,7 @@ export async function getDocument(docId: string, portId?: string): Promise<Docum
/**
* Email a signing reminder to one recipient. Skipped entirely when
* EMAIL_REDIRECT_TO is set the recipient's stored email may still be
* EMAIL_REDIRECT_TO is set - the recipient's stored email may still be
* a real client address from before the redirect was enabled.
*/
export async function sendReminder(
@@ -226,7 +226,7 @@ export async function sendReminder(
if (env.EMAIL_REDIRECT_TO) {
logger.warn(
{ docId, signerId, portId, redirect: env.EMAIL_REDIRECT_TO },
'sendReminder SKIPPED EMAIL_REDIRECT_TO is set, outbound comms paused',
'sendReminder SKIPPED - EMAIL_REDIRECT_TO is set, outbound comms paused',
);
return;
}
@@ -282,12 +282,12 @@ export async function checkDocumensoHealth(
//
// Callers always work in PERCENT (0-100). For v1 the abstraction multiplies by
// the page dimensions returned by Documenso (cached per docId for the lifetime
// of the process fields for a given doc usually go in a single batch).
// of the process - fields for a given doc usually go in a single batch).
export type DocumensoFieldType = 'SIGNATURE' | 'INITIALS' | 'DATE' | 'TEXT' | 'EMAIL';
export interface DocumensoFieldPlacement {
/** Documenso recipient id; v1 expects number, v2 string coerced internally. */
/** Documenso recipient id; v1 expects number, v2 string - coerced internally. */
recipientId: number | string;
type: DocumensoFieldType;
pageNumber: number;
@@ -296,7 +296,7 @@ export interface DocumensoFieldPlacement {
pageY: number;
pageWidth: number;
pageHeight: number;
/** Optional v2 fieldMeta passed through verbatim, ignored on v1. */
/** Optional v2 fieldMeta - passed through verbatim, ignored on v1. */
fieldMeta?: Record<string, unknown>;
}
@@ -309,7 +309,7 @@ const DEFAULT_PAGE_DIMENSIONS: DocumensoPageDimensions = { width: 595, height: 8
const pageDimensionCache = new Map<string, DocumensoPageDimensions>();
/** Test seam clears the page-dimension memoization. */
/** Test seam - clears the page-dimension memoization. */
export function __resetDocumensoCachesForTests(): void {
pageDimensionCache.clear();
}
@@ -353,7 +353,7 @@ export async function placeFields(
...(f.fieldMeta ? { fieldMeta: f.fieldMeta } : {}),
}));
// Note: v2 endpoint shape (envelopeId/recipientId types) must be
// confirmed against a live Documenso 2.x instance see PR11 realapi
// confirmed against a live Documenso 2.x instance - see PR11 realapi
// suite. Spec risk register flags this drift as the top v2 risk.
const res = await fetch(`${baseUrl}/api/v2/envelope/field/create-many`, {
method: 'POST',
@@ -401,7 +401,7 @@ export async function placeFields(
/**
* Auto-position one SIGNATURE field per recipient at the last-page footer,
* staggered horizontally so multiple signers don't overlap. Used by the
* upload-path wizard admins can refine in Documenso afterwards.
* upload-path wizard - admins can refine in Documenso afterwards.
*
* Layout (percent of page):
* y = 88 (footer band)
@@ -445,7 +445,7 @@ export function computeDefaultSignatureLayout(
* v1: DELETE /api/v1/documents/{id}
* v2: DELETE /api/v2/envelope/{id}
*
* Idempotent on 404 (already gone) logs and resolves.
* Idempotent on 404 (already gone) - logs and resolves.
*/
export async function voidDocument(docId: string, portId?: string): Promise<void> {
const { baseUrl, apiKey, apiVersion } = await resolveCreds(portId);

View File

@@ -37,7 +37,7 @@ export interface DocumensoTemplatePayload {
export interface DocumensoPayloadOptions {
/** `interestId` used to build `externalId` and Documenso referencing. */
interestId: string;
/** Documenso recipient IDs come from env vars. */
/** Documenso recipient IDs - come from env vars. */
clientRecipientId: number;
developerRecipientId: number;
approvalRecipientId: number;

View File

@@ -58,7 +58,7 @@ export interface SendReminderOptions {
/** true = cron auto-fire, enforces 9-16 window + cadence cooldown.
* false (default) = manual UI action, bypasses both. */
auto?: boolean;
/** Optional target a specific pending signer (parallel mode), or
/** Optional - target a specific pending signer (parallel mode), or
* bypass the lowest-pending default in sequential mode (must still be the
* next pending signer in that case). */
signerId?: string;
@@ -183,7 +183,7 @@ export async function sendReminderIfAllowed(
/**
* Cron entry point. Selects in-flight documents whose effective cadence
* (override or template) is set, then attempts auto-fire on each.
* `interests.reminderEnabled` is no longer part of the gating per-doc
* `interests.reminderEnabled` is no longer part of the gating - per-doc
* `remindersDisabled` is the kill switch instead.
*/
export async function processReminderQueue(portId: string): Promise<void> {

View File

@@ -1,5 +1,5 @@
/**
* Admin Document Template Service TipTap JSON-based templates
* Admin Document Template Service - TipTap JSON-based templates
*
* This service manages templates whose content is stored as TipTap JSON
* (serialised to the `bodyHtml` text column). Version history is maintained

View File

@@ -237,7 +237,7 @@ export async function resolveTemplate(
tokenMap['{{client.phone}}'] = eoi.client.primaryPhone ?? '';
tokenMap['{{client.nationality}}'] = eoi.client.nationality ?? '';
// Yacht tokens `eoi.yacht` is null when no yacht is linked
// Yacht tokens - `eoi.yacht` is null when no yacht is linked
// (Section 3 of the EOI is optional). Tokens render as empty strings
// in that case so the template still produces output.
tokenMap['{{yacht.name}}'] = eoi.yacht?.name ?? '';
@@ -252,7 +252,7 @@ export async function resolveTemplate(
tokenMap['{{yacht.widthM}}'] = eoi.yacht?.widthM ?? '';
tokenMap['{{yacht.draftM}}'] = eoi.yacht?.draftM ?? '';
// EoiContext doesn't expose the yacht.registration column look it up
// EoiContext doesn't expose the yacht.registration column - look it up
// separately (cheap, indexed fetch) so the token resolves when present.
try {
const interestRow = await db.query.interests.findFirst({
@@ -283,7 +283,7 @@ export async function resolveTemplate(
tokenMap['{{owner.name}}'] = eoi.owner.name;
tokenMap['{{owner.legalName}}'] = eoi.owner.legalName ?? '';
// Berth tokens also optional. Render empty when no berth is linked.
// Berth tokens - also optional. Render empty when no berth is linked.
tokenMap['{{berth.mooringNumber}}'] = eoi.berth?.mooringNumber ?? '';
tokenMap['{{berth.area}}'] = eoi.berth?.area ?? '';
tokenMap['{{berth.lengthFt}}'] = eoi.berth?.lengthFt ?? '';
@@ -301,9 +301,9 @@ export async function resolveTemplate(
tokenMap['{{interest.notes}}'] = eoi.interest.notes ?? '';
} catch (err) {
// buildEoiContext throws ValidationError when the EOI's required client
// fields (name/email/address Section 2) are missing. For non-EOI
// fields (name/email/address - Section 2) are missing. For non-EOI
// templates (correspondence, welcome letters, etc.) those gates don't
// apply fall through to the legacy resolution path below. Re-throw
// apply - fall through to the legacy resolution path below. Re-throw
// anything else.
if (
!(err instanceof ValidationError) ||
@@ -323,7 +323,7 @@ export async function resolveTemplate(
where: eq(clients.id, context.clientId),
});
if (client && client.portId === context.portId) {
// Always resolve source from the DB EoiContext doesn't carry it.
// Always resolve source from the DB - EoiContext doesn't carry it.
if (tokenMap['{{client.source}}'] === undefined) {
tokenMap['{{client.source}}'] = client.source ?? '';
}
@@ -349,7 +349,7 @@ export async function resolveTemplate(
}
}
// Interest tokens (legacy path fills in fields EoiContext doesn't expose,
// Interest tokens (legacy path - fills in fields EoiContext doesn't expose,
// like eoiStatus / dateEoiSigned / dateContractSigned, or populates the
// whole interest.* block when EOI resolution was skipped).
if (context.interestId) {
@@ -365,7 +365,7 @@ export async function resolveTemplate(
: '';
tokenMap['{{interest.notes}}'] = interest.notes ?? '';
}
// These are never populated by EoiContext always fill them in.
// These are never populated by EoiContext - always fill them in.
tokenMap['{{interest.eoiStatus}}'] = interest.eoiStatus ?? '';
tokenMap['{{interest.dateEoiSigned}}'] = interest.dateEoiSigned
? new Date(interest.dateEoiSigned).toLocaleDateString('en-GB')
@@ -395,7 +395,7 @@ export async function resolveTemplate(
}
}
// Berth tokens (legacy path when a berthId is passed directly and EOI
// Berth tokens (legacy path - when a berthId is passed directly and EOI
// resolution didn't already populate the berth block).
if (context.berthId && !eoiContextLoaded) {
const berth = await db.query.berths.findFirst({
@@ -507,7 +507,7 @@ export async function generateFromTemplate(
const pdfBytes = await generatePdf(pdfTemplate, [
{
portName: `${port?.name ?? 'Port Nimara'} ${template.name}`,
portName: `${port?.name ?? 'Port Nimara'} - ${template.name}`,
body: wrappedContent,
generatedAt: `Generated: ${new Date().toLocaleString('en-GB')}`,
},
@@ -610,7 +610,7 @@ export async function generateAndSend(
);
} catch (err) {
logger.error({ err, templateId, recipientEmail }, 'Failed to send template email');
// Don't throw document was created successfully; email failure is non-fatal
// Don't throw - document was created successfully; email failure is non-fatal
}
void createAuditLog({
@@ -631,7 +631,7 @@ export async function generateAndSend(
/**
* BR-142: For EOI templates, the in-app pathway uses the same source PDF as
* the Documenso template filled via pdf-lib with values from EoiContext.
* the Documenso template - filled via pdf-lib with values from EoiContext.
* Same field names, same legal document; the only difference is who renders
* it. The form is left interactive so a recipient can adjust before signing.
*/
@@ -893,7 +893,7 @@ async function generateAndSignViaDocumensoTemplate(
payload as unknown as Record<string, unknown>,
);
// Record a documents row referencing the Documenso document. No local file
// Record a documents row referencing the Documenso document. No local file -
// Documenso owns the PDF and delivers signed copies via webhook (handled elsewhere).
const [documentRecord] = await db
.insert(documents)

View File

@@ -118,7 +118,7 @@ function buildHubTabFilters(
case 'expired':
// Either explicitly expired, or in-flight past their expiry date.
// (Documents schema doesn't yet have an `expires_at` column, so for
// now this is just status='expired' extend when expiry lands.)
// now this is just status='expired' - extend when expiry lands.)
filters.push(eq(documents.status, 'expired'));
break;
}
@@ -126,7 +126,7 @@ function buildHubTabFilters(
}
export interface ListDocumentsExtra {
/** Email of the calling user used by hub tab filtering for "awaiting me". */
/** Email of the calling user - used by hub tab filtering for "awaiting me". */
currentUserEmail?: string;
}
@@ -514,7 +514,7 @@ export async function sendForSigning(documentId: string, portId: string, meta: A
// never match real port users and cause silent no-ops in handleRecipientSigned.
const eoiSigners = await getPortEoiSigners(portId);
// BR-021: Create 3 signers client (1), developer (2), sales/approver (3)
// BR-021: Create 3 signers - client (1), developer (2), sales/approver (3)
const signerRecords = await db
.insert(documentSigners)
.values([
@@ -812,7 +812,7 @@ export async function handleRecipientSigned(eventData: {
documentId: doc.id,
recipientEmail: eventData.recipientEmail,
},
'handleRecipientSigned: no matching signer row for recipient email ' +
'handleRecipientSigned: no matching signer row for recipient email - ' +
'check eoi_signers system setting for this port',
);
}
@@ -929,7 +929,7 @@ export async function handleDocumentCompleted(eventData: { documentId: string })
// Guard against double-fire: DOCUMENT_COMPLETED may arrive multiple times
// (webhook retries) or follow a DOCUMENT_SIGNED that already advanced the
// stage. advanceStageIfBehind handles the pipeline guard internally, but
// evaluateRule has no idempotency skip it if the interest is already at
// evaluateRule has no idempotency - skip it if the interest is already at
// eoi_signed or beyond to prevent duplicate berth-rule side effects.
const currentStageIdx = PIPELINE_STAGES.indexOf(
interest.pipelineStage as (typeof PIPELINE_STAGES)[number],
@@ -1198,7 +1198,7 @@ export async function cancelDocument(
// CRM is the system of record for cancellation status. A transient
// Documenso failure shouldn't block the user from marking the doc cancelled
// here voidDocument already treats 404 as success, and the periodic
// here - voidDocument already treats 404 as success, and the periodic
// webhook receiver will reconcile if the remote void eventually lands.
if (existing.documensoId) {
try {
@@ -1284,7 +1284,7 @@ export async function composeSignedDocEmail(
return {
to: dedupedRecipients,
cc: [],
subject: `Signed ${doc.documentType.replace(/_/g, ' ')} ${doc.title}`,
subject: `Signed ${doc.documentType.replace(/_/g, ' ')} - ${doc.title}`,
body: '',
attachments: [{ fileId: doc.signedFileId }],
defaultSenderType: 'system',
@@ -1359,9 +1359,9 @@ export async function removeDocumentWatcher(
* Create-document wizard entry point (PR6).
*
* Dispatches across pathways:
* - 'documenso-template' Documenso renders + signs from its own template
* - 'inapp' render PDF locally from a CRM template, upload to Documenso
* - 'upload' admin-supplied PDF, upload to Documenso (auto-place signature
* - 'documenso-template' - Documenso renders + signs from its own template
* - 'inapp' - render PDF locally from a CRM template, upload to Documenso
* - 'upload' - admin-supplied PDF, upload to Documenso (auto-place signature
* fields if `autoPlaceFields`)
*
* Persists the document, applies reminder overrides, attaches watchers, and

View File

@@ -72,7 +72,7 @@ export async function sendEmail(
throw new ForbiddenError('Email account does not belong to this port');
}
// Decrypt credentials (INTERNAL never logged or returned)
// Decrypt credentials (INTERNAL - never logged or returned)
const creds = await getDecryptedCredentials(data.accountId);
// Build user-specific SMTP transporter
@@ -133,7 +133,7 @@ export async function sendEmail(
// to that address and the subject is prefixed so the operator can see
// who would have received the message. This service builds its OWN
// transporter (per-account SMTP) so it doesn't go through sendEmail's
// redirect we apply the same logic here.
// redirect - we apply the same logic here.
const requestedTo = data.to.join(', ');
const requestedCc = data.cc?.join(', ');
const effectiveTo = env.EMAIL_REDIRECT_TO ?? requestedTo;

View File

@@ -29,7 +29,7 @@ export interface DraftResult {
* Request an AI-generated email draft.
*
* Generates an opaque random jobId rather than relying on BullMQ's default
* sequential ids the jobId is the access token for polling, so it must
* sequential ids - the jobId is the access token for polling, so it must
* not be enumerable. The job payload also captures the requesting user
* + port so the poll endpoint can refuse cross-tenant / cross-user reads.
*
@@ -59,7 +59,7 @@ export async function requestEmailDraft(
await aiQueue.add(
'generate-email-draft',
{
// No PII only IDs and context parameters
// No PII - only IDs and context parameters
interestId: request.interestId,
clientId: request.clientId,
portId: request.portId,

View File

@@ -91,7 +91,7 @@ export async function getThread(threadId: string, portId: string) {
export async function ingestMessage(portId: string, parsedEmail: ParsedEmail) {
let threadId: string | null = null;
// Step 1: Message-ID chain check inReplyTo and references headers
// Step 1: Message-ID chain - check inReplyTo and references headers
const referencedIds = [
...(parsedEmail.inReplyTo ? [parsedEmail.inReplyTo] : []),
...(parsedEmail.references ?? []),
@@ -104,10 +104,7 @@ export async function ingestMessage(portId: string, parsedEmail: ParsedEmail) {
if (existingMessage) {
// Verify thread belongs to this port
const thread = await db.query.emailThreads.findFirst({
where: and(
eq(emailThreads.id, existingMessage.threadId),
eq(emailThreads.portId, portId),
),
where: and(eq(emailThreads.id, existingMessage.threadId), eq(emailThreads.portId, portId)),
});
if (thread) {
threadId = thread.id;
@@ -117,7 +114,10 @@ export async function ingestMessage(portId: string, parsedEmail: ParsedEmail) {
// Step 2: Email address match against client contacts
if (!threadId) {
const fromAddress = parsedEmail.from.replace(/.*<(.+)>/, '$1').trim().toLowerCase();
const fromAddress = parsedEmail.from
.replace(/.*<(.+)>/, '$1')
.trim()
.toLowerCase();
const contactRows = await db
.select({
@@ -168,9 +168,7 @@ export async function ingestMessage(portId: string, parsedEmail: ParsedEmail) {
// Step 3: Subject + sender fuzzy match
if (!threadId) {
const normalizedSubject = parsedEmail.subject
.replace(/^(re|fwd|fw):\s*/i, '')
.trim();
const normalizedSubject = parsedEmail.subject.replace(/^(re|fwd|fw):\s*/i, '').trim();
if (normalizedSubject) {
const matchingThread = await db.query.emailThreads.findFirst({
@@ -187,7 +185,7 @@ export async function ingestMessage(portId: string, parsedEmail: ParsedEmail) {
}
}
// No thread found create a new one
// No thread found - create a new one
if (!threadId) {
const newThreadRows = await db
.insert(emailThreads)
@@ -276,7 +274,7 @@ export async function syncInbox(accountId: string): Promise<void> {
logger.info({ accountId, exists: mailbox.exists }, 'IMAP INBOX opened');
// Search for messages since the last sync date
// client.search() returns false | number[] false means nothing found
// client.search() returns false | number[] - false means nothing found
const searchResult = await client.search({ since });
const uids: number[] = searchResult === false ? [] : searchResult;
@@ -291,21 +289,15 @@ export async function syncInbox(accountId: string): Promise<void> {
const parsed = await simpleParser(message.source);
// Normalise messageId mailparser returns string | string[] | undefined
// Normalise messageId - mailparser returns string | string[] | undefined
const rawMsgId = parsed.messageId;
const messageId =
rawMsgId == null
? ''
: Array.isArray(rawMsgId)
? (rawMsgId[0] ?? '')
: rawMsgId;
rawMsgId == null ? '' : Array.isArray(rawMsgId) ? (rawMsgId[0] ?? '') : rawMsgId;
const from = parsed.from?.text ?? '';
// Normalise to/cc mailparser AddressObject can be an array
const resolveAddresses = (
field: typeof parsed.to,
): string[] => {
// Normalise to/cc - mailparser AddressObject can be an array
const resolveAddresses = (field: typeof parsed.to): string[] => {
if (!field) return [];
const arr = Array.isArray(field) ? field : [field];
return arr.flatMap((a) =>
@@ -321,8 +313,8 @@ export async function syncInbox(accountId: string): Promise<void> {
rawRefs == null
? []
: typeof rawRefs === 'string'
? rawRefs.split(/\s+/).filter(Boolean)
: rawRefs;
? rawRefs.split(/\s+/).filter(Boolean)
: rawRefs;
await ingestMessage(account.portId, {
messageId,

View File

@@ -24,7 +24,7 @@ import { ValidationError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
interface SetEntityTagsArgs<TJoin extends PgTable> {
/** Join table e.g. `clientTags`, `yachtTags`. */
/** Join table - e.g. `clientTags`, `yachtTags`. */
joinTable: TJoin;
/**
* Column on the join table that points back at the parent entity (e.g.
@@ -56,7 +56,7 @@ export async function setEntityTags<TJoin extends PgTable>(
// Tenant scope: every supplied tagId must belong to the caller's port.
// The tags table is per-port (`tags.port_id`) but the join tables only
// have a single-column FK to `tags.id` without this guard, a port-A
// have a single-column FK to `tags.id` - without this guard, a port-A
// caller could splice a port-B tag UUID onto their own entity. The
// entity's GET handler joins `tags ON join.tag_id = tags.id` with no
// port filter, so the foreign tag's name and color render in port A.

View File

@@ -83,8 +83,8 @@ export type EoiContext = {
*
* Tenant-scoped: every fetch is gated by `portId`, and missing rows surface
* as NotFoundError. The hard gate matches the EOI document's top paragraph
* (Section 2 name, address, email): without those the EOI is unsignable
* and we throw. Yacht and berth (Section 3) are optional the rendered PDF
* (Section 2 - name, address, email): without those the EOI is unsignable
* and we throw. Yacht and berth (Section 3) are optional - the rendered PDF
* leaves those fields blank when not set.
*/
export async function buildEoiContext(interestId: string, portId: string): Promise<EoiContext> {
@@ -96,7 +96,7 @@ export async function buildEoiContext(interestId: string, portId: string): Promi
throw new NotFoundError('Interest');
}
// Parallelise independent reads. Yacht and berth are both nullable
// Parallelise independent reads. Yacht and berth are both nullable -
// the EOI's Section 3 stays blank when they're absent.
const [yacht, berth, client, port] = await Promise.all([
interest.yachtId
@@ -120,7 +120,7 @@ export async function buildEoiContext(interestId: string, portId: string): Promi
if (!client) throw new NotFoundError('Client');
if (!port) throw new NotFoundError('Port');
// 5. Primary contacts email + phone for the interest's client.
// 5. Primary contacts - email + phone for the interest's client.
const contactRows = await db
.select({
channel: clientContacts.channel,
@@ -166,7 +166,7 @@ export async function buildEoiContext(interestId: string, portId: string): Promi
if (!clientAddress || !clientAddress.street.trim()) missing.push('client address');
if (missing.length > 0) {
throw new ValidationError(
`Cannot generate EOI missing required client details: ${missing.join(', ')}.`,
`Cannot generate EOI - missing required client details: ${missing.join(', ')}.`,
);
}

View File

@@ -1,5 +1,5 @@
/**
* Expense duplicate detection heuristic match on
* Expense duplicate detection - heuristic match on
* (port + vendor + amount + date ± 3d). PR1 ships the function shape;
* PR8 wires the BullMQ trigger and the merge service.
*/
@@ -71,7 +71,7 @@ export async function markBestDuplicate(expenseId: string): Promise<string | nul
}
/**
* Clear the duplicate flag operator confirmed this is a real expense.
* Clear the duplicate flag - operator confirmed this is a real expense.
* Leaves `dedupScannedAt` populated so the engine doesn't re-flag it.
*/
export async function clearDuplicate(expenseId: string, portId: string): Promise<void> {
@@ -118,7 +118,7 @@ export async function mergeDuplicate(
.set({ receiptFileIds: mergedReceipts })
.where(eq(expenses.id, targetId));
// Archive the source preserves audit history, keeps any FKs alive.
// Archive the source - preserves audit history, keeps any FKs alive.
await tx
.update(expenses)
.set({ archivedAt: sql`now()`, duplicateOf: null })

View File

@@ -33,7 +33,7 @@ export interface OcrContext {
export const OCR_MAX_TOKENS = 1024;
export const OCR_LOW_CONFIDENCE_THRESHOLD = 0.6;
/** Stub returns "pending" shape so callers can wire UI in PR1 without
/** Stub - returns "pending" shape so callers can wire UI in PR1 without
* Anthropic credentials. */
export async function ocrReceipt(_ctx: OcrContext): Promise<OcrResult> {
return {

View File

@@ -135,7 +135,7 @@ export async function createExpense(portId: string, data: CreateExpenseInput, me
category: expense.category ?? '',
});
// Schedule a duplicate-detection sweep. Best-effort we don't want a
// Schedule a duplicate-detection sweep. Best-effort - we don't want a
// queue-side hiccup to fail the user's create.
try {
const { getQueue } = await import('@/lib/queue');

View File

@@ -231,7 +231,7 @@ export async function listFiles(portId: string, query: ListFilesInput) {
sort: sort ? { column: sortColumn, direction: order } : undefined,
page,
pageSize: limit,
// no archivedAtColumn files are immutable records
// no archivedAtColumn - files are immutable records
});
}

View File

@@ -1,6 +1,6 @@
/**
* Builds the structured payload that becomes the JSON + HTML inside a
* GDPR client-data export. Pure read-side no writes, no I/O outside
* GDPR client-data export. Pure read-side - no writes, no I/O outside
* Drizzle. The worker pairs this with the actual ZIP/upload/email work.
*
* GDPR Article 15 (right of access) requires that we hand the data
@@ -223,8 +223,8 @@ function tableSection(title: string, rows: Record<string, unknown>[]): string {
}
/**
* Renders the bundle as a self-contained HTML document no external
* resources, no JS so it opens in any browser including offline.
* Renders the bundle as a self-contained HTML document - no external
* resources, no JS - so it opens in any browser including offline.
*/
export function renderBundleHtml(bundle: GdprBundle): string {
const clientName = String(bundle.client.fullName ?? bundle.meta.clientId ?? 'Unknown');
@@ -255,7 +255,7 @@ export function renderBundleHtml(bundle: GdprBundle): string {
<html lang="en">
<head>
<meta charset="utf-8" />
<title>Personal data export ${escapeHtml(clientName)}</title>
<title>Personal data export - ${escapeHtml(clientName)}</title>
<style>
body { font: 14px/1.5 -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, sans-serif; margin: 2rem; max-width: 1200px; }
h1 { border-bottom: 2px solid #333; padding-bottom: 0.5rem; }

View File

@@ -69,7 +69,7 @@ export async function requestGdprExport(input: RequestExportInput): Promise<Requ
});
if (!primary) {
throw new ValidationError(
'Client has no primary email contact provide an emailOverride or add one before exporting.',
'Client has no primary email contact - provide an emailOverride or add one before exporting.',
);
}
}
@@ -132,7 +132,7 @@ export async function processGdprExportJob(input: ProcessJobInput): Promise<void
const html = renderBundleHtml(bundle);
// Stream a ZIP into a buffer. Receipts/contracts are not included
// here they live on file rows referenced by the bundle and would
// here - they live on file rows referenced by the bundle and would
// bloat the archive. Add them later if Article-15 requests demand.
const zip = archiver('zip', { zlib: { level: 9 } });
const sink = new PassThrough();
@@ -212,7 +212,7 @@ async function emailExport(input: ProcessJobInput, storageKey: string): Promise<
if (!recipient) {
logger.warn(
{ exportId: input.exportId, clientId: input.clientId },
'GDPR export ready but no email recipient skipping send',
'GDPR export ready but no email recipient - skipping send',
);
return;
}
@@ -227,8 +227,8 @@ async function emailExport(input: ProcessJobInput, storageKey: string): Promise<
<p>Hello ${escapeHtml(name)},</p>
<p>You requested a copy of the personal data we hold about you. The export is ready and contains:</p>
<ul>
<li><code>client.json</code> machine-readable data dump</li>
<li><code>client.html</code> same data as a printable web page</li>
<li><code>client.json</code> - machine-readable data dump</li>
<li><code>client.html</code> - same data as a printable web page</li>
</ul>
<p><a href="${url}">Download the export (ZIP, expires ${escapeHtml(expiry)})</a></p>
<p>If you have any questions, reply to this email.</p>
@@ -254,7 +254,7 @@ function escapeHtml(s: unknown): string {
.replace(/'/g, '&#39;');
}
/** Lists exports for a client (most-recent first) feeds the admin "history" UI. */
/** Lists exports for a client (most-recent first) - feeds the admin "history" UI. */
export async function listClientExports(clientId: string, portId: string) {
const client = await db.query.clients.findFirst({ where: eq(clients.id, clientId) });
if (!client || client.portId !== portId) throw new NotFoundError('Client');

View File

@@ -46,7 +46,7 @@ function scoreStageSpeed(createdAt: Date, pipelineStage: string): number {
const idx = PIPELINE_STAGES.indexOf(pipelineStage as (typeof PIPELINE_STAGES)[number]);
const stageIndex = idx === -1 ? 0 : idx;
if (stageIndex === 0) {
// Still at open no progression
// Still at open - no progression
return 0;
}
@@ -131,7 +131,7 @@ export async function calculateInterestScore(
// 3. Document completeness
const documentCompleteness = scoreDocumentCompleteness(interest);
// 4. Engagement notes, emails, reminders in last 30 days
// 4. Engagement - notes, emails, reminders in last 30 days
const thirtyDaysAgo = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
const [notesResult, remindersResult, emailResult] = await Promise.all([

View File

@@ -29,8 +29,8 @@ import type {
// ─── Port-scope FK validator ─────────────────────────────────────────────────
// Tenant scope: every FK referenced from an interest body clientId, berthId,
// and yachtId must belong to the caller's port. Without this, a body-supplied
// Tenant scope: every FK referenced from an interest body - clientId, berthId,
// and yachtId - must belong to the caller's port. Without this, a body-supplied
// foreign-port id would create an interest that joins through these FKs and
// surfaces foreign-tenant data on subsequent reads (clientName, berth mooring
// number, yacht ownership). assertYachtBelongsToClient still runs separately to
@@ -370,7 +370,7 @@ export async function getInterestById(id: string, portId: string) {
.where(eq(interestNotes.interestId, id));
// Active reminder count for the interest's bell badge. Counts reminders
// directly linked via interestId `pending` and `snoozed` only;
// directly linked via interestId - `pending` and `snoozed` only;
// completed/dismissed don't surface.
const [{ count: activeReminderCount } = { count: 0 }] = await db
.select({ count: sql<number>`count(*)::int` })
@@ -633,7 +633,7 @@ export async function changeInterestStage(
// Moves an interest forward to `target` if (and only if) it is currently behind
// it in the pipeline order. Used by lifecycle events (EOI sent, EOI signed,
// deposit recorded, contract signed) so the user-visible stage tracks reality
// without overwriting a more advanced state e.g. a late-arriving signed-EOI
// without overwriting a more advanced state - e.g. a late-arriving signed-EOI
// webhook on an interest that has already moved on to `contract_sent` is a
// no-op rather than a regression.
//
@@ -658,7 +658,7 @@ export async function advanceStageIfBehind(
// yachtId gate: changeInterestStage requires a yacht before leaving `open`.
// EOI events imply a yacht is in the picture, but if the data is missing we
// bail rather than throw the EOI itself shouldn't fail because of this.
// bail rather than throw - the EOI itself shouldn't fail because of this.
if (existing.pipelineStage === 'open' && !existing.yachtId) {
return false;
}
@@ -671,7 +671,7 @@ export async function advanceStageIfBehind(
//
// Records a terminal outcome for the interest and moves the pipelineStage to
// `completed` so the funnel/kanban reflect the final state. The outcome
// distinguishes won deals (they made it through) from lost variants funnel
// distinguishes won deals (they made it through) from lost variants - funnel
// math and reports key off the `outcome` column to compute true conversion.
//
// Both the stage advance and the outcome write happen in one transaction so

View File

@@ -139,7 +139,7 @@ async function resolveBillingEntity(
* Verify every supplied expense ID belongs to the caller's port. Without
* this gate, a caller could link foreign-port expenses into their own
* draft invoice and read those expenses back via getInvoiceById's
* `linkedExpenses` join a cross-tenant data leak.
* `linkedExpenses` join - a cross-tenant data leak.
*/
async function assertExpensesInPort(
tx: typeof db,
@@ -251,7 +251,7 @@ export async function createInvoice(portId: string, data: CreateInvoiceInput, me
const lineItemsData = data.lineItems ?? [];
const subtotal = lineItemsData.reduce((sum, li) => sum + (li.quantity ?? 1) * li.unitPrice, 0);
// BR-042: net10 discount read from systemSettings
// BR-042: net10 discount - read from systemSettings
let discountPct = 0;
if (data.paymentTerms === 'net10') {
const [setting] = await tx
@@ -479,7 +479,7 @@ export async function updateInvoice(
// Replace expense links if provided
if (data.expenseIds !== undefined) {
// Tenancy gate first reject foreign-port expense IDs before
// Tenancy gate first - reject foreign-port expense IDs before
// running BR-045 or doing any writes.
await assertExpensesInPort(tx, portId, data.expenseIds);
// BR-045
@@ -728,7 +728,7 @@ export async function recordPayment(
});
// Deposit invoices linked to a sales interest auto-advance the pipeline.
// Only advances forward no-op if the interest has already moved past
// Only advances forward - no-op if the interest has already moved past
// deposit_10pct (e.g. straight-to-contract flows).
if (updated.kind === 'deposit' && updated.interestId) {
const { advanceStageIfBehind } = await import('@/lib/services/interests.service');

View File

@@ -83,7 +83,7 @@ export async function createNotification(
}
}
// 2. Preference check (skip for system_alert type always delivered)
// 2. Preference check (skip for system_alert type - always delivered)
if (type !== 'system_alert') {
const [pref] = await db
.select({
@@ -101,12 +101,12 @@ export async function createNotification(
.limit(1);
if (pref && pref.inApp === false) {
// Check if email is enabled if neither, skip entirely
// Check if email is enabled - if neither, skip entirely
if (pref.email === false) {
return null;
}
// inApp disabled but email enabled: still enqueue email but skip insert
// We can't insert and emit, so just enqueue if there were a row but we need an ID.
// We can't insert and emit, so just enqueue if there were a row - but we need an ID.
// Per spec: if inApp=false, skip insert. Email requires notificationId so skip email too.
return null;
}

View File

@@ -1,5 +1,5 @@
/**
* OCR provider config stored in `system_settings` under the key
* OCR provider config - stored in `system_settings` under the key
* `ocr.config`. Each port can either have its own row (port_id = port.id)
* or opt into the global row (port_id = null) by setting `useGlobal: true`.
*/
@@ -22,7 +22,7 @@ export const DEFAULT_MODEL: Record<OcrProvider, string> = {
claude: 'claude-haiku-4-5',
};
/** Public shape that admin UIs read never includes the raw key. */
/** Public shape that admin UIs read - never includes the raw key. */
export interface OcrConfigPublic {
provider: OcrProvider;
model: string;
@@ -38,7 +38,7 @@ export interface OcrConfigPublic {
aiEnabled: boolean;
}
/** Internal shape including the decrypted key server-side only. */
/** Internal shape including the decrypted key - server-side only. */
export interface OcrConfigResolved extends OcrConfigPublic {
apiKey: string | null;
/** Source of the resolved row: 'port' | 'global' | 'none'. */
@@ -115,7 +115,7 @@ export async function getResolvedOcrConfig(portId: string): Promise<OcrConfigRes
};
}
/** Public-safe view for the admin UI same shape but never the key. */
/** Public-safe view for the admin UI - same shape but never the key. */
export async function getPublicOcrConfig(portId: string | null): Promise<OcrConfigPublic> {
const row = await readRow(portId);
if (!row) {

View File

@@ -108,7 +108,7 @@ async function issueActivationToken(
await sendEmail(email, subject, html, undefined, text);
} catch (err) {
logger.error({ err, email }, 'Failed to send portal activation email');
// Re-throw the admin should know if their invite mail bounced.
// Re-throw - the admin should know if their invite mail bounced.
throw err;
}
}
@@ -162,7 +162,7 @@ export async function signIn(args: {
where: eq(portalUsers.email, normalizedEmail),
});
// Dummy hash with the right shape used to keep verifyPassword's compute
// Dummy hash with the right shape - used to keep verifyPassword's compute
// cost identical when the user doesn't exist.
const dummyHash =
'0000000000000000000000000000000000000000000000000000000000000000:00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000';
@@ -177,7 +177,7 @@ export async function signIn(args: {
// Disabled-port check happens AFTER the credential check so that a wrong
// password on a disabled-port account still surfaces "invalid email or
// password" we never leak which ports have the portal turned off.
// password" - we never leak which ports have the portal turned off.
if (!(await isPortalEnabledForPort(user.portId))) {
throw new UnauthorizedError('Invalid email or password');
}
@@ -209,7 +209,7 @@ export async function requestPasswordReset(email: string): Promise<void> {
return;
}
// Same silent no-op when the port has the portal disabled keeps the
// Same silent no-op when the port has the portal disabled - keeps the
// disabled-state from leaking through the public reset endpoint.
if (!(await isPortalEnabledForPort(user.portId))) {
logger.debug({ portId: user.portId }, 'Password reset on disabled-portal port');
@@ -239,7 +239,7 @@ export async function requestPasswordReset(email: string): Promise<void> {
await sendEmail(user.email, subject, html, undefined, text);
} catch (err) {
logger.error({ err, email: user.email }, 'Failed to send password-reset email');
// Don't propagate the public route returns 200 either way.
// Don't propagate - the public route returns 200 either way.
}
}

View File

@@ -151,7 +151,7 @@ export async function listRecommendations(interestId: string, portId: string) {
// Verify the interest belongs to the caller's port. Without this gate,
// any user with `interests:view` could pass a foreign-port interestId
// and receive that tenant's recommended berths (mooring numbers,
// dimensions, status operational data they should not see).
// dimensions, status - operational data they should not see).
const interest = await db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
});

View File

@@ -113,7 +113,7 @@ export async function getUpcomingReminders(portId: string, days: number = 14) {
* all point at rows inside the caller's port. Without this guard, a
* reminder created with a foreign-port FK would later be hydrated with
* `with: { client, interest, berth }` joins (no port filter on the
* relation) leaking the foreign-tenant rows back to the attacker.
* relation) - leaking the foreign-tenant rows back to the attacker.
*/
async function assertReminderFksInPort(
portId: string,
@@ -444,7 +444,7 @@ export async function processFollowUpReminders() {
const title = client ? `Follow up with ${client.fullName}` : 'Follow up on interest';
// Find the assigned user (first userPortRole for this port, or fallback)
// For now, leave assignedTo null the notification goes to the port room
// For now, leave assignedTo null - the notification goes to the port room
await db.insert(reminders).values({
portId: port.id,
title,

View File

@@ -235,7 +235,7 @@ export async function createResidentialInterest(
data: CreateResidentialInterestInput,
meta: AuditMeta,
) {
// Validate the residential client belongs to this port prevents
// Validate the residential client belongs to this port - prevents
// cross-port linking.
const client = await db.query.residentialClients.findFirst({
where: and(

View File

@@ -194,7 +194,7 @@ function recentSearchKey(userId: string, portId: string): string {
}
/**
* Fire-and-forget saves a search term to the user's recent searches sorted set.
* Fire-and-forget - saves a search term to the user's recent searches sorted set.
*/
export function saveRecentSearch(userId: string, portId: string, searchTerm: string): void {
const key = recentSearchKey(userId, portId);
@@ -203,7 +203,7 @@ export function saveRecentSearch(userId: string, portId: string, searchTerm: str
.then(() => redis.zremrangebyrank(key, 0, -(RECENT_SEARCH_MAX + 1)))
.then(() => redis.expire(key, RECENT_SEARCH_TTL))
.catch(() => {
// Intentionally swallowed recent searches are non-critical
// Intentionally swallowed - recent searches are non-critical
});
}

View File

@@ -237,9 +237,7 @@ export async function getQueueJobs(
try {
const dataStr = JSON.stringify(job.data);
truncatedData =
dataStr.length > 500
? JSON.parse(dataStr.slice(0, 500) + '...(truncated)')
: job.data;
dataStr.length > 500 ? JSON.parse(dataStr.slice(0, 500) + '...(truncated)') : job.data;
} catch {
truncatedData = '[unparseable]';
}
@@ -261,11 +259,7 @@ export async function getQueueJobs(
// ─── retryJob ─────────────────────────────────────────────────────────────────
export async function retryJob(
queueName: QueueName,
jobId: string,
userId: string,
): Promise<void> {
export async function retryJob(queueName: QueueName, jobId: string, userId: string): Promise<void> {
const queue = getQueue(queueName);
const job = await queue.getJob(jobId);
if (!job) throw new Error(`Job ${jobId} not found in queue ${queueName}`);
@@ -348,19 +342,21 @@ export async function getRecentErrors(limit = 20): Promise<RecentError[]> {
metadata: (row.metadata as Record<string, unknown>) ?? {},
}));
// Fetch failed jobs from all queues (sample top 5 per queue)
// Fetch failed jobs from all queues (sample - top 5 per queue)
const queueNames = Object.keys(QUEUE_CONFIGS) as QueueName[];
const failedJobResults = await Promise.allSettled(
queueNames.map(async (name) => {
const queue = getQueue(name);
const jobs = await queue.getJobs(['failed'], 0, 4);
return jobs.map((job): RecentError => ({
id: `${name}:${job.id ?? ''}`,
source: 'queue',
message: `Queue job failed: ${job.name} in ${name}`,
timestamp: job.finishedOn ? new Date(job.finishedOn) : new Date(job.timestamp),
metadata: { queueName: name, failedReason: job.failedReason },
}));
return jobs.map(
(job): RecentError => ({
id: `${name}:${job.id ?? ''}`,
source: 'queue',
message: `Queue job failed: ${job.name} in ${name}`,
timestamp: job.finishedOn ? new Date(job.finishedOn) : new Date(job.timestamp),
metadata: { queueName: name, failedReason: job.failedReason },
}),
);
}),
);

View File

@@ -11,7 +11,7 @@ import { INTERNAL_TO_WEBHOOK_MAP } from '@/lib/services/webhook-event-map';
* queries all active webhooks for the given port that are subscribed to that
* event, and enqueues a BullMQ delivery job for each one.
*
* This function is fire-and-forget callers should use `void dispatchWebhookEvent(...)`.
* This function is fire-and-forget - callers should use `void dispatchWebhookEvent(...)`.
*/
export async function dispatchWebhookEvent(
portId: string,
@@ -20,7 +20,7 @@ export async function dispatchWebhookEvent(
): Promise<void> {
const webhookEvent = INTERNAL_TO_WEBHOOK_MAP[internalEvent];
if (!webhookEvent) {
// No mapping for this event skip silently
// No mapping for this event - skip silently
return;
}
@@ -65,10 +65,7 @@ export async function dispatchWebhookEvent(
});
}
} catch (err) {
// Never block callers log and swallow
logger.error(
{ portId, internalEvent, webhookEvent, err },
'Failed to dispatch webhook event',
);
// Never block callers - log and swallow
logger.error({ portId, internalEvent, webhookEvent, err }, 'Failed to dispatch webhook event');
}
}

View File

@@ -67,7 +67,7 @@ export async function createWebhook(
userAgent: meta.userAgent,
});
// Return with plaintext secret shown ONCE only on creation
// Return with plaintext secret - shown ONCE only on creation
return {
...webhook!,
secret: plaintextSecret,
@@ -220,7 +220,7 @@ export async function regenerateSecret(portId: string, webhookId: string, meta:
userAgent: meta.userAgent,
});
// Return new plaintext secret shown ONCE
// Return new plaintext secret - shown ONCE
return {
webhookId,
secret: plaintextSecret,

View File

@@ -211,7 +211,7 @@ export async function transferOwnership(
yacht.currentOwnerType === data.newOwner.type &&
yacht.currentOwnerId === data.newOwner.id
) {
throw new ValidationError('same owner nothing to transfer');
throw new ValidationError('same owner - nothing to transfer');
}
await assertOwnerExists(portId, data.newOwner, tx);