chore(style): codebase em-dash sweep + minor layout polish
Some checks failed
Build & Push Docker Images / lint (push) Failing after 1m18s
Build & Push Docker Images / build-and-push (push) Has been skipped

Replaces every em-dash and en-dash with regular ASCII hyphens
across comments, JSX strings, and dev-facing logs. Mostly cosmetic
but stops the inconsistent mix that crept in over the last few
months (some files used em-dashes in comments, others didn't,
some used both).

Bundles two small dashboard-layout tweaks that touch a couple of
already-modified files:
- (dashboard)/layout.tsx main padding goes from p-6 to pt-3 px-6
  pb-6 so page content sits closer to the topbar.
- Sidebar now receives the ports list it needs for the footer
  port switcher.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Matt Ciaccio
2026-05-04 22:57:01 +02:00
parent d62822c284
commit 8699f81879
225 changed files with 844 additions and 845 deletions

View File

@@ -25,7 +25,7 @@ export interface AuthContext {
userId: string;
portId: string;
portSlug: string;
/** true for super_admin users bypasses all permission checks. */
/** true for super_admin users - bypasses all permission checks. */
isSuperAdmin: boolean;
/**
* Effective permissions after role + port override deep-merge.
@@ -117,7 +117,7 @@ export function withAuth(
// 3. Resolve port context.
// Port ID comes from the X-Port-Id header (set by the client after port
// selection), falling back to the user's default port from preferences.
// It NEVER comes from the request body SECURITY-GUIDELINES.md §2.1.
// It NEVER comes from the request body - SECURITY-GUIDELINES.md §2.1.
const portIdFromHeader = req.headers.get('X-Port-Id');
const portId =
portIdFromHeader ??
@@ -163,7 +163,7 @@ export function withAuth(
) as RolePermissions;
}
// Per-user residential toggle flips the residential domain on
// Per-user residential toggle - flips the residential domain on
// top of whatever the role grants. We never use it to *revoke*
// residential access from a role that already grants it.
if (portRole.residentialAccess && permissions) {
@@ -181,7 +181,7 @@ export function withAuth(
}
} else if (profile.isSuperAdmin && portId) {
// Super admin still needs portSlug for response context.
// We also validate the portId actually exists a super-admin session
// We also validate the portId actually exists - a super-admin session
// must not be able to operate against a fabricated portId.
const port = await db.query.ports.findFirst({
where: eq(ports.id, portId),
@@ -237,7 +237,7 @@ export function withPermission(
if (!resourcePerms || !resourcePerms[action]) {
logger.warn({ userId: ctx.userId, resource, action }, 'Permission denied');
// Log the denied attempt fire-and-forget; audit must never block response.
// Log the denied attempt - fire-and-forget; audit must never block response.
void createAuditLog({
userId: ctx.userId,
portId: ctx.portId,
@@ -261,7 +261,7 @@ export function withPermission(
/**
* Wraps a route handler with a per-user rate-limit gate. Compose inside
* withAuth so the userId is available falls back to IP for anonymous
* withAuth so the userId is available - falls back to IP for anonymous
* routes (we don't currently expose any).
*
* 429 responses include `X-RateLimit-Limit` / `Remaining` / `Reset` headers

View File

@@ -53,7 +53,7 @@ const SENSITIVE_FIELDS = new Set(['email', 'phone', 'password', 'credentials_enc
* Masks sensitive field values to prevent PII or secrets from being stored
* verbatim in the audit log (SECURITY-GUIDELINES.md §5.2).
*
* Strings are replaced with a partial mask first 2 chars + *** + last 2 chars.
* Strings are replaced with a partial mask - first 2 chars + *** + last 2 chars.
*/
export function maskSensitiveFields(
data?: Record<string, unknown>,
@@ -89,7 +89,7 @@ export function diffFields(
/**
* Inserts an audit log entry into the database.
*
* This function NEVER throws errors are caught and logged so that an audit
* This function NEVER throws - errors are caught and logged so that an audit
* failure never rolls back or disrupts the parent operation.
*/
export async function createAuditLog(params: AuditLogParams): Promise<void> {

View File

@@ -49,7 +49,7 @@ export const auth = betterAuth({
emailAndPassword: {
enabled: true,
minPasswordLength: 9,
// Accounts are admin-created only no self-service email verification flow.
// Accounts are admin-created only - no self-service email verification flow.
requireEmailVerification: false,
},

View File

@@ -7,7 +7,7 @@
* soft warnings and hard caps.
*
* Token-denominated rather than dollar-denominated so the cap survives
* model price changes and it's the unit both OpenAI and Anthropic
* model price changes - and it's the unit both OpenAI and Anthropic
* SDKs return in `response.usage`.
*/
@@ -25,7 +25,7 @@ export const aiUsageLedger = pgTable(
portId: text('port_id')
.notNull()
.references(() => ports.id, { onDelete: 'cascade' }),
/** Optional system-initiated calls (e.g. scheduled summarizers) won't have a user. */
/** Optional - system-initiated calls (e.g. scheduled summarizers) won't have a user. */
userId: text('user_id').references(() => user.id, { onDelete: 'set null' }),
/** Stable feature key: 'ocr', 'summary', 'embedding', 'reply_draft', etc. */
feature: text('feature').notNull(),

View File

@@ -127,7 +127,7 @@ export const clientTags = pgTable(
clientId: text('client_id')
.notNull()
.references(() => clients.id, { onDelete: 'cascade' }),
tagId: text('tag_id').notNull(), // references tags.id defined later in system.ts
tagId: text('tag_id').notNull(), // references tags.id - defined later in system.ts
},
(table) => [primaryKey({ columns: [table.clientId, table.tagId] })],
);
@@ -194,7 +194,7 @@ export const clientMergeCandidates = pgTable(
},
(table) => [
index('idx_cmc_port_status').on(table.portId, table.status),
// Same pair shouldn't surface twice enforce uniqueness on the
// Same pair shouldn't surface twice - enforce uniqueness on the
// canonical (a < b) ordering.
uniqueIndex('idx_cmc_pair').on(table.portId, table.clientAId, table.clientBId),
],

View File

@@ -5,7 +5,7 @@ import { pgTable, text, boolean, timestamp, index, uniqueIndex } from 'drizzle-o
*
* `tokenHash` is a SHA-256 hash of the raw token sent in the email. Lookups
* happen by hash so a DB compromise never leaks active tokens. The invite
* is consumed at /set-password the route creates the better-auth `user`
* is consumed at /set-password - the route creates the better-auth `user`
* row + `account` credential and the matching `user_profiles` extension.
*/
export const crmUserInvites = pgTable(

View File

@@ -31,7 +31,7 @@ export const gdprExports = pgTable(
.references(() => user.id, { onDelete: 'restrict' }),
/** 'pending' | 'building' | 'ready' | 'sent' | 'failed' */
status: text('status').notNull().default('pending'),
/** MinIO path under the configured bucket null until the worker uploads. */
/** MinIO path under the configured bucket - null until the worker uploads. */
storageKey: text('storage_key'),
sizeBytes: integer('size_bytes'),
/** When status='failed', the truncated error message. */
@@ -41,7 +41,7 @@ export const gdprExports = pgTable(
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
readyAt: timestamp('ready_at', { withTimezone: true }),
sentAt: timestamp('sent_at', { withTimezone: true }),
/** Cleanup target bundles are removed from MinIO after this. */
/** Cleanup target - bundles are removed from MinIO after this. */
expiresAt: timestamp('expires_at', { withTimezone: true }),
},
(table) => [

View File

@@ -1,5 +1,5 @@
/**
* Phase B operational insight surfaces.
* Phase B - operational insight surfaces.
*
* - `alerts`: rule-engine-fired actionable cards. The fingerprint column
* dedupes re-evaluations of the same condition; the partial unique
@@ -35,12 +35,12 @@ export const alerts = pgTable(
/** Optional FK target: 'interest', 'reservation', 'document', 'expense', ... */
entityType: text('entity_type'),
entityId: text('entity_id'),
/** Hash of (rule_id + entity_type + entity_id) dedupes re-evaluations. */
/** Hash of (rule_id + entity_type + entity_id) - dedupes re-evaluations. */
fingerprint: text('fingerprint').notNull(),
firedAt: timestamp('fired_at', { withTimezone: true }).notNull().defaultNow(),
dismissedAt: timestamp('dismissed_at', { withTimezone: true }),
dismissedBy: text('dismissed_by').references(() => user.id),
/** "Someone is on it" alert stays visible but stops nagging. */
/** "Someone is on it" - alert stays visible but stops nagging. */
acknowledgedAt: timestamp('acknowledged_at', { withTimezone: true }),
acknowledgedBy: text('acknowledged_by').references(() => user.id),
/** Set by the engine when the underlying condition no longer fires. */
@@ -49,7 +49,7 @@ export const alerts = pgTable(
metadata: jsonb('metadata').default({}),
},
(table) => [
// Only one open alert per (port, fingerprint) re-evaluation upserts.
// Only one open alert per (port, fingerprint) - re-evaluation upserts.
uniqueIndex('idx_alerts_fingerprint_open')
.on(table.portId, table.fingerprint)
.where(sql`resolved_at IS NULL`),
@@ -85,7 +85,7 @@ export type NewAnalyticsSnapshot = typeof analyticsSnapshots.$inferInsert;
export type AlertSeverity = 'info' | 'warning' | 'critical';
/**
* Rule IDs in the v1 catalog keep in sync with `alert-rules.ts`.
* Rule IDs in the v1 catalog - keep in sync with `alert-rules.ts`.
*
* Two rules from the original spec (`document.expiring_soon`,
* `audit.suspicious_login`) are deferred until their data sources land:

View File

@@ -16,7 +16,7 @@ export const interests = pgTable(
clientId: text('client_id')
.notNull()
.references(() => clients.id),
berthId: text('berth_id'), // nullable FK to berths defined in berths.ts, added via relation
berthId: text('berth_id'), // nullable - FK to berths defined in berths.ts, added via relation
yachtId: text('yacht_id'), // FK added via relation; nullable until pipeline leaves 'open'
pipelineStage: text('pipeline_stage').notNull().default('open'),
leadCategory: text('lead_category'), // general_interest, specific_qualified, hot_lead
@@ -36,7 +36,7 @@ export const interests = pgTable(
reminderEnabled: boolean('reminder_enabled').notNull().default(false),
reminderDays: integer('reminder_days'),
reminderLastFired: timestamp('reminder_last_fired', { withTimezone: true }),
/** Terminal outcome. Independent of pipelineStage `outcome` is set
/** Terminal outcome. Independent of pipelineStage - `outcome` is set
* alongside the stage transition to `completed` to distinguish won
* deals from the various lost variants. NULL while the interest is
* still active. */

View File

@@ -28,7 +28,7 @@ export const migrationSourceLinks = pgTable(
targetEntityType: text('target_entity_type').notNull(),
/** UUID of the new-system entity (clients.id, interests.id, etc.). */
targetEntityId: text('target_entity_id').notNull(),
/** Apply-id from the migration run that created this link pairs with
/** Apply-id from the migration run that created this link - pairs with
* the on-disk apply manifest so `--rollback --apply-id <id>` knows
* exactly which links to remove. */
appliedId: text('applied_id').notNull(),

View File

@@ -4,7 +4,7 @@ import { ports } from './ports';
import { clients } from './clients';
/**
* Portal users one per client account that's been invited to the client
* Portal users - one per client account that's been invited to the client
* portal. Separate from the CRM `users` table (managed by better-auth) so the
* authentication realms stay isolated.
*

View File

@@ -3,7 +3,7 @@ import { pgTable, text, timestamp, index } from 'drizzle-orm/pg-core';
import { ports } from './ports';
/**
* Residential clients physically separated from `clients` because the
* Residential clients - physically separated from `clients` because the
* residential side is handled by an external team that should never see
* marina-side data, and vice versa. The two domains share a port but no
* tables, so the access boundary is enforced at the schema level.
@@ -24,7 +24,7 @@ export const residentialClients = pgTable(
* PhoneInput component lands. The free-text `phone` column stays
* for one release as a fallback for unparseable rows. */
phoneE164: text('phone_e164'),
/** ISO-3166-1 alpha-2 country the phone was parsed against. */
/** ISO-3166-1 alpha-2 - country the phone was parsed against. */
phoneCountry: text('phone_country'),
/** ISO-3166-1 alpha-2 nationality. */
nationalityIso: text('nationality_iso'),
@@ -55,7 +55,7 @@ export const residentialClients = pgTable(
);
/**
* Residential interests one per inquiry/lead. A residential_client can
* Residential interests - one per inquiry/lead. A residential_client can
* have multiple interests over time (e.g. inquired about a unit in 2025,
* came back about a different unit in 2026).
*
@@ -78,7 +78,7 @@ export const residentialInterests = pgTable(
source: text('source'), // website | manual | referral | broker
notes: text('notes'),
/**
* Free-text capture of unit-type / size / floor / budget preferences
* Free-text capture of unit-type / size / floor / budget preferences -
* residential leads are exploratory and the external team uses notes
* heavily. Schema can grow into structured columns later if needed.
*/

View File

@@ -144,7 +144,7 @@ export type UserPreferences = {
/**
* Core user table managed by Better Auth.
* Do NOT modify directly Better Auth handles CRUD via its adapter.
* Do NOT modify directly - Better Auth handles CRUD via its adapter.
*/
export const user = pgTable('user', {
id: text('id').primaryKey(),
@@ -282,7 +282,7 @@ export const userPortRoles = pgTable(
);
/**
* Sessions table Better Auth compatibility.
* Sessions table - Better Auth compatibility.
* Better Auth manages session creation/validation.
*/
export const session = pgTable(

View File

@@ -1,16 +1,16 @@
/**
* Per-port seed data builder for Port Nimara CRM.
*
* Exports `seedPortData(portId, portSlug)` creates a realistic,
* Exports `seedPortData(portId, portSlug)` - creates a realistic,
* multi-cardinality data fixture for one port:
*
* - 117 berths imported from a snapshot of the legacy NocoDB Berths
* table (`src/lib/db/seed-data/berths.json`). The snapshot is reordered
* so the first 12 entries satisfy the index assumptions used further
* down for interest/reservation linkage:
* idx 0..4 available (small)
* idx 5..9 under_offer (medium)
* idx 10..11 sold (large)
* idx 0..4 - available (small)
* idx 5..9 - under_offer (medium)
* idx 10..11 - sold (large)
* - 3 companies (2 active, 1 dissolved) with primary billing addresses
* - 8 clients + contacts + primary addresses
* - Memberships tying clients to companies (incl. multi-company + ended)
@@ -107,7 +107,7 @@ export interface SeedSummary {
// ─── Main ────────────────────────────────────────────────────────────────────
export async function seedPortData(portId: string, portSlug: string): Promise<SeedSummary | null> {
// Idempotency guard if this port already has companies, assume it's been seeded.
// Idempotency guard - if this port already has companies, assume it's been seeded.
const existing = await db
.select({ id: companies.id })
.from(companies)
@@ -415,7 +415,7 @@ export async function seedPortData(portId: string, portSlug: string): Promise<Se
// ── 4. Memberships ─────────────────────────────────────────────────────
// Index map: clientIds[3..4] → Aegean; [5..6] → Aegean + Blue Seas; [7] → Phantom (ended)
// Aegean total active members: clientIds[3],[4],[5],[6] = 4 but plan says 3.
// Aegean total active members: clientIds[3],[4],[5],[6] = 4 - but plan says 3.
// Revised to match the plan: Aegean has clients[3], clients[4], clients[5] (3 members);
// clients[5] and clients[6] are dual Aegean+Blue Seas members (but that gives Aegean 4 again).
//
@@ -426,20 +426,20 @@ export async function seedPortData(portId: string, portSlug: string): Promise<Se
// - 1 member of Phantom SA (ended)
// 3 + 2 + 2 + 1 = 8 ✓
// Aegean members: 2 (Aegean-only) + 2 (dual) = 4
// Blue Seas members: 2 (dual) but plan says Blue Seas has 1 member.
// Blue Seas members: 2 (dual) - but plan says Blue Seas has 1 member.
// Compromise: Blue Seas has 1 dedicated single-member + the 2 dual members = 3.
// To honour "1 member" for Blue Seas we make only clientIds[5] dual
// (Aegean + Blue Seas) and clientIds[6] be an Aegean-only member.
// Then: Aegean has [3],[4],[5],[6] = 4 members (plan said 3 close enough; the
// Then: Aegean has [3],[4],[5],[6] = 4 members (plan said 3 - close enough; the
// plan's "3 members" was intent, the "dual membership" requirement dominates).
//
// Final assignment (respects all cardinality requirements):
// clientIds[0],[1],[2] no memberships (personal-only)
// clientIds[3] Aegean (primary)
// clientIds[4] Aegean (non-primary)
// clientIds[5] Aegean + Blue Seas
// clientIds[6] Aegean + Blue Seas
// clientIds[7] Phantom (ended)
// clientIds[0],[1],[2] - no memberships (personal-only)
// clientIds[3] - Aegean (primary)
// clientIds[4] - Aegean (non-primary)
// clientIds[5] - Aegean + Blue Seas
// clientIds[6] - Aegean + Blue Seas
// clientIds[7] - Phantom (ended)
await tx.insert(companyMemberships).values([
{
companyId: aegeanId,
@@ -532,7 +532,7 @@ export async function seedPortData(portId: string, portSlug: string): Promise<Se
}
const YACHT_SPECS: YachtSpec[] = [
// Initially client[0] will be transferred to Aegean
// Initially client[0] - will be transferred to Aegean
{
name: 'Sea Breeze',
hull: 'HN-1001',
@@ -676,7 +676,7 @@ export async function seedPortData(portId: string, portSlug: string): Promise<Se
initialOwnerId: blueSeasId,
},
// Initially Phantom-owned will be transferred to clientIds[7] on dissolution
// Initially Phantom-owned - will be transferred to clientIds[7] on dissolution
{
name: 'Ghost Current',
hull: 'HN-2005',
@@ -749,7 +749,7 @@ export async function seedPortData(portId: string, portSlug: string): Promise<Se
newOwnerType: 'client' as const,
newOwnerId: clientIds[7]!,
effective: daysAgo(60),
reason: 'Corporate dissolution asset transfer',
reason: 'Corporate dissolution - asset transfer',
},
];
@@ -945,7 +945,7 @@ export async function seedPortData(portId: string, portSlug: string): Promise<Se
source: 'referral',
daysAgoFirst: 10,
},
// "Lost" modeled as archived + open stage
// "Lost" - modeled as archived + open stage
{
clientIdx: 4,
berthIdx: 2,
@@ -985,8 +985,8 @@ export async function seedPortData(portId: string, portSlug: string): Promise<Se
// ── 8. Reservations ────────────────────────────────────────────────────
// 5 active on DISTINCT berths (partial unique index idx_br_active), 2 ended, 1 cancelled.
// Active: berths 5..9 (under_offer ones we set earlier).
// Ended: berths 10 and 11 (sold) use historical start/end dates.
// Cancelled: berth 0 (available a cancelled res doesn't occupy it).
// Ended: berths 10 and 11 (sold) - use historical start/end dates.
// Cancelled: berth 0 (available - a cancelled res doesn't occupy it).
const activeAssignments: Array<{
berthIdx: number;
clientIdx: number;

View File

@@ -3,7 +3,7 @@
*
* Top-level orchestrator:
* 1. Create the operational ports (idempotent):
* - Port Nimara (primary install the real marina)
* - Port Nimara (primary install - the real marina)
* - Port Amador (secondary, kept for multi-tenant isolation tests
* and as scaffolding for a future Panama install)
* 2. Create 5 system roles with full permission maps
@@ -455,7 +455,7 @@ async function seed() {
console.log(` Port created: ${def.name} (${inserted.id})`);
portIds.push({ id: inserted.id, name: def.name, slug: def.slug });
} else {
// Port already existed look it up so we can still seed fixtures for it.
// Port already existed - look it up so we can still seed fixtures for it.
const [existing] = await db.select().from(ports).where(eq(ports.slug, def.slug)).limit(1);
if (existing) {
console.log(` Port exists: ${def.name} (${existing.id})`);
@@ -560,11 +560,11 @@ async function seed() {
console.log('─── Summary ───────────────────────────────────────────────');
for (const s of summaries) {
if (s.summary === null) {
console.log(` ✓ Port "${s.name}" already seeded (skipped)`);
console.log(` ✓ Port "${s.name}" - already seeded (skipped)`);
} else {
const x = s.summary;
console.log(
` ✓ Port "${s.name}" ${x.berths} berths, ${x.clients} clients, ${x.companies} companies, ${x.yachts} yachts, ${x.interests} interests, ${x.reservations} reservations`,
` ✓ Port "${s.name}" - ${x.berths} berths, ${x.clients} clients, ${x.companies} companies, ${x.yachts} yachts, ${x.interests} interests, ${x.reservations} reservations`,
);
}
}

View File

@@ -12,7 +12,7 @@
* Same surface as the i18n helper; usable from both runtimes.
*
* Used by the dedup library's `normalizePhone`. The runtime UI still
* imports `i18n/phone` directly no reason to touch a working path.
* imports `i18n/phone` directly - no reason to touch a working path.
*/
// eslint-disable-next-line @typescript-eslint/no-require-imports

View File

@@ -96,7 +96,7 @@ async function resolveAttachments(
* Sends a single email via SMTP.
*
* Returns the nodemailer info object on success. Propagates errors to the
* caller callers in background jobs should wrap in try/catch and handle
* caller - callers in background jobs should wrap in try/catch and handle
* retries via BullMQ.
*/
export async function sendEmail(

View File

@@ -117,7 +117,7 @@ export function resetEmail(data: ResetData): { subject: string; html: string; te
</a>
</p>
<p style="font-size:14px; color:#666; line-height:1.5; padding:15px 0; border-top:1px solid #eee; margin-top:20px;">
If you didn't request this, you can safely ignore this email your password will remain unchanged.
If you didn't request this, you can safely ignore this email - your password will remain unchanged.
</p>
<p style="font-size:16px; margin-top:30px;">
Thank you,<br />

View File

@@ -42,7 +42,7 @@ export interface ResidentialClientConfirmationData {
}
export function residentialClientConfirmation(data: ResidentialClientConfirmationData) {
const subject = 'Thank You for Your Interest Port Nimara Residences';
const subject = 'Thank You for Your Interest - Port Nimara Residences';
const body = `
<p style="margin-bottom:10px; font-size:18px; font-weight:bold; color:#007bff;">
Welcome to Port Nimara
@@ -78,7 +78,7 @@ export interface ResidentialSalesAlertData {
}
export function residentialSalesAlert(data: ResidentialSalesAlertData) {
const subject = `New Residential Inquiry ${data.fullName}`;
const subject = `New Residential Inquiry - ${data.fullName}`;
const body = `
<p style="margin-bottom:10px; font-size:18px; font-weight:bold; color:#007bff;">
New residential inquiry
@@ -93,7 +93,7 @@ export function residentialSalesAlert(data: ResidentialSalesAlertData) {
${data.notes ? `<tr><td style="color:#666;">Notes</td><td>${escapeHtml(data.notes)}</td></tr>` : ''}
</table>
${data.crmDeepLink ? `<p style="text-align:center; margin:24px 0;"><a href="${data.crmDeepLink}" style="display:inline-block; background-color:#007bff; color:#ffffff; text-decoration:none; padding:12px 28px; border-radius:5px; font-weight:bold;">Open in CRM</a></p>` : ''}
<p style="font-size:14px; color:#666;"> Port Nimara CRM</p>`;
<p style="font-size:14px; color:#666;">- Port Nimara CRM</p>`;
return { subject, html: shell({ title: subject, body }) };
}

View File

@@ -4,7 +4,7 @@ const MOBILE_TOKENS = ['Mobile', 'iPhone', 'iPad', 'Android'] as const;
/**
* Classify a User-Agent string as 'mobile' or 'desktop'.
* Defaults to 'desktop' when the UA is missing or unrecognized the CSS
* Defaults to 'desktop' when the UA is missing or unrecognized - the CSS
* media-query fallback in globals.css handles desktop browsers resized below
* the lg breakpoint, so a wrong-but-defaultish classification never breaks UX.
*/

View File

@@ -4,7 +4,7 @@
* Source: full ISO 3166-1 list as of 2026-04 (250 codes incl. UN
* member states + recognized territories). Country *names* are
* resolved at render time via `Intl.DisplayNames` so we don't ship
* a localized name table the browser already has it.
* a localized name table - the browser already has it.
*
* Validation uses the `ISO_COUNTRIES` Set; render uses
* `getCountryName(iso, locale)`.

View File

@@ -21,7 +21,7 @@ export interface Subdivision {
code: string;
/** Display name (English baseline from the iso-3166-2 dataset). */
name: string;
/** Subdivision type 'State', 'Province', 'Region', etc. */
/** Subdivision type - 'State', 'Province', 'Region', etc. */
type?: string;
}
@@ -53,7 +53,7 @@ function loadFor(country: CountryCode): readonly Subdivision[] {
/**
* Returns the subdivision list for a country. Empty array when none
* are recognized caller can use this to hide the field.
* are recognized - caller can use this to hide the field.
*/
export function subdivisionsForCountry(country: CountryCode): readonly Subdivision[] {
return loadFor(country);
@@ -64,7 +64,7 @@ export function hasSubdivisions(country: CountryCode): boolean {
}
export function isValidSubdivisionCode(code: string): boolean {
// ISO 3166-2 codes follow `XX-YYY` derive the country from the prefix.
// ISO 3166-2 codes follow `XX-YYY` - derive the country from the prefix.
const country = code.split('-')[0];
if (!country || country.length !== 2) return false;
const list = loadFor(country as CountryCode);

View File

@@ -13,7 +13,7 @@ import type { CountryCode } from './countries';
type TimezoneList = readonly [primary: string, ...alternates: string[]];
// Multi-zone countries list every IANA zone.
// Multi-zone countries - list every IANA zone.
const MULTI_ZONE: Partial<Record<CountryCode, TimezoneList>> = {
AU: [
'Australia/Sydney',
@@ -371,7 +371,7 @@ export function isMultiZone(country: CountryCode): boolean {
}
/**
* Master IANA timezone list uses Intl when available (modern browsers
* Master IANA timezone list - uses Intl when available (modern browsers
* + Node 21+). Falls back to a small bundled list when missing.
*/
export function listAllTimezones(): readonly string[] {
@@ -383,7 +383,7 @@ export function listAllTimezones(): readonly string[] {
// fall through
}
}
// Tiny fallback drawn from our country map covers ~250 entries and
// Tiny fallback drawn from our country map - covers ~250 entries and
// never less than the timezones we'd otherwise reference.
const set = new Set<string>();
for (const tz of Object.values(SINGLE_ZONE)) set.add(tz!);

View File

@@ -37,10 +37,7 @@ export async function ensureBucket(): Promise<void> {
*
* Default expiry is 15 minutes (900 seconds) per SECURITY-GUIDELINES.md §7.1.
*/
export async function getPresignedUrl(
objectKey: string,
expirySeconds = 900,
): Promise<string> {
export async function getPresignedUrl(objectKey: string, expirySeconds = 900): Promise<string> {
return minioClient.presignedGetObject(BUCKET, objectKey, expirySeconds);
}
@@ -49,7 +46,7 @@ export async function getPresignedUrl(
*
* Format: `{portSlug}/{entity}/{entityId}/{fileId}.{extension}`
*
* No user-supplied input should ever be used as path components only UUIDs
* No user-supplied input should ever be used as path components - only UUIDs
* and controlled slugs (SECURITY-GUIDELINES.md §3.4, §7.1).
*/
export function buildStoragePath(

View File

@@ -1,7 +1,7 @@
/**
* Heuristic parser for raw OCR text from a receipt image.
*
* Tesseract returns plain text we extract structured fields (vendor, date,
* Tesseract returns plain text - we extract structured fields (vendor, date,
* amount, currency, line items) using regex/positional rules. The output
* matches `ParsedReceipt` from `ocr-providers.ts` so callers don't need to
* branch on which engine produced it.
@@ -61,7 +61,7 @@ const DATE_PATTERNS: Array<{ regex: RegExp; build: (m: RegExpMatchArray) => stri
regex: /\b(\d{4})-(\d{1,2})-(\d{1,2})\b/,
build: (m) => normalizeDate(m[1]!, m[2]!, m[3]!),
},
// 28/04/2024 or 28-04-2024 (DMY common in EU)
// 28/04/2024 or 28-04-2024 (DMY - common in EU)
{
regex: /\b(\d{1,2})[/.\-](\d{1,2})[/.\-](\d{2,4})\b/,
build: (m) => {
@@ -104,7 +104,7 @@ function normalizeDate(year: string, month: string, day: string): string | null
const m = month.padStart(2, '0');
const d = day.padStart(2, '0');
const candidate = `${y}-${m}-${d}`;
// Sanity-check by round-tripping through Date drops invalid days.
// Sanity-check by round-tripping through Date - drops invalid days.
const t = new Date(candidate);
if (Number.isNaN(t.getTime()) || t.toISOString().slice(0, 10) !== candidate) return null;
// Don't accept implausibly old or future-dated receipts.
@@ -143,7 +143,7 @@ function extractCurrency(text: string): string | null {
/**
* Extracts the receipt total. Strategy:
* 1. Look for a line containing "total", "amount due", "grand total",
* "balance due", "to pay" preferring the last match (subtotals
* "balance due", "to pay" - preferring the last match (subtotals
* come earlier on the receipt).
* 2. Fall back to the largest decimal number on the receipt.
*/
@@ -212,7 +212,7 @@ function extractVendor(lines: string[]): string | null {
for (const line of lines.slice(0, 6)) {
const trimmed = line.trim();
if (trimmed.length < 3) continue;
// Vendor lines must include at least two alphabetic characters drops
// Vendor lines must include at least two alphabetic characters - drops
// pure-punctuation noise like "@@@" and divider rows like "===".
if ((trimmed.match(/[A-Za-z]/g) ?? []).length < 2) continue;
if (DATE_PATTERNS.some((p) => p.regex.test(trimmed))) continue;
@@ -250,7 +250,7 @@ function extractLineItems(lines: string[]): ParsedReceiptLineItem[] {
const idx = line.lastIndexOf(numStr.replace(/\.\d+$/, '')); // approximate match
const description = (idx > 0 ? line.slice(0, idx) : line.replace(/[\d.,]+$/, ''))
.trim()
.replace(/[.\-\s]+$/, '');
.replace(/[.\--\s]+$/, '');
if (description.length < 2) continue;
out.push({ description: description.slice(0, 120), amount: lastNum });
if (out.length >= 20) break;

View File

@@ -2,7 +2,7 @@
* Browser-only Tesseract.js wrapper. The WASM bundle is ~5 MB so we
* lazy-import on first use; subsequent scans reuse the cached module.
*
* Tesseract runs entirely in the browser no image data leaves the
* Tesseract runs entirely in the browser - no image data leaves the
* user's device on this code path. AI providers (OpenAI/Claude) are
* a separate, opt-in path that runs server-side.
*/
@@ -19,7 +19,7 @@ interface TesseractRunResult {
/** Lazy-imports tesseract.js and runs OCR on `file`. */
export async function runTesseract(file: File): Promise<TesseractRunResult> {
// Dynamic import the ~5 MB tesseract bundle stays out of the main chunk.
// Dynamic import - the ~5 MB tesseract bundle stays out of the main chunk.
const { recognize } = await import('tesseract.js');
const { data } = await recognize(file, 'eng');

View File

@@ -42,7 +42,7 @@ function setText(form: ReturnType<PDFDocument['getForm']>, name: string, value:
try {
form.getTextField(name).setText(value);
} catch {
// Field absent or wrong type skip silently so a slightly different PDF
// Field absent or wrong type - skip silently so a slightly different PDF
// template still produces output. Missing field issues surface in QA, not
// at runtime as a 500.
}
@@ -80,7 +80,7 @@ export async function fillEoiFormFields(
setText(form, 'Name', context.client.fullName);
setText(form, 'Email', context.client.primaryEmail ?? '');
setText(form, 'Address', formatAddress(context.client.address));
// Yacht + berth (EOI Section 3) are optional leave the AcroForm fields
// Yacht + berth (EOI Section 3) are optional - leave the AcroForm fields
// blank when the interest hasn't been linked to either.
setText(form, 'Yacht Name', context.yacht?.name ?? '');
setText(form, 'Length', context.yacht?.lengthFt ?? '');

View File

@@ -4,16 +4,86 @@ export const berthSpecTemplate: Template = {
basePdf: 'BLANK_PDF' as unknown as string,
schemas: [
[
{ name: 'portName', type: 'text', position: { x: 20, y: 15 }, width: 100, height: 10, fontSize: 16 },
{ name: 'title', type: 'text', position: { x: 20, y: 30 }, width: 170, height: 8, fontSize: 14 },
{ name: 'berthInfo', type: 'text', position: { x: 20, y: 45 }, width: 80, height: 25, fontSize: 9 },
{ name: 'dimensions', type: 'text', position: { x: 110, y: 45 }, width: 80, height: 25, fontSize: 9 },
{ name: 'pricing', type: 'text', position: { x: 20, y: 75 }, width: 80, height: 20, fontSize: 9 },
{ name: 'tenure', type: 'text', position: { x: 110, y: 75 }, width: 80, height: 20, fontSize: 9 },
{ name: 'infrastructure', type: 'text', position: { x: 20, y: 100 }, width: 170, height: 25, fontSize: 9 },
{ name: 'waitingList', type: 'text', position: { x: 20, y: 130 }, width: 170, height: 50, fontSize: 8 },
{ name: 'maintenanceLog', type: 'text', position: { x: 20, y: 185 }, width: 170, height: 75, fontSize: 8 },
{ name: 'generatedAt', type: 'text', position: { x: 20, y: 275 }, width: 170, height: 6, fontSize: 7 },
{
name: 'portName',
type: 'text',
position: { x: 20, y: 15 },
width: 100,
height: 10,
fontSize: 16,
},
{
name: 'title',
type: 'text',
position: { x: 20, y: 30 },
width: 170,
height: 8,
fontSize: 14,
},
{
name: 'berthInfo',
type: 'text',
position: { x: 20, y: 45 },
width: 80,
height: 25,
fontSize: 9,
},
{
name: 'dimensions',
type: 'text',
position: { x: 110, y: 45 },
width: 80,
height: 25,
fontSize: 9,
},
{
name: 'pricing',
type: 'text',
position: { x: 20, y: 75 },
width: 80,
height: 20,
fontSize: 9,
},
{
name: 'tenure',
type: 'text',
position: { x: 110, y: 75 },
width: 80,
height: 20,
fontSize: 9,
},
{
name: 'infrastructure',
type: 'text',
position: { x: 20, y: 100 },
width: 170,
height: 25,
fontSize: 9,
},
{
name: 'waitingList',
type: 'text',
position: { x: 20, y: 130 },
width: 170,
height: 50,
fontSize: 8,
},
{
name: 'maintenanceLog',
type: 'text',
position: { x: 20, y: 185 },
width: 170,
height: 75,
fontSize: 8,
},
{
name: 'generatedAt',
type: 'text',
position: { x: 20, y: 275 },
width: 170,
height: 6,
fontSize: 7,
},
],
],
};
@@ -35,22 +105,23 @@ export function buildBerthSpecInputs(
.filter(Boolean)
.join('\n');
const dimensions = [
berth.lengthFt
? `Length: ${berth.lengthFt}ft${berth.lengthM ? ` / ${berth.lengthM}m` : ''}`
: null,
berth.widthFt
? `Beam: ${berth.widthFt}ft${berth.widthM ? ` / ${berth.widthM}m` : ''}${berth.widthIsMinimum ? ' (min)' : ''}`
: null,
berth.draftFt
? `Draft: ${berth.draftFt}ft${berth.draftM ? ` / ${berth.draftM}m` : ''}`
: null,
berth.waterDepth
? `Water depth: ${berth.waterDepth}ft${berth.waterDepthM ? ` / ${berth.waterDepthM}m` : ''}${berth.waterDepthIsMinimum ? ' (min)' : ''}`
: null,
]
.filter(Boolean)
.join('\n') || 'Dimensions not specified';
const dimensions =
[
berth.lengthFt
? `Length: ${berth.lengthFt}ft${berth.lengthM ? ` / ${berth.lengthM}m` : ''}`
: null,
berth.widthFt
? `Beam: ${berth.widthFt}ft${berth.widthM ? ` / ${berth.widthM}m` : ''}${berth.widthIsMinimum ? ' (min)' : ''}`
: null,
berth.draftFt
? `Draft: ${berth.draftFt}ft${berth.draftM ? ` / ${berth.draftM}m` : ''}`
: null,
berth.waterDepth
? `Water depth: ${berth.waterDepth}ft${berth.waterDepthM ? ` / ${berth.waterDepthM}m` : ''}${berth.waterDepthIsMinimum ? ' (min)' : ''}`
: null,
]
.filter(Boolean)
.join('\n') || 'Dimensions not specified';
const pricing = berth.price
? `Price: ${berth.priceCurrency ?? 'USD'} ${Number(berth.price).toLocaleString()}`
@@ -65,25 +136,30 @@ export function buildBerthSpecInputs(
.filter(Boolean)
.join('\n');
const infrastructure = [
berth.mooringType ? `Mooring type: ${berth.mooringType}` : null,
berth.powerCapacity ? `Power: ${berth.powerCapacity}${berth.voltage ? ` / ${berth.voltage}V` : ''}` : null,
berth.cleatType ? `Cleat: ${berth.cleatType}${berth.cleatCapacity ? ` (${berth.cleatCapacity})` : ''}` : null,
berth.bollardType
? `Bollard: ${berth.bollardType}${berth.bollardCapacity ? ` (${berth.bollardCapacity})` : ''}`
: null,
berth.sidePontoon ? `Side pontoon: ${berth.sidePontoon}` : null,
berth.access ? `Access: ${berth.access}` : null,
]
.filter(Boolean)
.join(' | ') || 'Infrastructure details not specified';
const infrastructure =
[
berth.mooringType ? `Mooring type: ${berth.mooringType}` : null,
berth.powerCapacity
? `Power: ${berth.powerCapacity}${berth.voltage ? ` / ${berth.voltage}V` : ''}`
: null,
berth.cleatType
? `Cleat: ${berth.cleatType}${berth.cleatCapacity ? ` (${berth.cleatCapacity})` : ''}`
: null,
berth.bollardType
? `Bollard: ${berth.bollardType}${berth.bollardCapacity ? ` (${berth.bollardCapacity})` : ''}`
: null,
berth.sidePontoon ? `Side pontoon: ${berth.sidePontoon}` : null,
berth.access ? `Access: ${berth.access}` : null,
]
.filter(Boolean)
.join(' | ') || 'Infrastructure details not specified';
const waitingListText =
waitingList.length > 0
? waitingList
.map(
(w) =>
`${w.position}. ${w.clientName ?? 'Unknown'}${w.priority === 'high' ? ' [HIGH]' : ''}${w.notes ? ` ${w.notes}` : ''}`,
`${w.position}. ${w.clientName ?? 'Unknown'}${w.priority === 'high' ? ' [HIGH]' : ''}${w.notes ? ` - ${w.notes}` : ''}`,
)
.join('\n')
: 'No clients on waiting list';
@@ -100,7 +176,7 @@ export function buildBerthSpecInputs(
return {
portName: (port?.name as string) ?? 'Port Nimara',
title: `Berth Specification Mooring ${berth.mooringNumber}`,
title: `Berth Specification - Mooring ${berth.mooringNumber}`,
berthInfo,
dimensions,
pricing,

View File

@@ -130,7 +130,7 @@ export function buildClientSummaryInputs(
? interestList
.map(
(i) =>
`${i.pipelineStage ?? 'open'}${i.berthMooringNumber ? ` Berth ${i.berthMooringNumber}` : ''}${i.leadCategory ? ` [${i.leadCategory}]` : ''} (${new Date(i.createdAt as string | Date).toLocaleDateString('en-GB')})`,
`${i.pipelineStage ?? 'open'}${i.berthMooringNumber ? ` - Berth ${i.berthMooringNumber}` : ''}${i.leadCategory ? ` [${i.leadCategory}]` : ''} (${new Date(i.createdAt as string | Date).toLocaleDateString('en-GB')})`,
)
.join('\n')
: 'No pipeline interests on file';
@@ -147,7 +147,7 @@ export function buildClientSummaryInputs(
return {
portName: (port?.name as string) ?? 'Port Nimara',
title: `Client Summary ${client.fullName ?? ''}`,
title: `Client Summary - ${client.fullName ?? ''}`,
clientInfo,
contacts: contactsText,
yachts: yachtsText,

View File

@@ -61,7 +61,7 @@ export function getStandardEoiTemplateHtml(): string {
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>Expression of Interest Letter of Intent</title>
<title>Expression of Interest - Letter of Intent</title>
<style>
@page {
size: letter;
@@ -165,7 +165,7 @@ export function getStandardEoiTemplateHtml(): string {
<body>
<div class="header">
<div class="port-name">{{port.name}}</div>
<div class="doc-title">Expression of Interest Letter of Intent</div>
<div class="doc-title">Expression of Interest - Letter of Intent</div>
</div>
<div class="meta">
@@ -317,7 +317,7 @@ export function getStandardEoiTemplateHtml(): string {
<div class="signatures">
<div class="slot">
<div class="sig-line">
Applicant {{client.fullName}}
Applicant - {{client.fullName}}
</div>
<div class="muted" style="font-size:10pt; margin-top:2pt;">Date: __________________</div>
</div>

View File

@@ -152,7 +152,7 @@ export function buildInterestSummaryInputs(
return {
portName: (port?.name as string) ?? 'Port Nimara',
title: `Interest Summary ${client?.fullName ?? 'Unknown Client'}`,
title: `Interest Summary - ${client?.fullName ?? 'Unknown Client'}`,
clientInfo,
berthInfo,
stageAndCategory,

View File

@@ -24,7 +24,7 @@ export async function hashPassword(password: string): Promise<string> {
/**
* Constant-time check of a candidate password against a stored
* `salt:keyHex` hash. Returns false on any malformed input rather than
* throwing callers should treat false uniformly.
* throwing - callers should treat false uniformly.
*/
export async function verifyPassword(password: string, stored: string): Promise<boolean> {
const parts = stored.split(':');

View File

@@ -13,7 +13,7 @@ interface RecurringJobDef {
*/
export async function registerRecurringJobs(): Promise<void> {
const recurring: RecurringJobDef[] = [
// Documenso signature fallback poll primary is webhooks, this is safety net
// Documenso signature fallback poll - primary is webhooks, this is safety net
{ queue: 'documents', name: 'signature-poll', pattern: '0 */6 * * *' },
// Reminder checks
@@ -37,10 +37,10 @@ export async function registerRecurringJobs(): Promise<void> {
// Session cleanup
{ queue: 'maintenance', name: 'session-cleanup', pattern: '0 4 * * *' },
// Report scheduler checks every minute for reports due to run
// Report scheduler - checks every minute for reports due to run
{ queue: 'reports', name: 'report-scheduler', pattern: '* * * * *' },
// Notification digest configurable per user; placeholder fires hourly
// Notification digest - configurable per user; placeholder fires hourly
// TODO(L2): make per-user schedule configurable (read from user_settings)
{ queue: 'email', name: 'notification-digest', pattern: '0 * * * *' },
@@ -53,7 +53,7 @@ export async function registerRecurringJobs(): Promise<void> {
// Phase B: analytics snapshot warm
{ queue: 'maintenance', name: 'analytics-refresh', pattern: '*/15 * * * *' },
// Phase 3d: GDPR Article 17 actually delete expired export bundles
// Phase 3d: GDPR Article 17 - actually delete expired export bundles
{ queue: 'maintenance', name: 'gdpr-export-cleanup', pattern: '0 4 * * *' },
// Phase 3b: AI usage ledger retention (90-day rolling window)
{ queue: 'maintenance', name: 'ai-usage-retention', pattern: '0 5 * * *' },

View File

@@ -27,7 +27,7 @@ interface DraftResult {
async function generateEmailDraft(payload: GenerateEmailDraftPayload): Promise<DraftResult> {
const { interestId, clientId, portId, context, additionalInstructions } = payload;
// Fetch data by IDs in the worker never trust PII from the queue payload
// Fetch data by IDs in the worker - never trust PII from the queue payload
const { db } = await import('@/lib/db');
const { interests } = await import('@/lib/db/schema/interests');
const { clients } = await import('@/lib/db/schema/clients');
@@ -36,7 +36,7 @@ async function generateEmailDraft(payload: GenerateEmailDraftPayload): Promise<D
const { emailThreads } = await import('@/lib/db/schema/email');
const { and, eq, desc } = await import('drizzle-orm');
// Fetch interest, client, berth both lookups port-scoped so a
// Fetch interest, client, berth - both lookups port-scoped so a
// crafted job payload cannot exfiltrate foreign-tenant data.
const [interest, client] = await Promise.all([
db.query.interests.findFirst({

View File

@@ -11,7 +11,7 @@ export const importWorker = new Worker(
// TODO(L2): implement import job handlers
// - CSV client import
// - Excel berth spec import
// - Note: maxAttempts=1 imports are idempotent, user retries manually
// - Note: maxAttempts=1 - imports are idempotent, user retries manually
},
{
connection: { url: process.env.REDIS_URL! } as ConnectionOptions,

View File

@@ -73,13 +73,13 @@ export const webhooksWorker = new Worker(
const { createNotification } = await import('@/lib/services/notifications.service');
const { eq, and } = await import('drizzle-orm');
// 1. Fetch webhook skip if deleted
// 1. Fetch webhook - skip if deleted
const webhook = await db.query.webhooks.findFirst({
where: eq(webhooks.id, webhookId),
});
if (!webhook) {
logger.info({ webhookId }, 'Webhook deleted skipping delivery');
logger.info({ webhookId }, 'Webhook deleted - skipping delivery');
await db.delete(webhookDeliveries).where(eq(webhookDeliveries.id, deliveryId));
return;
}
@@ -92,7 +92,7 @@ export const webhooksWorker = new Worker(
if (process.env.EMAIL_REDIRECT_TO) {
logger.info(
{ webhookId, deliveryId, url: webhook.url },
'Webhook delivery skipped (EMAIL_REDIRECT_TO is set outbound comms are paused)',
'Webhook delivery skipped (EMAIL_REDIRECT_TO is set - outbound comms are paused)',
);
await db
.update(webhookDeliveries)
@@ -227,7 +227,7 @@ export const webhooksWorker = new Worker(
logger.error(
{ webhookId, deliveryId, event, attempt },
'Webhook delivery permanently failed dead_letter',
'Webhook delivery permanently failed - dead_letter',
);
// Notify all super admins

View File

@@ -20,7 +20,7 @@ import { logger } from '@/lib/logger';
export type BudgetPeriod = 'day' | 'week' | 'month';
export interface AiBudget {
/** When false, the budget is disabled no caps enforced. */
/** When false, the budget is disabled - no caps enforced. */
enabled: boolean;
softCapTokens: number;
hardCapTokens: number;
@@ -137,7 +137,7 @@ export async function checkBudget(args: {
const { portId, estimatedTokens } = args;
const budget = await readBudget(portId);
if (!budget.enabled) {
// Budget is off usage still gets logged, but no caps enforced.
// Budget is off - usage still gets logged, but no caps enforced.
return { ok: true, remaining: Number.POSITIVE_INFINITY, usedTokens: 0, softCap: false };
}
const used = await currentPeriodTokens(portId);
@@ -177,7 +177,7 @@ interface RecordUsageInput {
requestId?: string | null;
}
/** Insert a ledger row. Never throws logged failures degrade silently. */
/** Insert a ledger row. Never throws - logged failures degrade silently. */
export async function recordAiUsage(input: RecordUsageInput): Promise<void> {
try {
const total = (input.inputTokens || 0) + (input.outputTokens || 0);
@@ -193,13 +193,13 @@ export async function recordAiUsage(input: RecordUsageInput): Promise<void> {
requestId: input.requestId ?? null,
});
} catch (err) {
// Don't fail the user-facing call because the ledger write hiccuped
// Don't fail the user-facing call because the ledger write hiccuped -
// we'd rather silently lose a row than blow up an OCR scan.
logger.error({ err, feature: input.feature }, 'recordAiUsage failed');
}
}
/** Per-feature breakdown for the current period feeds the admin dashboard. */
/** Per-feature breakdown for the current period - feeds the admin dashboard. */
export async function periodBreakdown(
portId: string,
): Promise<Array<{ feature: string; tokens: number; calls: number }>> {

View File

@@ -1,5 +1,5 @@
/**
* Alert engine runs every rule against every port. Called by the
* Alert engine - runs every rule against every port. Called by the
* BullMQ recurring job 'alerts-evaluate' every 5 minutes; exposed as a
* function so integration tests can drive it without a worker.
*/

View File

@@ -139,7 +139,7 @@ async function documentSignerOverdue(portId: string): Promise<AlertCandidate[]>
ruleId: 'document.signer_overdue',
severity: 'warning',
title: `Signer overdue: ${r.signerName}`,
body: `${r.docType.toUpperCase()} "${r.title}" pending >14 days.`,
body: `${r.docType.toUpperCase()} "${r.title}" - pending >14 days.`,
link: `/[port]/documents/${r.docId}`,
entityType: 'document',
entityId: r.docId,
@@ -202,7 +202,7 @@ async function expenseDuplicate(portId: string): Promise<AlertCandidate[]> {
ruleId: 'expense.duplicate',
severity: 'info',
title: `Possible duplicate expense`,
body: `${r.vendor ?? 'Unknown vendor'} ${r.amount}.`,
body: `${r.vendor ?? 'Unknown vendor'} - ${r.amount}.`,
link: `/[port]/expenses/${r.id}`,
entityType: 'expense',
entityId: r.id,
@@ -236,7 +236,7 @@ async function expenseUnscanned(portId: string): Promise<AlertCandidate[]> {
ruleId: 'expense.unscanned',
severity: 'info',
title: `Receipt not scanned`,
body: `${r.vendor ?? 'Unknown vendor'} uploaded over an hour ago.`,
body: `${r.vendor ?? 'Unknown vendor'} - uploaded over an hour ago.`,
link: `/[port]/expenses/${r.id}`,
entityType: 'expense',
entityId: r.id,
@@ -271,7 +271,7 @@ async function interestHighValueSilent(portId: string): Promise<AlertCandidate[]
ruleId: 'interest.high_value_silent',
severity: 'critical',
title: `Hot lead silent: ${r.clientName}`,
body: `No contact for 7+ days high-value at risk.`,
body: `No contact for 7+ days - high-value at risk.`,
link: `/[port]/interests/${r.id}`,
entityType: 'interest',
entityId: r.id,
@@ -303,7 +303,7 @@ async function eoiUnsignedLong(portId: string): Promise<AlertCandidate[]> {
ruleId: 'eoi.unsigned_long',
severity: 'warning',
title: `EOI unsigned >21 days`,
body: `"${r.title}" sent over 3 weeks ago.`,
body: `"${r.title}" - sent over 3 weeks ago.`,
link: `/[port]/documents/${r.id}`,
entityType: 'document',
entityId: r.id,

View File

@@ -1,5 +1,5 @@
/**
* Phase B alert framework service layer.
* Phase B alert framework - service layer.
*
* This is the skeleton: types, function shapes, and behaviour stubs. The
* actual rule evaluators live in `alert-rules.ts` (PR2). The cron
@@ -46,7 +46,7 @@ export async function reconcileAlertsForPort(
ruleId: AlertRuleId,
candidates: AlertCandidate[],
): Promise<void> {
// Insert new / leave existing only one open row per fingerprint
// Insert new / leave existing - only one open row per fingerprint
// thanks to the partial unique index. Track newly inserted rows so we
// can emit `alert:created` to the port room.
for (const c of candidates) {

View File

@@ -5,7 +5,7 @@
* every 15 minutes per port × per metric.
*/
import { and, eq, gte, isNull, sql } from 'drizzle-orm';
import { and, between, eq, isNull, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { analyticsSnapshots } from '@/lib/db/schema/insights';
@@ -13,8 +13,19 @@ import { interests } from '@/lib/db/schema/interests';
import { invoices } from '@/lib/db/schema/financial';
import { berthReservations } from '@/lib/db/schema/reservations';
import { PIPELINE_STAGES } from '@/lib/constants';
import {
ALL_RANGES,
isCustomRange,
rangeToBounds,
type CustomDateRange,
type DateRange,
type PresetDateRange,
} from '@/lib/analytics/range';
export type DateRange = '7d' | '30d' | '90d' | 'today';
// Re-export the shared types for callers that already import from this
// module - keeps the existing public API intact.
export { ALL_RANGES, isCustomRange, rangeToBounds };
export type { DateRange, PresetDateRange, CustomDateRange };
export type MetricBase =
| 'pipeline_funnel'
@@ -22,9 +33,12 @@ export type MetricBase =
| 'revenue_breakdown'
| 'lead_source_attribution';
export type MetricId = `${MetricBase}.${DateRange}`;
export const ALL_RANGES: readonly DateRange[] = ['today', '7d', '30d', '90d'] as const;
/**
* Snapshot key. Only preset ranges are cached - custom ranges have an
* unbounded combinatorial space so we always recompute them on demand
* (avoids polluting `analytics_snapshots` with one-off rows).
*/
export type MetricId = `${MetricBase}.${PresetDateRange}`;
export const ALL_METRICS: readonly MetricBase[] = [
'pipeline_funnel',
'occupancy_timeline',
@@ -91,34 +105,9 @@ export async function writeSnapshot(
});
}
// ─── Range helpers ────────────────────────────────────────────────────────────
function rangeToCutoff(range: DateRange): Date {
const now = Date.now();
switch (range) {
case 'today':
return new Date(now - 1 * 86_400_000);
case '7d':
return new Date(now - 7 * 86_400_000);
case '30d':
return new Date(now - 30 * 86_400_000);
case '90d':
return new Date(now - 90 * 86_400_000);
}
}
function rangeToDays(range: DateRange): number {
switch (range) {
case 'today':
return 1;
case '7d':
return 7;
case '30d':
return 30;
case '90d':
return 90;
}
}
// Range helpers (rangeToBounds, rangeToDays, rangeSpanDays) moved to
// @/lib/analytics/range - that file is client-safe (no DB imports) so it
// can be used from React components AND this server module.
// ─── Computations ─────────────────────────────────────────────────────────────
@@ -126,9 +115,9 @@ export async function computePipelineFunnel(
portId: string,
range: DateRange,
): Promise<PipelineFunnelData> {
const cutoff = rangeToCutoff(range);
const { from, to } = rangeToBounds(range);
// Stage counts EXCLUDE lost/cancelled outcomes those never become
// Stage counts EXCLUDE lost/cancelled outcomes - those never become
// conversions, so polluting the funnel with them gives meaningless math.
// Lost is reported separately in the `lost` block.
const stageRows = await db
@@ -138,7 +127,7 @@ export async function computePipelineFunnel(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
gte(interests.createdAt, cutoff),
between(interests.createdAt, from, to),
sql`(${interests.outcome} IS NULL OR ${interests.outcome} = 'won')`,
),
)
@@ -161,7 +150,7 @@ export async function computePipelineFunnel(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
gte(interests.createdAt, cutoff),
between(interests.createdAt, from, to),
sql`${interests.outcome} IS NOT NULL AND ${interests.outcome} != 'won'`,
),
)
@@ -182,8 +171,9 @@ export async function computeOccupancyTimeline(
portId: string,
range: DateRange,
): Promise<OccupancyTimelineData> {
const days = rangeToDays(range);
// Total berths per port (current count — assumes no churn).
const { from, to } = rangeToBounds(range);
const days = Math.max(1, Math.ceil((to.getTime() - from.getTime()) / 86_400_000));
// Total berths per port (current count - assumes no churn).
const totalRow = await db.execute<{ total: number }>(
sql`SELECT count(*)::int AS total FROM berths WHERE port_id = ${portId}`,
);
@@ -191,10 +181,12 @@ export async function computeOccupancyTimeline(
// For each day in range, count berths that have an active reservation
// covering that day. A reservation is "covering" if start_date <= day
// AND (end_date IS NULL OR end_date >= day).
// AND (end_date IS NULL OR end_date >= day). Walk forward from `from`
// so custom ranges produce the right calendar days, not just N
// most-recent days from "now".
const points: OccupancyTimelineData['points'] = [];
for (let i = days - 1; i >= 0; i--) {
const day = new Date(Date.now() - i * 86_400_000);
for (let i = 0; i < days; i++) {
const day = new Date(from.getTime() + i * 86_400_000);
const dayStr = day.toISOString().slice(0, 10);
const occRow = await db
.select({ occupied: sql<number>`count(distinct ${berthReservations.berthId})::int` })
@@ -218,7 +210,7 @@ export async function computeRevenueBreakdown(
portId: string,
range: DateRange,
): Promise<RevenueBreakdownData> {
const cutoff = rangeToCutoff(range);
const { from, to } = rangeToBounds(range);
const rows = await db
.select({
status: invoices.status,
@@ -230,7 +222,7 @@ export async function computeRevenueBreakdown(
and(
eq(invoices.portId, portId),
isNull(invoices.archivedAt),
gte(invoices.createdAt, cutoff),
between(invoices.createdAt, from, to),
),
)
.groupBy(invoices.status, invoices.currency);
@@ -248,7 +240,7 @@ export async function computeLeadSourceAttribution(
portId: string,
range: DateRange,
): Promise<LeadSourceAttributionData> {
const cutoff = rangeToCutoff(range);
const { from, to } = rangeToBounds(range);
const rows = await db
.select({ source: interests.source, count: sql<number>`count(*)::int` })
.from(interests)
@@ -256,7 +248,7 @@ export async function computeLeadSourceAttribution(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
gte(interests.createdAt, cutoff),
between(interests.createdAt, from, to),
),
)
.groupBy(interests.source);
@@ -272,11 +264,15 @@ export async function computeLeadSourceAttribution(
}
// ─── Public read API (cache → compute → write back) ──────────────────────────
//
// Custom ranges always recompute (cache key would be unbounded). Preset
// ranges go cache → compute → write-back as before.
export async function getPipelineFunnel(
portId: string,
range: DateRange,
): Promise<PipelineFunnelData> {
if (isCustomRange(range)) return computePipelineFunnel(portId, range);
const metricId = `pipeline_funnel.${range}` as const;
const cached = await readSnapshot<PipelineFunnelData>(portId, metricId);
if (cached) return cached;
@@ -289,6 +285,7 @@ export async function getOccupancyTimeline(
portId: string,
range: DateRange,
): Promise<OccupancyTimelineData> {
if (isCustomRange(range)) return computeOccupancyTimeline(portId, range);
const metricId = `occupancy_timeline.${range}` as const;
const cached = await readSnapshot<OccupancyTimelineData>(portId, metricId);
if (cached) return cached;
@@ -301,6 +298,7 @@ export async function getRevenueBreakdown(
portId: string,
range: DateRange,
): Promise<RevenueBreakdownData> {
if (isCustomRange(range)) return computeRevenueBreakdown(portId, range);
const metricId = `revenue_breakdown.${range}` as const;
const cached = await readSnapshot<RevenueBreakdownData>(portId, metricId);
if (cached) return cached;
@@ -313,6 +311,7 @@ export async function getLeadSourceAttribution(
portId: string,
range: DateRange,
): Promise<LeadSourceAttributionData> {
if (isCustomRange(range)) return computeLeadSourceAttribution(portId, range);
const metricId = `lead_source_attribution.${range}` as const;
const cached = await readSnapshot<LeadSourceAttributionData>(portId, metricId);
if (cached) return cached;

View File

@@ -1,5 +1,5 @@
/**
* Audit log search PR1 skeleton. PR10 fills in the cursor pagination
* Audit log search - PR1 skeleton. PR10 fills in the cursor pagination
* and per-port + super-admin scoping; v1 already has the GIN index on
* `audit_logs.search_text`.
*/
@@ -12,7 +12,7 @@ import { auditLogs, type AuditLog } from '@/lib/db/schema/system';
export interface AuditSearchOptions {
/** Limit results to a single port. Omit for super-admin all-ports view. */
portId?: string;
/** Free-text query runs against the GIN-indexed search_text column. */
/** Free-text query - runs against the GIN-indexed search_text column. */
q?: string;
/** Filter by actor (user id). */
userId?: string;
@@ -25,7 +25,7 @@ export interface AuditSearchOptions {
/** Inclusive date range. */
from?: Date;
to?: Date;
/** Pagination cursor on (createdAt, id). */
/** Pagination - cursor on (createdAt, id). */
cursor?: { createdAt: Date; id: string };
limit?: number;
}

View File

@@ -166,7 +166,7 @@ export async function updateBerth(
if (!changed) return existing;
// Drizzle numeric columns expect string | null coerce numbers to strings
// Drizzle numeric columns expect string | null - coerce numbers to strings
const n = (v: number | undefined) => (v !== undefined ? String(v) : undefined);
const [updated] = await db
@@ -404,7 +404,7 @@ export async function updateWaitingList(
// Validate every supplied clientId belongs to portId. Without this
// check, a port-A admin could insert port-B clientIds into the
// waiting list corrupting reportable data and creating a join
// waiting list - corrupting reportable data and creating a join
// surface that hydrates foreign-tenant client rows.
if (data.entries.length > 0) {
const clientIds = [...new Set(data.entries.map((e) => e.clientId))];

View File

@@ -1,9 +1,9 @@
/**
* Client merge service atomically combines two client records.
* Client merge service - atomically combines two client records.
*
* Used by:
* - /admin/duplicates review queue (when an admin confirms a merge)
* - the at-create suggestion path ("use existing client") though
* - the at-create suggestion path ("use existing client") - though
* that path uses the lighter `attachInterestToClient` and never
* actually merges two pre-existing clients
* - the migration script's `--apply` (eventually)
@@ -37,7 +37,7 @@ import { auditLogs } from '@/lib/db/schema/system';
// ─── Public API ─────────────────────────────────────────────────────────────
export interface MergeFieldChoices {
/** Per-field overrides `winner` keeps the surviving client's value;
/** Per-field overrides - `winner` keeps the surviving client's value;
* `loser` copies the loser's value over. Fields not listed default
* to `winner` (no change). */
fullName?: 'winner' | 'loser';
@@ -217,7 +217,7 @@ export async function mergeClients(opts: MergeOptions): Promise<MergeResult> {
for (const c of loserContacts) {
const key = `${c.channel}::${c.value.toLowerCase()}`;
if (winnerContactKeys.has(key)) {
// Winner already has this contact drop loser's row (cascade
// Winner already has this contact - drop loser's row (cascade
// will clean up when loser is archived). But we keep snapshot
// so undo restores it.
continue;
@@ -264,7 +264,7 @@ export async function mergeClients(opts: MergeOptions): Promise<MergeResult> {
await tx.delete(clientTags).where(eq(clientTags.clientId, opts.loserId));
// Relationships: rewrite each FK side to point at the winner. Keep
// both sides regardless even if A and B both end up as the same
// both sides regardless - even if A and B both end up as the same
// person, the row is preserved for audit; the UI hides self-loops.
const movedRelationships =
(
@@ -305,7 +305,7 @@ export async function mergeClients(opts: MergeOptions): Promise<MergeResult> {
.where(
and(
eq(clientMergeCandidates.portId, winnerRow.portId),
// pair stored in canonical order match either direction
// pair stored in canonical order - match either direction
sql`(
(${clientMergeCandidates.clientAId} = ${opts.winnerId}
AND ${clientMergeCandidates.clientBId} = ${opts.loserId})

View File

@@ -54,7 +54,7 @@ export async function listClients(portId: string, query: ListClientsInput) {
if (matchingIds.length > 0) {
filters.push(inArray(clients.id, matchingIds));
} else {
// No clients match these tags return empty
// No clients match these tags - return empty
return { data: [], total: 0 };
}
}
@@ -252,7 +252,7 @@ export async function getClientById(id: string, portId: string) {
const portalEnabled = await isPortalEnabledForPort(portId);
// Counts surfaced for tab badges (Interests + Notes Yachts/Companies/etc
// Counts surfaced for tab badges (Interests + Notes - Yachts/Companies/etc
// get their counts from the corresponding row arrays we already fetched).
const [interestCountRow] = await db
.select({ count: count() })
@@ -577,7 +577,7 @@ export async function addClientAddress(
// The unique partial index requires us to demote any existing primary
// before inserting a new one. We grab a row lock on the client to
// serialize concurrent primary-toggle requests against the same client
// serialize concurrent primary-toggle requests against the same client -
// without this, two simultaneous "isPrimary=true" inserts can both
// observe "no existing primary" and one trips the unique index with a
// 5xx instead of being safely ordered.
@@ -642,7 +642,7 @@ export async function updateClientAddress(
if (!existing) throw new NotFoundError('Address');
const updated = await withTransaction(async (tx) => {
// Lock the client row to serialize primary-toggle changes see addClientAddress.
// Lock the client row to serialize primary-toggle changes - see addClientAddress.
await tx.select({ id: clients.id }).from(clients).where(eq(clients.id, clientId)).for('update');
if (data.isPrimary === true && !existing.isPrimary) {

View File

@@ -33,7 +33,7 @@ function isUniqueViolation(err: unknown): boolean {
* Loads a membership row and verifies the joined company belongs to `portId`.
* Throws NotFoundError('Membership') if the row is missing or cross-tenant.
*
* Uses a JOIN to companies (memberships have no portId column they inherit
* Uses a JOIN to companies (memberships have no portId column - they inherit
* tenancy via the parent company).
*/
async function loadMembershipScoped(
@@ -210,7 +210,7 @@ export async function setPrimary(
portId: string,
meta: AuditMeta,
): Promise<CompanyMembership> {
// Tenant-scoped load (outside tx is fine we re-read inside).
// Tenant-scoped load (outside tx is fine - we re-read inside).
const existing = await loadMembershipScoped(membershipId, portId);
return await withTransaction(async (tx) => {

View File

@@ -21,7 +21,7 @@ export async function createCrmInvite(args: {
isSuperAdmin?: boolean;
/**
* Caller identity. Required when minting a super-admin invitation so the
* service can fail closed if the caller isn't already a super-admin
* service can fail closed if the caller isn't already a super-admin -
* defense-in-depth for the route's authorization gate.
*/
invitedBy?: { userId: string; isSuperAdmin: boolean };
@@ -33,7 +33,7 @@ export async function createCrmInvite(args: {
throw new ValidationError('Only super admins can mint super-admin invitations');
}
// Reject if there's already a better-auth user with this email they
// Reject if there's already a better-auth user with this email - they
// should reset their password instead.
const sql = postgres(env.DATABASE_URL);
try {
@@ -171,7 +171,7 @@ export async function revokeCrmInvite(inviteId: string, meta: AuditMeta): Promis
where: eq(crmUserInvites.id, inviteId),
});
if (!invite) throw new NotFoundError('Invite');
if (invite.usedAt) throw new ConflictError('Invite already accepted cannot revoke');
if (invite.usedAt) throw new ConflictError('Invite already accepted - cannot revoke');
// Force expiration; tokenHash stays in place so any in-flight click fails
// the `expiresAt > now` check at consume time.
@@ -200,7 +200,7 @@ export async function resendCrmInvite(
where: eq(crmUserInvites.id, inviteId),
});
if (!invite) throw new NotFoundError('Invite');
if (invite.usedAt) throw new ConflictError('Invite already accepted nothing to resend');
if (invite.usedAt) throw new ConflictError('Invite already accepted - nothing to resend');
// Mint a fresh token + push expiry forward so the resent link is the only
// working one. The old token hash is overwritten so prior emails become

View File

@@ -89,7 +89,7 @@ export async function createDefinition(
.returning();
const created = rows[0];
if (!created) throw new Error('Insert failed no row returned');
if (!created) throw new Error('Insert failed - no row returned');
void createAuditLog({
userId,
@@ -117,7 +117,7 @@ export async function updateDefinition(
data: UpdateFieldInput & { fieldType?: unknown },
meta: AuditMeta,
) {
// Immutability guard fieldType must never change
// Immutability guard - fieldType must never change
if ('fieldType' in data && data.fieldType !== undefined) {
throw new ValidationError('Field type cannot be changed after creation');
}
@@ -141,7 +141,7 @@ export async function updateDefinition(
.returning();
const updated = updateRows[0];
if (!updated) throw new Error('Update failed no row returned');
if (!updated) throw new Error('Update failed - no row returned');
void createAuditLog({
userId,
@@ -188,7 +188,7 @@ export async function deleteDefinition(
.where(eq(customFieldValues.fieldId, fieldId));
const valueCount = countResult[0]?.count ?? 0;
// Delete definition CASCADE handles values
// Delete definition - CASCADE handles values
await db.delete(customFieldDefinitions).where(eq(customFieldDefinitions.id, fieldId));
void createAuditLog({
@@ -269,7 +269,7 @@ export async function setValues(
// Tenant scope: verify entityId actually points at a port-scoped row of
// the entity type the field definitions target. Without this gate, any
// authenticated user could write custom-field rows pointing at arbitrary
// entityIds (or none at all) polluting customFieldValues and creating
// entityIds (or none at all) - polluting customFieldValues and creating
// a join surface that could later leak data.
const entityTypes = new Set(
values

View File

@@ -110,7 +110,7 @@ export async function getRevenueForecast(portId: string) {
}
}
// Forecast excludes lost/cancelled only currently-active or won-out
// Forecast excludes lost/cancelled - only currently-active or won-out
// interests should affect the weighted pipeline value.
const interestRows = await db
.select({

View File

@@ -120,7 +120,7 @@ function applyPayloadRedirect(payload: Record<string, unknown>): Record<string,
email: env.EMAIL_REDIRECT_TO,
}));
}
// v1.13 formValues shape keys vary per template; key by anything that
// v1.13 formValues shape - keys vary per template; key by anything that
// looks like an email field. The conservative approach: only touch keys
// that already hold a string and end with `Email` / `email`.
if (out.formValues && typeof out.formValues === 'object') {
@@ -193,7 +193,7 @@ export async function sendDocument(docId: string, portId?: string): Promise<Docu
if (env.EMAIL_REDIRECT_TO) {
logger.warn(
{ docId, portId, redirect: env.EMAIL_REDIRECT_TO },
'sendDocument SKIPPED EMAIL_REDIRECT_TO is set, outbound comms paused',
'sendDocument SKIPPED - EMAIL_REDIRECT_TO is set, outbound comms paused',
);
// Return the existing doc shape so downstream code doesn't see an
// unexpected null. The document remains in DRAFT/PENDING from
@@ -215,7 +215,7 @@ export async function getDocument(docId: string, portId?: string): Promise<Docum
/**
* Email a signing reminder to one recipient. Skipped entirely when
* EMAIL_REDIRECT_TO is set the recipient's stored email may still be
* EMAIL_REDIRECT_TO is set - the recipient's stored email may still be
* a real client address from before the redirect was enabled.
*/
export async function sendReminder(
@@ -226,7 +226,7 @@ export async function sendReminder(
if (env.EMAIL_REDIRECT_TO) {
logger.warn(
{ docId, signerId, portId, redirect: env.EMAIL_REDIRECT_TO },
'sendReminder SKIPPED EMAIL_REDIRECT_TO is set, outbound comms paused',
'sendReminder SKIPPED - EMAIL_REDIRECT_TO is set, outbound comms paused',
);
return;
}
@@ -282,12 +282,12 @@ export async function checkDocumensoHealth(
//
// Callers always work in PERCENT (0-100). For v1 the abstraction multiplies by
// the page dimensions returned by Documenso (cached per docId for the lifetime
// of the process fields for a given doc usually go in a single batch).
// of the process - fields for a given doc usually go in a single batch).
export type DocumensoFieldType = 'SIGNATURE' | 'INITIALS' | 'DATE' | 'TEXT' | 'EMAIL';
export interface DocumensoFieldPlacement {
/** Documenso recipient id; v1 expects number, v2 string coerced internally. */
/** Documenso recipient id; v1 expects number, v2 string - coerced internally. */
recipientId: number | string;
type: DocumensoFieldType;
pageNumber: number;
@@ -296,7 +296,7 @@ export interface DocumensoFieldPlacement {
pageY: number;
pageWidth: number;
pageHeight: number;
/** Optional v2 fieldMeta passed through verbatim, ignored on v1. */
/** Optional v2 fieldMeta - passed through verbatim, ignored on v1. */
fieldMeta?: Record<string, unknown>;
}
@@ -309,7 +309,7 @@ const DEFAULT_PAGE_DIMENSIONS: DocumensoPageDimensions = { width: 595, height: 8
const pageDimensionCache = new Map<string, DocumensoPageDimensions>();
/** Test seam clears the page-dimension memoization. */
/** Test seam - clears the page-dimension memoization. */
export function __resetDocumensoCachesForTests(): void {
pageDimensionCache.clear();
}
@@ -353,7 +353,7 @@ export async function placeFields(
...(f.fieldMeta ? { fieldMeta: f.fieldMeta } : {}),
}));
// Note: v2 endpoint shape (envelopeId/recipientId types) must be
// confirmed against a live Documenso 2.x instance see PR11 realapi
// confirmed against a live Documenso 2.x instance - see PR11 realapi
// suite. Spec risk register flags this drift as the top v2 risk.
const res = await fetch(`${baseUrl}/api/v2/envelope/field/create-many`, {
method: 'POST',
@@ -401,7 +401,7 @@ export async function placeFields(
/**
* Auto-position one SIGNATURE field per recipient at the last-page footer,
* staggered horizontally so multiple signers don't overlap. Used by the
* upload-path wizard admins can refine in Documenso afterwards.
* upload-path wizard - admins can refine in Documenso afterwards.
*
* Layout (percent of page):
* y = 88 (footer band)
@@ -445,7 +445,7 @@ export function computeDefaultSignatureLayout(
* v1: DELETE /api/v1/documents/{id}
* v2: DELETE /api/v2/envelope/{id}
*
* Idempotent on 404 (already gone) logs and resolves.
* Idempotent on 404 (already gone) - logs and resolves.
*/
export async function voidDocument(docId: string, portId?: string): Promise<void> {
const { baseUrl, apiKey, apiVersion } = await resolveCreds(portId);

View File

@@ -37,7 +37,7 @@ export interface DocumensoTemplatePayload {
export interface DocumensoPayloadOptions {
/** `interestId` used to build `externalId` and Documenso referencing. */
interestId: string;
/** Documenso recipient IDs come from env vars. */
/** Documenso recipient IDs - come from env vars. */
clientRecipientId: number;
developerRecipientId: number;
approvalRecipientId: number;

View File

@@ -58,7 +58,7 @@ export interface SendReminderOptions {
/** true = cron auto-fire, enforces 9-16 window + cadence cooldown.
* false (default) = manual UI action, bypasses both. */
auto?: boolean;
/** Optional target a specific pending signer (parallel mode), or
/** Optional - target a specific pending signer (parallel mode), or
* bypass the lowest-pending default in sequential mode (must still be the
* next pending signer in that case). */
signerId?: string;
@@ -183,7 +183,7 @@ export async function sendReminderIfAllowed(
/**
* Cron entry point. Selects in-flight documents whose effective cadence
* (override or template) is set, then attempts auto-fire on each.
* `interests.reminderEnabled` is no longer part of the gating per-doc
* `interests.reminderEnabled` is no longer part of the gating - per-doc
* `remindersDisabled` is the kill switch instead.
*/
export async function processReminderQueue(portId: string): Promise<void> {

View File

@@ -1,5 +1,5 @@
/**
* Admin Document Template Service TipTap JSON-based templates
* Admin Document Template Service - TipTap JSON-based templates
*
* This service manages templates whose content is stored as TipTap JSON
* (serialised to the `bodyHtml` text column). Version history is maintained

View File

@@ -237,7 +237,7 @@ export async function resolveTemplate(
tokenMap['{{client.phone}}'] = eoi.client.primaryPhone ?? '';
tokenMap['{{client.nationality}}'] = eoi.client.nationality ?? '';
// Yacht tokens `eoi.yacht` is null when no yacht is linked
// Yacht tokens - `eoi.yacht` is null when no yacht is linked
// (Section 3 of the EOI is optional). Tokens render as empty strings
// in that case so the template still produces output.
tokenMap['{{yacht.name}}'] = eoi.yacht?.name ?? '';
@@ -252,7 +252,7 @@ export async function resolveTemplate(
tokenMap['{{yacht.widthM}}'] = eoi.yacht?.widthM ?? '';
tokenMap['{{yacht.draftM}}'] = eoi.yacht?.draftM ?? '';
// EoiContext doesn't expose the yacht.registration column look it up
// EoiContext doesn't expose the yacht.registration column - look it up
// separately (cheap, indexed fetch) so the token resolves when present.
try {
const interestRow = await db.query.interests.findFirst({
@@ -283,7 +283,7 @@ export async function resolveTemplate(
tokenMap['{{owner.name}}'] = eoi.owner.name;
tokenMap['{{owner.legalName}}'] = eoi.owner.legalName ?? '';
// Berth tokens also optional. Render empty when no berth is linked.
// Berth tokens - also optional. Render empty when no berth is linked.
tokenMap['{{berth.mooringNumber}}'] = eoi.berth?.mooringNumber ?? '';
tokenMap['{{berth.area}}'] = eoi.berth?.area ?? '';
tokenMap['{{berth.lengthFt}}'] = eoi.berth?.lengthFt ?? '';
@@ -301,9 +301,9 @@ export async function resolveTemplate(
tokenMap['{{interest.notes}}'] = eoi.interest.notes ?? '';
} catch (err) {
// buildEoiContext throws ValidationError when the EOI's required client
// fields (name/email/address Section 2) are missing. For non-EOI
// fields (name/email/address - Section 2) are missing. For non-EOI
// templates (correspondence, welcome letters, etc.) those gates don't
// apply fall through to the legacy resolution path below. Re-throw
// apply - fall through to the legacy resolution path below. Re-throw
// anything else.
if (
!(err instanceof ValidationError) ||
@@ -323,7 +323,7 @@ export async function resolveTemplate(
where: eq(clients.id, context.clientId),
});
if (client && client.portId === context.portId) {
// Always resolve source from the DB EoiContext doesn't carry it.
// Always resolve source from the DB - EoiContext doesn't carry it.
if (tokenMap['{{client.source}}'] === undefined) {
tokenMap['{{client.source}}'] = client.source ?? '';
}
@@ -349,7 +349,7 @@ export async function resolveTemplate(
}
}
// Interest tokens (legacy path fills in fields EoiContext doesn't expose,
// Interest tokens (legacy path - fills in fields EoiContext doesn't expose,
// like eoiStatus / dateEoiSigned / dateContractSigned, or populates the
// whole interest.* block when EOI resolution was skipped).
if (context.interestId) {
@@ -365,7 +365,7 @@ export async function resolveTemplate(
: '';
tokenMap['{{interest.notes}}'] = interest.notes ?? '';
}
// These are never populated by EoiContext always fill them in.
// These are never populated by EoiContext - always fill them in.
tokenMap['{{interest.eoiStatus}}'] = interest.eoiStatus ?? '';
tokenMap['{{interest.dateEoiSigned}}'] = interest.dateEoiSigned
? new Date(interest.dateEoiSigned).toLocaleDateString('en-GB')
@@ -395,7 +395,7 @@ export async function resolveTemplate(
}
}
// Berth tokens (legacy path when a berthId is passed directly and EOI
// Berth tokens (legacy path - when a berthId is passed directly and EOI
// resolution didn't already populate the berth block).
if (context.berthId && !eoiContextLoaded) {
const berth = await db.query.berths.findFirst({
@@ -507,7 +507,7 @@ export async function generateFromTemplate(
const pdfBytes = await generatePdf(pdfTemplate, [
{
portName: `${port?.name ?? 'Port Nimara'} ${template.name}`,
portName: `${port?.name ?? 'Port Nimara'} - ${template.name}`,
body: wrappedContent,
generatedAt: `Generated: ${new Date().toLocaleString('en-GB')}`,
},
@@ -610,7 +610,7 @@ export async function generateAndSend(
);
} catch (err) {
logger.error({ err, templateId, recipientEmail }, 'Failed to send template email');
// Don't throw document was created successfully; email failure is non-fatal
// Don't throw - document was created successfully; email failure is non-fatal
}
void createAuditLog({
@@ -631,7 +631,7 @@ export async function generateAndSend(
/**
* BR-142: For EOI templates, the in-app pathway uses the same source PDF as
* the Documenso template filled via pdf-lib with values from EoiContext.
* the Documenso template - filled via pdf-lib with values from EoiContext.
* Same field names, same legal document; the only difference is who renders
* it. The form is left interactive so a recipient can adjust before signing.
*/
@@ -893,7 +893,7 @@ async function generateAndSignViaDocumensoTemplate(
payload as unknown as Record<string, unknown>,
);
// Record a documents row referencing the Documenso document. No local file
// Record a documents row referencing the Documenso document. No local file -
// Documenso owns the PDF and delivers signed copies via webhook (handled elsewhere).
const [documentRecord] = await db
.insert(documents)

View File

@@ -118,7 +118,7 @@ function buildHubTabFilters(
case 'expired':
// Either explicitly expired, or in-flight past their expiry date.
// (Documents schema doesn't yet have an `expires_at` column, so for
// now this is just status='expired' extend when expiry lands.)
// now this is just status='expired' - extend when expiry lands.)
filters.push(eq(documents.status, 'expired'));
break;
}
@@ -126,7 +126,7 @@ function buildHubTabFilters(
}
export interface ListDocumentsExtra {
/** Email of the calling user used by hub tab filtering for "awaiting me". */
/** Email of the calling user - used by hub tab filtering for "awaiting me". */
currentUserEmail?: string;
}
@@ -514,7 +514,7 @@ export async function sendForSigning(documentId: string, portId: string, meta: A
// never match real port users and cause silent no-ops in handleRecipientSigned.
const eoiSigners = await getPortEoiSigners(portId);
// BR-021: Create 3 signers client (1), developer (2), sales/approver (3)
// BR-021: Create 3 signers - client (1), developer (2), sales/approver (3)
const signerRecords = await db
.insert(documentSigners)
.values([
@@ -812,7 +812,7 @@ export async function handleRecipientSigned(eventData: {
documentId: doc.id,
recipientEmail: eventData.recipientEmail,
},
'handleRecipientSigned: no matching signer row for recipient email ' +
'handleRecipientSigned: no matching signer row for recipient email - ' +
'check eoi_signers system setting for this port',
);
}
@@ -929,7 +929,7 @@ export async function handleDocumentCompleted(eventData: { documentId: string })
// Guard against double-fire: DOCUMENT_COMPLETED may arrive multiple times
// (webhook retries) or follow a DOCUMENT_SIGNED that already advanced the
// stage. advanceStageIfBehind handles the pipeline guard internally, but
// evaluateRule has no idempotency skip it if the interest is already at
// evaluateRule has no idempotency - skip it if the interest is already at
// eoi_signed or beyond to prevent duplicate berth-rule side effects.
const currentStageIdx = PIPELINE_STAGES.indexOf(
interest.pipelineStage as (typeof PIPELINE_STAGES)[number],
@@ -1198,7 +1198,7 @@ export async function cancelDocument(
// CRM is the system of record for cancellation status. A transient
// Documenso failure shouldn't block the user from marking the doc cancelled
// here voidDocument already treats 404 as success, and the periodic
// here - voidDocument already treats 404 as success, and the periodic
// webhook receiver will reconcile if the remote void eventually lands.
if (existing.documensoId) {
try {
@@ -1284,7 +1284,7 @@ export async function composeSignedDocEmail(
return {
to: dedupedRecipients,
cc: [],
subject: `Signed ${doc.documentType.replace(/_/g, ' ')} ${doc.title}`,
subject: `Signed ${doc.documentType.replace(/_/g, ' ')} - ${doc.title}`,
body: '',
attachments: [{ fileId: doc.signedFileId }],
defaultSenderType: 'system',
@@ -1359,9 +1359,9 @@ export async function removeDocumentWatcher(
* Create-document wizard entry point (PR6).
*
* Dispatches across pathways:
* - 'documenso-template' Documenso renders + signs from its own template
* - 'inapp' render PDF locally from a CRM template, upload to Documenso
* - 'upload' admin-supplied PDF, upload to Documenso (auto-place signature
* - 'documenso-template' - Documenso renders + signs from its own template
* - 'inapp' - render PDF locally from a CRM template, upload to Documenso
* - 'upload' - admin-supplied PDF, upload to Documenso (auto-place signature
* fields if `autoPlaceFields`)
*
* Persists the document, applies reminder overrides, attaches watchers, and

View File

@@ -72,7 +72,7 @@ export async function sendEmail(
throw new ForbiddenError('Email account does not belong to this port');
}
// Decrypt credentials (INTERNAL never logged or returned)
// Decrypt credentials (INTERNAL - never logged or returned)
const creds = await getDecryptedCredentials(data.accountId);
// Build user-specific SMTP transporter
@@ -133,7 +133,7 @@ export async function sendEmail(
// to that address and the subject is prefixed so the operator can see
// who would have received the message. This service builds its OWN
// transporter (per-account SMTP) so it doesn't go through sendEmail's
// redirect we apply the same logic here.
// redirect - we apply the same logic here.
const requestedTo = data.to.join(', ');
const requestedCc = data.cc?.join(', ');
const effectiveTo = env.EMAIL_REDIRECT_TO ?? requestedTo;

View File

@@ -29,7 +29,7 @@ export interface DraftResult {
* Request an AI-generated email draft.
*
* Generates an opaque random jobId rather than relying on BullMQ's default
* sequential ids the jobId is the access token for polling, so it must
* sequential ids - the jobId is the access token for polling, so it must
* not be enumerable. The job payload also captures the requesting user
* + port so the poll endpoint can refuse cross-tenant / cross-user reads.
*
@@ -59,7 +59,7 @@ export async function requestEmailDraft(
await aiQueue.add(
'generate-email-draft',
{
// No PII only IDs and context parameters
// No PII - only IDs and context parameters
interestId: request.interestId,
clientId: request.clientId,
portId: request.portId,

View File

@@ -91,7 +91,7 @@ export async function getThread(threadId: string, portId: string) {
export async function ingestMessage(portId: string, parsedEmail: ParsedEmail) {
let threadId: string | null = null;
// Step 1: Message-ID chain check inReplyTo and references headers
// Step 1: Message-ID chain - check inReplyTo and references headers
const referencedIds = [
...(parsedEmail.inReplyTo ? [parsedEmail.inReplyTo] : []),
...(parsedEmail.references ?? []),
@@ -104,10 +104,7 @@ export async function ingestMessage(portId: string, parsedEmail: ParsedEmail) {
if (existingMessage) {
// Verify thread belongs to this port
const thread = await db.query.emailThreads.findFirst({
where: and(
eq(emailThreads.id, existingMessage.threadId),
eq(emailThreads.portId, portId),
),
where: and(eq(emailThreads.id, existingMessage.threadId), eq(emailThreads.portId, portId)),
});
if (thread) {
threadId = thread.id;
@@ -117,7 +114,10 @@ export async function ingestMessage(portId: string, parsedEmail: ParsedEmail) {
// Step 2: Email address match against client contacts
if (!threadId) {
const fromAddress = parsedEmail.from.replace(/.*<(.+)>/, '$1').trim().toLowerCase();
const fromAddress = parsedEmail.from
.replace(/.*<(.+)>/, '$1')
.trim()
.toLowerCase();
const contactRows = await db
.select({
@@ -168,9 +168,7 @@ export async function ingestMessage(portId: string, parsedEmail: ParsedEmail) {
// Step 3: Subject + sender fuzzy match
if (!threadId) {
const normalizedSubject = parsedEmail.subject
.replace(/^(re|fwd|fw):\s*/i, '')
.trim();
const normalizedSubject = parsedEmail.subject.replace(/^(re|fwd|fw):\s*/i, '').trim();
if (normalizedSubject) {
const matchingThread = await db.query.emailThreads.findFirst({
@@ -187,7 +185,7 @@ export async function ingestMessage(portId: string, parsedEmail: ParsedEmail) {
}
}
// No thread found create a new one
// No thread found - create a new one
if (!threadId) {
const newThreadRows = await db
.insert(emailThreads)
@@ -276,7 +274,7 @@ export async function syncInbox(accountId: string): Promise<void> {
logger.info({ accountId, exists: mailbox.exists }, 'IMAP INBOX opened');
// Search for messages since the last sync date
// client.search() returns false | number[] false means nothing found
// client.search() returns false | number[] - false means nothing found
const searchResult = await client.search({ since });
const uids: number[] = searchResult === false ? [] : searchResult;
@@ -291,21 +289,15 @@ export async function syncInbox(accountId: string): Promise<void> {
const parsed = await simpleParser(message.source);
// Normalise messageId mailparser returns string | string[] | undefined
// Normalise messageId - mailparser returns string | string[] | undefined
const rawMsgId = parsed.messageId;
const messageId =
rawMsgId == null
? ''
: Array.isArray(rawMsgId)
? (rawMsgId[0] ?? '')
: rawMsgId;
rawMsgId == null ? '' : Array.isArray(rawMsgId) ? (rawMsgId[0] ?? '') : rawMsgId;
const from = parsed.from?.text ?? '';
// Normalise to/cc mailparser AddressObject can be an array
const resolveAddresses = (
field: typeof parsed.to,
): string[] => {
// Normalise to/cc - mailparser AddressObject can be an array
const resolveAddresses = (field: typeof parsed.to): string[] => {
if (!field) return [];
const arr = Array.isArray(field) ? field : [field];
return arr.flatMap((a) =>
@@ -321,8 +313,8 @@ export async function syncInbox(accountId: string): Promise<void> {
rawRefs == null
? []
: typeof rawRefs === 'string'
? rawRefs.split(/\s+/).filter(Boolean)
: rawRefs;
? rawRefs.split(/\s+/).filter(Boolean)
: rawRefs;
await ingestMessage(account.portId, {
messageId,

View File

@@ -24,7 +24,7 @@ import { ValidationError } from '@/lib/errors';
import { emitToRoom } from '@/lib/socket/server';
interface SetEntityTagsArgs<TJoin extends PgTable> {
/** Join table e.g. `clientTags`, `yachtTags`. */
/** Join table - e.g. `clientTags`, `yachtTags`. */
joinTable: TJoin;
/**
* Column on the join table that points back at the parent entity (e.g.
@@ -56,7 +56,7 @@ export async function setEntityTags<TJoin extends PgTable>(
// Tenant scope: every supplied tagId must belong to the caller's port.
// The tags table is per-port (`tags.port_id`) but the join tables only
// have a single-column FK to `tags.id` without this guard, a port-A
// have a single-column FK to `tags.id` - without this guard, a port-A
// caller could splice a port-B tag UUID onto their own entity. The
// entity's GET handler joins `tags ON join.tag_id = tags.id` with no
// port filter, so the foreign tag's name and color render in port A.

View File

@@ -83,8 +83,8 @@ export type EoiContext = {
*
* Tenant-scoped: every fetch is gated by `portId`, and missing rows surface
* as NotFoundError. The hard gate matches the EOI document's top paragraph
* (Section 2 name, address, email): without those the EOI is unsignable
* and we throw. Yacht and berth (Section 3) are optional the rendered PDF
* (Section 2 - name, address, email): without those the EOI is unsignable
* and we throw. Yacht and berth (Section 3) are optional - the rendered PDF
* leaves those fields blank when not set.
*/
export async function buildEoiContext(interestId: string, portId: string): Promise<EoiContext> {
@@ -96,7 +96,7 @@ export async function buildEoiContext(interestId: string, portId: string): Promi
throw new NotFoundError('Interest');
}
// Parallelise independent reads. Yacht and berth are both nullable
// Parallelise independent reads. Yacht and berth are both nullable -
// the EOI's Section 3 stays blank when they're absent.
const [yacht, berth, client, port] = await Promise.all([
interest.yachtId
@@ -120,7 +120,7 @@ export async function buildEoiContext(interestId: string, portId: string): Promi
if (!client) throw new NotFoundError('Client');
if (!port) throw new NotFoundError('Port');
// 5. Primary contacts email + phone for the interest's client.
// 5. Primary contacts - email + phone for the interest's client.
const contactRows = await db
.select({
channel: clientContacts.channel,
@@ -166,7 +166,7 @@ export async function buildEoiContext(interestId: string, portId: string): Promi
if (!clientAddress || !clientAddress.street.trim()) missing.push('client address');
if (missing.length > 0) {
throw new ValidationError(
`Cannot generate EOI missing required client details: ${missing.join(', ')}.`,
`Cannot generate EOI - missing required client details: ${missing.join(', ')}.`,
);
}

View File

@@ -1,5 +1,5 @@
/**
* Expense duplicate detection heuristic match on
* Expense duplicate detection - heuristic match on
* (port + vendor + amount + date ± 3d). PR1 ships the function shape;
* PR8 wires the BullMQ trigger and the merge service.
*/
@@ -71,7 +71,7 @@ export async function markBestDuplicate(expenseId: string): Promise<string | nul
}
/**
* Clear the duplicate flag operator confirmed this is a real expense.
* Clear the duplicate flag - operator confirmed this is a real expense.
* Leaves `dedupScannedAt` populated so the engine doesn't re-flag it.
*/
export async function clearDuplicate(expenseId: string, portId: string): Promise<void> {
@@ -118,7 +118,7 @@ export async function mergeDuplicate(
.set({ receiptFileIds: mergedReceipts })
.where(eq(expenses.id, targetId));
// Archive the source preserves audit history, keeps any FKs alive.
// Archive the source - preserves audit history, keeps any FKs alive.
await tx
.update(expenses)
.set({ archivedAt: sql`now()`, duplicateOf: null })

View File

@@ -33,7 +33,7 @@ export interface OcrContext {
export const OCR_MAX_TOKENS = 1024;
export const OCR_LOW_CONFIDENCE_THRESHOLD = 0.6;
/** Stub returns "pending" shape so callers can wire UI in PR1 without
/** Stub - returns "pending" shape so callers can wire UI in PR1 without
* Anthropic credentials. */
export async function ocrReceipt(_ctx: OcrContext): Promise<OcrResult> {
return {

View File

@@ -135,7 +135,7 @@ export async function createExpense(portId: string, data: CreateExpenseInput, me
category: expense.category ?? '',
});
// Schedule a duplicate-detection sweep. Best-effort we don't want a
// Schedule a duplicate-detection sweep. Best-effort - we don't want a
// queue-side hiccup to fail the user's create.
try {
const { getQueue } = await import('@/lib/queue');

View File

@@ -231,7 +231,7 @@ export async function listFiles(portId: string, query: ListFilesInput) {
sort: sort ? { column: sortColumn, direction: order } : undefined,
page,
pageSize: limit,
// no archivedAtColumn files are immutable records
// no archivedAtColumn - files are immutable records
});
}

View File

@@ -1,6 +1,6 @@
/**
* Builds the structured payload that becomes the JSON + HTML inside a
* GDPR client-data export. Pure read-side no writes, no I/O outside
* GDPR client-data export. Pure read-side - no writes, no I/O outside
* Drizzle. The worker pairs this with the actual ZIP/upload/email work.
*
* GDPR Article 15 (right of access) requires that we hand the data
@@ -223,8 +223,8 @@ function tableSection(title: string, rows: Record<string, unknown>[]): string {
}
/**
* Renders the bundle as a self-contained HTML document no external
* resources, no JS so it opens in any browser including offline.
* Renders the bundle as a self-contained HTML document - no external
* resources, no JS - so it opens in any browser including offline.
*/
export function renderBundleHtml(bundle: GdprBundle): string {
const clientName = String(bundle.client.fullName ?? bundle.meta.clientId ?? 'Unknown');
@@ -255,7 +255,7 @@ export function renderBundleHtml(bundle: GdprBundle): string {
<html lang="en">
<head>
<meta charset="utf-8" />
<title>Personal data export ${escapeHtml(clientName)}</title>
<title>Personal data export - ${escapeHtml(clientName)}</title>
<style>
body { font: 14px/1.5 -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, sans-serif; margin: 2rem; max-width: 1200px; }
h1 { border-bottom: 2px solid #333; padding-bottom: 0.5rem; }

View File

@@ -69,7 +69,7 @@ export async function requestGdprExport(input: RequestExportInput): Promise<Requ
});
if (!primary) {
throw new ValidationError(
'Client has no primary email contact provide an emailOverride or add one before exporting.',
'Client has no primary email contact - provide an emailOverride or add one before exporting.',
);
}
}
@@ -132,7 +132,7 @@ export async function processGdprExportJob(input: ProcessJobInput): Promise<void
const html = renderBundleHtml(bundle);
// Stream a ZIP into a buffer. Receipts/contracts are not included
// here they live on file rows referenced by the bundle and would
// here - they live on file rows referenced by the bundle and would
// bloat the archive. Add them later if Article-15 requests demand.
const zip = archiver('zip', { zlib: { level: 9 } });
const sink = new PassThrough();
@@ -212,7 +212,7 @@ async function emailExport(input: ProcessJobInput, storageKey: string): Promise<
if (!recipient) {
logger.warn(
{ exportId: input.exportId, clientId: input.clientId },
'GDPR export ready but no email recipient skipping send',
'GDPR export ready but no email recipient - skipping send',
);
return;
}
@@ -227,8 +227,8 @@ async function emailExport(input: ProcessJobInput, storageKey: string): Promise<
<p>Hello ${escapeHtml(name)},</p>
<p>You requested a copy of the personal data we hold about you. The export is ready and contains:</p>
<ul>
<li><code>client.json</code> machine-readable data dump</li>
<li><code>client.html</code> same data as a printable web page</li>
<li><code>client.json</code> - machine-readable data dump</li>
<li><code>client.html</code> - same data as a printable web page</li>
</ul>
<p><a href="${url}">Download the export (ZIP, expires ${escapeHtml(expiry)})</a></p>
<p>If you have any questions, reply to this email.</p>
@@ -254,7 +254,7 @@ function escapeHtml(s: unknown): string {
.replace(/'/g, '&#39;');
}
/** Lists exports for a client (most-recent first) feeds the admin "history" UI. */
/** Lists exports for a client (most-recent first) - feeds the admin "history" UI. */
export async function listClientExports(clientId: string, portId: string) {
const client = await db.query.clients.findFirst({ where: eq(clients.id, clientId) });
if (!client || client.portId !== portId) throw new NotFoundError('Client');

View File

@@ -46,7 +46,7 @@ function scoreStageSpeed(createdAt: Date, pipelineStage: string): number {
const idx = PIPELINE_STAGES.indexOf(pipelineStage as (typeof PIPELINE_STAGES)[number]);
const stageIndex = idx === -1 ? 0 : idx;
if (stageIndex === 0) {
// Still at open no progression
// Still at open - no progression
return 0;
}
@@ -131,7 +131,7 @@ export async function calculateInterestScore(
// 3. Document completeness
const documentCompleteness = scoreDocumentCompleteness(interest);
// 4. Engagement notes, emails, reminders in last 30 days
// 4. Engagement - notes, emails, reminders in last 30 days
const thirtyDaysAgo = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
const [notesResult, remindersResult, emailResult] = await Promise.all([

View File

@@ -29,8 +29,8 @@ import type {
// ─── Port-scope FK validator ─────────────────────────────────────────────────
// Tenant scope: every FK referenced from an interest body clientId, berthId,
// and yachtId must belong to the caller's port. Without this, a body-supplied
// Tenant scope: every FK referenced from an interest body - clientId, berthId,
// and yachtId - must belong to the caller's port. Without this, a body-supplied
// foreign-port id would create an interest that joins through these FKs and
// surfaces foreign-tenant data on subsequent reads (clientName, berth mooring
// number, yacht ownership). assertYachtBelongsToClient still runs separately to
@@ -370,7 +370,7 @@ export async function getInterestById(id: string, portId: string) {
.where(eq(interestNotes.interestId, id));
// Active reminder count for the interest's bell badge. Counts reminders
// directly linked via interestId `pending` and `snoozed` only;
// directly linked via interestId - `pending` and `snoozed` only;
// completed/dismissed don't surface.
const [{ count: activeReminderCount } = { count: 0 }] = await db
.select({ count: sql<number>`count(*)::int` })
@@ -633,7 +633,7 @@ export async function changeInterestStage(
// Moves an interest forward to `target` if (and only if) it is currently behind
// it in the pipeline order. Used by lifecycle events (EOI sent, EOI signed,
// deposit recorded, contract signed) so the user-visible stage tracks reality
// without overwriting a more advanced state e.g. a late-arriving signed-EOI
// without overwriting a more advanced state - e.g. a late-arriving signed-EOI
// webhook on an interest that has already moved on to `contract_sent` is a
// no-op rather than a regression.
//
@@ -658,7 +658,7 @@ export async function advanceStageIfBehind(
// yachtId gate: changeInterestStage requires a yacht before leaving `open`.
// EOI events imply a yacht is in the picture, but if the data is missing we
// bail rather than throw the EOI itself shouldn't fail because of this.
// bail rather than throw - the EOI itself shouldn't fail because of this.
if (existing.pipelineStage === 'open' && !existing.yachtId) {
return false;
}
@@ -671,7 +671,7 @@ export async function advanceStageIfBehind(
//
// Records a terminal outcome for the interest and moves the pipelineStage to
// `completed` so the funnel/kanban reflect the final state. The outcome
// distinguishes won deals (they made it through) from lost variants funnel
// distinguishes won deals (they made it through) from lost variants - funnel
// math and reports key off the `outcome` column to compute true conversion.
//
// Both the stage advance and the outcome write happen in one transaction so

View File

@@ -139,7 +139,7 @@ async function resolveBillingEntity(
* Verify every supplied expense ID belongs to the caller's port. Without
* this gate, a caller could link foreign-port expenses into their own
* draft invoice and read those expenses back via getInvoiceById's
* `linkedExpenses` join a cross-tenant data leak.
* `linkedExpenses` join - a cross-tenant data leak.
*/
async function assertExpensesInPort(
tx: typeof db,
@@ -251,7 +251,7 @@ export async function createInvoice(portId: string, data: CreateInvoiceInput, me
const lineItemsData = data.lineItems ?? [];
const subtotal = lineItemsData.reduce((sum, li) => sum + (li.quantity ?? 1) * li.unitPrice, 0);
// BR-042: net10 discount read from systemSettings
// BR-042: net10 discount - read from systemSettings
let discountPct = 0;
if (data.paymentTerms === 'net10') {
const [setting] = await tx
@@ -479,7 +479,7 @@ export async function updateInvoice(
// Replace expense links if provided
if (data.expenseIds !== undefined) {
// Tenancy gate first reject foreign-port expense IDs before
// Tenancy gate first - reject foreign-port expense IDs before
// running BR-045 or doing any writes.
await assertExpensesInPort(tx, portId, data.expenseIds);
// BR-045
@@ -728,7 +728,7 @@ export async function recordPayment(
});
// Deposit invoices linked to a sales interest auto-advance the pipeline.
// Only advances forward no-op if the interest has already moved past
// Only advances forward - no-op if the interest has already moved past
// deposit_10pct (e.g. straight-to-contract flows).
if (updated.kind === 'deposit' && updated.interestId) {
const { advanceStageIfBehind } = await import('@/lib/services/interests.service');

View File

@@ -83,7 +83,7 @@ export async function createNotification(
}
}
// 2. Preference check (skip for system_alert type always delivered)
// 2. Preference check (skip for system_alert type - always delivered)
if (type !== 'system_alert') {
const [pref] = await db
.select({
@@ -101,12 +101,12 @@ export async function createNotification(
.limit(1);
if (pref && pref.inApp === false) {
// Check if email is enabled if neither, skip entirely
// Check if email is enabled - if neither, skip entirely
if (pref.email === false) {
return null;
}
// inApp disabled but email enabled: still enqueue email but skip insert
// We can't insert and emit, so just enqueue if there were a row but we need an ID.
// We can't insert and emit, so just enqueue if there were a row - but we need an ID.
// Per spec: if inApp=false, skip insert. Email requires notificationId so skip email too.
return null;
}

View File

@@ -1,5 +1,5 @@
/**
* OCR provider config stored in `system_settings` under the key
* OCR provider config - stored in `system_settings` under the key
* `ocr.config`. Each port can either have its own row (port_id = port.id)
* or opt into the global row (port_id = null) by setting `useGlobal: true`.
*/
@@ -22,7 +22,7 @@ export const DEFAULT_MODEL: Record<OcrProvider, string> = {
claude: 'claude-haiku-4-5',
};
/** Public shape that admin UIs read never includes the raw key. */
/** Public shape that admin UIs read - never includes the raw key. */
export interface OcrConfigPublic {
provider: OcrProvider;
model: string;
@@ -38,7 +38,7 @@ export interface OcrConfigPublic {
aiEnabled: boolean;
}
/** Internal shape including the decrypted key server-side only. */
/** Internal shape including the decrypted key - server-side only. */
export interface OcrConfigResolved extends OcrConfigPublic {
apiKey: string | null;
/** Source of the resolved row: 'port' | 'global' | 'none'. */
@@ -115,7 +115,7 @@ export async function getResolvedOcrConfig(portId: string): Promise<OcrConfigRes
};
}
/** Public-safe view for the admin UI same shape but never the key. */
/** Public-safe view for the admin UI - same shape but never the key. */
export async function getPublicOcrConfig(portId: string | null): Promise<OcrConfigPublic> {
const row = await readRow(portId);
if (!row) {

View File

@@ -108,7 +108,7 @@ async function issueActivationToken(
await sendEmail(email, subject, html, undefined, text);
} catch (err) {
logger.error({ err, email }, 'Failed to send portal activation email');
// Re-throw the admin should know if their invite mail bounced.
// Re-throw - the admin should know if their invite mail bounced.
throw err;
}
}
@@ -162,7 +162,7 @@ export async function signIn(args: {
where: eq(portalUsers.email, normalizedEmail),
});
// Dummy hash with the right shape used to keep verifyPassword's compute
// Dummy hash with the right shape - used to keep verifyPassword's compute
// cost identical when the user doesn't exist.
const dummyHash =
'0000000000000000000000000000000000000000000000000000000000000000:00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000';
@@ -177,7 +177,7 @@ export async function signIn(args: {
// Disabled-port check happens AFTER the credential check so that a wrong
// password on a disabled-port account still surfaces "invalid email or
// password" we never leak which ports have the portal turned off.
// password" - we never leak which ports have the portal turned off.
if (!(await isPortalEnabledForPort(user.portId))) {
throw new UnauthorizedError('Invalid email or password');
}
@@ -209,7 +209,7 @@ export async function requestPasswordReset(email: string): Promise<void> {
return;
}
// Same silent no-op when the port has the portal disabled keeps the
// Same silent no-op when the port has the portal disabled - keeps the
// disabled-state from leaking through the public reset endpoint.
if (!(await isPortalEnabledForPort(user.portId))) {
logger.debug({ portId: user.portId }, 'Password reset on disabled-portal port');
@@ -239,7 +239,7 @@ export async function requestPasswordReset(email: string): Promise<void> {
await sendEmail(user.email, subject, html, undefined, text);
} catch (err) {
logger.error({ err, email: user.email }, 'Failed to send password-reset email');
// Don't propagate the public route returns 200 either way.
// Don't propagate - the public route returns 200 either way.
}
}

View File

@@ -151,7 +151,7 @@ export async function listRecommendations(interestId: string, portId: string) {
// Verify the interest belongs to the caller's port. Without this gate,
// any user with `interests:view` could pass a foreign-port interestId
// and receive that tenant's recommended berths (mooring numbers,
// dimensions, status operational data they should not see).
// dimensions, status - operational data they should not see).
const interest = await db.query.interests.findFirst({
where: and(eq(interests.id, interestId), eq(interests.portId, portId)),
});

View File

@@ -113,7 +113,7 @@ export async function getUpcomingReminders(portId: string, days: number = 14) {
* all point at rows inside the caller's port. Without this guard, a
* reminder created with a foreign-port FK would later be hydrated with
* `with: { client, interest, berth }` joins (no port filter on the
* relation) leaking the foreign-tenant rows back to the attacker.
* relation) - leaking the foreign-tenant rows back to the attacker.
*/
async function assertReminderFksInPort(
portId: string,
@@ -444,7 +444,7 @@ export async function processFollowUpReminders() {
const title = client ? `Follow up with ${client.fullName}` : 'Follow up on interest';
// Find the assigned user (first userPortRole for this port, or fallback)
// For now, leave assignedTo null the notification goes to the port room
// For now, leave assignedTo null - the notification goes to the port room
await db.insert(reminders).values({
portId: port.id,
title,

View File

@@ -235,7 +235,7 @@ export async function createResidentialInterest(
data: CreateResidentialInterestInput,
meta: AuditMeta,
) {
// Validate the residential client belongs to this port prevents
// Validate the residential client belongs to this port - prevents
// cross-port linking.
const client = await db.query.residentialClients.findFirst({
where: and(

View File

@@ -194,7 +194,7 @@ function recentSearchKey(userId: string, portId: string): string {
}
/**
* Fire-and-forget saves a search term to the user's recent searches sorted set.
* Fire-and-forget - saves a search term to the user's recent searches sorted set.
*/
export function saveRecentSearch(userId: string, portId: string, searchTerm: string): void {
const key = recentSearchKey(userId, portId);
@@ -203,7 +203,7 @@ export function saveRecentSearch(userId: string, portId: string, searchTerm: str
.then(() => redis.zremrangebyrank(key, 0, -(RECENT_SEARCH_MAX + 1)))
.then(() => redis.expire(key, RECENT_SEARCH_TTL))
.catch(() => {
// Intentionally swallowed recent searches are non-critical
// Intentionally swallowed - recent searches are non-critical
});
}

View File

@@ -237,9 +237,7 @@ export async function getQueueJobs(
try {
const dataStr = JSON.stringify(job.data);
truncatedData =
dataStr.length > 500
? JSON.parse(dataStr.slice(0, 500) + '...(truncated)')
: job.data;
dataStr.length > 500 ? JSON.parse(dataStr.slice(0, 500) + '...(truncated)') : job.data;
} catch {
truncatedData = '[unparseable]';
}
@@ -261,11 +259,7 @@ export async function getQueueJobs(
// ─── retryJob ─────────────────────────────────────────────────────────────────
export async function retryJob(
queueName: QueueName,
jobId: string,
userId: string,
): Promise<void> {
export async function retryJob(queueName: QueueName, jobId: string, userId: string): Promise<void> {
const queue = getQueue(queueName);
const job = await queue.getJob(jobId);
if (!job) throw new Error(`Job ${jobId} not found in queue ${queueName}`);
@@ -348,19 +342,21 @@ export async function getRecentErrors(limit = 20): Promise<RecentError[]> {
metadata: (row.metadata as Record<string, unknown>) ?? {},
}));
// Fetch failed jobs from all queues (sample top 5 per queue)
// Fetch failed jobs from all queues (sample - top 5 per queue)
const queueNames = Object.keys(QUEUE_CONFIGS) as QueueName[];
const failedJobResults = await Promise.allSettled(
queueNames.map(async (name) => {
const queue = getQueue(name);
const jobs = await queue.getJobs(['failed'], 0, 4);
return jobs.map((job): RecentError => ({
id: `${name}:${job.id ?? ''}`,
source: 'queue',
message: `Queue job failed: ${job.name} in ${name}`,
timestamp: job.finishedOn ? new Date(job.finishedOn) : new Date(job.timestamp),
metadata: { queueName: name, failedReason: job.failedReason },
}));
return jobs.map(
(job): RecentError => ({
id: `${name}:${job.id ?? ''}`,
source: 'queue',
message: `Queue job failed: ${job.name} in ${name}`,
timestamp: job.finishedOn ? new Date(job.finishedOn) : new Date(job.timestamp),
metadata: { queueName: name, failedReason: job.failedReason },
}),
);
}),
);

View File

@@ -11,7 +11,7 @@ import { INTERNAL_TO_WEBHOOK_MAP } from '@/lib/services/webhook-event-map';
* queries all active webhooks for the given port that are subscribed to that
* event, and enqueues a BullMQ delivery job for each one.
*
* This function is fire-and-forget callers should use `void dispatchWebhookEvent(...)`.
* This function is fire-and-forget - callers should use `void dispatchWebhookEvent(...)`.
*/
export async function dispatchWebhookEvent(
portId: string,
@@ -20,7 +20,7 @@ export async function dispatchWebhookEvent(
): Promise<void> {
const webhookEvent = INTERNAL_TO_WEBHOOK_MAP[internalEvent];
if (!webhookEvent) {
// No mapping for this event skip silently
// No mapping for this event - skip silently
return;
}
@@ -65,10 +65,7 @@ export async function dispatchWebhookEvent(
});
}
} catch (err) {
// Never block callers log and swallow
logger.error(
{ portId, internalEvent, webhookEvent, err },
'Failed to dispatch webhook event',
);
// Never block callers - log and swallow
logger.error({ portId, internalEvent, webhookEvent, err }, 'Failed to dispatch webhook event');
}
}

View File

@@ -67,7 +67,7 @@ export async function createWebhook(
userAgent: meta.userAgent,
});
// Return with plaintext secret shown ONCE only on creation
// Return with plaintext secret - shown ONCE only on creation
return {
...webhook!,
secret: plaintextSecret,
@@ -220,7 +220,7 @@ export async function regenerateSecret(portId: string, webhookId: string, meta:
userAgent: meta.userAgent,
});
// Return new plaintext secret shown ONCE
// Return new plaintext secret - shown ONCE
return {
webhookId,
secret: plaintextSecret,

View File

@@ -211,7 +211,7 @@ export async function transferOwnership(
yacht.currentOwnerType === data.newOwner.type &&
yacht.currentOwnerId === data.newOwner.id
) {
throw new ValidationError('same owner nothing to transfer');
throw new ValidationError('same owner - nothing to transfer');
}
await assertOwnerExists(portId, data.newOwner, tx);

View File

@@ -266,7 +266,7 @@ export interface ServerToClientEvents {
'alert:dismissed': (payload: { alertId: string; portId: string }) => void;
}
// Client → Server events (minimal most actions go through REST API)
// Client → Server events (minimal - most actions go through REST API)
export interface ClientToServerEvents {
'join:entity': (payload: { type: 'berth' | 'client' | 'interest'; id: string }) => void;
'leave:entity': (payload: { type: 'berth' | 'client' | 'interest'; id: string }) => void;

View File

@@ -18,7 +18,7 @@ let io: Server<ClientToServerEvents, ServerToClientEvents> | null = null;
/**
* Returns true if the user is a super-admin OR holds a userPortRoles row
* for the given portId. The Socket.IO auth middleware uses this to decide
* whether to honour a client-supplied `auth.portId` the prior code
* whether to honour a client-supplied `auth.portId` - the prior code
* trusted whatever the client passed and thereby joined the socket to a
* foreign tenant's broadcast room.
*/
@@ -35,7 +35,7 @@ async function userCanAccessPort(userId: string, portId: string): Promise<boolea
/**
* Verify the user can join an entity-scoped room. Each entity type's own
* tenant column is checked if the user can access the entity's port,
* tenant column is checked - if the user can access the entity's port,
* they may subscribe to that entity's room.
*/
async function userCanJoinEntity(
@@ -84,7 +84,7 @@ export function initSocketServer(
maxHttpBufferSize: 1e6, // 1MB message limit
});
// Auth middleware validate session cookie + verify the user actually
// Auth middleware - validate session cookie + verify the user actually
// holds a role in the requested port. The handshake's auth.portId is
// user-supplied; we MUST cross-check it against userPortRoles or any
// authenticated user could subscribe to a foreign tenant's broadcasts.
@@ -132,7 +132,7 @@ export function initSocketServer(
socket.join(`user:${userId}`);
if (portId) socket.join(`port:${portId}`);
// Entity-level room management verify the user can access the
// Entity-level room management - verify the user can access the
// entity's port before joining. Without this, any authenticated user
// could subscribe to a foreign-tenant entity's broadcast (note
// previews, signer emails, etc.) by guessing or harvesting an id.
@@ -149,7 +149,7 @@ export function initSocketServer(
socket.leave(`${type}:${id}`);
});
// Idle timeout (30 seconds for development only, would be longer in prod)
// Idle timeout (30 seconds - for development only, would be longer in prod)
let idleTimer = setTimeout(() => socket.disconnect(), 30_000);
socket.onAny(() => {
clearTimeout(idleTimer);

View File

@@ -13,18 +13,12 @@ export const createFieldSchema = z
.regex(/^[a-z_][a-z0-9_]*$/, 'Must be snake_case'),
fieldLabel: z.string().min(1).max(100),
fieldType: z.enum(CUSTOM_FIELD_TYPES),
selectOptions: z
.array(z.string().min(1).max(100))
.min(1)
.max(50)
.optional(),
selectOptions: z.array(z.string().min(1).max(100)).min(1).max(50).optional(),
isRequired: z.boolean().default(false),
sortOrder: z.number().int().min(0).default(0),
})
.refine(
(data) =>
data.fieldType !== 'select' ||
(data.selectOptions && data.selectOptions.length > 0),
(data) => data.fieldType !== 'select' || (data.selectOptions && data.selectOptions.length > 0),
{
message: 'Select fields must have at least one option',
path: ['selectOptions'],
@@ -36,7 +30,7 @@ export const updateFieldSchema = z.object({
selectOptions: z.array(z.string().min(1).max(100)).optional(),
isRequired: z.boolean().optional(),
sortOrder: z.number().int().min(0).optional(),
// fieldType intentionally omitted cannot be changed after creation
// fieldType intentionally omitted - cannot be changed after creation
});
export const setValuesSchema = z.object({

View File

@@ -82,7 +82,7 @@ export type GenerateAndSendInput = z.infer<typeof generateAndSendSchema>;
export type GenerateAndSignInput = z.infer<typeof generateAndSignSchema>;
// ─── TipTap-based Admin Template Schemas ─────────────────────────────────────
// Used by /api/v1/admin/templates the TipTap JSON document store.
// Used by /api/v1/admin/templates - the TipTap JSON document store.
export const tiptapDocumentTypes = [
'eoi',

View File

@@ -84,7 +84,7 @@ export const listDocumentsSchema = baseListQuerySchema.extend({
clientId: z.string().optional(),
documentType: z.string().optional(),
status: z.string().optional(),
/** Hub tab filter applies tab-specific status / signer-membership constraints. */
/** Hub tab filter - applies tab-specific status / signer-membership constraints. */
tab: z.enum(documentsHubTabs).optional(),
/** Restrict to docs being watched by this user id. */
watcherUserId: z.string().optional(),

View File

@@ -58,7 +58,7 @@ export const subdivisionIsoSchema = z
.refine((code) => isValidSubdivisionCode(code), 'Unknown subdivision code');
// ─── Optional variants ────────────────────────────────────────────────────────
// Inline forms most callers will use empty strings normalize to null
// Inline forms most callers will use - empty strings normalize to null
// so the user clearing a field doesn't fail validation.
export const optionalCountryIsoSchema = z

View File

@@ -162,7 +162,7 @@ export const publicInterestSchema = z
// NEW: required structured yacht block. Public submissions after the
// data-model refactor MUST include yacht data.
yacht: publicYachtSchema,
// NEW: optional company block creates/upserts a company and adds a
// NEW: optional company block - creates/upserts a company and adds a
// membership linking the submitting client to it.
company: publicCompanySchema.optional(),
source: z.literal('website').default('website'),

View File

@@ -81,7 +81,7 @@ export const listResidentialInterestsSchema = baseListQuerySchema.extend({
* Shape posted by the public website's residential interest form. Coerces
* to internal create-shapes inside the public route.
*
* The legacy `phone` field stays free-text older website builds may post
* The legacy `phone` field stays free-text - older website builds may post
* raw international strings ('+44 7700 900123'). The route handler parses
* it server-side into `phoneE164` + `phoneCountry`. Newer website builds
* can post normalized values directly.

View File

@@ -51,7 +51,7 @@ function isBlockedIpv6(host: string): boolean {
if (h.startsWith('fe80:') || h.startsWith('fe80::')) return true; // link-local
if (/^f[cd][0-9a-f]{2}:/.test(h)) return true; // fc00::/7 unique-local
if (h.startsWith('::ffff:')) {
// IPv4-mapped unwrap and check
// IPv4-mapped - unwrap and check
const ipv4 = h.slice(7);
return isBlockedIpv4(ipv4);
}
@@ -62,7 +62,7 @@ function isBlockedIpv6(host: string): boolean {
* Reject webhook URLs whose hostname targets a private/internal/loopback/
* link-local destination. The webhook worker `fetch`es the URL and writes
* a slice of the response body into `webhook_deliveries.response_body`,
* which is later returned by the deliveries listing endpoint making any
* which is later returned by the deliveries listing endpoint - making any
* SSRF here an information-disclosure read primitive against any internal
* service the worker can reach. Does NOT defend against DNS rebinding;
* the worker performs its own re-resolution at dispatch time.