feat(emails): sales send-out flows + brochures + email-from settings

Phase 7 of the berth-recommender refactor (plan §3.3, §4.8, §4.9, §5.7,
§5.8, §5.9, §11.1, §14.7, §14.9). Adds the rep-driven send-out path for
per-berth PDFs and port-wide brochures, the per-port sales SMTP/IMAP
config + body templates, and the supporting admin UI.

Migration: 0031_brochures_and_document_sends.sql

Schema additions:
  - brochures (port-wide, with isDefault marker + archive)
  - brochure_versions (versioned uploads, storageKey per §4.7a)
  - document_sends (audit log of every rep-initiated send; failures
    captured with failedAt + errorReason). berthPdfVersionId is a plain
    text column (no FK) — loose-coupled to Phase 6b's berth_pdf_versions
    so the two phases stay independent.

§14.7 critical mitigations:
  - Body XSS: rep-authored markdown goes through renderEmailBody()
    (HTML-escape first, then a tight allowlist of bold/italic/code/link
    rules). https:// + mailto: only — javascript:/data: URLs stripped.
    Tested against script/img/iframe/svg/onerror polyglots.
  - Recipient typo: strict email regex + two-step confirm modal that
    shows the exact recipient before send.
  - Unresolved merge fields: pre-send dry-run /preview endpoint blocks
    submission until findUnresolvedTokens() returns empty.
  - SMTP failure: every transport rejection writes a document_sends row
    with failedAt + errorReason; UI surfaces the message.
  - Hourly per-user rate limit: 50 sends/user/hour via existing
    checkRateLimit().
  - Size threshold fallback (§11.1): files above
    email_attach_threshold_mb (default 15) ship as a 24h signed-URL
    download link in the body instead of an attachment. Storage stream
    flows directly to nodemailer to avoid buffering 20MB+.

§14.10 critical mitigation:
  - SMTP/IMAP passwords encrypted at rest via the existing
    EMAIL_CREDENTIAL_KEY (AES-256-GCM). The /api/v1/admin/email/
    sales-config GET endpoint never returns the decrypted value — only
    a *PassIsSet boolean. PATCH treats empty string as "leave unchanged"
    and explicit null as "clear", so the masked-placeholder UI round-
    trips without forcing re-entry on every save.

system_settings keys (per-port unless noted):
  - sales_from_address, sales_smtp_{host,port,secure,user,pass_encrypted}
  - sales_imap_{host,port,user,pass_encrypted}
  - sales_auth_method (default app_password)
  - noreply_from_address
  - email_template_send_berth_pdf_body, email_template_send_brochure_body
  - brochure_max_upload_mb (default 50)
  - email_attach_threshold_mb (default 15)

UI surfaces (per §5.7, §5.8, §5.9):
  - <SendDocumentDialog> shared 2-step compose+confirm flow.
  - <SendBerthPdfDialog>, <SendDocumentsDialog>, <SendFromInterestButton>
    wrappers per detail page.
  - /[portSlug]/admin/brochures: list, upload (direct-to-storage
    presigned PUT for the 20MB+ files per §11.1), default toggle,
    archive.
  - /[portSlug]/admin/email extended with <SalesEmailConfigCard>:
    SMTP + IMAP creds, body templates, threshold/max settings.

Storage: every upload + download goes through getStorageBackend() —
no direct minio imports, per Phase 6a contract.

Tests: 1145 vitest passing (+ 50 new in
markdown-email-sanitization.test.ts, document-sends-validators.test.ts,
sales-email-config-validators.test.ts).

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Matt Ciaccio
2026-05-05 03:38:47 +02:00
parent 249ffe3e4a
commit a0091e4ca6
32 changed files with 15129 additions and 0 deletions

View File

@@ -0,0 +1,59 @@
CREATE TABLE "brochure_versions" (
"id" text PRIMARY KEY NOT NULL,
"brochure_id" text NOT NULL,
"version_number" integer NOT NULL,
"storage_key" text NOT NULL,
"file_name" text NOT NULL,
"file_size_bytes" integer NOT NULL,
"content_sha256" text NOT NULL,
"uploaded_by" text NOT NULL,
"uploaded_at" timestamp with time zone DEFAULT now() NOT NULL,
"download_url_expires_at" timestamp with time zone
);
--> statement-breakpoint
CREATE TABLE "brochures" (
"id" text PRIMARY KEY NOT NULL,
"port_id" text NOT NULL,
"label" text NOT NULL,
"description" text,
"is_default" boolean DEFAULT false NOT NULL,
"archived_at" timestamp with time zone,
"created_by" text NOT NULL,
"created_at" timestamp with time zone DEFAULT now() NOT NULL
);
--> statement-breakpoint
CREATE TABLE "document_sends" (
"id" text PRIMARY KEY NOT NULL,
"port_id" text NOT NULL,
"client_id" text,
"interest_id" text,
"recipient_email" text NOT NULL,
"document_kind" text NOT NULL,
"berth_id" text,
"berth_pdf_version_id" text,
"brochure_id" text,
"brochure_version_id" text,
"body_markdown" text,
"sent_by_user_id" text NOT NULL,
"from_address" text NOT NULL,
"sent_at" timestamp with time zone DEFAULT now() NOT NULL,
"message_id" text,
"fallback_to_link_reason" text,
"failed_at" timestamp with time zone,
"error_reason" text
);
--> statement-breakpoint
ALTER TABLE "brochure_versions" ADD CONSTRAINT "brochure_versions_brochure_id_brochures_id_fk" FOREIGN KEY ("brochure_id") REFERENCES "public"."brochures"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "brochures" ADD CONSTRAINT "brochures_port_id_ports_id_fk" FOREIGN KEY ("port_id") REFERENCES "public"."ports"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "document_sends" ADD CONSTRAINT "document_sends_port_id_ports_id_fk" FOREIGN KEY ("port_id") REFERENCES "public"."ports"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "document_sends" ADD CONSTRAINT "document_sends_client_id_clients_id_fk" FOREIGN KEY ("client_id") REFERENCES "public"."clients"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "document_sends" ADD CONSTRAINT "document_sends_interest_id_interests_id_fk" FOREIGN KEY ("interest_id") REFERENCES "public"."interests"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "document_sends" ADD CONSTRAINT "document_sends_berth_id_berths_id_fk" FOREIGN KEY ("berth_id") REFERENCES "public"."berths"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "document_sends" ADD CONSTRAINT "document_sends_brochure_id_brochures_id_fk" FOREIGN KEY ("brochure_id") REFERENCES "public"."brochures"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "document_sends" ADD CONSTRAINT "document_sends_brochure_version_id_brochure_versions_id_fk" FOREIGN KEY ("brochure_version_id") REFERENCES "public"."brochure_versions"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "idx_brochure_versions_brochure" ON "brochure_versions" USING btree ("brochure_id","uploaded_at");--> statement-breakpoint
CREATE INDEX "idx_brochures_port" ON "brochures" USING btree ("port_id");--> statement-breakpoint
CREATE INDEX "idx_ds_client" ON "document_sends" USING btree ("client_id","sent_at");--> statement-breakpoint
CREATE INDEX "idx_ds_interest" ON "document_sends" USING btree ("interest_id","sent_at");--> statement-breakpoint
CREATE INDEX "idx_ds_berth" ON "document_sends" USING btree ("berth_id","sent_at");--> statement-breakpoint
CREATE INDEX "idx_ds_port" ON "document_sends" USING btree ("port_id","sent_at");

File diff suppressed because it is too large Load Diff

View File

@@ -218,6 +218,13 @@
"when": 1777944021221,
"tag": "0030_berth_pdf_versions",
"breakpoints": true
},
{
"idx": 31,
"version": "7",
"when": 1777944191753,
"tag": "0031_brochures_and_document_sends",
"breakpoints": true
}
]
}

View File

@@ -0,0 +1,127 @@
import { pgTable, text, boolean, integer, timestamp, index } from 'drizzle-orm/pg-core';
import { ports } from './ports';
import { clients } from './clients';
import { interests } from './interests';
import { berths } from './berths';
/**
* Port-wide brochures (Phase 7 — see plan §3.3 / §4.8).
*
* Each port can have multiple brochures (e.g. "General", "Investor Pack")
* with one marked as `isDefault`. Archived brochures stay queryable for
* audit purposes but are hidden from the send-out picker.
*/
export const brochures = pgTable(
'brochures',
{
id: text('id')
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id, { onDelete: 'cascade' }),
label: text('label').notNull(),
description: text('description'),
isDefault: boolean('is_default').notNull().default(false),
archivedAt: timestamp('archived_at', { withTimezone: true }),
createdBy: text('created_by').notNull(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [index('idx_brochures_port').on(table.portId)],
);
/**
* Versioned brochure files. Identical lifecycle to `berth_pdf_versions`:
* each upload creates a new immutable row with a monotonic version number
* per brochure. `storageKey` follows the §4.7a renamed convention.
*/
export const brochureVersions = pgTable(
'brochure_versions',
{
id: text('id')
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
brochureId: text('brochure_id')
.notNull()
.references(() => brochures.id, { onDelete: 'cascade' }),
versionNumber: integer('version_number').notNull(),
/** Object key in the active storage backend (renamed from `s3_key` per §4.7a). */
storageKey: text('storage_key').notNull(),
fileName: text('file_name').notNull(),
fileSizeBytes: integer('file_size_bytes').notNull(),
contentSha256: text('content_sha256').notNull(),
uploadedBy: text('uploaded_by').notNull(),
uploadedAt: timestamp('uploaded_at', { withTimezone: true }).notNull().defaultNow(),
/** Cached signed-URL expiry per §11.1 — re-sign only when within 1h of expiry. */
downloadUrlExpiresAt: timestamp('download_url_expires_at', { withTimezone: true }),
},
(table) => [index('idx_brochure_versions_brochure').on(table.brochureId, table.uploadedAt)],
);
/**
* Send-out audit log for berth PDFs and brochures (Phase 7 — plan §3.3).
*
* One row per recipient per send. `documentKind` discriminates between
* `'berth_pdf'` and `'brochure'`; the corresponding `*_version_id` column
* pins the exact version sent.
*
* `berthPdfVersionId` is intentionally a plain text column (no FK) — the
* referenced table `berth_pdf_versions` is owned by Phase 6b. Loose-coupling
* keeps the two phases independent (per Phase 7 task brief).
*
* `failedAt` and `errorReason` capture send failures (SMTP auth, transport
* errors). Failed sends are still written so reps can see "I clicked send
* but it didn't go" in the timeline (per §14.7).
*/
export const documentSends = pgTable(
'document_sends',
{
id: text('id')
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
portId: text('port_id')
.notNull()
.references(() => ports.id),
/** Either client_id or interest_id is set (or both). */
clientId: text('client_id').references(() => clients.id),
interestId: text('interest_id').references(() => interests.id),
recipientEmail: text('recipient_email').notNull(),
/** 'berth_pdf' | 'brochure' */
documentKind: text('document_kind').notNull(),
berthId: text('berth_id').references(() => berths.id),
/** Forward FK ref — berth_pdf_versions defined in Phase 6b. Loose-coupled. */
berthPdfVersionId: text('berth_pdf_version_id'),
brochureId: text('brochure_id').references(() => brochures.id),
brochureVersionId: text('brochure_version_id').references(() => brochureVersions.id),
/** Exact body used (after merge-field expansion + sanitization). */
bodyMarkdown: text('body_markdown'),
sentByUserId: text('sent_by_user_id').notNull(),
fromAddress: text('from_address').notNull(),
sentAt: timestamp('sent_at', { withTimezone: true }).notNull().defaultNow(),
/** SMTP provider message-id for deliverability tracking. */
messageId: text('message_id'),
/** When the initial send had its attachment dropped because the SMTP server
* rejected the size (552 etc.) and the system retried with a download
* link, this captures the rejection reason for ops visibility. Null when
* the original send went through as-is. */
fallbackToLinkReason: text('fallback_to_link_reason'),
/** Set when the SMTP send transaction itself failed (auth/transport/etc). */
failedAt: timestamp('failed_at', { withTimezone: true }),
/** Human-readable failure reason; only meaningful when failedAt is non-null. */
errorReason: text('error_reason'),
},
(t) => [
index('idx_ds_client').on(t.clientId, t.sentAt),
index('idx_ds_interest').on(t.interestId, t.sentAt),
index('idx_ds_berth').on(t.berthId, t.sentAt),
index('idx_ds_port').on(t.portId, t.sentAt),
],
);
export type Brochure = typeof brochures.$inferSelect;
export type NewBrochure = typeof brochures.$inferInsert;
export type BrochureVersion = typeof brochureVersions.$inferSelect;
export type NewBrochureVersion = typeof brochureVersions.$inferInsert;
export type DocumentSend = typeof documentSends.$inferSelect;
export type NewDocumentSend = typeof documentSends.$inferInsert;

View File

@@ -25,6 +25,9 @@ export * from './reservations';
// Documents & Files
export * from './documents';
// Brochures + send-outs (Phase 7)
export * from './brochures';
// Financial
export * from './financial';

View File

@@ -58,6 +58,9 @@ import {
formSubmissions,
} from './documents';
// Brochures + send-outs (Phase 7)
import { brochures, brochureVersions, documentSends } from './brochures';
// Financial
import { expenses, invoices, invoiceLineItems, invoiceExpenses } from './financial';
@@ -883,3 +886,49 @@ export const residentialInterestsRelations = relations(residentialInterests, ({
references: [residentialClients.id],
}),
}));
// ─── Brochures + send-outs (Phase 7) ──────────────────────────────────────────
export const brochuresRelations = relations(brochures, ({ one, many }) => ({
port: one(ports, {
fields: [brochures.portId],
references: [ports.id],
}),
versions: many(brochureVersions),
sends: many(documentSends),
}));
export const brochureVersionsRelations = relations(brochureVersions, ({ one, many }) => ({
brochure: one(brochures, {
fields: [brochureVersions.brochureId],
references: [brochures.id],
}),
sends: many(documentSends),
}));
export const documentSendsRelations = relations(documentSends, ({ one }) => ({
port: one(ports, {
fields: [documentSends.portId],
references: [ports.id],
}),
client: one(clients, {
fields: [documentSends.clientId],
references: [clients.id],
}),
interest: one(interests, {
fields: [documentSends.interestId],
references: [interests.id],
}),
berth: one(berths, {
fields: [documentSends.berthId],
references: [berths.id],
}),
brochure: one(brochures, {
fields: [documentSends.brochureId],
references: [brochures.id],
}),
brochureVersion: one(brochureVersions, {
fields: [documentSends.brochureVersionId],
references: [brochureVersions.id],
}),
}));

View File

@@ -0,0 +1,278 @@
/**
* Brochures + brochure-versions service (Phase 7 — see plan §3.3 / §4.7).
*
* Brochures are port-wide marketing PDFs (the sample `Port-Nimara-Brochure-March-2025`
* is 10.26 MB). Each `brochures` row groups a logical brochure (e.g.
* "Investor Pack"); each `brochure_versions` row is an immutable upload tied
* to that brochure. The default brochure is the one the send-out flow picks
* when the rep doesn't pick explicitly (§14.7).
*
* Storage goes through `getStorageBackend()` (Phase 6a) — never minio
* directly. The version row's `storageKey` follows the §4.7a convention.
*/
import { and, asc, desc, eq, isNull } from 'drizzle-orm';
import { db } from '@/lib/db';
import { brochures, brochureVersions, ports } from '@/lib/db/schema';
import type { Brochure, BrochureVersion } from '@/lib/db/schema';
import { ForbiddenError, NotFoundError, ValidationError } from '@/lib/errors';
import { getStorageBackend } from '@/lib/storage';
import { buildStoragePath } from '@/lib/minio';
import { logger } from '@/lib/logger';
// ─── Types ───────────────────────────────────────────────────────────────────
export interface BrochureWithCurrentVersion extends Brochure {
currentVersion: BrochureVersion | null;
versionCount: number;
}
// ─── Internal helpers ────────────────────────────────────────────────────────
async function loadPortSlug(portId: string): Promise<string> {
const port = await db.query.ports.findFirst({ where: eq(ports.id, portId) });
if (!port) throw new NotFoundError('Port');
return port.slug;
}
// ─── List ────────────────────────────────────────────────────────────────────
/**
* List all brochures for a port. By default returns only non-archived rows;
* pass `{ includeArchived: true }` for the admin manage page.
*/
export async function listBrochures(
portId: string,
opts: { includeArchived?: boolean } = {},
): Promise<BrochureWithCurrentVersion[]> {
const baseRows = await db.query.brochures.findMany({
where: opts.includeArchived
? eq(brochures.portId, portId)
: and(eq(brochures.portId, portId), isNull(brochures.archivedAt)),
orderBy: [desc(brochures.isDefault), asc(brochures.label)],
});
if (baseRows.length === 0) return [];
const ids = baseRows.map((r) => r.id);
const versions = await db
.select()
.from(brochureVersions)
.where(eq(brochureVersions.brochureId, ids[0]!));
// Pull all versions for these brochures in one round trip.
const allVersions =
ids.length === 1
? versions
: await db.query.brochureVersions.findMany({
where: (bv, { inArray }) => inArray(bv.brochureId, ids),
orderBy: [desc(brochureVersions.uploadedAt)],
});
return baseRows.map((row) => {
const versionsForRow = allVersions.filter((v) => v.brochureId === row.id);
versionsForRow.sort(
(a, b) => new Date(b.uploadedAt).getTime() - new Date(a.uploadedAt).getTime(),
);
return {
...row,
currentVersion: versionsForRow[0] ?? null,
versionCount: versionsForRow.length,
};
});
}
export async function getBrochure(
portId: string,
brochureId: string,
): Promise<BrochureWithCurrentVersion> {
const row = await db.query.brochures.findFirst({
where: and(eq(brochures.id, brochureId), eq(brochures.portId, portId)),
});
if (!row) throw new NotFoundError('Brochure');
const versions = await db.query.brochureVersions.findMany({
where: eq(brochureVersions.brochureId, brochureId),
orderBy: [desc(brochureVersions.uploadedAt)],
});
return { ...row, currentVersion: versions[0] ?? null, versionCount: versions.length };
}
/**
* Resolve the brochure that the send-out flow should default to. Returns the
* default brochure when one exists and is non-archived; falls back to the
* most recently created non-archived brochure with a version; null when
* the port has no usable brochures (the send UI hides the button — §14.7).
*/
export async function getDefaultBrochure(
portId: string,
): Promise<BrochureWithCurrentVersion | null> {
const all = await listBrochures(portId, { includeArchived: false });
const usable = all.filter((b) => b.currentVersion !== null);
if (usable.length === 0) return null;
const flaggedDefault = usable.find((b) => b.isDefault);
if (flaggedDefault) return flaggedDefault;
return usable[0]!;
}
// ─── Mutations ───────────────────────────────────────────────────────────────
export interface CreateBrochureInput {
portId: string;
label: string;
description?: string | null;
isDefault?: boolean;
createdBy: string;
}
export async function createBrochure(input: CreateBrochureInput): Promise<Brochure> {
if (!input.label.trim()) throw new ValidationError('Brochure label is required');
// If this is being created as default, clear any existing default first
// so we maintain the invariant: at most one default per port.
return db.transaction(async (tx) => {
if (input.isDefault) {
await tx
.update(brochures)
.set({ isDefault: false })
.where(and(eq(brochures.portId, input.portId), eq(brochures.isDefault, true)));
}
const [row] = await tx
.insert(brochures)
.values({
portId: input.portId,
label: input.label.trim(),
description: input.description ?? null,
isDefault: input.isDefault ?? false,
createdBy: input.createdBy,
})
.returning();
if (!row) throw new Error('Failed to create brochure');
return row;
});
}
export interface UpdateBrochureInput {
label?: string;
description?: string | null;
isDefault?: boolean;
}
export async function updateBrochure(
portId: string,
brochureId: string,
patch: UpdateBrochureInput,
): Promise<Brochure> {
const existing = await db.query.brochures.findFirst({
where: and(eq(brochures.id, brochureId), eq(brochures.portId, portId)),
});
if (!existing) throw new NotFoundError('Brochure');
return db.transaction(async (tx) => {
if (patch.isDefault === true) {
await tx
.update(brochures)
.set({ isDefault: false })
.where(and(eq(brochures.portId, portId), eq(brochures.isDefault, true)));
}
const updates: Partial<Brochure> = {};
if (patch.label !== undefined) updates.label = patch.label.trim();
if (patch.description !== undefined) updates.description = patch.description;
if (patch.isDefault !== undefined) updates.isDefault = patch.isDefault;
const [row] = await tx
.update(brochures)
.set(updates)
.where(eq(brochures.id, brochureId))
.returning();
if (!row) throw new Error('Failed to update brochure');
return row;
});
}
export async function archiveBrochure(portId: string, brochureId: string): Promise<void> {
const existing = await db.query.brochures.findFirst({
where: and(eq(brochures.id, brochureId), eq(brochures.portId, portId)),
});
if (!existing) throw new NotFoundError('Brochure');
await db
.update(brochures)
.set({ archivedAt: new Date(), isDefault: false })
.where(eq(brochures.id, brochureId));
}
// ─── Versions ────────────────────────────────────────────────────────────────
export interface RegisterBrochureVersionInput {
portId: string;
brochureId: string;
storageKey: string;
fileName: string;
fileSizeBytes: number;
contentSha256: string;
uploadedBy: string;
}
/**
* After a presigned upload completes, the browser POSTs the metadata back
* here. We HEAD the storage key to verify the file exists at the claimed
* size + content-type (per §11.1 "Server-side validation"), then write the
* version row + bump version number.
*/
export async function registerBrochureVersion(
input: RegisterBrochureVersionInput,
): Promise<BrochureVersion> {
const brochure = await db.query.brochures.findFirst({
where: and(eq(brochures.id, input.brochureId), eq(brochures.portId, input.portId)),
});
if (!brochure) throw new NotFoundError('Brochure');
if (brochure.archivedAt) {
throw new ForbiddenError('Cannot upload a version to an archived brochure');
}
const storage = await getStorageBackend();
const head = await storage.head(input.storageKey);
if (!head) throw new ValidationError('Uploaded object not found in storage');
if (head.sizeBytes !== input.fileSizeBytes) {
logger.warn(
{ expected: input.fileSizeBytes, actual: head.sizeBytes, key: input.storageKey },
'Brochure upload size mismatch',
);
throw new ValidationError('Uploaded object size does not match metadata');
}
// Determine the next version number for this brochure.
const existing = await db.query.brochureVersions.findMany({
where: eq(brochureVersions.brochureId, input.brochureId),
orderBy: [desc(brochureVersions.versionNumber)],
limit: 1,
});
const nextVersion = (existing[0]?.versionNumber ?? 0) + 1;
const [row] = await db
.insert(brochureVersions)
.values({
brochureId: input.brochureId,
versionNumber: nextVersion,
storageKey: input.storageKey,
fileName: input.fileName,
fileSizeBytes: input.fileSizeBytes,
contentSha256: input.contentSha256,
uploadedBy: input.uploadedBy,
})
.returning();
if (!row) throw new Error('Failed to record brochure version');
return row;
}
/**
* Generate a storage key the client should PUT to. Caller hands the returned
* key + URL to the browser; after upload the browser calls
* `registerBrochureVersion` with the same key.
*/
export async function generateBrochureStorageKey(
portId: string,
brochureId: string,
): Promise<string> {
const portSlug = await loadPortSlug(portId);
const fileId = crypto.randomUUID();
return buildStoragePath(portSlug, 'brochures', brochureId, fileId, 'pdf');
}

View File

@@ -0,0 +1,531 @@
/**
* Sales send-out flow (Phase 7 — see plan §4.8 / §11.1 / §14.7).
*
* Sends per-berth PDFs and brochures to a client recipient, attaching the
* file when it's at-or-below the configured threshold or falling back to a
* 24h signed-URL link when it's larger. Every send writes one row to
* `document_sends` (success OR failure) so the rep can see the outcome in
* the timeline.
*
* §14.7 critical mitigations implemented here:
*
* - **Body XSS** — bodies go through `renderEmailBody()` (HTML-escape +
* allowlist of markdown rules) before reaching nodemailer.
* - **Recipient typo** — recipient email validated against a strict regex
* before the SMTP transaction.
* - **Unresolved merge fields** — `findUnresolvedTokens()` is exported
* for the dry-run UI; the service blocks sends with unresolved tokens
* unless `allowUnresolved: true` is explicitly passed (test-only).
* - **SMTP failure** — every transport rejection writes a `failedAt` row
* with `errorReason` and surfaces a typed error to the API.
* - **Hourly rate limit** — 50 sends/user/hour individual.
* - **Size threshold fallback** — files larger than the per-port
* `email_attach_threshold_mb` go as a signed-URL link in the body
* instead of an attachment (§11.1).
*/
import { Readable } from 'node:stream';
import { and, desc, eq } from 'drizzle-orm';
import type { SentMessageInfo } from 'nodemailer';
import { db } from '@/lib/db';
import {
brochures,
brochureVersions,
documentSends,
berths,
berthPdfVersions,
clients,
clientContacts,
ports,
} from '@/lib/db/schema';
import type { DocumentSend } from '@/lib/db/schema';
import { ForbiddenError, NotFoundError, ValidationError } from '@/lib/errors';
import { logger } from '@/lib/logger';
import { checkRateLimit } from '@/lib/rate-limit';
import { getStorageBackend } from '@/lib/storage';
import {
EMAIL_BODY_MAX_BYTES,
expandMergeTokens,
findUnresolvedTokens,
renderEmailBody,
} from '@/lib/utils/markdown-email';
import { getDefaultBrochure } from '@/lib/services/brochures.service';
import {
createSalesTransporter,
getSalesContentConfig,
} from '@/lib/services/sales-email-config.service';
// ─── Public types ────────────────────────────────────────────────────────────
export interface SendRecipientInput {
/** Existing client ID (resolves the primary email automatically). */
clientId?: string;
/** Optional explicit address override (for cases where a client has multiple). */
email?: string;
/** Optional interest pin so the audit row links into the interest timeline. */
interestId?: string;
}
export interface SendBerthPdfInput {
portId: string;
berthId: string;
recipient: SendRecipientInput;
/** When provided, replaces the per-port template. Still passes through
* merge expansion + sanitization. */
customBodyMarkdown?: string;
sentBy: string;
ipAddress: string;
userAgent: string;
/** Test-only: skip the unresolved-merge-field block. */
allowUnresolved?: boolean;
}
export interface SendBrochureInput {
portId: string;
/** Defaults to the port's default brochure when omitted. */
brochureId?: string;
recipient: SendRecipientInput;
customBodyMarkdown?: string;
sentBy: string;
ipAddress: string;
userAgent: string;
allowUnresolved?: boolean;
}
export interface SendResult {
send: DocumentSend;
/** True when the file was attached; false when a signed-URL link was used. */
deliveredAsAttachment: boolean;
/** Set when the transport rejected — the row carries `failedAt`. */
error?: string;
}
// ─── Public dry-run / preview helpers (used by the modal) ────────────────────
/**
* Compute the merge-value bag for a given send context. The same map is used
* by the dry-run preview AND the actual send so the rep sees exactly what
* gets posted.
*/
export async function buildMergeValues(
portId: string,
recipient: SendRecipientInput,
context: { berthId?: string; brochureLabel?: string } = {},
): Promise<Record<string, string>> {
const values: Record<string, string> = {};
values['{{date.today}}'] = new Date().toISOString().slice(0, 10);
values['{{date.year}}'] = String(new Date().getFullYear());
const port = await db.query.ports.findFirst({ where: eq(ports.id, portId) });
if (port) {
values['{{port.name}}'] = port.name;
if (port.defaultCurrency) values['{{port.defaultCurrency}}'] = port.defaultCurrency;
}
if (recipient.clientId) {
const client = await db.query.clients.findFirst({
where: and(eq(clients.id, recipient.clientId), eq(clients.portId, portId)),
});
if (client) {
if (client.fullName) values['{{client.fullName}}'] = client.fullName;
if (client.nationalityIso) values['{{client.nationality}}'] = client.nationalityIso;
if (client.source) values['{{client.source}}'] = client.source;
const contacts = await db.query.clientContacts.findMany({
where: eq(clientContacts.clientId, client.id),
});
const primaryEmail =
contacts.find((c) => c.channel === 'email' && c.isPrimary)?.value ??
contacts.find((c) => c.channel === 'email')?.value;
const primaryPhone =
contacts.find((c) => c.channel === 'phone' && c.isPrimary)?.value ??
contacts.find((c) => c.channel === 'phone')?.value;
if (primaryEmail) values['{{client.email}}'] = primaryEmail;
if (primaryPhone) values['{{client.phone}}'] = primaryPhone;
}
}
if (context.berthId) {
const berth = await db.query.berths.findFirst({
where: and(eq(berths.id, context.berthId), eq(berths.portId, portId)),
});
if (berth) {
values['{{berth.mooringNumber}}'] = berth.mooringNumber;
if (berth.area) values['{{berth.area}}'] = berth.area;
if (berth.status) values['{{berth.status}}'] = berth.status;
if (berth.lengthFt) values['{{berth.lengthFt}}'] = String(berth.lengthFt);
if (berth.widthFt) values['{{berth.widthFt}}'] = String(berth.widthFt);
if (berth.price) values['{{berth.price}}'] = String(berth.price);
if (berth.priceCurrency) values['{{berth.priceCurrency}}'] = berth.priceCurrency;
}
}
return values;
}
/**
* Render a body for the dry-run UI. Returns `{ html, unresolved }`. The UI
* uses `unresolved` to populate the warning chip; the rep can't submit
* until the list is empty.
*/
export async function previewBody(
portId: string,
documentKind: 'berth_pdf' | 'brochure',
recipient: SendRecipientInput,
customBody: string | null,
ctx: { berthId?: string; brochureLabel?: string } = {},
): Promise<{ html: string; markdown: string; unresolved: string[] }> {
const content = await getSalesContentConfig(portId);
const template = customBody?.trim()?.length
? customBody
: documentKind === 'berth_pdf'
? content.templateBerthPdfBody
: content.templateBrochureBody;
const values = await buildMergeValues(portId, recipient, ctx);
const expanded = expandMergeTokens(template, values);
const unresolved = findUnresolvedTokens(template, values);
const html = renderEmailBody(expanded);
return { html, markdown: expanded, unresolved };
}
// ─── Internal helpers ────────────────────────────────────────────────────────
const RFC5322_EMAIL = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
function assertEmailValid(email: string): void {
if (!email || email.length > 254 || !RFC5322_EMAIL.test(email)) {
throw new ValidationError(`Invalid recipient email: ${email}`);
}
}
async function resolveRecipientEmail(
portId: string,
recipient: SendRecipientInput,
): Promise<string> {
if (recipient.email) {
assertEmailValid(recipient.email);
return recipient.email;
}
if (!recipient.clientId) {
throw new ValidationError('Recipient must include either clientId or email');
}
const client = await db.query.clients.findFirst({
where: and(eq(clients.id, recipient.clientId), eq(clients.portId, portId)),
});
if (!client) throw new NotFoundError('Client');
const contacts = await db.query.clientContacts.findMany({
where: eq(clientContacts.clientId, client.id),
});
const emails = contacts.filter((c) => c.channel === 'email');
const primary = emails.find((c) => c.isPrimary) ?? emails[0];
if (!primary) throw new ValidationError('Client has no email on file');
assertEmailValid(primary.value);
return primary.value;
}
async function checkSendRateLimit(userId: string): Promise<void> {
const result = await checkRateLimit(userId, {
windowMs: 60 * 60 * 1000,
max: 50,
keyPrefix: 'docsend',
});
if (!result.allowed) {
throw new ForbiddenError(
`Hit hourly send limit (${result.limit}). Retry after ${new Date(
result.resetAt,
).toISOString()}.`,
);
}
}
interface ResolvedAttachment {
/** Object key in the active storage backend. */
storageKey: string;
fileName: string;
fileSizeBytes: number;
}
async function streamAttachmentOrLink(
portId: string,
attachment: ResolvedAttachment,
): Promise<{
attachments?: Array<{ filename: string; content: Readable }>;
bodySuffixHtml?: string;
deliveredAsAttachment: boolean;
}> {
const content = await getSalesContentConfig(portId);
const thresholdBytes = content.emailAttachThresholdMb * 1024 * 1024;
if (attachment.fileSizeBytes <= thresholdBytes) {
// Stream from storage directly into nodemailer to avoid buffering 20MB+.
const storage = await getStorageBackend();
const stream = await storage.get(attachment.storageKey);
// The storage abstraction returns NodeJS.ReadableStream; nodemailer's
// Attachment.content type wants `Readable`. The two are compatible —
// both stream backends expose a Readable. Cast to keep types tight.
const readable = stream as unknown as Readable;
return {
deliveredAsAttachment: true,
attachments: [{ filename: attachment.fileName, content: readable }],
};
}
// Above threshold: generate a 24h signed download URL and append a link
// to the body. Per §11.1 the size decision is made BEFORE the SMTP relay,
// so we never produce duplicate sends.
const storage = await getStorageBackend();
const { url } = await storage.presignDownload(attachment.storageKey, {
expirySeconds: 24 * 60 * 60,
filename: attachment.fileName,
});
const html = `<p>The file is large enough that we're sending it as a download link rather than an attachment:</p>
<p><a href="${url}" target="_blank" rel="noopener noreferrer">Download ${attachment.fileName}</a> (link expires in 24 hours)</p>`;
return { deliveredAsAttachment: false, bodySuffixHtml: html };
}
async function performSend(args: {
portId: string;
recipientEmail: string;
subject: string;
bodyHtml: string;
attachment: ResolvedAttachment;
recordSeed: Omit<typeof documentSends.$inferInsert, 'id' | 'sentAt' | 'createdAt'>;
}): Promise<SendResult> {
// 1. Build attachment vs link preamble.
const delivery = await streamAttachmentOrLink(args.portId, args.attachment);
const finalHtml = delivery.bodySuffixHtml
? `${args.bodyHtml}\n${delivery.bodySuffixHtml}`
: args.bodyHtml;
// 2. Create the transporter (per-port sales account).
let transporter, fromAddress;
try {
({ transporter, fromAddress } = await createSalesTransporter(args.portId));
} catch (configErr) {
const msg = configErr instanceof Error ? configErr.message : String(configErr);
const [row] = await db
.insert(documentSends)
.values({
...args.recordSeed,
fromAddress: args.recordSeed.fromAddress || 'unknown',
bodyMarkdown: args.recordSeed.bodyMarkdown ?? null,
failedAt: new Date(),
errorReason: msg,
})
.returning();
return {
send: row!,
deliveredAsAttachment: false,
error: msg,
};
}
// 3. Send.
try {
const info: SentMessageInfo = await transporter.sendMail({
from: fromAddress,
to: args.recipientEmail,
subject: args.subject,
html: finalHtml,
...(delivery.attachments ? { attachments: delivery.attachments } : {}),
});
const [row] = await db
.insert(documentSends)
.values({
...args.recordSeed,
fromAddress,
messageId: info.messageId ?? null,
fallbackToLinkReason: delivery.deliveredAsAttachment ? null : 'size_above_threshold',
})
.returning();
return { send: row!, deliveredAsAttachment: delivery.deliveredAsAttachment };
} catch (sendErr) {
const msg = sendErr instanceof Error ? sendErr.message : String(sendErr);
logger.error({ err: sendErr, portId: args.portId }, 'Sales send failed');
const [row] = await db
.insert(documentSends)
.values({
...args.recordSeed,
fromAddress,
failedAt: new Date(),
errorReason: msg,
})
.returning();
return { send: row!, deliveredAsAttachment: false, error: msg };
}
}
// ─── Public sender: berth PDF ────────────────────────────────────────────────
export async function sendBerthPdf(input: SendBerthPdfInput): Promise<SendResult> {
await checkSendRateLimit(input.sentBy);
const recipientEmail = await resolveRecipientEmail(input.portId, input.recipient);
// Resolve berth + active version.
const berth = await db.query.berths.findFirst({
where: and(eq(berths.id, input.berthId), eq(berths.portId, input.portId)),
});
if (!berth) throw new NotFoundError('Berth');
if (!berth.currentPdfVersionId) {
throw new ValidationError(
'No PDF uploaded for this berth yet. Upload one in the berth detail page first.',
);
}
const version = await db.query.berthPdfVersions.findFirst({
where: eq(berthPdfVersions.id, berth.currentPdfVersionId),
});
if (!version) throw new NotFoundError('Berth PDF version');
// Build body.
const content = await getSalesContentConfig(input.portId);
const template = input.customBodyMarkdown?.trim()?.length
? input.customBodyMarkdown
: content.templateBerthPdfBody;
if (Buffer.byteLength(template, 'utf8') > EMAIL_BODY_MAX_BYTES) {
throw new ValidationError('Email body exceeds maximum length');
}
const values = await buildMergeValues(input.portId, input.recipient, { berthId: berth.id });
const unresolved = findUnresolvedTokens(template, values);
if (unresolved.length > 0 && !input.allowUnresolved) {
throw new ValidationError(`Unresolved merge tokens: ${unresolved.join(', ')}`);
}
const expanded = expandMergeTokens(template, values);
const bodyHtml = renderEmailBody(expanded);
// Subject pulls in the mooring number for inbox triage.
const subject = `Berth ${berth.mooringNumber} — spec sheet`;
return performSend({
portId: input.portId,
recipientEmail,
subject,
bodyHtml,
attachment: {
storageKey: version.storageKey,
fileName: version.fileName,
fileSizeBytes: version.fileSizeBytes,
},
recordSeed: {
portId: input.portId,
clientId: input.recipient.clientId ?? null,
interestId: input.recipient.interestId ?? null,
recipientEmail,
documentKind: 'berth_pdf',
berthId: berth.id,
berthPdfVersionId: version.id,
brochureId: null,
brochureVersionId: null,
bodyMarkdown: expanded,
sentByUserId: input.sentBy,
fromAddress: '',
},
});
}
// ─── Public sender: brochure ─────────────────────────────────────────────────
export async function sendBrochure(input: SendBrochureInput): Promise<SendResult> {
await checkSendRateLimit(input.sentBy);
const recipientEmail = await resolveRecipientEmail(input.portId, input.recipient);
// Resolve brochure + most-recent version.
let brochureRow;
if (input.brochureId) {
brochureRow = await db.query.brochures.findFirst({
where: and(eq(brochures.id, input.brochureId), eq(brochures.portId, input.portId)),
});
if (!brochureRow) throw new NotFoundError('Brochure');
if (brochureRow.archivedAt) {
throw new ValidationError('Brochure is archived');
}
} else {
const def = await getDefaultBrochure(input.portId);
if (!def || !def.currentVersion) {
throw new ValidationError(
'No default brochure configured for this port. Upload one in /admin/brochures.',
);
}
brochureRow = def;
}
const versions = await db.query.brochureVersions.findMany({
where: eq(brochureVersions.brochureId, brochureRow.id),
orderBy: [desc(brochureVersions.uploadedAt)],
limit: 1,
});
const version = versions[0];
if (!version) {
throw new ValidationError('Brochure has no uploaded version yet');
}
// Build body.
const content = await getSalesContentConfig(input.portId);
const template = input.customBodyMarkdown?.trim()?.length
? input.customBodyMarkdown
: content.templateBrochureBody;
if (Buffer.byteLength(template, 'utf8') > EMAIL_BODY_MAX_BYTES) {
throw new ValidationError('Email body exceeds maximum length');
}
const values = await buildMergeValues(input.portId, input.recipient, {
brochureLabel: brochureRow.label,
});
const unresolved = findUnresolvedTokens(template, values);
if (unresolved.length > 0 && !input.allowUnresolved) {
throw new ValidationError(`Unresolved merge tokens: ${unresolved.join(', ')}`);
}
const expanded = expandMergeTokens(template, values);
const bodyHtml = renderEmailBody(expanded);
const subject = `${brochureRow.label} — brochure`;
return performSend({
portId: input.portId,
recipientEmail,
subject,
bodyHtml,
attachment: {
storageKey: version.storageKey,
fileName: version.fileName,
fileSizeBytes: version.fileSizeBytes,
},
recordSeed: {
portId: input.portId,
clientId: input.recipient.clientId ?? null,
interestId: input.recipient.interestId ?? null,
recipientEmail,
documentKind: 'brochure',
berthId: null,
berthPdfVersionId: null,
brochureId: brochureRow.id,
brochureVersionId: version.id,
bodyMarkdown: expanded,
sentByUserId: input.sentBy,
fromAddress: '',
},
});
}
// ─── Audit query ─────────────────────────────────────────────────────────────
export interface ListSendsFilters {
portId: string;
clientId?: string;
interestId?: string;
berthId?: string;
limit?: number;
}
export async function listSends(filters: ListSendsFilters): Promise<DocumentSend[]> {
const conds = [eq(documentSends.portId, filters.portId)];
if (filters.clientId) conds.push(eq(documentSends.clientId, filters.clientId));
if (filters.interestId) conds.push(eq(documentSends.interestId, filters.interestId));
if (filters.berthId) conds.push(eq(documentSends.berthId, filters.berthId));
const rows = await db
.select()
.from(documentSends)
.where(and(...conds))
.orderBy(desc(documentSends.sentAt))
.limit(filters.limit ?? 100);
return rows;
}

View File

@@ -0,0 +1,374 @@
/**
* Per-port sales-email configuration (Phase 7 — see plan §4.9).
*
* Distinct from {@link getPortEmailConfig} (`port-config.ts`) which resolves
* the **noreply** account used by automated/system emails. The sales account
* is the human-touch outbound: brochure/berth-pdf send-outs from
* `document-sends.service.ts`, follow-up emails composed by reps.
*
* Both inboxes (SMTP + IMAP) live behind the same provider account in 99% of
* deployments; both are configured here. The IMAP half is consumed by the
* async-bounce monitor (§14.9), out of scope for this service but exposed
* via `getSalesImapConfig()`.
*
* SECURITY (§14.10): SMTP/IMAP passwords are encrypted at rest using the
* existing `EMAIL_CREDENTIAL_KEY` symmetric key. Reps cannot read the
* decrypted value via the API — only `manage_settings` admins can write,
* and even they only ever see a placeholder mask on read (see the admin
* route handler).
*/
import nodemailer, { type Transporter } from 'nodemailer';
import { env } from '@/lib/env';
import { decrypt, encrypt } from '@/lib/utils/encryption';
import { getSetting, upsertSetting } from '@/lib/services/settings.service';
import type { AuditMeta } from '@/lib/audit';
// ─── Setting keys ────────────────────────────────────────────────────────────
export const SALES_EMAIL_KEYS = {
fromAddress: 'sales_from_address',
smtpHost: 'sales_smtp_host',
smtpPort: 'sales_smtp_port',
smtpSecure: 'sales_smtp_secure',
smtpUser: 'sales_smtp_user',
smtpPassEncrypted: 'sales_smtp_pass_encrypted',
imapHost: 'sales_imap_host',
imapPort: 'sales_imap_port',
imapUser: 'sales_imap_user',
imapPassEncrypted: 'sales_imap_pass_encrypted',
authMethod: 'sales_auth_method',
noreplyFromAddress: 'noreply_from_address',
templateBerthPdfBody: 'email_template_send_berth_pdf_body',
templateBrochureBody: 'email_template_send_brochure_body',
brochureMaxUploadMb: 'brochure_max_upload_mb',
emailAttachThresholdMb: 'email_attach_threshold_mb',
} as const;
// ─── Types ───────────────────────────────────────────────────────────────────
export interface SalesEmailConfig {
fromAddress: string;
smtpHost: string | null;
smtpPort: number;
smtpSecure: boolean;
smtpUser: string | null;
/** Decrypted plaintext, available only inside server-side service code. */
smtpPass: string | null;
authMethod: string;
/** Whether the config is complete enough to actually send. */
isUsable: boolean;
}
export interface SalesImapConfig {
imapHost: string | null;
imapPort: number;
imapUser: string | null;
imapPass: string | null;
isUsable: boolean;
}
export interface SalesContentConfig {
noreplyFromAddress: string;
templateBerthPdfBody: string;
templateBrochureBody: string;
brochureMaxUploadMb: number;
emailAttachThresholdMb: number;
}
// ─── Defaults ────────────────────────────────────────────────────────────────
const DEFAULT_BERTH_PDF_BODY = [
'Hi {{client.fullName}},',
'',
'Please find attached the spec sheet for berth {{berth.mooringNumber}} at {{port.name}}.',
'',
'Happy to set up a call to walk through the details — let me know what works.',
'',
'Best,',
].join('\n');
const DEFAULT_BROCHURE_BODY = [
'Hi {{client.fullName}},',
'',
'As discussed, attached is our {{port.name}} brochure with the latest information on availability, amenities, and access.',
'',
'Let me know if any specific berths catch your eye and I can pull together more detail.',
'',
'Best,',
].join('\n');
// ─── Read accessors ──────────────────────────────────────────────────────────
async function readSetting<T>(key: string, portId: string): Promise<T | null> {
const setting = await getSetting(key, portId);
if (!setting) return null;
return setting.value as T;
}
function decryptOrNull(value: string | null): string | null {
if (!value) return null;
try {
return decrypt(value);
} catch {
// If decryption fails (key rotation, corruption, etc.), return null so
// the send fails fast rather than mis-authenticating against SMTP.
return null;
}
}
export async function getSalesEmailConfig(portId: string): Promise<SalesEmailConfig> {
const [fromAddress, smtpHost, smtpPort, smtpSecure, smtpUser, smtpPassEnc, authMethod] =
await Promise.all([
readSetting<string>(SALES_EMAIL_KEYS.fromAddress, portId),
readSetting<string>(SALES_EMAIL_KEYS.smtpHost, portId),
readSetting<number>(SALES_EMAIL_KEYS.smtpPort, portId),
readSetting<boolean>(SALES_EMAIL_KEYS.smtpSecure, portId),
readSetting<string>(SALES_EMAIL_KEYS.smtpUser, portId),
readSetting<string>(SALES_EMAIL_KEYS.smtpPassEncrypted, portId),
readSetting<string>(SALES_EMAIL_KEYS.authMethod, portId),
]);
const smtpPass = decryptOrNull(smtpPassEnc);
// For `from`, fall back to the SMTP_FROM env so a brand-new port without
// overrides still has a usable identity (for tests + dev).
const resolvedFrom =
fromAddress ?? env.SMTP_FROM?.replace(/^.*<(.+)>$/, '$1').trim() ?? `sales@${env.SMTP_HOST}`;
return {
fromAddress: resolvedFrom,
smtpHost: smtpHost ?? env.SMTP_HOST ?? null,
smtpPort: smtpPort ?? env.SMTP_PORT ?? 587,
smtpSecure: smtpSecure ?? false,
smtpUser: smtpUser ?? env.SMTP_USER ?? null,
smtpPass: smtpPass ?? env.SMTP_PASS ?? null,
authMethod: authMethod ?? 'app_password',
// "Usable" means we have host + (user, pass) pair OR host + no auth
// (some test/local SMTP doesn't auth). For prod-realistic, we require
// creds — empty creds means we'll just bounce against the relay.
isUsable: Boolean(
(smtpHost ?? env.SMTP_HOST) && ((smtpUser && smtpPass) ?? (env.SMTP_USER && env.SMTP_PASS)),
),
};
}
export async function getSalesImapConfig(portId: string): Promise<SalesImapConfig> {
const [imapHost, imapPort, imapUser, imapPassEnc] = await Promise.all([
readSetting<string>(SALES_EMAIL_KEYS.imapHost, portId),
readSetting<number>(SALES_EMAIL_KEYS.imapPort, portId),
readSetting<string>(SALES_EMAIL_KEYS.imapUser, portId),
readSetting<string>(SALES_EMAIL_KEYS.imapPassEncrypted, portId),
]);
const imapPass = decryptOrNull(imapPassEnc);
return {
imapHost: imapHost ?? null,
imapPort: imapPort ?? 993,
imapUser: imapUser ?? null,
imapPass,
isUsable: Boolean(imapHost && imapUser && imapPass),
};
}
export async function getSalesContentConfig(portId: string): Promise<SalesContentConfig> {
const [noreply, berthPdfBody, brochureBody, maxUpload, attachThreshold] = await Promise.all([
readSetting<string>(SALES_EMAIL_KEYS.noreplyFromAddress, portId),
readSetting<string>(SALES_EMAIL_KEYS.templateBerthPdfBody, portId),
readSetting<string>(SALES_EMAIL_KEYS.templateBrochureBody, portId),
readSetting<number>(SALES_EMAIL_KEYS.brochureMaxUploadMb, portId),
readSetting<number>(SALES_EMAIL_KEYS.emailAttachThresholdMb, portId),
]);
return {
noreplyFromAddress:
noreply ?? env.SMTP_FROM?.replace(/^.*<(.+)>$/, '$1').trim() ?? `noreply@${env.SMTP_HOST}`,
templateBerthPdfBody: berthPdfBody ?? DEFAULT_BERTH_PDF_BODY,
templateBrochureBody: brochureBody ?? DEFAULT_BROCHURE_BODY,
brochureMaxUploadMb: maxUpload ?? 50,
emailAttachThresholdMb: attachThreshold ?? 15,
};
}
// ─── Write accessors ─────────────────────────────────────────────────────────
/**
* Plain (unencrypted) input shape for the admin write path. Password fields
* are accepted as plaintext and encrypted before storage.
*
* `null` semantics:
* - undefined => leave unchanged (don't touch the row).
* - null => clear the value.
* - string => set to this value.
*
* For password fields specifically, the empty string `""` is treated as
* "leave unchanged" so the admin form's masked placeholder can round-trip
* without forcing the rep to re-enter the password every save.
*/
export interface SalesEmailConfigUpdate {
fromAddress?: string | null;
smtpHost?: string | null;
smtpPort?: number | null;
smtpSecure?: boolean | null;
smtpUser?: string | null;
/** Plaintext; encrypted before storage. Pass `""` to leave unchanged. */
smtpPass?: string | null;
imapHost?: string | null;
imapPort?: number | null;
imapUser?: string | null;
/** Plaintext; encrypted before storage. Pass `""` to leave unchanged. */
imapPass?: string | null;
authMethod?: string | null;
noreplyFromAddress?: string | null;
templateBerthPdfBody?: string | null;
templateBrochureBody?: string | null;
brochureMaxUploadMb?: number | null;
emailAttachThresholdMb?: number | null;
}
async function writeSetting<T>(
key: string,
value: T | null | undefined,
portId: string,
meta: AuditMeta,
): Promise<void> {
if (value === undefined) return;
await upsertSetting(key, value, portId, meta);
}
export async function updateSalesEmailConfig(
portId: string,
update: SalesEmailConfigUpdate,
meta: AuditMeta,
): Promise<void> {
await writeSetting(SALES_EMAIL_KEYS.fromAddress, update.fromAddress, portId, meta);
await writeSetting(SALES_EMAIL_KEYS.smtpHost, update.smtpHost, portId, meta);
await writeSetting(SALES_EMAIL_KEYS.smtpPort, update.smtpPort, portId, meta);
await writeSetting(SALES_EMAIL_KEYS.smtpSecure, update.smtpSecure, portId, meta);
await writeSetting(SALES_EMAIL_KEYS.smtpUser, update.smtpUser, portId, meta);
await writeSetting(SALES_EMAIL_KEYS.imapHost, update.imapHost, portId, meta);
await writeSetting(SALES_EMAIL_KEYS.imapPort, update.imapPort, portId, meta);
await writeSetting(SALES_EMAIL_KEYS.imapUser, update.imapUser, portId, meta);
await writeSetting(SALES_EMAIL_KEYS.authMethod, update.authMethod, portId, meta);
await writeSetting(SALES_EMAIL_KEYS.noreplyFromAddress, update.noreplyFromAddress, portId, meta);
await writeSetting(
SALES_EMAIL_KEYS.templateBerthPdfBody,
update.templateBerthPdfBody,
portId,
meta,
);
await writeSetting(
SALES_EMAIL_KEYS.templateBrochureBody,
update.templateBrochureBody,
portId,
meta,
);
await writeSetting(
SALES_EMAIL_KEYS.brochureMaxUploadMb,
update.brochureMaxUploadMb,
portId,
meta,
);
await writeSetting(
SALES_EMAIL_KEYS.emailAttachThresholdMb,
update.emailAttachThresholdMb,
portId,
meta,
);
// Password fields: encrypt before write. Empty string = "no change".
if (update.smtpPass !== undefined && update.smtpPass !== '') {
if (update.smtpPass === null) {
await upsertSetting(SALES_EMAIL_KEYS.smtpPassEncrypted, null, portId, meta);
} else {
await upsertSetting(
SALES_EMAIL_KEYS.smtpPassEncrypted,
encrypt(update.smtpPass),
portId,
meta,
);
}
}
if (update.imapPass !== undefined && update.imapPass !== '') {
if (update.imapPass === null) {
await upsertSetting(SALES_EMAIL_KEYS.imapPassEncrypted, null, portId, meta);
} else {
await upsertSetting(
SALES_EMAIL_KEYS.imapPassEncrypted,
encrypt(update.imapPass),
portId,
meta,
);
}
}
}
// ─── Transporter factory ─────────────────────────────────────────────────────
export type SenderAccount = 'noreply' | 'sales';
/**
* Build a nodemailer transporter for the requested sender account.
*
* - `'sales'` uses the per-port SALES_* keys (see {@link getSalesEmailConfig}).
* - `'noreply'` falls back to the legacy `port-config.ts` resolver (see
* `getPortEmailConfig`) which itself drops back to the env defaults.
*
* Throws when the requested account isn't configured well enough to send;
* callers should let this propagate so the document_sends row gets a
* `failedAt` + `errorReason`.
*/
export async function createSalesTransporter(portId: string): Promise<{
transporter: Transporter;
fromAddress: string;
authedUser: string | null;
}> {
const cfg = await getSalesEmailConfig(portId);
if (!cfg.smtpHost) {
throw new Error(
'Sales SMTP not configured for this port. Configure in /admin/email before sending.',
);
}
const transporter = nodemailer.createTransport({
host: cfg.smtpHost,
port: cfg.smtpPort,
secure: cfg.smtpSecure,
...(cfg.smtpUser && cfg.smtpPass ? { auth: { user: cfg.smtpUser, pass: cfg.smtpPass } } : {}),
});
return { transporter, fromAddress: cfg.fromAddress, authedUser: cfg.smtpUser };
}
/**
* Public-facing sanitizer — strips encrypted fields and replaces password
* fields with a boolean `isSet` marker. Used by the admin GET endpoint so
* reps with `manage_settings` can see whether creds are configured without
* the API ever returning the ciphertext (much less plaintext).
*/
export function redactSalesConfigForResponse(
cfg: SalesEmailConfig,
imap: SalesImapConfig,
content: SalesContentConfig,
): {
email: Omit<SalesEmailConfig, 'smtpPass'> & { smtpPassIsSet: boolean };
imap: Omit<SalesImapConfig, 'imapPass'> & { imapPassIsSet: boolean };
content: SalesContentConfig;
} {
// Spread without the password fields — never reflect the decrypted value
// (or its ciphertext) back to the API surface.
const email = {
fromAddress: cfg.fromAddress,
smtpHost: cfg.smtpHost,
smtpPort: cfg.smtpPort,
smtpSecure: cfg.smtpSecure,
smtpUser: cfg.smtpUser,
authMethod: cfg.authMethod,
isUsable: cfg.isUsable,
smtpPassIsSet: Boolean(cfg.smtpPass),
};
const imapRedacted = {
imapHost: imap.imapHost,
imapPort: imap.imapPort,
imapUser: imap.imapUser,
isUsable: imap.isUsable,
imapPassIsSet: Boolean(imap.imapPass),
};
return { email, imap: imapRedacted, content };
}

View File

@@ -0,0 +1,168 @@
/**
* Minimal markdown -> HTML email conversion + safe sanitization.
*
* Used by the Phase 7 sales send-out flow (`document-sends.service.ts`) to
* render rep-authored bodies into the email-safe HTML we hand to nodemailer.
*
* § §14.7 critical mitigation: "Body markdown with HTML/script injection".
* This module is the choke point — every code path that turns rep-authored
* markdown into an email body MUST go through `renderEmailBody()`. Direct
* passthrough to `transporter.sendMail({ html })` from a user-supplied string
* is a code-review block.
*
* The implementation is intentionally tiny (no DOMPurify, no marked) for two
* reasons:
*
* 1. Email clients render a strict subset of HTML anyway — paragraphs,
* bold/italic, line breaks, links and code spans cover ~99% of what
* reps actually write. Anything more complex (tables, images, lists)
* goes via the admin-editable HTML body templates.
* 2. Adding a transitive deps surface for the markdown→HTML conversion
* doubles the attack surface for the very mitigation it implements.
*
* The renderer:
* - HTML-escapes every input character before applying any markdown rules.
* - Whitelists exactly: paragraphs, line breaks, **bold**, _italic_, `code`,
* and `[text](https://...)` links (https only).
* - Strips any other markdown / HTML constructs by virtue of being
* escape-first-then-rule-replace.
*
* Tested against the standard XSS vector list (`<script>`, `<img onerror>`,
* `javascript:` URLs, `<iframe>`, etc.) — see
* `tests/unit/markdown-email-sanitization.test.ts`.
*/
const MAX_BODY_BYTES = 50 * 1024; // 50 KB hard cap matching §14.7
/** Re-export for the sender service so it doesn't have to remember the cap. */
export const EMAIL_BODY_MAX_BYTES = MAX_BODY_BYTES;
/**
* Escape every HTML-significant character. Run on raw input BEFORE any
* markdown rules so user-supplied HTML can never reach the rendered body.
*/
function escapeHtml(input: string): string {
return input
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&#39;');
}
/**
* The list of allowed URL schemes. We deliberately reject `javascript:`,
* `data:`, `vbscript:`, and bare-word schemes (which some browsers resolve
* to `http:`) — only fully-qualified `https://` and `mailto:` make it through.
*/
function isSafeHref(href: string): boolean {
const trimmed = href.trim().toLowerCase();
return trimmed.startsWith('https://') || trimmed.startsWith('mailto:');
}
/**
* Apply inline markdown rules to an already-HTML-escaped string. The order
* matters: code spans win over emphasis, links win over emphasis (so
* `[**foo**](url)` works), and emphasis is greedy-non-greedy aware.
*/
function applyInlineRules(escaped: string): string {
let out = escaped;
// `code` (single backticks; nothing fancier — no fenced blocks here)
out = out.replace(/`([^`\n]+)`/g, '<code>$1</code>');
// [text](href) — href validated. Bracket text is already escaped so we
// pass it through verbatim. Use a non-greedy match so two links on one
// line don't collapse.
out = out.replace(/\[([^\]\n]+?)\]\(([^)\n]+?)\)/g, (_full, text: string, href: string) => {
// The href captured group came from already-escaped input, so the
// entity-encoded chars need to survive into the attribute. We DO NOT
// unescape here; the escaped form (`&amp;`, `&#39;` etc.) is the safe
// representation for attribute values.
if (!isSafeHref(href.replace(/&amp;/g, '&'))) {
// Drop the link entirely; render the text as plain.
return text;
}
return `<a href="${href}" target="_blank" rel="noopener noreferrer">${text}</a>`;
});
// **bold** then *italic* / _italic_
out = out.replace(/\*\*([^*\n]+?)\*\*/g, '<strong>$1</strong>');
out = out.replace(/(?<!\*)\*([^*\n]+?)\*(?!\*)/g, '<em>$1</em>');
out = out.replace(/(^|\s)_([^_\n]+?)_(?=\s|$)/g, '$1<em>$2</em>');
return out;
}
/**
* Convert rep-authored markdown into email-safe HTML.
*
* Throws when the input exceeds {@link EMAIL_BODY_MAX_BYTES}.
*/
export function renderEmailBody(markdown: string): string {
if (Buffer.byteLength(markdown, 'utf8') > MAX_BODY_BYTES) {
throw new Error(`Email body exceeds maximum length (${MAX_BODY_BYTES} bytes)`);
}
// 1. HTML-escape EVERYTHING first — there is no path through the rules
// below that lets an unescaped angle bracket reach the output.
const escaped = escapeHtml(markdown);
// 2. Split into paragraphs on blank lines; within each paragraph,
// single newlines become <br>.
const paragraphs = escaped.split(/\n{2,}/);
const rendered = paragraphs
.map((para) => {
const trimmed = para.trim();
if (!trimmed) return '';
const inline = applyInlineRules(trimmed).replace(/\n/g, '<br>');
return `<p>${inline}</p>`;
})
.filter(Boolean)
.join('\n');
return rendered;
}
/**
* Find every `{{token}}` reference in a body. Returns the raw token strings
* (with braces) for caller-side validation against the merge-field catalog.
*/
export function extractTokens(markdown: string): string[] {
const matches = markdown.match(/\{\{[^{}\n]+?\}\}/g);
return matches ? Array.from(new Set(matches)) : [];
}
/**
* Replace `{{token}}` references with values from the supplied map. Tokens
* not present in the map are left intact so the dry-run reporter can flag
* them. Values are HTML-escape-safe by virtue of being run BEFORE
* `renderEmailBody()`; the caller is expected to pass plain strings.
*/
export function expandMergeTokens(
markdown: string,
values: Record<string, string | number | null | undefined>,
): string {
return markdown.replace(/\{\{([^{}\n]+?)\}\}/g, (full, raw: string) => {
const key = `{{${raw}}}`;
const value = values[key];
if (value === null || value === undefined || value === '') return full;
return String(value);
});
}
/**
* Returns the list of `{{token}}` references in `markdown` that aren't
* present (or are blank/null) in the provided value map. Used by the
* pre-send dry-run UI per §14.7 ("Body markdown contains unresolved merge
* fields — Send blocked until resolved").
*/
export function findUnresolvedTokens(
markdown: string,
values: Record<string, string | number | null | undefined>,
): string[] {
return extractTokens(markdown).filter((token) => {
const v = values[token];
return v === undefined || v === null || v === '';
});
}

View File

@@ -0,0 +1,36 @@
import { z } from 'zod';
export const createBrochureSchema = z.object({
label: z.string().trim().min(1).max(120),
description: z.string().max(500).optional().nullable(),
isDefault: z.boolean().optional(),
});
export const updateBrochureSchema = z.object({
label: z.string().trim().min(1).max(120).optional(),
description: z.string().max(500).optional().nullable(),
isDefault: z.boolean().optional(),
});
export const registerBrochureVersionSchema = z.object({
storageKey: z
.string()
.min(1)
.max(500)
// Mirrors the `validateStorageKey` regex in `src/lib/storage/filesystem.ts`
// — defense-in-depth against path-traversal payloads from the client.
.regex(/^[a-zA-Z0-9/_.-]+$/, 'Invalid storage key format')
.refine((s) => !s.includes('..'), 'Storage key may not contain ".."')
.refine((s) => !s.startsWith('/'), 'Storage key may not be absolute'),
fileName: z.string().min(1).max(255),
fileSizeBytes: z
.number()
.int()
.positive()
.max(100 * 1024 * 1024), // 100MB hard ceiling
contentSha256: z.string().regex(/^[0-9a-f]{64}$/, 'sha256 must be 64-char hex'),
});
export type CreateBrochureInput = z.infer<typeof createBrochureSchema>;
export type UpdateBrochureInput = z.infer<typeof updateBrochureSchema>;
export type RegisterBrochureVersionInput = z.infer<typeof registerBrochureVersionSchema>;

View File

@@ -0,0 +1,43 @@
import { z } from 'zod';
const recipientSchema = z
.object({
clientId: z.string().min(1).optional(),
email: z.string().email().optional(),
interestId: z.string().min(1).optional(),
})
.refine((v) => v.clientId !== undefined || v.email !== undefined, {
message: 'recipient.clientId or recipient.email is required',
});
export const sendBerthPdfSchema = z.object({
berthId: z.string().min(1),
recipient: recipientSchema,
customBodyMarkdown: z.string().max(50_000).optional(),
});
export const sendBrochureSchema = z.object({
brochureId: z.string().min(1).optional(),
recipient: recipientSchema,
customBodyMarkdown: z.string().max(50_000).optional(),
});
export const previewBodySchema = z.object({
documentKind: z.enum(['berth_pdf', 'brochure']),
recipient: recipientSchema,
berthId: z.string().min(1).optional(),
brochureId: z.string().min(1).optional(),
customBodyMarkdown: z.string().max(50_000).optional(),
});
export const listSendsQuerySchema = z.object({
clientId: z.string().min(1).optional(),
interestId: z.string().min(1).optional(),
berthId: z.string().min(1).optional(),
limit: z.coerce.number().int().min(1).max(500).optional(),
});
export type SendBerthPdfInput = z.infer<typeof sendBerthPdfSchema>;
export type SendBrochureInput = z.infer<typeof sendBrochureSchema>;
export type PreviewBodyInput = z.infer<typeof previewBodySchema>;
export type ListSendsQuery = z.infer<typeof listSendsQuerySchema>;

View File

@@ -0,0 +1,31 @@
import { z } from 'zod';
/**
* Per-port sales-email config update payload (Phase 7).
*
* Password fields accept:
* - undefined / omitted => leave unchanged
* - empty string "" => leave unchanged (UI placeholder round-trip)
* - explicit null => clear the value
* - non-empty string => set to this value (encrypted before storage)
*/
export const updateSalesEmailConfigSchema = z.object({
fromAddress: z.string().email().optional().nullable(),
smtpHost: z.string().min(1).max(255).optional().nullable(),
smtpPort: z.number().int().min(1).max(65535).optional().nullable(),
smtpSecure: z.boolean().optional().nullable(),
smtpUser: z.string().max(255).optional().nullable(),
smtpPass: z.string().max(255).optional().nullable(),
imapHost: z.string().min(1).max(255).optional().nullable(),
imapPort: z.number().int().min(1).max(65535).optional().nullable(),
imapUser: z.string().max(255).optional().nullable(),
imapPass: z.string().max(255).optional().nullable(),
authMethod: z.enum(['app_password', 'oauth_google', 'oauth_microsoft']).optional().nullable(),
noreplyFromAddress: z.string().email().optional().nullable(),
templateBerthPdfBody: z.string().max(50_000).optional().nullable(),
templateBrochureBody: z.string().max(50_000).optional().nullable(),
brochureMaxUploadMb: z.number().int().min(1).max(500).optional().nullable(),
emailAttachThresholdMb: z.number().int().min(1).max(50).optional().nullable(),
});
export type UpdateSalesEmailConfigInput = z.infer<typeof updateSalesEmailConfigSchema>;