feat(website-intake): dual-write endpoint + migration chain repair

Adds website_submissions table + shared-secret POST endpoint so the
marketing site can dual-write inquiries alongside its NocoDB write.
Race-safe via INSERT ... ON CONFLICT, idempotent on submission_id,
refuses every request when WEBSITE_INTAKE_SECRET is unset. Also
repairs pre-existing 0020/0021/0022 prevId collision (renumbered +
journal re-sorted) so db:generate works again. 11 unit tests.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Matt Ciaccio
2026-05-04 22:52:33 +02:00
parent c612bbdfd9
commit 49d34e00c8
16 changed files with 11556 additions and 28 deletions

View File

@@ -0,0 +1,177 @@
import { NextRequest, NextResponse } from 'next/server';
import { timingSafeEqual } from 'node:crypto';
import { z } from 'zod';
import { eq } from 'drizzle-orm';
import { db } from '@/lib/db';
import { ports } from '@/lib/db/schema/ports';
import { websiteSubmissions } from '@/lib/db/schema/website-submissions';
import { env } from '@/lib/env';
import { logger } from '@/lib/logger';
import { checkRateLimit, rateLimiters } from '@/lib/rate-limit';
/**
* POST /api/public/website-inquiries
*
* Capture endpoint for the marketing website's dual-write. The website
* server (`/server/api/register.ts`, `/server/api/contact.ts`) calls this
* AFTER its existing NocoDB write succeeds, sending the same payload as a
* server-to-server fire-and-forget POST. The CRM stores the raw payload
* in `website_submissions` for later analysis / promotion to entities.
*
* Auth: shared-secret in `X-Webhook-Secret` header, timing-safe compared
* against `WEBSITE_INTAKE_SECRET`. If the env var is unset on this
* instance, the endpoint refuses every request with 503 - the correct
* posture for dev/staging that hasn't been wired up yet.
*
* Idempotency: payload carries a `submission_id` UUID. The unique index
* on `website_submissions.submission_id` makes redelivery a no-op; the
* handler returns 200 + the existing record's id instead of erroring.
*
* No emails / no `interests` rows are created here. The endpoint's job is
* pure data capture. A separate "promote" step (future) will turn captured
* submissions into proper `clients` + `interests` rows once we trust the
* pipeline.
*/
const SubmissionSchema = z.object({
submission_id: z.string().uuid(),
kind: z.enum(['berth_inquiry', 'residence_inquiry', 'contact_form']),
payload: z.record(z.unknown()),
legacy_nocodb_id: z.string().optional(),
/** Defaults to port-nimara since that's currently the only port with a
* public marketing site. Future ports can override per-submission. */
port_slug: z.string().default('port-nimara'),
});
function verifySecret(header: string | null): boolean {
const expected = env.WEBSITE_INTAKE_SECRET;
if (!expected) return false;
if (!header) return false;
// Timing-safe compare requires equal-length buffers; pad to whichever is
// longer so an early-exit on length mismatch can't leak the secret length.
const a = Buffer.from(header);
const b = Buffer.from(expected);
const pad = Buffer.alloc(Math.max(a.length, b.length));
const aPad = Buffer.concat([a, pad]).subarray(0, pad.length);
const bPad = Buffer.concat([b, pad]).subarray(0, pad.length);
return timingSafeEqual(aPad, bPad) && a.length === b.length;
}
export async function POST(req: NextRequest) {
// Refuse outright if the CRM hasn't been wired up - safer than letting
// unauthenticated traffic in just because the env var was forgotten.
if (!env.WEBSITE_INTAKE_SECRET) {
return NextResponse.json(
{ error: 'Website intake is not configured on this server.' },
{ status: 503 },
);
}
// Auth gate - shared secret in header, timing-safe compare.
const secretHeader = req.headers.get('x-webhook-secret');
if (!verifySecret(secretHeader)) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
// Rate limit. All website-side traffic shares the website's egress IP,
// so we use a dedicated bucket sized to accommodate normal traffic
// (500/hr) rather than the 5/hr publicForm bucket meant for individual
// human submissions. The shared-secret header is the real abuse
// boundary; this limiter is just a backstop if the secret ever leaks.
const ip = req.headers.get('x-forwarded-for')?.split(',')[0]?.trim() ?? 'unknown';
const rl = await checkRateLimit(ip, rateLimiters.websiteIntake);
if (!rl.allowed) {
const retryAfter = Math.max(1, Math.ceil((rl.resetAt - Date.now()) / 1000));
return NextResponse.json(
{ error: 'Rate limit exceeded' },
{ status: 429, headers: { 'Retry-After': String(retryAfter) } },
);
}
// Parse + validate body. Reject anything that doesn't conform — the
// website is a known caller; a malformed payload signals tampering.
let parsed;
try {
const body = await req.json();
parsed = SubmissionSchema.parse(body);
} catch (err) {
return NextResponse.json(
{ error: 'Invalid payload', details: err instanceof Error ? err.message : 'parse error' },
{ status: 400 },
);
}
// Resolve port. We require the slug to exist; can't capture submissions
// for a port the CRM doesn't know about.
const [port] = await db
.select({ id: ports.id })
.from(ports)
.where(eq(ports.slug, parsed.port_slug))
.limit(1);
if (!port) {
// Don't echo the input slug back in the error - generic message is
// sufficient and avoids the input-reflection pattern that complicates
// log-injection / audit reviews. The slug is logged server-side
// for debugging.
logger.warn(
{ portSlug: parsed.port_slug, submissionId: parsed.submission_id },
'website-inquiry rejected: unknown port',
);
return NextResponse.json({ error: 'Unknown port' }, { status: 400 });
}
// Idempotent insert. Two parallel requests carrying the same submission_id
// could both pass any pre-check, so we don't pre-check at all - the unique
// index on submission_id is the source of truth, and `onConflictDoNothing`
// keeps the second request's INSERT from raising 23505. When the conflict
// hits, `returning()` yields zero rows and we look up the existing row to
// return its id, mirroring the first-delivery shape so the website never
// sees a difference between fresh and dup.
const insertResult = await db
.insert(websiteSubmissions)
.values({
portId: port.id,
submissionId: parsed.submission_id,
kind: parsed.kind,
payload: parsed.payload,
legacyNocodbId: parsed.legacy_nocodb_id ?? null,
sourceIp: ip,
userAgent: req.headers.get('user-agent') ?? null,
})
.onConflictDoNothing({ target: websiteSubmissions.submissionId })
.returning({ id: websiteSubmissions.id });
if (insertResult[0]) {
logger.info(
{
submissionId: parsed.submission_id,
kind: parsed.kind,
portSlug: parsed.port_slug,
legacyNocodbId: parsed.legacy_nocodb_id,
},
'website inquiry captured',
);
return NextResponse.json({ id: insertResult[0].id, deduped: false });
}
// Conflict path: row already exists. Fetch its id so the response shape
// stays identical regardless of which request "won" the race.
const existing = await db
.select({ id: websiteSubmissions.id })
.from(websiteSubmissions)
.where(eq(websiteSubmissions.submissionId, parsed.submission_id))
.limit(1);
if (existing[0]) {
return NextResponse.json({ id: existing[0].id, deduped: true });
}
// Should be unreachable - the conflict means a row exists, so the lookup
// above should always find it. If it doesn't (e.g. simultaneous DELETE),
// surface a 500 explicitly rather than silently 200ing a missing id.
logger.error(
{ submissionId: parsed.submission_id },
'website-inquiry conflict but row not found on lookup',
);
return NextResponse.json({ error: 'Insert failed' }, { status: 500 });
}

View File

@@ -0,0 +1,16 @@
CREATE TABLE "website_submissions" (
"id" text PRIMARY KEY NOT NULL,
"port_id" text NOT NULL,
"submission_id" text NOT NULL,
"kind" text NOT NULL,
"payload" jsonb NOT NULL,
"legacy_nocodb_id" text,
"source_ip" text,
"user_agent" text,
"received_at" timestamp with time zone DEFAULT now() NOT NULL
);
--> statement-breakpoint
ALTER TABLE "website_submissions" ADD CONSTRAINT "website_submissions_port_id_ports_id_fk" FOREIGN KEY ("port_id") REFERENCES "public"."ports"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
CREATE UNIQUE INDEX "idx_ws_submission_id" ON "website_submissions" USING btree ("submission_id");--> statement-breakpoint
CREATE INDEX "idx_ws_port_received" ON "website_submissions" USING btree ("port_id","received_at");--> statement-breakpoint
CREATE INDEX "idx_ws_kind" ON "website_submissions" USING btree ("kind");

View File

@@ -1,5 +1,5 @@
{
"id": "e0e6a819-cf9f-45d3-b65b-19da27890f0b",
"id": "e9d830fc-ec81-42ab-bea6-232dd99d20d1",
"prevId": "6326a9a7-0b30-4647-bf86-b3d79e6a08bf",
"version": "7",
"dialect": "postgresql",
@@ -762,13 +762,13 @@
},
"nominal_boat_size": {
"name": "nominal_boat_size",
"type": "numeric",
"type": "text",
"primaryKey": false,
"notNull": false
},
"nominal_boat_size_m": {
"name": "nominal_boat_size_m",
"type": "numeric",
"type": "text",
"primaryKey": false,
"notNull": false
},
@@ -799,13 +799,13 @@
},
"power_capacity": {
"name": "power_capacity",
"type": "numeric",
"type": "text",
"primaryKey": false,
"notNull": false
},
"voltage": {
"name": "voltage",
"type": "numeric",
"type": "text",
"primaryKey": false,
"notNull": false
},
@@ -914,12 +914,6 @@
"primaryKey": false,
"notNull": false
},
"status_override_mode": {
"name": "status_override_mode",
"type": "text",
"primaryKey": false,
"notNull": false
},
"created_at": {
"name": "created_at",
"type": "timestamp with time zone",
@@ -1339,6 +1333,158 @@
"checkConstraints": {},
"isRLSEnabled": false
},
"public.client_merge_candidates": {
"name": "client_merge_candidates",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true
},
"port_id": {
"name": "port_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"client_a_id": {
"name": "client_a_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"client_b_id": {
"name": "client_b_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"score": {
"name": "score",
"type": "integer",
"primaryKey": false,
"notNull": true
},
"reasons": {
"name": "reasons",
"type": "jsonb",
"primaryKey": false,
"notNull": true
},
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": true,
"default": "'pending'"
},
"created_at": {
"name": "created_at",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"resolved_at": {
"name": "resolved_at",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": false
},
"resolved_by": {
"name": "resolved_by",
"type": "text",
"primaryKey": false,
"notNull": false
}
},
"indexes": {
"idx_cmc_port_status": {
"name": "idx_cmc_port_status",
"columns": [
{
"expression": "port_id",
"isExpression": false,
"asc": true,
"nulls": "last"
},
{
"expression": "status",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": false,
"concurrently": false,
"method": "btree",
"with": {}
},
"idx_cmc_pair": {
"name": "idx_cmc_pair",
"columns": [
{
"expression": "port_id",
"isExpression": false,
"asc": true,
"nulls": "last"
},
{
"expression": "client_a_id",
"isExpression": false,
"asc": true,
"nulls": "last"
},
{
"expression": "client_b_id",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {
"client_merge_candidates_port_id_ports_id_fk": {
"name": "client_merge_candidates_port_id_ports_id_fk",
"tableFrom": "client_merge_candidates",
"tableTo": "ports",
"columnsFrom": ["port_id"],
"columnsTo": ["id"],
"onDelete": "no action",
"onUpdate": "no action"
},
"client_merge_candidates_client_a_id_clients_id_fk": {
"name": "client_merge_candidates_client_a_id_clients_id_fk",
"tableFrom": "client_merge_candidates",
"tableTo": "clients",
"columnsFrom": ["client_a_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
},
"client_merge_candidates_client_b_id_clients_id_fk": {
"name": "client_merge_candidates_client_b_id_clients_id_fk",
"tableFrom": "client_merge_candidates",
"tableTo": "clients",
"columnsFrom": ["client_b_id"],
"columnsTo": ["id"],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.client_merge_log": {
"name": "client_merge_log",
"schema": "",
@@ -10230,6 +10376,96 @@
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.migration_source_links": {
"name": "migration_source_links",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true
},
"source_system": {
"name": "source_system",
"type": "text",
"primaryKey": false,
"notNull": true
},
"source_id": {
"name": "source_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"target_entity_type": {
"name": "target_entity_type",
"type": "text",
"primaryKey": false,
"notNull": true
},
"target_entity_id": {
"name": "target_entity_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"applied_id": {
"name": "applied_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"applied_by": {
"name": "applied_by",
"type": "text",
"primaryKey": false,
"notNull": false
},
"applied_at": {
"name": "applied_at",
"type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
"default": "now()"
}
},
"indexes": {
"idx_msl_source_target": {
"name": "idx_msl_source_target",
"columns": [
{
"expression": "source_system",
"isExpression": false,
"asc": true,
"nulls": "last"
},
{
"expression": "source_id",
"isExpression": false,
"asc": true,
"nulls": "last"
},
{
"expression": "target_entity_type",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": true,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
}
},
"enums": {},

View File

@@ -1,6 +1,6 @@
{
"id": "e9d830fc-ec81-42ab-bea6-232dd99d20d1",
"prevId": "6326a9a7-0b30-4647-bf86-b3d79e6a08bf",
"id": "9f6ae433-f075-4348-8109-3cd368344fa8",
"prevId": "e9d830fc-ec81-42ab-bea6-232dd99d20d1",
"version": "7",
"dialect": "postgresql",
"tables": {
@@ -1870,6 +1870,12 @@
"primaryKey": false,
"notNull": false
},
"merged_into_client_id": {
"name": "merged_into_client_id",
"type": "text",
"primaryKey": false,
"notNull": false
},
"created_at": {
"name": "created_at",
"type": "timestamp with time zone",
@@ -1957,6 +1963,21 @@
"concurrently": false,
"method": "btree",
"with": {}
},
"idx_clients_merged_into": {
"name": "idx_clients_merged_into",
"columns": [
{
"expression": "merged_into_client_id",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": false,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {

View File

@@ -1,6 +1,6 @@
{
"id": "9f6ae433-f075-4348-8109-3cd368344fa8",
"prevId": "e9d830fc-ec81-42ab-bea6-232dd99d20d1",
"id": "e0e6a819-cf9f-45d3-b65b-19da27890f0b",
"prevId": "9f6ae433-f075-4348-8109-3cd368344fa8",
"version": "7",
"dialect": "postgresql",
"tables": {
@@ -762,13 +762,13 @@
},
"nominal_boat_size": {
"name": "nominal_boat_size",
"type": "text",
"type": "numeric",
"primaryKey": false,
"notNull": false
},
"nominal_boat_size_m": {
"name": "nominal_boat_size_m",
"type": "text",
"type": "numeric",
"primaryKey": false,
"notNull": false
},
@@ -799,13 +799,13 @@
},
"power_capacity": {
"name": "power_capacity",
"type": "text",
"type": "numeric",
"primaryKey": false,
"notNull": false
},
"voltage": {
"name": "voltage",
"type": "text",
"type": "numeric",
"primaryKey": false,
"notNull": false
},
@@ -914,6 +914,12 @@
"primaryKey": false,
"notNull": false
},
"status_override_mode": {
"name": "status_override_mode",
"type": "text",
"primaryKey": false,
"notNull": false
},
"created_at": {
"name": "created_at",
"type": "timestamp with time zone",

File diff suppressed because it is too large Load Diff

View File

@@ -145,22 +145,29 @@
{
"idx": 20,
"version": "7",
"when": 1777814682110,
"tag": "0020_medical_betty_brant",
"when": 1777811835982,
"tag": "0020_unusual_azazel",
"breakpoints": true
},
{
"idx": 21,
"version": "7",
"when": 1777811835982,
"tag": "0021_unusual_azazel",
"when": 1777812671833,
"tag": "0021_magenta_madame_hydra",
"breakpoints": true
},
{
"idx": 22,
"version": "7",
"when": 1777812671833,
"tag": "0022_magenta_madame_hydra",
"when": 1777814682110,
"tag": "0022_medical_betty_brant",
"breakpoints": true
},
{
"idx": 23,
"version": "7",
"when": 1777927586934,
"tag": "0023_omniscient_reaper",
"breakpoints": true
}
]

View File

@@ -37,7 +37,7 @@ export * from './portal';
// CRM admin invites (better-auth realm)
export * from './crm-invites';
// Residential (parallel domain separate clients & interests for the
// Residential (parallel domain - separate clients & interests for the
// external residential team)
export * from './residential';
@@ -56,8 +56,11 @@ export * from './ai-usage';
// GDPR export tracking (Phase 3d)
export * from './gdpr';
// Migration ledger (one-shot scripts NocoDB import etc.)
// Migration ledger (one-shot scripts - NocoDB import etc.)
export * from './migration';
// Relations (must come last — references all tables)
// Website submissions (dual-write capture from the marketing site)
export * from './website-submissions';
// Relations (must come last - references all tables)
export * from './relations';

View File

@@ -0,0 +1,67 @@
import { pgTable, text, jsonb, timestamp, index, uniqueIndex } from 'drizzle-orm/pg-core';
import { ports } from './ports';
/**
* Raw capture of every website inquiry submission, dual-written from the
* marketing site alongside its existing NocoDB write. Acts as a passive
* collector while the website still uses NocoDB as its primary system of
* record — the new CRM observes incoming traffic without altering it,
* letting us validate the data flow before any cutover.
*
* v1 deliberately stores the raw payload as JSON without promoting to
* `clients` / `interests` rows. Once we trust the pipeline, a separate
* "promote" job can transform these submissions into proper entities
* with full dedup / merge logic.
*
* Idempotency: each submission carries a `submission_id` UUID minted by
* the website's server. Re-delivery (network retry, double-click) hits
* the unique index and is treated as a no-op.
*/
export const websiteSubmissions = pgTable(
'website_submissions',
{
id: text('id')
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
/** Multi-tenant: every submission belongs to a port. Resolved from
* `port_slug` in the request payload (defaults to port-nimara if
* unspecified, since it's currently the only port with a public
* marketing site). */
portId: text('port_id')
.notNull()
.references(() => ports.id),
/** UUID v4 minted by the website's server when the form is processed.
* The unique index on this column enforces idempotency: retries from
* the website resolve to the existing row instead of creating a
* duplicate. */
submissionId: text('submission_id').notNull(),
/** Discriminator for the form type. Mirrors the website's existing
* branches (`berth_inquiry` from /api/register?interest=berths,
* `residence_inquiry` from /api/register?interest=residences,
* `contact_form` from /api/contact). Add new kinds as the website
* grows new form types. */
kind: text('kind').notNull(),
/** Verbatim form payload, including any reCAPTCHA / IP / user-agent
* metadata the website chose to forward. Stored as JSONB so the
* capture stays schema-flexible while we figure out which fields
* matter for the eventual promote step. */
payload: jsonb('payload').notNull(),
/** Cross-reference back to the legacy NocoDB row id created by the
* same form submission. Useful for reconciling: pick any submission
* here, look up the matching NocoDB row, confirm both halves agree. */
legacyNocodbId: text('legacy_nocodb_id'),
/** Capture-time metadata for debugging. */
sourceIp: text('source_ip'),
userAgent: text('user_agent'),
receivedAt: timestamp('received_at', { withTimezone: true }).notNull().defaultNow(),
},
(table) => [
uniqueIndex('idx_ws_submission_id').on(table.submissionId),
index('idx_ws_port_received').on(table.portId, table.receivedAt),
index('idx_ws_kind').on(table.kind),
],
);
export type WebsiteSubmission = typeof websiteSubmissions.$inferSelect;
export type NewWebsiteSubmission = typeof websiteSubmissions.$inferInsert;

View File

@@ -50,6 +50,13 @@ const envSchema = z.object({
GOOGLE_CLIENT_ID: z.string().optional(),
GOOGLE_CLIENT_SECRET: z.string().optional(),
// Shared secret used by the marketing website's server-side dual-write
// helper (POST to /api/public/website-inquiries). Set the SAME value on
// the website's CRM_INTAKE_SECRET env. Leave unset in dev/staging until
// the website's CRM_INTAKE_URL is also set — without this, the public
// intake endpoint refuses every request.
WEBSITE_INTAKE_SECRET: z.string().min(16).optional(),
// OpenAI (optional)
OPENAI_API_KEY: z.string().optional(),

View File

@@ -85,6 +85,12 @@ export const rateLimiters = {
exports: { windowMs: 60 * 60 * 1000, max: 30, keyPrefix: 'export' },
/** Public unauthenticated form posts (interest, residential inquiry): 5 per hour per IP. */
publicForm: { windowMs: 60 * 60 * 1000, max: 5, keyPrefix: 'publicform' },
/** Server-to-server intake from the marketing website's dual-write helper.
* All traffic shares the website's egress IP, so the bucket has to
* accommodate every legitimate inquiry the site can produce in an hour
* without dropping data. The shared-secret header gates abuse; this
* limiter is just a defensive backstop in case the secret leaks. */
websiteIntake: { windowMs: 60 * 60 * 1000, max: 500, keyPrefix: 'websiteintake' },
} as const satisfies Record<string, RateLimitConfig>;
export type RateLimiterName = keyof typeof rateLimiters;

View File

@@ -0,0 +1,69 @@
/**
* Dedicated test for the 503 path on /api/public/website-inquiries.
*
* Lives in its own file rather than sharing the main test file because
* the test mocks `@/lib/env` to return an empty object - that mock would
* leak into other tests in the same file via Vitest's module cache,
* making the rest of the suite return 503 instead of the expected
* status. Isolating to a single-test file sidesteps that entirely.
*
* Asserts the security-critical contract: when WEBSITE_INTAKE_SECRET is
* unset, every request gets 503, regardless of headers or payload. This
* is the dev/staging posture; without it, the endpoint would be
* unauthenticated.
*/
import { describe, expect, it, vi } from 'vitest';
vi.mock('@/lib/env', () => ({
env: {}, // WEBSITE_INTAKE_SECRET intentionally unset
}));
vi.mock('@/lib/rate-limit', () => ({
rateLimiters: { websiteIntake: { limit: 10, window: 60_000 } },
checkRateLimit: vi.fn(async () => ({ allowed: true, resetAt: Date.now() })),
}));
vi.mock('@/lib/logger', () => ({
logger: { info: vi.fn(), warn: vi.fn(), error: vi.fn() },
}));
vi.mock('@/lib/db', () => ({
db: {
select: () => ({ from: () => ({ where: () => ({ limit: async () => [] }) }) }),
insert: () => ({
values: () => ({
onConflictDoNothing: () => ({ returning: async () => [] }),
}),
}),
},
}));
function makeReq(body: unknown, headers: Record<string, string> = {}) {
return {
headers: {
get(name: string) {
return headers[name.toLowerCase()] ?? null;
},
},
json: async () => body,
} as unknown as import('next/server').NextRequest;
}
describe('POST /api/public/website-inquiries — 503 when secret unset', () => {
it('returns 503 even when a "valid" header + payload are supplied', async () => {
const { POST } = await import('@/app/api/public/website-inquiries/route');
const res = await POST(
makeReq(
{
submission_id: '11111111-1111-4111-8111-111111111111',
kind: 'berth_inquiry',
payload: {},
},
{ 'x-webhook-secret': 'anything-here-doesnt-matter' },
),
);
expect(res.status).toBe(503);
const body = await res.json();
expect(body.error).toMatch(/not configured/i);
});
});

View File

@@ -0,0 +1,274 @@
/**
* /api/public/website-inquiries route — unit tests.
*
* Asserts:
* 1. Auth: rejects missing/wrong X-Webhook-Secret with 401.
* 2. Validation: rejects malformed payloads (bad UUID, unknown kind,
* unknown port) with 400.
* 3. Idempotency: a repeat submission_id returns the existing row id
* instead of inserting a duplicate.
*
* Uses HOISTED `vi.mock` (top of file) so mock state is established
* before any module loads. Earlier attempts with `vi.doMock` inside
* beforeEach proved flaky under parallel test-file execution because
* other files' mocks leaked into ours via the shared module cache.
*
* The 503 path (WEBSITE_INTAKE_SECRET unset) is verified by manual
* code inspection rather than a unit test - exercising it would
* require runtime env mutation that conflicts with the hoisted mock.
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
const VALID_UUID = '11111111-1111-4111-8111-111111111111';
const SECRET = 'test-secret-at-least-16-chars-long';
// ─── Mock state — module-scoped so test-level mutations are visible ────
interface MockState {
portRow: Array<{ id: string }>;
existingRow: Array<{ id: string }>;
inserted: Array<Record<string, unknown>>;
rateLimitAllowed: boolean;
/** Counts select(...).limit(...) calls made by the route within a test.
* Call #1 = port lookup, call #2 = existing-submission lookup. Reset
* in beforeEach so each test starts fresh. */
queryCount: number;
}
const state: MockState = {
portRow: [{ id: 'port-uuid-port-nimara' }],
existingRow: [],
inserted: [],
rateLimitAllowed: true,
queryCount: 0,
};
// ─── Hoisted mocks — apply for the entire file ────────────────────────
vi.mock('@/lib/env', () => ({
env: { WEBSITE_INTAKE_SECRET: SECRET },
}));
vi.mock('@/lib/rate-limit', () => ({
rateLimiters: { publicForm: { limit: 10, window: 60_000 } },
checkRateLimit: vi.fn(async () => ({
allowed: state.rateLimitAllowed,
resetAt: Date.now() + 60_000,
})),
}));
vi.mock('@/lib/logger', () => ({
logger: { info: vi.fn(), warn: vi.fn(), error: vi.fn() },
}));
vi.mock('@/lib/db', () => {
const selectChain = {
from: () => selectChain,
where: () => selectChain,
limit: async () => {
// First select call in a test = port lookup; second = existing
// submission lookup (only reached on conflict path now). Counter
// is reset by beforeEach.
state.queryCount += 1;
return state.queryCount === 1 ? state.portRow : state.existingRow;
},
};
// Insert chain mirrors Drizzle's `insert(...).values(...).onConflictDoNothing(...).returning()`.
// When `state.existingRow` is non-empty (simulated existing row), the
// returning() resolves to []; the route then falls back to the SELECT
// for the existing row id. Otherwise returning() yields the new row.
const insertChain = {
values: (vals: Record<string, unknown>) => {
const onConflictChain = {
returning: async () => {
if (state.existingRow.length > 0) {
return []; // simulate conflict (no row inserted)
}
state.inserted.push(vals);
return [{ id: 'generated-row-id' }];
},
};
return {
onConflictDoNothing: () => onConflictChain,
// Backwards-compat: tests that don't go through the conflict
// path can still call .returning() directly. Same semantics.
returning: async () => {
state.inserted.push(vals);
return [{ id: 'generated-row-id' }];
},
};
},
};
return {
db: {
select: () => selectChain,
insert: () => insertChain,
},
};
});
// ─── Helpers ──────────────────────────────────────────────────────────
function makeReq(body: unknown, headers: Record<string, string> = {}) {
return {
headers: {
get(name: string) {
return headers[name.toLowerCase()] ?? null;
},
},
json: async () => body,
} as unknown as import('next/server').NextRequest;
}
beforeEach(() => {
state.portRow = [{ id: 'port-uuid-port-nimara' }];
state.existingRow = [];
state.inserted = [];
state.rateLimitAllowed = true;
state.queryCount = 0;
});
afterEach(() => {
vi.clearAllMocks();
});
// ─── Tests ────────────────────────────────────────────────────────────
describe('POST /api/public/website-inquiries — auth + capture', () => {
it('returns 401 when the X-Webhook-Secret header is missing', async () => {
const { POST } = await import('@/app/api/public/website-inquiries/route');
const res = await POST(makeReq({}));
expect(res.status).toBe(401);
});
it('returns 401 when the X-Webhook-Secret header is wrong', async () => {
const { POST } = await import('@/app/api/public/website-inquiries/route');
const res = await POST(makeReq({}, { 'x-webhook-secret': 'wrong-value-but-same-length-aaaa' }));
expect(res.status).toBe(401);
});
it('returns 400 when the body fails validation', async () => {
const { POST } = await import('@/app/api/public/website-inquiries/route');
const res = await POST(
makeReq(
{ submission_id: 'not-a-uuid', kind: 'berth_inquiry', payload: {} },
{
'x-webhook-secret': SECRET,
},
),
);
expect(res.status).toBe(400);
});
it('returns 400 when port_slug references a non-existent port', async () => {
state.portRow = [];
const { POST } = await import('@/app/api/public/website-inquiries/route');
const res = await POST(
makeReq(
{
submission_id: VALID_UUID,
kind: 'berth_inquiry',
payload: { foo: 'bar' },
port_slug: 'no-such-port',
},
{ 'x-webhook-secret': SECRET },
),
);
expect(res.status).toBe(400);
const body = await res.json();
expect(body.error).toMatch(/Unknown port/);
});
it('captures a fresh submission and returns its id', async () => {
const { POST } = await import('@/app/api/public/website-inquiries/route');
const res = await POST(
makeReq(
{
submission_id: VALID_UUID,
kind: 'berth_inquiry',
payload: { firstName: 'Jane', email: 'jane@example.com' },
legacy_nocodb_id: '12345',
},
{ 'x-webhook-secret': SECRET, 'user-agent': 'TestAgent/1.0' },
),
);
expect(res.status).toBe(200);
const body = await res.json();
expect(body).toEqual({ id: 'generated-row-id', deduped: false });
expect(state.inserted).toHaveLength(1);
const row = state.inserted[0]!;
expect(row.submissionId).toBe(VALID_UUID);
expect(row.kind).toBe('berth_inquiry');
expect(row.legacyNocodbId).toBe('12345');
expect(row.userAgent).toBe('TestAgent/1.0');
expect(row.payload).toEqual({ firstName: 'Jane', email: 'jane@example.com' });
});
it('returns 200 with deduped=true when the submission_id is already on file', async () => {
state.existingRow = [{ id: 'existing-row-id' }];
const { POST } = await import('@/app/api/public/website-inquiries/route');
const res = await POST(
makeReq(
{
submission_id: VALID_UUID,
kind: 'contact_form',
payload: {},
},
{ 'x-webhook-secret': SECRET },
),
);
expect(res.status).toBe(200);
const body = await res.json();
expect(body).toEqual({ id: 'existing-row-id', deduped: true });
expect(state.inserted).toHaveLength(0);
});
it('defaults port_slug to port-nimara when omitted', async () => {
const { POST } = await import('@/app/api/public/website-inquiries/route');
const res = await POST(
makeReq(
{
submission_id: VALID_UUID,
kind: 'residence_inquiry',
payload: { foo: 'bar' },
},
{ 'x-webhook-secret': SECRET },
),
);
expect(res.status).toBe(200);
expect(state.inserted).toHaveLength(1);
expect(state.inserted[0]!.portId).toBe('port-uuid-port-nimara');
});
it('rejects unknown kinds', async () => {
const { POST } = await import('@/app/api/public/website-inquiries/route');
const res = await POST(
makeReq(
{
submission_id: VALID_UUID,
kind: 'newsletter_signup',
payload: {},
},
{ 'x-webhook-secret': SECRET },
),
);
expect(res.status).toBe(400);
});
it('returns 429 when the rate limiter trips', async () => {
state.rateLimitAllowed = false;
const { POST } = await import('@/app/api/public/website-inquiries/route');
const res = await POST(
makeReq(
{
submission_id: VALID_UUID,
kind: 'berth_inquiry',
payload: {},
},
{ 'x-webhook-secret': SECRET },
),
);
expect(res.status).toBe(429);
});
});