fix(audit-final): pre-merge hardening + expense receipt UI
Final audit pass on feat/berth-recommender (3 parallel Opus agents) caught 5 critical and ~12 high-severity findings. All addressed in-branch; medium/low items deferred to docs/audit-final-deferred.md. Critical: - Add filesystem-backend PUT handler at /api/storage/[token] so presigned uploads stop 405-ing in filesystem mode (every browser-driven berth-PDF + brochure upload was broken). Same token-verify + replay protection as GET, plus magic-byte gate when c=application/pdf. - Forward req.signal into streamExpensePdf so an aborted 1000-receipt export no longer keeps grinding for minutes. - Strengthen Content-Disposition filename sanitization: \s matches CR/LF which would let documentName forge headers; restrict to [\w. -]+ and add filename* RFC 5987 fallback. - Lock public berths feed behind an explicit slug allowlist instead of ?portSlug= enumeration. - Reject cross-port interest_berths upserts (defense-in-depth on top of the recommender SQL port filter). High: - Recommender: width-only feasibility now caps length via L/W ratio so a 200ft berth doesn't surface for a 30ft beam request; total_interest_count filters out junction rows whose interest is in another port. - Mooring normalization follow-up migration (0034) catches un-hyphenated padded forms (A01) the original 0024 WHERE missed. - Send-out rate limit moved AFTER validation and scoped per-(port, user) so typos don't burn a slot and a multi-port rep can't be DoS'd by another tenant. - Default-brochure path now blocks an archived row from sneaking through the partial unique index. - NocoDB import --update-snapshot honoured under --dry-run so reps can refresh the seed JSON without committing DB writes. - PDF export: orderBy desc(expenseDate); apply isNull(archivedAt) when expenseIds are passed (was bypassed); flag rate-unavailable rows with an amber footer instead of silently treating them as 1:1; skip the USD->EUR chain when source already matches target. - expense-form-dialog: revokeObjectURL captures the URL in the closure instead of revoking the still-displayed one; reset upload state on close. - scan/page: handleClearReceipt resets in-flight scan/upload mutations; Save disabled while upload pending. - updateExpense re-asserts receipt-or-acknowledgement at the merged row so PATCH can't slip past the create-time refine. Plus the in-progress receipt upload UI for the expense form dialog (receipt picker + "I have no receipt" checkbox + warning banner) and a noReceiptAcknowledged flag on ExpenseRow for edit-mode hydration. Includes the canonical plan doc (referenced in CLAUDE.md), the handoff prompt, and a deferred-findings index for follow-up issues. 1163/1163 vitest passing. Typecheck clean. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,29 @@
|
||||
-- Audit follow-up: 0024 only normalized rows that contained a literal
|
||||
-- hyphen (`A-01`), but the audit caught that legacy NocoDB exports also
|
||||
-- produced un-hyphenated padded forms (`A01`). Those rows skipped the
|
||||
-- 0024 rewrite and remained non-canonical, which would break the public
|
||||
-- /berths/:mooringNumber lookup (the route gates on `^[A-Z]+\d+$`).
|
||||
--
|
||||
-- This migration re-runs the rewrite with a WHERE clause broadened to
|
||||
-- catch BOTH variants:
|
||||
-- - hyphenated padded ("A-01") ← redundant after 0024 but harmless
|
||||
-- - un-hyphenated padded ("A01")
|
||||
-- Rows already in canonical form skip the UPDATE because the regex_replace
|
||||
-- output equals the input AND the WHERE filter excludes them via the
|
||||
-- "leading zero or hyphen" pattern.
|
||||
UPDATE berths
|
||||
SET mooring_number = regexp_replace(mooring_number, '^([A-Z]+)-?0*(\d+)$', '\1\2')
|
||||
WHERE mooring_number ~ '^[A-Z]+-?0*\d+$'
|
||||
AND mooring_number !~ '^[A-Z]+\d+$';
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
bad_count integer;
|
||||
BEGIN
|
||||
SELECT count(*) INTO bad_count
|
||||
FROM berths
|
||||
WHERE mooring_number !~ '^[A-Z]+\d+$';
|
||||
IF bad_count > 0 THEN
|
||||
RAISE NOTICE 'Post-rewrite: % rows still do not match ^[A-Z]+\d+$ - manual review needed', bad_count;
|
||||
END IF;
|
||||
END $$;
|
||||
@@ -239,6 +239,13 @@
|
||||
"when": 1777948521076,
|
||||
"tag": "0033_expense_no_receipt_acknowledged",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 34,
|
||||
"version": "7",
|
||||
"when": 1778000000000,
|
||||
"tag": "0034_normalize_mooring_numbers_broaden",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -415,6 +415,14 @@ export async function recommendBerths(args: RecommendBerthsArgs): Promise<Recomm
|
||||
predicates.push(
|
||||
sql`b.length_ft::numeric <= ${interestInput.desiredLengthFt}::numeric * ${oversizeMultiplier}::numeric`,
|
||||
);
|
||||
} else if (interestInput.desiredWidthFt !== null) {
|
||||
// Width-only feasibility: cap the length using a generous L/W ratio
|
||||
// so the recommender doesn't surface a 200 ft berth for a 30 ft beam
|
||||
// request. Plan §4.4 promised an upper bound; without this branch the
|
||||
// null-length path skipped the cap entirely.
|
||||
predicates.push(
|
||||
sql`b.length_ft::numeric <= ${interestInput.desiredWidthFt}::numeric * 8::numeric * ${oversizeMultiplier}::numeric`,
|
||||
);
|
||||
}
|
||||
if (interestInput.desiredWidthFt !== null) {
|
||||
predicates.push(sql`b.width_ft::numeric >= ${interestInput.desiredWidthFt}`);
|
||||
@@ -495,7 +503,11 @@ export async function recommendBerths(args: RecommendBerthsArgs): Promise<Recomm
|
||||
-- rows reports 0 — the LEFT JOIN otherwise produces a single
|
||||
-- NULL-right-side row that COUNT(*) would tally as 1 and inflate
|
||||
-- the heat interest-count component for berths with no history.
|
||||
COUNT(ib.berth_id) AS total_interest_count,
|
||||
-- The FILTER also enforces port isolation defense-in-depth: an
|
||||
-- orphan junction row whose interest belongs to a different port
|
||||
-- (which the new cross-port guard now prevents but pre-existing
|
||||
-- data may carry) shouldn't inflate this count.
|
||||
COUNT(ib.berth_id) FILTER (WHERE i.id IS NOT NULL) AS total_interest_count,
|
||||
COUNT(*) FILTER (WHERE i.eoi_status = 'signed') AS eoi_signed_count
|
||||
FROM feasible f
|
||||
LEFT JOIN interest_berths ib ON ib.berth_id = f.id
|
||||
|
||||
@@ -225,8 +225,11 @@ async function resolveRecipientEmail(
|
||||
return primary.value;
|
||||
}
|
||||
|
||||
async function checkSendRateLimit(userId: string): Promise<void> {
|
||||
const result = await checkRateLimit(userId, {
|
||||
async function checkSendRateLimit(portId: string, userId: string): Promise<void> {
|
||||
// Per-(port, user) so a multi-port rep can't be DoS'd by another tenant
|
||||
// burning their global cap. Audit caught this — the original
|
||||
// single-key version locked a user out across every port they touched.
|
||||
const result = await checkRateLimit(`${portId}:${userId}`, {
|
||||
windowMs: 60 * 60 * 1000,
|
||||
max: 50,
|
||||
keyPrefix: 'docsend',
|
||||
@@ -369,7 +372,8 @@ async function performSend(args: {
|
||||
// ─── Public sender: berth PDF ────────────────────────────────────────────────
|
||||
|
||||
export async function sendBerthPdf(input: SendBerthPdfInput): Promise<SendResult> {
|
||||
await checkSendRateLimit(input.sentBy);
|
||||
// Rate-limit AFTER validation so a typo'd recipient or missing-PDF rep
|
||||
// doesn't burn a slot on a send that would have failed anyway.
|
||||
const recipientEmail = await resolveRecipientEmail(input.portId, input.recipient);
|
||||
|
||||
// Resolve berth + active version.
|
||||
@@ -406,6 +410,8 @@ export async function sendBerthPdf(input: SendBerthPdfInput): Promise<SendResult
|
||||
// Subject pulls in the mooring number for inbox triage.
|
||||
const subject = `Berth ${berth.mooringNumber} — spec sheet`;
|
||||
|
||||
await checkSendRateLimit(input.portId, input.sentBy);
|
||||
|
||||
return performSend({
|
||||
portId: input.portId,
|
||||
recipientEmail,
|
||||
@@ -436,7 +442,7 @@ export async function sendBerthPdf(input: SendBerthPdfInput): Promise<SendResult
|
||||
// ─── Public sender: brochure ─────────────────────────────────────────────────
|
||||
|
||||
export async function sendBrochure(input: SendBrochureInput): Promise<SendResult> {
|
||||
await checkSendRateLimit(input.sentBy);
|
||||
// Rate-limit AFTER validation (audit finding); typos shouldn't burn slots.
|
||||
const recipientEmail = await resolveRecipientEmail(input.portId, input.recipient);
|
||||
|
||||
// Resolve brochure + most-recent version.
|
||||
@@ -456,6 +462,14 @@ export async function sendBrochure(input: SendBrochureInput): Promise<SendResult
|
||||
'No default brochure configured for this port. Upload one in /admin/brochures.',
|
||||
);
|
||||
}
|
||||
// The partial unique index on `is_default` only enforces uniqueness when
|
||||
// archived_at IS NULL — an archived row can still carry is_default=true
|
||||
// and would silently be returned here without this guard.
|
||||
if (def.archivedAt) {
|
||||
throw new ValidationError(
|
||||
'Default brochure is archived. Choose a non-archived brochure as the default first.',
|
||||
);
|
||||
}
|
||||
brochureRow = def;
|
||||
}
|
||||
|
||||
@@ -488,6 +502,8 @@ export async function sendBrochure(input: SendBrochureInput): Promise<SendResult
|
||||
const bodyHtml = renderEmailBody(expanded);
|
||||
const subject = `${brochureRow.label} — brochure`;
|
||||
|
||||
await checkSendRateLimit(input.portId, input.sentBy);
|
||||
|
||||
return performSend({
|
||||
portId: input.portId,
|
||||
recipientEmail,
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
*/
|
||||
|
||||
import { Readable } from 'node:stream';
|
||||
import { eq, inArray, and, gte, lte, isNull } from 'drizzle-orm';
|
||||
import { eq, inArray, and, gte, lte, isNull, desc } from 'drizzle-orm';
|
||||
import PDFDocument from 'pdfkit';
|
||||
import sharp from 'sharp';
|
||||
|
||||
@@ -87,6 +87,13 @@ export interface ExpensePdfArgs {
|
||||
includeArchived?: boolean;
|
||||
};
|
||||
options: ExpensePdfOptions;
|
||||
/**
|
||||
* Caller's abort signal. When the client disconnects mid-stream we stop
|
||||
* pulling receipts off the storage backend rather than burning CPU/IO on
|
||||
* an export nobody's reading. Without this, a 1000-receipt export aborted
|
||||
* at byte 0 keeps the process busy for minutes.
|
||||
*/
|
||||
signal?: AbortSignal;
|
||||
}
|
||||
|
||||
// ─── Image resize gate ──────────────────────────────────────────────────────
|
||||
@@ -170,6 +177,10 @@ interface ProcessedExpense extends ExpenseRow {
|
||||
amountTarget: number;
|
||||
amountUsdNumeric: number;
|
||||
amountEurNumeric: number;
|
||||
/** True when ANY rate lookup for this row fell back to 1:1 (e.g. the
|
||||
* exchange-rate cache was cold and the upstream API returned null).
|
||||
* Surfaced via an asterisk in the table + a footnote in the summary. */
|
||||
rateUnavailable: boolean;
|
||||
}
|
||||
|
||||
interface Totals {
|
||||
@@ -187,6 +198,10 @@ interface Totals {
|
||||
/** Sum of the no-receipt expenses' targetTotal — the amount at risk
|
||||
* of being denied reimbursement. */
|
||||
noReceiptAmount: number;
|
||||
/** Number of rows whose conversion fell back to 1:1 — surfaces as an
|
||||
* amber footer so reps know the totals are approximate. Audit caught
|
||||
* the silent 1:1 fallback; users were getting EUR-labelled USD totals. */
|
||||
rateUnavailableCount: number;
|
||||
}
|
||||
|
||||
async function processExpenses(
|
||||
@@ -194,31 +209,58 @@ async function processExpenses(
|
||||
target: TargetCurrency,
|
||||
): Promise<ProcessedExpense[]> {
|
||||
// Resolve rate ONCE per source currency (cached by getRate). Avoids the
|
||||
// legacy code's per-row API call.
|
||||
const rateCache = new Map<string, number>();
|
||||
const ensureRate = async (from: string, to: string): Promise<number> => {
|
||||
if (from === to) return 1;
|
||||
// legacy code's per-row API call. We also track *which* lookups failed
|
||||
// (returned null upstream) so the PDF can surface a warning rather than
|
||||
// silently treating EUR as USD.
|
||||
const rateCache = new Map<string, { rate: number; ok: boolean }>();
|
||||
const ensureRate = async (from: string, to: string): Promise<{ rate: number; ok: boolean }> => {
|
||||
if (from === to) return { rate: 1, ok: true };
|
||||
const key = `${from}->${to}`;
|
||||
if (rateCache.has(key)) return rateCache.get(key)!;
|
||||
const rate = (await getRate(from, to)) ?? 1;
|
||||
rateCache.set(key, rate);
|
||||
return rate;
|
||||
const fetched = await getRate(from, to);
|
||||
const entry = fetched != null ? { rate: fetched, ok: true } : { rate: 1, ok: false };
|
||||
rateCache.set(key, entry);
|
||||
if (!entry.ok) {
|
||||
logger.warn({ from, to }, 'Expense PDF: exchange rate unavailable, falling back to 1:1');
|
||||
}
|
||||
return entry;
|
||||
};
|
||||
|
||||
const out: ProcessedExpense[] = [];
|
||||
for (const row of rows) {
|
||||
const raw = parseFloat(row.amount);
|
||||
const usd =
|
||||
row.amountUsd != null
|
||||
? parseFloat(row.amountUsd)
|
||||
: raw * (await ensureRate(row.currency.toUpperCase(), 'USD'));
|
||||
const eur = usd * (await ensureRate('USD', 'EUR'));
|
||||
let rateUnavailable = false;
|
||||
let usd: number;
|
||||
if (row.amountUsd != null) {
|
||||
usd = parseFloat(row.amountUsd);
|
||||
} else if (row.currency.toUpperCase() === 'USD') {
|
||||
usd = raw;
|
||||
} else {
|
||||
const { rate, ok } = await ensureRate(row.currency.toUpperCase(), 'USD');
|
||||
usd = raw * rate;
|
||||
if (!ok) rateUnavailable = true;
|
||||
}
|
||||
|
||||
// Skip the USD->EUR chain when the source already matches the target —
|
||||
// every redundant rate lookup adds rounding noise on top of the network
|
||||
// round-trip. EUR-source + EUR-target should land back exactly at the
|
||||
// input amount, not raw * USD-rate * USD-rate-inverse.
|
||||
let eur: number;
|
||||
if (row.currency.toUpperCase() === 'EUR') {
|
||||
eur = raw;
|
||||
} else {
|
||||
const { rate, ok } = await ensureRate('USD', 'EUR');
|
||||
eur = usd * rate;
|
||||
if (!ok) rateUnavailable = true;
|
||||
}
|
||||
|
||||
const targetVal = target === 'USD' ? usd : eur;
|
||||
out.push({
|
||||
...row,
|
||||
amountUsdNumeric: usd,
|
||||
amountEurNumeric: eur,
|
||||
amountTarget: targetVal,
|
||||
rateUnavailable,
|
||||
});
|
||||
}
|
||||
return out;
|
||||
@@ -234,6 +276,7 @@ function computeTotals(
|
||||
const eurTotal = rows.reduce((s, r) => s + r.amountEurNumeric, 0);
|
||||
const processingFee = includeProcessingFee ? targetTotal * 0.05 : 0;
|
||||
const receiptlessRows = rows.filter((r) => r.noReceiptAcknowledged);
|
||||
const rateUnavailableCount = rows.reduce((n, r) => n + (r.rateUnavailable ? 1 : 0), 0);
|
||||
return {
|
||||
count: rows.length,
|
||||
targetTotal,
|
||||
@@ -244,6 +287,7 @@ function computeTotals(
|
||||
targetCurrency: target,
|
||||
noReceiptCount: receiptlessRows.length,
|
||||
noReceiptAmount: receiptlessRows.reduce((s, r) => s + r.amountTarget, 0),
|
||||
rateUnavailableCount,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -311,12 +355,17 @@ function groupRows(
|
||||
|
||||
async function fetchExpenseRows(args: ExpensePdfArgs): Promise<ExpenseRow[]> {
|
||||
const conditions = [eq(expenses.portId, args.portId)];
|
||||
// Soft-delete filter applies regardless of which path produced the
|
||||
// expense list. The audit caught a regression where an `expenseIds`
|
||||
// selection would happily export archived rows because the
|
||||
// `isNull(archivedAt)` predicate sat inside the `else` branch — that
|
||||
// violates the soft-delete contract used everywhere else.
|
||||
if (!args.filter?.includeArchived) {
|
||||
conditions.push(isNull(expenses.archivedAt));
|
||||
}
|
||||
if (args.expenseIds?.length) {
|
||||
conditions.push(inArray(expenses.id, args.expenseIds));
|
||||
} else {
|
||||
if (!args.filter?.includeArchived) {
|
||||
conditions.push(isNull(expenses.archivedAt));
|
||||
}
|
||||
if (args.filter?.dateFrom) {
|
||||
conditions.push(
|
||||
gte(
|
||||
@@ -359,7 +408,7 @@ async function fetchExpenseRows(args: ExpensePdfArgs): Promise<ExpenseRow[]> {
|
||||
})
|
||||
.from(expenses)
|
||||
.where(and(...conditions))
|
||||
.orderBy(expenses.expenseDate);
|
||||
.orderBy(desc(expenses.expenseDate));
|
||||
return rows as ExpenseRow[];
|
||||
}
|
||||
|
||||
@@ -474,7 +523,10 @@ export async function streamExpensePdf(
|
||||
if (opts.includeDetails) addExpenseTable(doc, processed, opts);
|
||||
|
||||
if (opts.includeReceipts) {
|
||||
await addReceiptPages(doc, processed, filesById, opts);
|
||||
await addReceiptPages(doc, processed, filesById, {
|
||||
targetCurrency: opts.targetCurrency,
|
||||
signal: args.signal,
|
||||
});
|
||||
}
|
||||
|
||||
addFooter(doc);
|
||||
@@ -485,7 +537,10 @@ export async function streamExpensePdf(
|
||||
}
|
||||
})();
|
||||
|
||||
const safeName = opts.documentName.replace(/[^a-zA-Z0-9-_\s]/g, '_').trim() || 'expenses';
|
||||
// `\s` includes CR/LF; using it lets a malicious documentName forge
|
||||
// additional response headers via Content-Disposition. Restrict to
|
||||
// word/dot/dash/space (single-line space only — \s would let \n through).
|
||||
const safeName = opts.documentName.replace(/[^\w. \-]+/g, '_').trim() || 'expenses';
|
||||
return {
|
||||
stream: webStream,
|
||||
suggestedFilename: `${safeName}.pdf`,
|
||||
@@ -540,7 +595,19 @@ function addSummaryBox(
|
||||
]
|
||||
: [];
|
||||
|
||||
const boxHeight = (lines.length + warningLines.length) * 16 + 20;
|
||||
// Second warning band: any row whose currency conversion fell back to
|
||||
// 1:1 because the upstream rate was unavailable. Without this surface,
|
||||
// an EUR-source row would appear as `targetCurrency=EUR, amount=USD`
|
||||
// and reps would never know the totals are wrong.
|
||||
const showRateWarning = totals.rateUnavailableCount > 0;
|
||||
const rateWarningLines = showRateWarning
|
||||
? [
|
||||
`Note: ${totals.rateUnavailableCount} expense${totals.rateUnavailableCount === 1 ? '' : 's'} could not be converted (rate unavailable);`,
|
||||
`the displayed amount${totals.rateUnavailableCount === 1 ? ' is' : 's are'} approximate (1:1 fallback).`,
|
||||
]
|
||||
: [];
|
||||
|
||||
const boxHeight = (lines.length + warningLines.length + rateWarningLines.length) * 16 + 20;
|
||||
doc
|
||||
.rect(60, lineY, doc.page.width - 120, boxHeight)
|
||||
.fillColor('#f5f5f5')
|
||||
@@ -561,6 +628,14 @@ function addSummaryBox(
|
||||
}
|
||||
doc.fillColor('#000000').font('Helvetica');
|
||||
}
|
||||
if (showRateWarning) {
|
||||
doc.fillColor('#92400e').font('Helvetica-Oblique');
|
||||
for (const line of rateWarningLines) {
|
||||
doc.text(line, 75, y);
|
||||
y += 16;
|
||||
}
|
||||
doc.fillColor('#000000').font('Helvetica');
|
||||
}
|
||||
doc.y = lineY + boxHeight + 12;
|
||||
}
|
||||
|
||||
@@ -695,7 +770,7 @@ async function addReceiptPages(
|
||||
doc: PDFKit.PDFDocument,
|
||||
rows: ProcessedExpense[],
|
||||
filesById: Map<string, ResolvedFile>,
|
||||
opts: { targetCurrency: TargetCurrency },
|
||||
opts: { targetCurrency: TargetCurrency; signal?: AbortSignal },
|
||||
) {
|
||||
const expensesWithReceipts = rows.filter(
|
||||
(r) => Array.isArray(r.receiptFileIds) && r.receiptFileIds.length > 0,
|
||||
@@ -715,6 +790,17 @@ async function addReceiptPages(
|
||||
|
||||
for (const expense of expensesWithReceipts) {
|
||||
for (const fileId of expense.receiptFileIds ?? []) {
|
||||
// Bail out the moment the client disconnects. Without this, an
|
||||
// export aborted on the wire would keep grinding through the
|
||||
// remaining receipts and only stop when the doc.end() write
|
||||
// failed — minutes later for a 1000-row export.
|
||||
if (opts.signal?.aborted) {
|
||||
logger.info(
|
||||
{ receiptCounter, totalReceipts },
|
||||
'Expense PDF stream aborted by client; halting receipt loop',
|
||||
);
|
||||
return;
|
||||
}
|
||||
receiptCounter += 1;
|
||||
const file = filesById.get(fileId);
|
||||
if (!file) {
|
||||
|
||||
@@ -156,6 +156,24 @@ export async function updateExpense(
|
||||
) {
|
||||
const existing = await getExpenseById(id, portId);
|
||||
|
||||
// The create-time validator enforces "receipt OR no-receipt-ack" via
|
||||
// `.refine`, but `updateExpenseSchema` is `.partial()` so the rule is
|
||||
// dropped. Re-assert it here against the merged (existing + patch)
|
||||
// shape so a PATCH can't slip an unexplained receipt-less expense
|
||||
// past the create-time guard.
|
||||
const mergedReceiptIds =
|
||||
data.receiptFileIds !== undefined ? data.receiptFileIds : existing.receiptFileIds;
|
||||
const mergedAck =
|
||||
data.noReceiptAcknowledged !== undefined
|
||||
? data.noReceiptAcknowledged
|
||||
: existing.noReceiptAcknowledged;
|
||||
const hasReceipts = Array.isArray(mergedReceiptIds) && mergedReceiptIds.length > 0;
|
||||
if (!hasReceipts && !mergedAck) {
|
||||
throw new ConflictError(
|
||||
'Expense must either link a receipt file or acknowledge the no-receipt warning.',
|
||||
);
|
||||
}
|
||||
|
||||
const updateData: Record<string, unknown> = { ...data, updatedAt: new Date() };
|
||||
|
||||
// Re-convert to USD if amount or currency changed
|
||||
|
||||
@@ -19,8 +19,9 @@
|
||||
import { and, desc, eq, inArray } from 'drizzle-orm';
|
||||
|
||||
import { db } from '@/lib/db';
|
||||
import { interestBerths, type InterestBerth } from '@/lib/db/schema/interests';
|
||||
import { interestBerths, interests, type InterestBerth } from '@/lib/db/schema/interests';
|
||||
import { berths } from '@/lib/db/schema/berths';
|
||||
import { ValidationError } from '@/lib/errors';
|
||||
|
||||
type DbOrTx = typeof db | Parameters<Parameters<typeof db.transaction>[0]>[0];
|
||||
|
||||
@@ -197,6 +198,26 @@ export async function upsertInterestBerthTx(
|
||||
berthId: string,
|
||||
opts: AddOrUpdateOpts = {},
|
||||
): Promise<InterestBerth> {
|
||||
// Cross-port guard. The junction is silently multi-port-shaped (it has
|
||||
// no port_id of its own — it inherits via the FKs) so a caller wiring
|
||||
// an interest from one port to a berth from another would corrupt the
|
||||
// recommender + public-berth aggregates with phantom rows. We assert
|
||||
// both rows live in the same port BEFORE inserting; if either side is
|
||||
// missing, the FK constraint will surface that on insert.
|
||||
const sides = await tx
|
||||
.select({
|
||||
interestPortId: interests.portId,
|
||||
berthPortId: berths.portId,
|
||||
})
|
||||
.from(interests)
|
||||
.innerJoin(berths, eq(berths.id, berthId))
|
||||
.where(eq(interests.id, interestId))
|
||||
.limit(1);
|
||||
const side = sides[0];
|
||||
if (side && side.interestPortId !== side.berthPortId) {
|
||||
throw new ValidationError('Cannot link an interest and a berth from different ports.');
|
||||
}
|
||||
|
||||
if (opts.isPrimary === true) {
|
||||
await tx
|
||||
.update(interestBerths)
|
||||
|
||||
Reference in New Issue
Block a user