fix(security): scope berth-pdf service entrypoints by portId
Post-merge security review caught a cross-tenant authorization bypass
in the per-berth PDF endpoints (HIGH severity, confidence 10):
GET /api/v1/berths/[id]/pdf-versions
POST /api/v1/berths/[id]/pdf-versions
POST /api/v1/berths/[id]/pdf-upload-url
POST /api/v1/berths/[id]/pdf-versions/[versionId]/rollback
POST /api/v1/berths/[id]/pdf-versions/parse-results/apply
Each handler looked up the target berth by id only — `eq(berths.id, ...)`.
withAuth resolves ctx.portId from the user-controlled X-Port-Id header
(only verifying the user has SOME role on that port), and
withPermission('berths', 'view'|'edit', ...) is a coarse capability
check, not a row-level grant. A rep with berths:edit on Port A could
supply a Port B berth UUID and:
- list + receive 15-min presigned download URLs to every PDF version
- mint an upload URL targeting `berths/<port-B-id>/uploads/...`
- POST a new version (overwriting current_pdf_version_id on foreign berth)
- rollback to any prior version on a foreign berth
- apply rep-confirmed parse-result fields onto a foreign berth's columns
Sibling routes (waiting-list etc.) already pair the id filter with
`eq(berths.portId, ctx.portId)`, so this was an omission, not design.
Fix:
- Push `portId: string` into uploadBerthPdf, listBerthPdfVersions,
rollbackToVersion, applyParseResults, reconcilePdfWithBerth.
- Each function now filters the berth lookup with
`and(eq(berths.id, ...), eq(berths.portId, portId))` and throws
NotFoundError on mismatch (no foreign-port disclosure).
- Inline the same `and(...)` filter in the pdf-upload-url handler.
- Every handler passes ctx.portId through.
Coverage:
- New `cross-port tenant guard` test exercises every entrypoint with a
foreign-port id and asserts NotFoundError.
- 1164/1164 vitest passing. Typecheck clean.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -16,6 +16,7 @@ import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
import {
|
||||
applyParseResults,
|
||||
listBerthPdfVersions,
|
||||
reconcilePdfWithBerth,
|
||||
rollbackToVersion,
|
||||
uploadBerthPdf,
|
||||
@@ -69,6 +70,7 @@ describe('uploadBerthPdf', () => {
|
||||
|
||||
const result = await uploadBerthPdf({
|
||||
berthId: berth.id,
|
||||
portId: port.id,
|
||||
buffer: fakePdf(),
|
||||
fileName: 'A1.pdf',
|
||||
uploadedBy: 'test-user',
|
||||
@@ -94,6 +96,7 @@ describe('uploadBerthPdf', () => {
|
||||
await expect(
|
||||
uploadBerthPdf({
|
||||
berthId: berth.id,
|
||||
portId: port.id,
|
||||
buffer: Buffer.from('not a pdf at all'),
|
||||
fileName: 'spoof.pdf',
|
||||
uploadedBy: 'test-user',
|
||||
@@ -106,12 +109,14 @@ describe('uploadBerthPdf', () => {
|
||||
const berth = await makeBerth({ portId: port.id });
|
||||
await uploadBerthPdf({
|
||||
berthId: berth.id,
|
||||
portId: port.id,
|
||||
buffer: fakePdf(),
|
||||
fileName: 'v1.pdf',
|
||||
uploadedBy: 'test',
|
||||
});
|
||||
const second = await uploadBerthPdf({
|
||||
berthId: berth.id,
|
||||
portId: port.id,
|
||||
buffer: fakePdf(),
|
||||
fileName: 'v2.pdf',
|
||||
uploadedBy: 'test',
|
||||
@@ -127,15 +132,19 @@ describe('reconcilePdfWithBerth', () => {
|
||||
portId: port.id,
|
||||
overrides: { mooringNumber: 'A1', lengthFt: null, bowFacing: null },
|
||||
});
|
||||
const result = await reconcilePdfWithBerth(berth.id, {
|
||||
engine: 'ocr',
|
||||
fields: {
|
||||
lengthFt: { value: 200, confidence: 0.9, engine: 'ocr' },
|
||||
bowFacing: { value: 'East', confidence: 0.9, engine: 'ocr' },
|
||||
const result = await reconcilePdfWithBerth(
|
||||
berth.id,
|
||||
{
|
||||
engine: 'ocr',
|
||||
fields: {
|
||||
lengthFt: { value: 200, confidence: 0.9, engine: 'ocr' },
|
||||
bowFacing: { value: 'East', confidence: 0.9, engine: 'ocr' },
|
||||
},
|
||||
meanConfidence: 0.9,
|
||||
warnings: [],
|
||||
},
|
||||
meanConfidence: 0.9,
|
||||
warnings: [],
|
||||
});
|
||||
port.id,
|
||||
);
|
||||
const fields = result.autoApplied.map((a) => a.field).sort();
|
||||
expect(fields).toEqual(['bowFacing', 'lengthFt']);
|
||||
expect(result.conflicts).toHaveLength(0);
|
||||
@@ -147,15 +156,19 @@ describe('reconcilePdfWithBerth', () => {
|
||||
portId: port.id,
|
||||
overrides: { mooringNumber: 'A1', lengthFt: '100', bowFacing: 'West' },
|
||||
});
|
||||
const result = await reconcilePdfWithBerth(berth.id, {
|
||||
engine: 'ocr',
|
||||
fields: {
|
||||
lengthFt: { value: 200, confidence: 0.8, engine: 'ocr' },
|
||||
bowFacing: { value: 'East', confidence: 0.8, engine: 'ocr' },
|
||||
const result = await reconcilePdfWithBerth(
|
||||
berth.id,
|
||||
{
|
||||
engine: 'ocr',
|
||||
fields: {
|
||||
lengthFt: { value: 200, confidence: 0.8, engine: 'ocr' },
|
||||
bowFacing: { value: 'East', confidence: 0.8, engine: 'ocr' },
|
||||
},
|
||||
meanConfidence: 0.8,
|
||||
warnings: [],
|
||||
},
|
||||
meanConfidence: 0.8,
|
||||
warnings: [],
|
||||
});
|
||||
port.id,
|
||||
);
|
||||
expect(result.conflicts.map((c) => c.field).sort()).toEqual(['bowFacing', 'lengthFt']);
|
||||
});
|
||||
|
||||
@@ -165,14 +178,18 @@ describe('reconcilePdfWithBerth', () => {
|
||||
portId: port.id,
|
||||
overrides: { mooringNumber: 'A1', lengthFt: '200' },
|
||||
});
|
||||
const result = await reconcilePdfWithBerth(berth.id, {
|
||||
engine: 'ocr',
|
||||
fields: {
|
||||
lengthFt: { value: 201, confidence: 0.9, engine: 'ocr' }, // +0.5%
|
||||
const result = await reconcilePdfWithBerth(
|
||||
berth.id,
|
||||
{
|
||||
engine: 'ocr',
|
||||
fields: {
|
||||
lengthFt: { value: 201, confidence: 0.9, engine: 'ocr' }, // +0.5%
|
||||
},
|
||||
meanConfidence: 0.9,
|
||||
warnings: [],
|
||||
},
|
||||
meanConfidence: 0.9,
|
||||
warnings: [],
|
||||
});
|
||||
port.id,
|
||||
);
|
||||
expect(result.conflicts).toHaveLength(0);
|
||||
expect(result.autoApplied).toHaveLength(0);
|
||||
});
|
||||
@@ -183,14 +200,18 @@ describe('reconcilePdfWithBerth', () => {
|
||||
portId: port.id,
|
||||
overrides: { mooringNumber: 'A1' },
|
||||
});
|
||||
const result = await reconcilePdfWithBerth(berth.id, {
|
||||
engine: 'ocr',
|
||||
fields: {
|
||||
mooringNumber: { value: 'B5', confidence: 0.9, engine: 'ocr' },
|
||||
const result = await reconcilePdfWithBerth(
|
||||
berth.id,
|
||||
{
|
||||
engine: 'ocr',
|
||||
fields: {
|
||||
mooringNumber: { value: 'B5', confidence: 0.9, engine: 'ocr' },
|
||||
},
|
||||
meanConfidence: 0.9,
|
||||
warnings: [],
|
||||
},
|
||||
meanConfidence: 0.9,
|
||||
warnings: [],
|
||||
});
|
||||
port.id,
|
||||
);
|
||||
expect(result.warnings.some((w) => /B5/.test(w) && /A1/.test(w))).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -204,18 +225,24 @@ describe('applyParseResults', () => {
|
||||
});
|
||||
const upload = await uploadBerthPdf({
|
||||
berthId: berth.id,
|
||||
portId: port.id,
|
||||
buffer: fakePdf(),
|
||||
fileName: 'A1.pdf',
|
||||
uploadedBy: 'test',
|
||||
});
|
||||
|
||||
await applyParseResults(berth.id, upload.versionId, {
|
||||
lengthFt: 200,
|
||||
bowFacing: 'East',
|
||||
// unknown / non-allowlisted column should be silently dropped:
|
||||
// @ts-expect-error — testing the allowlist
|
||||
hackThePlanet: 'pwn',
|
||||
});
|
||||
await applyParseResults(
|
||||
berth.id,
|
||||
upload.versionId,
|
||||
{
|
||||
lengthFt: 200,
|
||||
bowFacing: 'East',
|
||||
// unknown / non-allowlisted column should be silently dropped:
|
||||
// @ts-expect-error — testing the allowlist
|
||||
hackThePlanet: 'pwn',
|
||||
},
|
||||
port.id,
|
||||
);
|
||||
|
||||
const refreshed = await db.query.berths.findFirst({ where: eq(berths.id, berth.id) });
|
||||
expect(refreshed?.lengthFt).toBe('200');
|
||||
@@ -236,12 +263,14 @@ describe('rollbackToVersion', () => {
|
||||
const berth = await makeBerth({ portId: port.id });
|
||||
const v1 = await uploadBerthPdf({
|
||||
berthId: berth.id,
|
||||
portId: port.id,
|
||||
buffer: fakePdf(),
|
||||
fileName: 'v1.pdf',
|
||||
uploadedBy: 'test',
|
||||
});
|
||||
const v2 = await uploadBerthPdf({
|
||||
berthId: berth.id,
|
||||
portId: port.id,
|
||||
buffer: fakePdf(),
|
||||
fileName: 'v2.pdf',
|
||||
uploadedBy: 'test',
|
||||
@@ -250,7 +279,7 @@ describe('rollbackToVersion', () => {
|
||||
let refreshed = await db.query.berths.findFirst({ where: eq(berths.id, berth.id) });
|
||||
expect(refreshed?.currentPdfVersionId).toBe(v2.versionId);
|
||||
|
||||
const result = await rollbackToVersion(berth.id, v1.versionId);
|
||||
const result = await rollbackToVersion(berth.id, v1.versionId, port.id);
|
||||
expect(result.versionNumber).toBe(1);
|
||||
|
||||
refreshed = await db.query.berths.findFirst({ where: eq(berths.id, berth.id) });
|
||||
@@ -262,10 +291,56 @@ describe('rollbackToVersion', () => {
|
||||
const berth = await makeBerth({ portId: port.id });
|
||||
const v1 = await uploadBerthPdf({
|
||||
berthId: berth.id,
|
||||
portId: port.id,
|
||||
buffer: fakePdf(),
|
||||
fileName: 'v1.pdf',
|
||||
uploadedBy: 'test',
|
||||
});
|
||||
await expect(rollbackToVersion(berth.id, v1.versionId)).rejects.toThrow(/already current/);
|
||||
await expect(rollbackToVersion(berth.id, v1.versionId, port.id)).rejects.toThrow(
|
||||
/already current/,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cross-port tenant guard', () => {
|
||||
it('rejects every berth-pdf service call when berthId belongs to a different port', async () => {
|
||||
const portA = await makePort();
|
||||
const portB = await makePort();
|
||||
const berthA = await makeBerth({ portId: portA.id });
|
||||
|
||||
// Seed a version under port A so list/apply/rollback have something
|
||||
// they could in theory return.
|
||||
const v1 = await uploadBerthPdf({
|
||||
berthId: berthA.id,
|
||||
portId: portA.id,
|
||||
buffer: fakePdf(),
|
||||
fileName: 'A.pdf',
|
||||
uploadedBy: 'test',
|
||||
});
|
||||
|
||||
// Port B caller passing port A's berth id must hit NotFoundError on
|
||||
// every entrypoint — including read-only listing, which previously
|
||||
// returned 15-min presigned download URLs to the foreign port's PDFs.
|
||||
await expect(listBerthPdfVersions(berthA.id, portB.id)).rejects.toThrow(/Berth/);
|
||||
await expect(rollbackToVersion(berthA.id, v1.versionId, portB.id)).rejects.toThrow(/Berth/);
|
||||
await expect(
|
||||
applyParseResults(berthA.id, v1.versionId, { lengthFt: 99 }, portB.id),
|
||||
).rejects.toThrow(/Berth/);
|
||||
await expect(
|
||||
uploadBerthPdf({
|
||||
berthId: berthA.id,
|
||||
portId: portB.id,
|
||||
buffer: fakePdf(),
|
||||
fileName: 'B-cross.pdf',
|
||||
uploadedBy: 'test',
|
||||
}),
|
||||
).rejects.toThrow(/Berth/);
|
||||
await expect(
|
||||
reconcilePdfWithBerth(
|
||||
berthA.id,
|
||||
{ engine: 'ocr', fields: {}, meanConfidence: 1, warnings: [] },
|
||||
portB.id,
|
||||
),
|
||||
).rejects.toThrow(/Berth/);
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user