/** * Integration tests for the per-berth PDF service (Phase 6b). * * Covers: * - uploadBerthPdf creates a row + bumps the berth pointer. * - Magic-byte rejection deletes the storage object. * - reconcilePdfWithBerth classifies CRM-null → autoApplied, mismatch → * conflicts, and respects the ±1% numeric tolerance. * - Mooring-number mismatch surfaces as a warning (§14.6). * - applyParseResults writes only allowlisted fields. * - rollbackToVersion flips the current pointer without re-parsing. */ import { eq } from 'drizzle-orm'; import { afterEach, beforeEach, describe, expect, it } from 'vitest'; import { applyParseResults, listBerthPdfVersions, reconcilePdfWithBerth, rollbackToVersion, uploadBerthPdf, } from '@/lib/services/berth-pdf.service'; import type { ParseResult } from '@/lib/services/berth-pdf-parser'; import { db } from '@/lib/db'; import { berths, berthPdfVersions } from '@/lib/db/schema/berths'; import { systemSettings } from '@/lib/db/schema/system'; import { makeBerth, makePort } from '../helpers/factories'; // Drop the global `storage_backend` row so the factory falls back to the // filesystem default when these tests run in isolation. (Other suites set it.) beforeEach(async () => { await db .insert(systemSettings) .values({ key: 'storage_backend', value: 'filesystem', portId: null, updatedBy: null, }) .onConflictDoNothing(); }); afterEach(async () => { // No file cleanup needed — the filesystem backend writes to a tmp root. }); function fakePdf(): Buffer { // Smallest possible byte sequence the magic-byte check accepts. return Buffer.concat([Buffer.from('%PDF-1.7\n'), Buffer.alloc(64, 0x20)]); } function parseResult(): ParseResult { return { engine: 'ocr', fields: { lengthFt: { value: 200, confidence: 0.9, engine: 'ocr' }, bowFacing: { value: 'East', confidence: 0.9, engine: 'ocr' }, }, meanConfidence: 0.9, warnings: [], }; } describe('uploadBerthPdf', () => { it('writes a version and updates currentPdfVersionId', async () => { const port = await makePort(); const berth = await makeBerth({ portId: port.id }); const result = await uploadBerthPdf({ berthId: berth.id, portId: port.id, buffer: fakePdf(), fileName: 'A1.pdf', uploadedBy: 'test-user', parseResult: parseResult(), }); expect(result.versionNumber).toBe(1); expect(result.versionId).toMatch(/^[0-9a-f-]{36}$/); const refreshed = await db.query.berths.findFirst({ where: eq(berths.id, berth.id) }); expect(refreshed?.currentPdfVersionId).toBe(result.versionId); const versionRow = await db.query.berthPdfVersions.findFirst({ where: eq(berthPdfVersions.id, result.versionId), }); expect(versionRow?.versionNumber).toBe(1); expect(versionRow?.fileName).toBe('A1.pdf'); expect((versionRow?.parseResults as { engine: string }).engine).toBe('ocr'); }); it('rejects a buffer that fails the magic-byte check', async () => { const port = await makePort(); const berth = await makeBerth({ portId: port.id }); await expect( uploadBerthPdf({ berthId: berth.id, portId: port.id, buffer: Buffer.from('not a pdf at all'), fileName: 'spoof.pdf', uploadedBy: 'test-user', }), // Plain-text user message replaced "magic-byte" wording; assert the // stable error code instead so this test survives copy edits. ).rejects.toMatchObject({ code: 'BERTHS_PDF_MAGIC_BYTE' }); }); it('increments versionNumber on the second upload', async () => { const port = await makePort(); const berth = await makeBerth({ portId: port.id }); await uploadBerthPdf({ berthId: berth.id, portId: port.id, buffer: fakePdf(), fileName: 'v1.pdf', uploadedBy: 'test', }); const second = await uploadBerthPdf({ berthId: berth.id, portId: port.id, buffer: fakePdf(), fileName: 'v2.pdf', uploadedBy: 'test', }); expect(second.versionNumber).toBe(2); }); }); describe('reconcilePdfWithBerth', () => { it('auto-applies fields where the CRM column is null', async () => { const port = await makePort(); const berth = await makeBerth({ portId: port.id, overrides: { mooringNumber: 'A1', lengthFt: null, bowFacing: null }, }); const result = await reconcilePdfWithBerth( berth.id, { engine: 'ocr', fields: { lengthFt: { value: 200, confidence: 0.9, engine: 'ocr' }, bowFacing: { value: 'East', confidence: 0.9, engine: 'ocr' }, }, meanConfidence: 0.9, warnings: [], }, port.id, ); const fields = result.autoApplied.map((a) => a.field).sort(); expect(fields).toEqual(['bowFacing', 'lengthFt']); expect(result.conflicts).toHaveLength(0); }); it('flags conflicts when CRM and PDF disagree on a non-null value', async () => { const port = await makePort(); const berth = await makeBerth({ portId: port.id, overrides: { mooringNumber: 'A1', lengthFt: '100', bowFacing: 'West' }, }); const result = await reconcilePdfWithBerth( berth.id, { engine: 'ocr', fields: { lengthFt: { value: 200, confidence: 0.8, engine: 'ocr' }, bowFacing: { value: 'East', confidence: 0.8, engine: 'ocr' }, }, meanConfidence: 0.8, warnings: [], }, port.id, ); expect(result.conflicts.map((c) => c.field).sort()).toEqual(['bowFacing', 'lengthFt']); }); it('treats a 0.5% numeric difference as equal (±1% tolerance)', async () => { const port = await makePort(); const berth = await makeBerth({ portId: port.id, overrides: { mooringNumber: 'A1', lengthFt: '200' }, }); const result = await reconcilePdfWithBerth( berth.id, { engine: 'ocr', fields: { lengthFt: { value: 201, confidence: 0.9, engine: 'ocr' }, // +0.5% }, meanConfidence: 0.9, warnings: [], }, port.id, ); expect(result.conflicts).toHaveLength(0); expect(result.autoApplied).toHaveLength(0); }); it('warns when the PDF mooring number does not match the berth', async () => { const port = await makePort(); const berth = await makeBerth({ portId: port.id, overrides: { mooringNumber: 'A1' }, }); const result = await reconcilePdfWithBerth( berth.id, { engine: 'ocr', fields: { mooringNumber: { value: 'B5', confidence: 0.9, engine: 'ocr' }, }, meanConfidence: 0.9, warnings: [], }, port.id, ); expect(result.warnings.some((w) => /B5/.test(w) && /A1/.test(w))).toBe(true); }); }); describe('applyParseResults', () => { it('updates only allowlisted fields and stamps appliedFields onto the version', async () => { const port = await makePort(); const berth = await makeBerth({ portId: port.id, overrides: { mooringNumber: 'A1', lengthFt: null, bowFacing: null }, }); const upload = await uploadBerthPdf({ berthId: berth.id, portId: port.id, buffer: fakePdf(), fileName: 'A1.pdf', uploadedBy: 'test', }); await applyParseResults( berth.id, upload.versionId, { lengthFt: 200, bowFacing: 'East', // unknown / non-allowlisted column should be silently dropped: // @ts-expect-error — testing the allowlist hackThePlanet: 'pwn', }, port.id, ); const refreshed = await db.query.berths.findFirst({ where: eq(berths.id, berth.id) }); expect(refreshed?.lengthFt).toBe('200'); expect(refreshed?.bowFacing).toBe('East'); const versionRow = await db.query.berthPdfVersions.findFirst({ where: eq(berthPdfVersions.id, upload.versionId), }); const applied = (versionRow?.parseResults as { appliedFields?: string[] }).appliedFields; expect(applied).toEqual(expect.arrayContaining(['lengthFt', 'bowFacing'])); expect(applied).not.toContain('hackThePlanet'); }); }); describe('rollbackToVersion', () => { it('flips current_pdf_version_id to the requested version without re-parsing', async () => { const port = await makePort(); const berth = await makeBerth({ portId: port.id }); const v1 = await uploadBerthPdf({ berthId: berth.id, portId: port.id, buffer: fakePdf(), fileName: 'v1.pdf', uploadedBy: 'test', }); const v2 = await uploadBerthPdf({ berthId: berth.id, portId: port.id, buffer: fakePdf(), fileName: 'v2.pdf', uploadedBy: 'test', }); let refreshed = await db.query.berths.findFirst({ where: eq(berths.id, berth.id) }); expect(refreshed?.currentPdfVersionId).toBe(v2.versionId); const result = await rollbackToVersion(berth.id, v1.versionId, port.id); expect(result.versionNumber).toBe(1); refreshed = await db.query.berths.findFirst({ where: eq(berths.id, berth.id) }); expect(refreshed?.currentPdfVersionId).toBe(v1.versionId); }); it('refuses to roll back to the already-current version', async () => { const port = await makePort(); const berth = await makeBerth({ portId: port.id }); const v1 = await uploadBerthPdf({ berthId: berth.id, portId: port.id, buffer: fakePdf(), fileName: 'v1.pdf', uploadedBy: 'test', }); // Plain-text user message replaced "already current" wording; assert // the stable error code instead. await expect(rollbackToVersion(berth.id, v1.versionId, port.id)).rejects.toMatchObject({ code: 'BERTHS_VERSION_ALREADY_CURRENT', }); }); }); describe('cross-port tenant guard', () => { it('rejects every berth-pdf service call when berthId belongs to a different port', async () => { const portA = await makePort(); const portB = await makePort(); const berthA = await makeBerth({ portId: portA.id }); // Seed a version under port A so list/apply/rollback have something // they could in theory return. const v1 = await uploadBerthPdf({ berthId: berthA.id, portId: portA.id, buffer: fakePdf(), fileName: 'A.pdf', uploadedBy: 'test', }); // Port B caller passing port A's berth id must hit NotFoundError on // every entrypoint — including read-only listing, which previously // returned 15-min presigned download URLs to the foreign port's PDFs. await expect(listBerthPdfVersions(berthA.id, portB.id)).rejects.toThrow(/berth/i); await expect(rollbackToVersion(berthA.id, v1.versionId, portB.id)).rejects.toThrow(/berth/i); await expect( applyParseResults(berthA.id, v1.versionId, { lengthFt: 99 }, portB.id), ).rejects.toThrow(/berth/i); await expect( uploadBerthPdf({ berthId: berthA.id, portId: portB.id, buffer: fakePdf(), fileName: 'B-cross.pdf', uploadedBy: 'test', }), ).rejects.toThrow(/berth/i); await expect( reconcilePdfWithBerth( berthA.id, { engine: 'ocr', fields: {}, meanConfidence: 1, warnings: [] }, portB.id, ), ).rejects.toThrow(/berth/i); }); });