Initial commit: Port Nimara CRM (Layers 0-4)
Full CRM rebuild with Next.js 15, TypeScript, Tailwind, Drizzle ORM, PostgreSQL, Redis, BullMQ, MinIO, and Socket.io. Includes 461 source files covering clients, berths, interests/pipeline, documents/EOI, expenses/invoices, email, notifications, dashboard, admin, and client portal. CI/CD via Gitea Actions with Docker builds. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
320
tests/integration/crud-audit.test.ts
Normal file
320
tests/integration/crud-audit.test.ts
Normal file
@@ -0,0 +1,320 @@
|
||||
/**
|
||||
* CRUD audit log integration tests.
|
||||
*
|
||||
* For each entity type (clients, interests, berths):
|
||||
* - Create → verify audit log entry with action='create'
|
||||
* - Update → verify audit log with action='update' and old/new values
|
||||
* - Archive → verify audit log with action='archive'
|
||||
* - Restore → verify audit log with action='restore'
|
||||
*
|
||||
* Skips gracefully when TEST_DATABASE_URL is not reachable.
|
||||
*/
|
||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
||||
|
||||
import { makeAuditMeta, makeCreateClientInput, makeCreateInterestInput } from '../helpers/factories';
|
||||
|
||||
const TEST_DB_URL =
|
||||
process.env.TEST_DATABASE_URL || 'postgresql://test:test@localhost:5433/portnimara_test';
|
||||
|
||||
let dbAvailable = false;
|
||||
|
||||
beforeAll(async () => {
|
||||
try {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1, idle_timeout: 3, connect_timeout: 3 });
|
||||
await sql`SELECT 1`;
|
||||
await sql.end();
|
||||
dbAvailable = true;
|
||||
} catch {
|
||||
console.warn('[crud-audit] Test database not available — skipping integration tests');
|
||||
}
|
||||
});
|
||||
|
||||
function itDb(name: string, fn: () => Promise<void>) {
|
||||
it(name, async () => {
|
||||
if (!dbAvailable) return;
|
||||
await fn();
|
||||
});
|
||||
}
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
async function seedPort(): Promise<string> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
const portId = crypto.randomUUID();
|
||||
await sql`
|
||||
INSERT INTO ports (id, name, slug, country, currency, timezone)
|
||||
VALUES (${portId}, 'Audit Test Port', ${'audit-' + portId.slice(0, 8)}, 'AU', 'AUD', 'UTC')
|
||||
`;
|
||||
await sql.end();
|
||||
return portId;
|
||||
}
|
||||
|
||||
async function cleanupPort(portId: string): Promise<void> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
await sql`DELETE FROM ports WHERE id = ${portId}`;
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
async function getAuditEntries(
|
||||
portId: string,
|
||||
entityId: string,
|
||||
action?: string,
|
||||
): Promise<Array<Record<string, unknown>>> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
let rows: Array<Record<string, unknown>>;
|
||||
|
||||
if (action) {
|
||||
rows = await sql<Array<Record<string, unknown>>>`
|
||||
SELECT * FROM audit_logs
|
||||
WHERE port_id = ${portId}
|
||||
AND entity_id = ${entityId}
|
||||
AND action = ${action}
|
||||
ORDER BY created_at ASC
|
||||
`;
|
||||
} else {
|
||||
rows = await sql<Array<Record<string, unknown>>>`
|
||||
SELECT * FROM audit_logs
|
||||
WHERE port_id = ${portId}
|
||||
AND entity_id = ${entityId}
|
||||
ORDER BY created_at ASC
|
||||
`;
|
||||
}
|
||||
|
||||
await sql.end();
|
||||
return rows;
|
||||
}
|
||||
|
||||
// ─── Client Audit Tests ───────────────────────────────────────────────────────
|
||||
|
||||
describe('CRUD Audit — Clients', () => {
|
||||
let portId: string;
|
||||
|
||||
vi.mock('@/lib/socket/server', () => ({ emitToRoom: vi.fn() }));
|
||||
vi.mock('@/lib/queue', () => ({
|
||||
getQueue: () => ({ add: vi.fn().mockResolvedValue(undefined) }),
|
||||
}));
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
portId = await seedPort();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
await cleanupPort(portId);
|
||||
});
|
||||
|
||||
itDb('create generates an audit log entry with action=create', async () => {
|
||||
const { createClient } = await import('@/lib/services/clients.service');
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
const client = await createClient(portId, makeCreateClientInput({ fullName: 'Audit Create Client' }), meta);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
|
||||
const logs = await getAuditEntries(portId, client.id, 'create');
|
||||
expect(logs.length).toBeGreaterThanOrEqual(1);
|
||||
|
||||
const log = logs[0]!;
|
||||
expect(log.entity_type).toBe('client');
|
||||
expect(log.action).toBe('create');
|
||||
const newVal = log.new_value as Record<string, unknown>;
|
||||
expect(newVal.fullName).toBe('Audit Create Client');
|
||||
});
|
||||
|
||||
itDb('update generates an audit log entry with action=update', async () => {
|
||||
const { createClient, updateClient } = await import('@/lib/services/clients.service');
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
const client = await createClient(portId, makeCreateClientInput({ fullName: 'Before Update' }), meta);
|
||||
|
||||
await updateClient(client.id, portId, { fullName: 'After Update' }, meta);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
|
||||
const logs = await getAuditEntries(portId, client.id, 'update');
|
||||
expect(logs.length).toBeGreaterThanOrEqual(1);
|
||||
|
||||
const updateLog = logs[logs.length - 1]!;
|
||||
expect(updateLog.action).toBe('update');
|
||||
const newVal = updateLog.new_value as Record<string, unknown>;
|
||||
expect(newVal.fullName).toBe('After Update');
|
||||
});
|
||||
|
||||
itDb('archive generates an audit log entry with action=archive', async () => {
|
||||
const { createClient, archiveClient } = await import('@/lib/services/clients.service');
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
const client = await createClient(portId, makeCreateClientInput({ fullName: 'Audit Archive Client' }), meta);
|
||||
|
||||
await archiveClient(client.id, portId, meta);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
|
||||
const logs = await getAuditEntries(portId, client.id, 'archive');
|
||||
expect(logs.length).toBeGreaterThanOrEqual(1);
|
||||
expect(logs[0]!.action).toBe('archive');
|
||||
});
|
||||
|
||||
itDb('restore generates an audit log entry with action=restore', async () => {
|
||||
const { createClient, archiveClient, restoreClient } = await import('@/lib/services/clients.service');
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
const client = await createClient(portId, makeCreateClientInput({ fullName: 'Audit Restore Client' }), meta);
|
||||
|
||||
await archiveClient(client.id, portId, meta);
|
||||
await restoreClient(client.id, portId, meta);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
|
||||
const logs = await getAuditEntries(portId, client.id, 'restore');
|
||||
expect(logs.length).toBeGreaterThanOrEqual(1);
|
||||
expect(logs[0]!.action).toBe('restore');
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Interest Audit Tests ─────────────────────────────────────────────────────
|
||||
|
||||
describe('CRUD Audit — Interests', () => {
|
||||
let portId: string;
|
||||
let clientId: string;
|
||||
|
||||
vi.mock('@/lib/socket/server', () => ({ emitToRoom: vi.fn() }));
|
||||
vi.mock('@/lib/queue', () => ({
|
||||
getQueue: () => ({ add: vi.fn().mockResolvedValue(undefined) }),
|
||||
}));
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
portId = await seedPort();
|
||||
|
||||
const { createClient } = await import('@/lib/services/clients.service');
|
||||
const client = await createClient(portId, makeCreateClientInput({ fullName: 'Interest Audit Client' }), makeAuditMeta({ portId }));
|
||||
clientId = client.id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
await cleanupPort(portId);
|
||||
});
|
||||
|
||||
itDb('create generates audit log with action=create', async () => {
|
||||
const { createInterest } = await import('@/lib/services/interests.service');
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
const interest = await createInterest(portId, makeCreateInterestInput({ clientId }), meta);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
|
||||
const logs = await getAuditEntries(portId, interest.id, 'create');
|
||||
expect(logs.length).toBeGreaterThanOrEqual(1);
|
||||
|
||||
const log = logs[0]!;
|
||||
expect(log.entity_type).toBe('interest');
|
||||
const newVal = log.new_value as Record<string, unknown>;
|
||||
expect(newVal.pipelineStage).toBe('open');
|
||||
});
|
||||
|
||||
itDb('update generates audit log with action=update', async () => {
|
||||
const { createInterest, updateInterest } = await import('@/lib/services/interests.service');
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
const interest = await createInterest(portId, { ...makeCreateInterestInput({ clientId }), notes: 'initial' }, meta);
|
||||
|
||||
await updateInterest(interest.id, portId, { notes: 'updated notes' }, meta);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
|
||||
const logs = await getAuditEntries(portId, interest.id, 'update');
|
||||
expect(logs.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
itDb('archive generates audit log with action=archive', async () => {
|
||||
const { createInterest, archiveInterest } = await import('@/lib/services/interests.service');
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
const interest = await createInterest(portId, makeCreateInterestInput({ clientId }), meta);
|
||||
|
||||
await archiveInterest(interest.id, portId, meta);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
|
||||
const logs = await getAuditEntries(portId, interest.id, 'archive');
|
||||
expect(logs.length).toBeGreaterThanOrEqual(1);
|
||||
expect(logs[0]!.action).toBe('archive');
|
||||
});
|
||||
|
||||
itDb('restore generates audit log with action=restore', async () => {
|
||||
const { createInterest, archiveInterest, restoreInterest } = await import('@/lib/services/interests.service');
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
const interest = await createInterest(portId, makeCreateInterestInput({ clientId }), meta);
|
||||
|
||||
await archiveInterest(interest.id, portId, meta);
|
||||
await restoreInterest(interest.id, portId, meta);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
|
||||
const logs = await getAuditEntries(portId, interest.id, 'restore');
|
||||
expect(logs.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Berth Audit Tests ────────────────────────────────────────────────────────
|
||||
|
||||
describe('CRUD Audit — Berths', () => {
|
||||
let portId: string;
|
||||
let berthId: string;
|
||||
|
||||
vi.mock('@/lib/socket/server', () => ({ emitToRoom: vi.fn() }));
|
||||
vi.mock('@/lib/queue', () => ({
|
||||
getQueue: () => ({ add: vi.fn().mockResolvedValue(undefined) }),
|
||||
}));
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
portId = await seedPort();
|
||||
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
berthId = crypto.randomUUID();
|
||||
await sql`
|
||||
INSERT INTO berths (id, port_id, mooring_number, status)
|
||||
VALUES (${berthId}, ${portId}, 'AUDIT-B1', 'available')
|
||||
`;
|
||||
await sql.end();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
await cleanupPort(portId);
|
||||
});
|
||||
|
||||
itDb('updateBerth generates audit log with action=update', async () => {
|
||||
const { updateBerth } = await import('@/lib/services/berths.service');
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
await updateBerth(berthId, portId, { area: 'North Pier', berthApproved: true }, meta);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
|
||||
const logs = await getAuditEntries(portId, berthId, 'update');
|
||||
expect(logs.length).toBeGreaterThanOrEqual(1);
|
||||
expect(logs[0]!.entity_type).toBe('berth');
|
||||
});
|
||||
|
||||
itDb('updateBerth on wrong portId throws NotFoundError', async () => {
|
||||
const { updateBerth } = await import('@/lib/services/berths.service');
|
||||
const { NotFoundError } = await import('@/lib/errors');
|
||||
const wrongPortId = crypto.randomUUID();
|
||||
const meta = makeAuditMeta({ portId: wrongPortId });
|
||||
|
||||
await expect(
|
||||
updateBerth(berthId, wrongPortId, { area: 'Should fail' }, meta),
|
||||
).rejects.toThrow(NotFoundError);
|
||||
});
|
||||
});
|
||||
313
tests/integration/custom-fields.test.ts
Normal file
313
tests/integration/custom-fields.test.ts
Normal file
@@ -0,0 +1,313 @@
|
||||
/**
|
||||
* Custom field integration tests.
|
||||
*
|
||||
* Verifies:
|
||||
* - Create a custom field definition (type: text)
|
||||
* - Attempt to update fieldType → ValidationError thrown
|
||||
* - Update fieldLabel → succeeds
|
||||
* - Set a value for an entity → value stored
|
||||
* - Get values for entity → returns value with definition
|
||||
* - Delete definition → values cascade deleted
|
||||
*
|
||||
* Skips gracefully when TEST_DATABASE_URL is not reachable.
|
||||
*/
|
||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
||||
|
||||
import { makeAuditMeta } from '../helpers/factories';
|
||||
|
||||
const TEST_DB_URL =
|
||||
process.env.TEST_DATABASE_URL || 'postgresql://test:test@localhost:5433/portnimara_test';
|
||||
|
||||
let dbAvailable = false;
|
||||
|
||||
beforeAll(async () => {
|
||||
try {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1, idle_timeout: 3, connect_timeout: 3 });
|
||||
await sql`SELECT 1`;
|
||||
await sql.end();
|
||||
dbAvailable = true;
|
||||
} catch {
|
||||
console.warn('[custom-fields] Test database not available — skipping integration tests');
|
||||
}
|
||||
});
|
||||
|
||||
function itDb(name: string, fn: () => Promise<void>) {
|
||||
it(name, async () => {
|
||||
if (!dbAvailable) return;
|
||||
await fn();
|
||||
});
|
||||
}
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
async function seedPort(): Promise<string> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
const portId = crypto.randomUUID();
|
||||
await sql`
|
||||
INSERT INTO ports (id, name, slug, country, currency, timezone)
|
||||
VALUES (${portId}, 'Custom Fields Test Port', ${'cf-' + portId.slice(0, 8)}, 'AU', 'AUD', 'UTC')
|
||||
`;
|
||||
await sql.end();
|
||||
return portId;
|
||||
}
|
||||
|
||||
async function cleanupPort(portId: string): Promise<void> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
await sql`DELETE FROM ports WHERE id = ${portId}`;
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
// ─── Definitions Tests ────────────────────────────────────────────────────────
|
||||
|
||||
describe('Custom Fields — Definitions', () => {
|
||||
let portId: string;
|
||||
const userId = crypto.randomUUID();
|
||||
|
||||
vi.mock('@/lib/audit', () => ({
|
||||
createAuditLog: vi.fn().mockResolvedValue(undefined),
|
||||
}));
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
portId = await seedPort();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
await cleanupPort(portId);
|
||||
});
|
||||
|
||||
itDb('creates a custom field definition', async () => {
|
||||
const { createDefinition } = await import('@/lib/services/custom-fields.service');
|
||||
const meta = makeAuditMeta({ portId, userId });
|
||||
|
||||
const def = await createDefinition(
|
||||
portId,
|
||||
userId,
|
||||
{
|
||||
entityType: 'client',
|
||||
fieldName: 'vessel_registration',
|
||||
fieldLabel: 'Vessel Registration',
|
||||
fieldType: 'text',
|
||||
isRequired: false,
|
||||
sortOrder: 0,
|
||||
},
|
||||
meta,
|
||||
);
|
||||
|
||||
expect(def.id).toBeDefined();
|
||||
expect(def.portId).toBe(portId);
|
||||
expect(def.fieldName).toBe('vessel_registration');
|
||||
expect(def.fieldType).toBe('text');
|
||||
});
|
||||
|
||||
itDb('creating duplicate fieldName for same entityType throws ConflictError', async () => {
|
||||
const { createDefinition } = await import('@/lib/services/custom-fields.service');
|
||||
const { ConflictError } = await import('@/lib/errors');
|
||||
const meta = makeAuditMeta({ portId, userId });
|
||||
|
||||
await createDefinition(
|
||||
portId,
|
||||
userId,
|
||||
{
|
||||
entityType: 'interest',
|
||||
fieldName: 'preferred_berth_area',
|
||||
fieldLabel: 'Preferred Berth Area',
|
||||
fieldType: 'text',
|
||||
isRequired: false,
|
||||
sortOrder: 0,
|
||||
},
|
||||
meta,
|
||||
);
|
||||
|
||||
await expect(
|
||||
createDefinition(
|
||||
portId,
|
||||
userId,
|
||||
{
|
||||
entityType: 'interest',
|
||||
fieldName: 'preferred_berth_area',
|
||||
fieldLabel: 'Duplicate Label',
|
||||
fieldType: 'text',
|
||||
isRequired: false,
|
||||
sortOrder: 1,
|
||||
},
|
||||
meta,
|
||||
),
|
||||
).rejects.toThrow(ConflictError);
|
||||
});
|
||||
|
||||
itDb('updateDefinition with fieldType property throws ValidationError', async () => {
|
||||
const { createDefinition, updateDefinition } = await import(
|
||||
'@/lib/services/custom-fields.service'
|
||||
);
|
||||
const { ValidationError } = await import('@/lib/errors');
|
||||
const meta = makeAuditMeta({ portId, userId });
|
||||
|
||||
const def = await createDefinition(
|
||||
portId,
|
||||
userId,
|
||||
{
|
||||
entityType: 'client',
|
||||
fieldName: 'immutable_type_field',
|
||||
fieldLabel: 'Immutable',
|
||||
fieldType: 'text',
|
||||
isRequired: false,
|
||||
sortOrder: 0,
|
||||
},
|
||||
meta,
|
||||
);
|
||||
|
||||
// Cast to any to bypass TS — the service should guard against this at runtime
|
||||
await expect(
|
||||
updateDefinition(portId, def.id, userId, { fieldType: 'number' } as any, meta),
|
||||
).rejects.toThrow(ValidationError);
|
||||
});
|
||||
|
||||
itDb('updateDefinition can change fieldLabel without error', async () => {
|
||||
const { createDefinition, updateDefinition } = await import(
|
||||
'@/lib/services/custom-fields.service'
|
||||
);
|
||||
const meta = makeAuditMeta({ portId, userId });
|
||||
|
||||
const def = await createDefinition(
|
||||
portId,
|
||||
userId,
|
||||
{
|
||||
entityType: 'berth',
|
||||
fieldName: 'special_notes',
|
||||
fieldLabel: 'Notes',
|
||||
fieldType: 'text',
|
||||
isRequired: false,
|
||||
sortOrder: 0,
|
||||
},
|
||||
meta,
|
||||
);
|
||||
|
||||
const updated = await updateDefinition(portId, def.id, userId, { fieldLabel: 'Special Notes' }, meta);
|
||||
expect(updated.fieldLabel).toBe('Special Notes');
|
||||
expect(updated.fieldType).toBe('text');
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Values Tests ─────────────────────────────────────────────────────────────
|
||||
|
||||
describe('Custom Fields — Values', () => {
|
||||
let portId: string;
|
||||
const userId = crypto.randomUUID();
|
||||
const entityId = crypto.randomUUID();
|
||||
|
||||
vi.mock('@/lib/audit', () => ({
|
||||
createAuditLog: vi.fn().mockResolvedValue(undefined),
|
||||
}));
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
portId = await seedPort();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
await cleanupPort(portId);
|
||||
});
|
||||
|
||||
itDb('setValues stores a text value and getValues returns it with definition', async () => {
|
||||
const { createDefinition, setValues, getValues } = await import(
|
||||
'@/lib/services/custom-fields.service'
|
||||
);
|
||||
const meta = makeAuditMeta({ portId, userId });
|
||||
|
||||
const def = await createDefinition(
|
||||
portId,
|
||||
userId,
|
||||
{
|
||||
entityType: 'client',
|
||||
fieldName: 'marina_membership',
|
||||
fieldLabel: 'Marina Membership',
|
||||
fieldType: 'text',
|
||||
isRequired: false,
|
||||
sortOrder: 0,
|
||||
},
|
||||
meta,
|
||||
);
|
||||
|
||||
await setValues(entityId, portId, userId, [{ fieldId: def.id, value: 'GOLD-2024' }], meta);
|
||||
|
||||
const result = await getValues(entityId, portId);
|
||||
const entry = result.find((r) => r.definition.id === def.id);
|
||||
|
||||
expect(entry).toBeDefined();
|
||||
expect(entry!.value).not.toBeNull();
|
||||
// value is stored as jsonb — the raw stored value
|
||||
expect((entry!.value as Record<string, unknown>).value).toBe('GOLD-2024');
|
||||
});
|
||||
|
||||
itDb('setValues with wrong type throws ValidationError', async () => {
|
||||
const { createDefinition, setValues } = await import('@/lib/services/custom-fields.service');
|
||||
const { ValidationError } = await import('@/lib/errors');
|
||||
const meta = makeAuditMeta({ portId, userId });
|
||||
|
||||
const def = await createDefinition(
|
||||
portId,
|
||||
userId,
|
||||
{
|
||||
entityType: 'client',
|
||||
fieldName: 'year_joined',
|
||||
fieldLabel: 'Year Joined',
|
||||
fieldType: 'number',
|
||||
isRequired: false,
|
||||
sortOrder: 0,
|
||||
},
|
||||
meta,
|
||||
);
|
||||
|
||||
await expect(
|
||||
setValues(entityId, portId, userId, [{ fieldId: def.id, value: 'not-a-number' }], meta),
|
||||
).rejects.toThrow(ValidationError);
|
||||
});
|
||||
|
||||
itDb('deleteDefinition cascades to remove associated values', async () => {
|
||||
const { createDefinition, setValues, deleteDefinition, getValues } = await import(
|
||||
'@/lib/services/custom-fields.service'
|
||||
);
|
||||
const meta = makeAuditMeta({ portId, userId });
|
||||
|
||||
const cascadeEntityId = crypto.randomUUID();
|
||||
|
||||
const def = await createDefinition(
|
||||
portId,
|
||||
userId,
|
||||
{
|
||||
entityType: 'client',
|
||||
fieldName: 'cascade_test_field',
|
||||
fieldLabel: 'Cascade Test',
|
||||
fieldType: 'text',
|
||||
isRequired: false,
|
||||
sortOrder: 0,
|
||||
},
|
||||
meta,
|
||||
);
|
||||
|
||||
await setValues(
|
||||
cascadeEntityId,
|
||||
portId,
|
||||
userId,
|
||||
[{ fieldId: def.id, value: 'will-be-deleted' }],
|
||||
meta,
|
||||
);
|
||||
|
||||
// Verify the value exists
|
||||
const before = await getValues(cascadeEntityId, portId);
|
||||
expect(before.find((r) => r.definition.id === def.id)?.value).not.toBeNull();
|
||||
|
||||
const result = await deleteDefinition(portId, def.id, userId, meta);
|
||||
expect(result.deletedValueCount).toBeGreaterThanOrEqual(1);
|
||||
|
||||
// Definition should no longer appear in getValues results
|
||||
const after = await getValues(cascadeEntityId, portId);
|
||||
expect(after.find((r) => r.definition.id === def.id)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
249
tests/integration/notification-lifecycle.test.ts
Normal file
249
tests/integration/notification-lifecycle.test.ts
Normal file
@@ -0,0 +1,249 @@
|
||||
/**
|
||||
* Notification lifecycle integration tests.
|
||||
*
|
||||
* Verifies:
|
||||
* - createNotification() inserts a row and returns it
|
||||
* - Calling again with same dedupeKey within cooldown returns null (suppressed)
|
||||
* - Calling after cooldown expiry creates a new notification
|
||||
* - system_alert type bypasses preference check
|
||||
* - markRead → isRead becomes true
|
||||
* - markAllRead → all notifications for user become read
|
||||
* - getUnreadCount returns correct count
|
||||
*
|
||||
* Skips gracefully when TEST_DATABASE_URL is not reachable.
|
||||
*/
|
||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
||||
|
||||
const TEST_DB_URL =
|
||||
process.env.TEST_DATABASE_URL || 'postgresql://test:test@localhost:5433/portnimara_test';
|
||||
|
||||
let dbAvailable = false;
|
||||
|
||||
beforeAll(async () => {
|
||||
try {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1, idle_timeout: 3, connect_timeout: 3 });
|
||||
await sql`SELECT 1`;
|
||||
await sql.end();
|
||||
dbAvailable = true;
|
||||
} catch {
|
||||
console.warn(
|
||||
'[notification-lifecycle] Test database not available — skipping integration tests',
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
function itDb(name: string, fn: () => Promise<void>) {
|
||||
it(name, async () => {
|
||||
if (!dbAvailable) return;
|
||||
await fn();
|
||||
});
|
||||
}
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
async function seedPortAndUser(): Promise<{ portId: string; userId: string }> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
|
||||
const portId = crypto.randomUUID();
|
||||
const userId = crypto.randomUUID();
|
||||
|
||||
await sql`
|
||||
INSERT INTO ports (id, name, slug, country, currency, timezone)
|
||||
VALUES (${portId}, 'Notif Test Port', ${'notif-' + portId.slice(0, 8)}, 'AU', 'AUD', 'UTC')
|
||||
`;
|
||||
|
||||
await sql`
|
||||
INSERT INTO "user" (id, name, email, email_verified, created_at, updated_at)
|
||||
VALUES (${userId}, 'Notif User', ${'notif-' + userId.slice(0, 8) + '@test.local'}, true, NOW(), NOW())
|
||||
`;
|
||||
|
||||
await sql`
|
||||
INSERT INTO user_profiles (id, user_id, display_name, is_super_admin, is_active, preferences)
|
||||
VALUES (${crypto.randomUUID()}, ${userId}, 'Notif User', false, true, '{}')
|
||||
`;
|
||||
|
||||
await sql.end();
|
||||
return { portId, userId };
|
||||
}
|
||||
|
||||
async function cleanupPortAndUser(portId: string, userId: string): Promise<void> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
await sql`DELETE FROM ports WHERE id = ${portId}`;
|
||||
await sql`DELETE FROM user_profiles WHERE user_id = ${userId}`;
|
||||
await sql`DELETE FROM "user" WHERE id = ${userId}`;
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
// ─── Tests ────────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('Notification Lifecycle', () => {
|
||||
let portId: string;
|
||||
let userId: string;
|
||||
|
||||
// Mock socket and queue — these are tested in isolation here
|
||||
vi.mock('@/lib/socket/server', () => ({ emitToRoom: vi.fn() }));
|
||||
vi.mock('@/lib/queue', () => ({
|
||||
getQueue: () => ({ add: vi.fn().mockResolvedValue(undefined) }),
|
||||
}));
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
({ portId, userId } = await seedPortAndUser());
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
await cleanupPortAndUser(portId, userId);
|
||||
});
|
||||
|
||||
itDb('createNotification inserts a row and returns it', async () => {
|
||||
const { createNotification } = await import('@/lib/services/notifications.service');
|
||||
|
||||
const notif = await createNotification({
|
||||
portId,
|
||||
userId,
|
||||
type: 'interest_stage_changed',
|
||||
title: 'Test notification',
|
||||
description: 'A test',
|
||||
link: '/interests/123',
|
||||
entityType: 'interest',
|
||||
entityId: 'test-entity-1',
|
||||
});
|
||||
|
||||
expect(notif).not.toBeNull();
|
||||
expect(notif!.id).toBeDefined();
|
||||
expect(notif!.portId).toBe(portId);
|
||||
expect(notif!.userId).toBe(userId);
|
||||
expect(notif!.isRead).toBe(false);
|
||||
expect(notif!.title).toBe('Test notification');
|
||||
});
|
||||
|
||||
itDb('duplicate dedupeKey within cooldown returns null (suppressed)', async () => {
|
||||
const { createNotification } = await import('@/lib/services/notifications.service');
|
||||
|
||||
const dedupeKey = `interest:dedup-test-${crypto.randomUUID()}:stage:details_sent`;
|
||||
const params = {
|
||||
portId,
|
||||
userId,
|
||||
type: 'interest_stage_changed',
|
||||
title: 'Dedup test',
|
||||
dedupeKey,
|
||||
cooldownMs: 300_000,
|
||||
};
|
||||
|
||||
const first = await createNotification(params);
|
||||
expect(first).not.toBeNull();
|
||||
|
||||
const second = await createNotification(params);
|
||||
expect(second).toBeNull();
|
||||
});
|
||||
|
||||
itDb('dedupeKey with expired cooldown creates a new notification', async () => {
|
||||
const { createNotification } = await import('@/lib/services/notifications.service');
|
||||
|
||||
const dedupeKey = `interest:expired-cooldown-${crypto.randomUUID()}:stage:open`;
|
||||
const params = {
|
||||
portId,
|
||||
userId,
|
||||
type: 'interest_stage_changed',
|
||||
title: 'Expired cooldown test',
|
||||
dedupeKey,
|
||||
cooldownMs: 0,
|
||||
};
|
||||
|
||||
const first = await createNotification(params);
|
||||
expect(first).not.toBeNull();
|
||||
|
||||
const second = await createNotification(params);
|
||||
expect(second).not.toBeNull();
|
||||
expect(second!.id).not.toBe(first!.id);
|
||||
});
|
||||
|
||||
itDb('system_alert type bypasses preference check and is always inserted', async () => {
|
||||
const { createNotification } = await import('@/lib/services/notifications.service');
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
|
||||
// Insert a preference that would block a non-system notification
|
||||
await sql`
|
||||
INSERT INTO user_notification_preferences
|
||||
(id, user_id, port_id, notification_type, in_app, email)
|
||||
VALUES (${crypto.randomUUID()}, ${userId}, ${portId}, 'blocked_type', false, false)
|
||||
ON CONFLICT DO NOTHING
|
||||
`;
|
||||
await sql.end();
|
||||
|
||||
// system_alert MUST still be inserted regardless of any preference
|
||||
const notif = await createNotification({
|
||||
portId,
|
||||
userId,
|
||||
type: 'system_alert',
|
||||
title: 'System alert test',
|
||||
});
|
||||
|
||||
expect(notif).not.toBeNull();
|
||||
expect(notif!.type).toBe('system_alert');
|
||||
});
|
||||
|
||||
itDb('markRead sets isRead to true', async () => {
|
||||
const { createNotification, markRead } = await import('@/lib/services/notifications.service');
|
||||
const postgres = (await import('postgres')).default;
|
||||
|
||||
const notif = await createNotification({
|
||||
portId,
|
||||
userId,
|
||||
type: 'system_alert',
|
||||
title: 'Mark-read test',
|
||||
});
|
||||
|
||||
expect(notif).not.toBeNull();
|
||||
expect(notif!.isRead).toBe(false);
|
||||
|
||||
await markRead(notif!.id, userId);
|
||||
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
const rows = await sql<Array<{ is_read: boolean }>>`
|
||||
SELECT is_read FROM notifications WHERE id = ${notif!.id}
|
||||
`;
|
||||
await sql.end();
|
||||
|
||||
expect(rows[0]?.is_read).toBe(true);
|
||||
});
|
||||
|
||||
itDb('markAllRead sets all unread notifications for the user to read', async () => {
|
||||
const { createNotification, markAllRead, getUnreadCount } = await import(
|
||||
'@/lib/services/notifications.service'
|
||||
);
|
||||
|
||||
await createNotification({ portId, userId, type: 'system_alert', title: 'Unread 1' });
|
||||
await createNotification({ portId, userId, type: 'system_alert', title: 'Unread 2' });
|
||||
|
||||
const before = await getUnreadCount(userId, portId);
|
||||
expect(before.count).toBeGreaterThan(0);
|
||||
|
||||
await markAllRead(userId, portId);
|
||||
|
||||
const after = await getUnreadCount(userId, portId);
|
||||
expect(after.count).toBe(0);
|
||||
});
|
||||
|
||||
itDb('getUnreadCount returns accurate count', async () => {
|
||||
const { createNotification, getUnreadCount, markAllRead } = await import(
|
||||
'@/lib/services/notifications.service'
|
||||
);
|
||||
|
||||
await markAllRead(userId, portId);
|
||||
|
||||
const baseline = await getUnreadCount(userId, portId);
|
||||
expect(baseline.count).toBe(0);
|
||||
|
||||
await createNotification({ portId, userId, type: 'system_alert', title: 'Count test 1' });
|
||||
await createNotification({ portId, userId, type: 'system_alert', title: 'Count test 2' });
|
||||
|
||||
const after = await getUnreadCount(userId, portId);
|
||||
expect(after.count).toBe(2);
|
||||
});
|
||||
});
|
||||
252
tests/integration/permission-matrix.test.ts
Normal file
252
tests/integration/permission-matrix.test.ts
Normal file
@@ -0,0 +1,252 @@
|
||||
/**
|
||||
* Permission matrix tests.
|
||||
*
|
||||
* Tests the withPermission() guard logic directly using mock AuthContext values.
|
||||
* These tests do NOT require a database and run always.
|
||||
*
|
||||
* Verifies:
|
||||
* - super_admin bypasses all permission checks
|
||||
* - viewer can read but not write
|
||||
* - sales_agent can manage own clients/interests but not admin features
|
||||
* - sales_manager has elevated but non-admin access
|
||||
* - director has near-full access
|
||||
* - deepMerge correctly applies port-level overrides
|
||||
*/
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
|
||||
import { withPermission, deepMerge, type AuthContext } from '@/lib/api/helpers';
|
||||
import {
|
||||
makeFullPermissions,
|
||||
makeViewerPermissions,
|
||||
makeSalesAgentPermissions,
|
||||
makeSalesManagerPermissions,
|
||||
makeDirectorPermissions,
|
||||
} from '../helpers/factories';
|
||||
import type { RolePermissions } from '@/lib/db/schema/users';
|
||||
import { NextRequest, NextResponse } from 'next/server';
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function makeCtx(overrides: Partial<AuthContext>): AuthContext {
|
||||
return {
|
||||
userId: 'user-1',
|
||||
portId: 'port-1',
|
||||
portSlug: 'test-port',
|
||||
isSuperAdmin: false,
|
||||
permissions: makeViewerPermissions(),
|
||||
user: { email: 'test@example.com', name: 'Test User' },
|
||||
ipAddress: '127.0.0.1',
|
||||
userAgent: 'vitest/1.0',
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
/** Minimal NextRequest for testing permission guards. */
|
||||
function makeRequest(): NextRequest {
|
||||
return new NextRequest('http://localhost/api/test', { method: 'GET' });
|
||||
}
|
||||
|
||||
/** Returns a handler that resolves to 200 OK. */
|
||||
function okHandler() {
|
||||
return vi.fn().mockResolvedValue(NextResponse.json({ ok: true }, { status: 200 }));
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes the withPermission guard and returns the response status.
|
||||
*/
|
||||
async function checkPermission(
|
||||
ctx: AuthContext,
|
||||
resource: keyof RolePermissions,
|
||||
action: string,
|
||||
): Promise<number> {
|
||||
const handler = okHandler();
|
||||
const guarded = withPermission(resource, action, handler);
|
||||
const response = await guarded(makeRequest(), ctx, {});
|
||||
return response.status;
|
||||
}
|
||||
|
||||
// ─── super_admin ──────────────────────────────────────────────────────────────
|
||||
|
||||
describe('Permission Matrix — super_admin', () => {
|
||||
const ctx = makeCtx({ isSuperAdmin: true, permissions: null });
|
||||
|
||||
it('can access clients.create', async () => {
|
||||
expect(await checkPermission(ctx, 'clients', 'create')).toBe(200);
|
||||
});
|
||||
|
||||
it('can access admin.manage_users', async () => {
|
||||
expect(await checkPermission(ctx, 'admin', 'manage_users')).toBe(200);
|
||||
});
|
||||
|
||||
it('can access admin.system_backup', async () => {
|
||||
expect(await checkPermission(ctx, 'admin', 'system_backup')).toBe(200);
|
||||
});
|
||||
|
||||
it('can access invoices.delete', async () => {
|
||||
expect(await checkPermission(ctx, 'invoices', 'delete')).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── viewer ───────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('Permission Matrix — viewer', () => {
|
||||
const ctx = makeCtx({ permissions: makeViewerPermissions() });
|
||||
|
||||
it('can view clients', async () => {
|
||||
expect(await checkPermission(ctx, 'clients', 'view')).toBe(200);
|
||||
});
|
||||
|
||||
it('cannot create clients', async () => {
|
||||
expect(await checkPermission(ctx, 'clients', 'create')).toBe(403);
|
||||
});
|
||||
|
||||
it('cannot update clients', async () => {
|
||||
expect(await checkPermission(ctx, 'clients', 'edit')).toBe(403);
|
||||
});
|
||||
|
||||
it('cannot delete clients', async () => {
|
||||
expect(await checkPermission(ctx, 'clients', 'delete')).toBe(403);
|
||||
});
|
||||
|
||||
it('cannot change interest stage', async () => {
|
||||
expect(await checkPermission(ctx, 'interests', 'change_stage')).toBe(403);
|
||||
});
|
||||
|
||||
it('cannot manage admin settings', async () => {
|
||||
expect(await checkPermission(ctx, 'admin', 'manage_settings')).toBe(403);
|
||||
});
|
||||
|
||||
it('cannot manage webhooks', async () => {
|
||||
expect(await checkPermission(ctx, 'admin', 'manage_webhooks')).toBe(403);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── sales_agent ─────────────────────────────────────────────────────────────
|
||||
|
||||
describe('Permission Matrix — sales_agent', () => {
|
||||
const ctx = makeCtx({ permissions: makeSalesAgentPermissions() });
|
||||
|
||||
it('can view clients', async () => {
|
||||
expect(await checkPermission(ctx, 'clients', 'view')).toBe(200);
|
||||
});
|
||||
|
||||
it('can create clients', async () => {
|
||||
expect(await checkPermission(ctx, 'clients', 'create')).toBe(200);
|
||||
});
|
||||
|
||||
it('can edit clients', async () => {
|
||||
expect(await checkPermission(ctx, 'clients', 'edit')).toBe(200);
|
||||
});
|
||||
|
||||
it('cannot delete clients', async () => {
|
||||
expect(await checkPermission(ctx, 'clients', 'delete')).toBe(403);
|
||||
});
|
||||
|
||||
it('cannot merge clients', async () => {
|
||||
expect(await checkPermission(ctx, 'clients', 'merge')).toBe(403);
|
||||
});
|
||||
|
||||
it('can create interests', async () => {
|
||||
expect(await checkPermission(ctx, 'interests', 'create')).toBe(200);
|
||||
});
|
||||
|
||||
it('can change interest stage', async () => {
|
||||
expect(await checkPermission(ctx, 'interests', 'change_stage')).toBe(200);
|
||||
});
|
||||
|
||||
it('cannot manage admin users', async () => {
|
||||
expect(await checkPermission(ctx, 'admin', 'manage_users')).toBe(403);
|
||||
});
|
||||
|
||||
it('cannot manage webhooks', async () => {
|
||||
expect(await checkPermission(ctx, 'admin', 'manage_webhooks')).toBe(403);
|
||||
});
|
||||
|
||||
it('cannot configure email accounts', async () => {
|
||||
expect(await checkPermission(ctx, 'email', 'configure_account')).toBe(403);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── sales_manager ────────────────────────────────────────────────────────────
|
||||
|
||||
describe('Permission Matrix — sales_manager', () => {
|
||||
const ctx = makeCtx({ permissions: makeSalesManagerPermissions() });
|
||||
|
||||
it('can do everything with clients', async () => {
|
||||
for (const action of ['view', 'create', 'edit', 'delete', 'merge', 'export']) {
|
||||
expect(await checkPermission(ctx, 'clients', action)).toBe(200);
|
||||
}
|
||||
});
|
||||
|
||||
it('can view audit log', async () => {
|
||||
expect(await checkPermission(ctx, 'admin', 'view_audit_log')).toBe(200);
|
||||
});
|
||||
|
||||
it('cannot manage webhooks', async () => {
|
||||
expect(await checkPermission(ctx, 'admin', 'manage_webhooks')).toBe(403);
|
||||
});
|
||||
|
||||
it('cannot manage system users', async () => {
|
||||
expect(await checkPermission(ctx, 'admin', 'manage_users')).toBe(403);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── director ─────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('Permission Matrix — director', () => {
|
||||
const ctx = makeCtx({ permissions: makeDirectorPermissions() });
|
||||
|
||||
it('can manage webhooks', async () => {
|
||||
expect(await checkPermission(ctx, 'admin', 'manage_webhooks')).toBe(200);
|
||||
});
|
||||
|
||||
it('can manage users', async () => {
|
||||
expect(await checkPermission(ctx, 'admin', 'manage_users')).toBe(200);
|
||||
});
|
||||
|
||||
it('cannot perform system_backup', async () => {
|
||||
expect(await checkPermission(ctx, 'admin', 'system_backup')).toBe(403);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── deepMerge ────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('deepMerge — permission override merging', () => {
|
||||
it('overrides a single leaf value', () => {
|
||||
const base = { clients: { view: true, create: false } };
|
||||
const override = { clients: { create: true } };
|
||||
const result = deepMerge(base, override) as typeof base;
|
||||
expect(result.clients.create).toBe(true);
|
||||
expect(result.clients.view).toBe(true);
|
||||
});
|
||||
|
||||
it('does not mutate the base object', () => {
|
||||
const base = { a: { b: false } };
|
||||
const override = { a: { b: true } };
|
||||
deepMerge(base, override);
|
||||
expect(base.a.b).toBe(false);
|
||||
});
|
||||
|
||||
it('merges nested objects without removing unrelated keys', () => {
|
||||
const base = { admin: { manage_users: false, view_audit_log: true } };
|
||||
const override = { admin: { manage_users: true } };
|
||||
const result = deepMerge(base, override) as typeof base;
|
||||
expect(result.admin.manage_users).toBe(true);
|
||||
expect(result.admin.view_audit_log).toBe(true);
|
||||
});
|
||||
|
||||
it('override with full-permission block gives full access', () => {
|
||||
const base = makeViewerPermissions() as Record<string, unknown>;
|
||||
const override = { clients: { create: true, edit: true, delete: true, merge: true, export: true } };
|
||||
const result = deepMerge(base, override) as RolePermissions;
|
||||
expect(result.clients.create).toBe(true);
|
||||
expect(result.clients.view).toBe(true); // preserved from base
|
||||
});
|
||||
|
||||
it('handles non-object values (arrays stay as-is)', () => {
|
||||
const base = { events: ['a', 'b'] };
|
||||
const override = { events: ['c'] };
|
||||
const result = deepMerge(base, override) as typeof base;
|
||||
expect(result.events).toEqual(['c']);
|
||||
});
|
||||
});
|
||||
206
tests/integration/pipeline-transitions.test.ts
Normal file
206
tests/integration/pipeline-transitions.test.ts
Normal file
@@ -0,0 +1,206 @@
|
||||
/**
|
||||
* Pipeline transition integration tests.
|
||||
*
|
||||
* Verifies:
|
||||
* - An interest can advance through all 8 pipeline stages
|
||||
* - Each transition is logged in audit_logs with action='update'
|
||||
* - Backward transitions are permitted
|
||||
* - Milestone auto-population (BR-133)
|
||||
* - Socket event name is 'interest:stageChanged'
|
||||
*
|
||||
* Skips gracefully when TEST_DATABASE_URL is not reachable.
|
||||
*/
|
||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
||||
|
||||
import { PIPELINE_STAGES } from '@/lib/constants';
|
||||
import { makeAuditMeta, makeCreateClientInput, makeCreateInterestInput } from '../helpers/factories';
|
||||
|
||||
const TEST_DB_URL =
|
||||
process.env.TEST_DATABASE_URL || 'postgresql://test:test@localhost:5433/portnimara_test';
|
||||
|
||||
// ─── DB Availability Check ────────────────────────────────────────────────────
|
||||
|
||||
let dbAvailable = false;
|
||||
|
||||
beforeAll(async () => {
|
||||
try {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1, idle_timeout: 3, connect_timeout: 3 });
|
||||
await sql`SELECT 1`;
|
||||
await sql.end();
|
||||
dbAvailable = true;
|
||||
} catch {
|
||||
console.warn('[pipeline-transitions] Test database not available — skipping integration tests');
|
||||
}
|
||||
});
|
||||
|
||||
function itDb(name: string, fn: () => Promise<void>) {
|
||||
it(name, async () => {
|
||||
if (!dbAvailable) return;
|
||||
await fn();
|
||||
});
|
||||
}
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
async function seedPort(): Promise<string> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
const portId = crypto.randomUUID();
|
||||
await sql`
|
||||
INSERT INTO ports (id, name, slug, country, currency, timezone)
|
||||
VALUES (${portId}, 'Pipeline Test Port', ${'pipeline-' + portId.slice(0, 8)}, 'AU', 'AUD', 'UTC')
|
||||
`;
|
||||
await sql.end();
|
||||
return portId;
|
||||
}
|
||||
|
||||
async function cleanupPort(portId: string): Promise<void> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
await sql`DELETE FROM ports WHERE id = ${portId}`;
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
async function getLatestAuditLog(portId: string, entityId: string): Promise<Record<string, unknown> | null> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
const rows = await sql<Record<string, unknown>[]>`
|
||||
SELECT * FROM audit_logs
|
||||
WHERE port_id = ${portId} AND entity_id = ${entityId}
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1
|
||||
`;
|
||||
await sql.end();
|
||||
return rows[0] ?? null;
|
||||
}
|
||||
|
||||
// ─── Tests ────────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('Pipeline Transitions', () => {
|
||||
let portId: string;
|
||||
let interestId: string;
|
||||
|
||||
// Mock external side-effects so tests are self-contained
|
||||
vi.mock('@/lib/socket/server', () => ({ emitToRoom: vi.fn() }));
|
||||
vi.mock('@/lib/queue', () => ({
|
||||
getQueue: () => ({ add: vi.fn().mockResolvedValue(undefined) }),
|
||||
}));
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
|
||||
portId = await seedPort();
|
||||
|
||||
const { createClient } = await import('@/lib/services/clients.service');
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
const client = await createClient(portId, makeCreateClientInput({ fullName: 'Pipeline Test Client' }), meta);
|
||||
|
||||
const { createInterest } = await import('@/lib/services/interests.service');
|
||||
const interest = await createInterest(portId, makeCreateInterestInput({ clientId: client.id }), meta);
|
||||
interestId = interest.id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
await cleanupPort(portId);
|
||||
});
|
||||
|
||||
itDb('advances through all 8 pipeline stages sequentially', async () => {
|
||||
const { changeInterestStage, getInterestById } = await import(
|
||||
'@/lib/services/interests.service'
|
||||
);
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
for (const stage of PIPELINE_STAGES) {
|
||||
await changeInterestStage(interestId, portId, { pipelineStage: stage }, meta);
|
||||
|
||||
const updated = await getInterestById(interestId, portId);
|
||||
expect(updated.pipelineStage).toBe(stage);
|
||||
}
|
||||
});
|
||||
|
||||
itDb('each stage transition creates an audit log entry with action=update', async () => {
|
||||
const { changeInterestStage } = await import('@/lib/services/interests.service');
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
await changeInterestStage(interestId, portId, { pipelineStage: 'open' }, meta);
|
||||
|
||||
// Allow async audit log to flush
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
|
||||
const log = await getLatestAuditLog(portId, interestId);
|
||||
expect(log).not.toBeNull();
|
||||
expect(log!.action).toBe('update');
|
||||
expect(log!.entity_type).toBe('interest');
|
||||
|
||||
const newValue = log!.new_value as Record<string, unknown>;
|
||||
expect(newValue.pipelineStage).toBe('open');
|
||||
});
|
||||
|
||||
itDb('backward transition: completed → open is permitted', async () => {
|
||||
const { changeInterestStage, getInterestById } = await import(
|
||||
'@/lib/services/interests.service'
|
||||
);
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
await changeInterestStage(interestId, portId, { pipelineStage: 'completed' }, meta);
|
||||
await changeInterestStage(interestId, portId, { pipelineStage: 'open' }, meta);
|
||||
|
||||
const updated = await getInterestById(interestId, portId);
|
||||
expect(updated.pipelineStage).toBe('open');
|
||||
});
|
||||
|
||||
itDb('BR-133: advancing to signed_eoi_nda auto-populates dateEoiSigned', async () => {
|
||||
const { changeInterestStage, getInterestById } = await import(
|
||||
'@/lib/services/interests.service'
|
||||
);
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
await changeInterestStage(interestId, portId, { pipelineStage: 'signed_eoi_nda' }, meta);
|
||||
|
||||
const updated = await getInterestById(interestId, portId);
|
||||
expect(updated.dateEoiSigned).not.toBeNull();
|
||||
});
|
||||
|
||||
itDb('BR-133: advancing to contract auto-populates dateContractSigned', async () => {
|
||||
const { changeInterestStage, getInterestById } = await import(
|
||||
'@/lib/services/interests.service'
|
||||
);
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
await changeInterestStage(interestId, portId, { pipelineStage: 'contract' }, meta);
|
||||
|
||||
const updated = await getInterestById(interestId, portId);
|
||||
expect(updated.dateContractSigned).not.toBeNull();
|
||||
});
|
||||
|
||||
itDb('BR-133: advancing to deposit_10pct auto-populates dateDepositReceived', async () => {
|
||||
const { changeInterestStage, getInterestById } = await import(
|
||||
'@/lib/services/interests.service'
|
||||
);
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
await changeInterestStage(interestId, portId, { pipelineStage: 'deposit_10pct' }, meta);
|
||||
|
||||
const updated = await getInterestById(interestId, portId);
|
||||
expect(updated.dateDepositReceived).not.toBeNull();
|
||||
});
|
||||
|
||||
itDb('stage change emits interest:stageChanged socket event', async () => {
|
||||
const { emitToRoom } = await import('@/lib/socket/server');
|
||||
const { changeInterestStage } = await import('@/lib/services/interests.service');
|
||||
const meta = makeAuditMeta({ portId });
|
||||
|
||||
vi.clearAllMocks();
|
||||
|
||||
await changeInterestStage(interestId, portId, { pipelineStage: 'details_sent' }, meta);
|
||||
|
||||
expect(emitToRoom).toHaveBeenCalledWith(
|
||||
`port:${portId}`,
|
||||
'interest:stageChanged',
|
||||
expect.objectContaining({ interestId, newStage: 'details_sent' }),
|
||||
);
|
||||
});
|
||||
});
|
||||
197
tests/integration/port-scoping.test.ts
Normal file
197
tests/integration/port-scoping.test.ts
Normal file
@@ -0,0 +1,197 @@
|
||||
/**
|
||||
* Port-scoping integration tests (SECURITY-CRITICAL).
|
||||
*
|
||||
* Codex Addenda: Two-port testing — every entity must be invisible
|
||||
* when queried under a different portId.
|
||||
*
|
||||
* Skips gracefully when TEST_DATABASE_URL is not reachable.
|
||||
*/
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
|
||||
import { makeAuditMeta, makeCreateClientInput, makeCreateInterestInput } from '../helpers/factories';
|
||||
|
||||
const TEST_DB_URL =
|
||||
process.env.TEST_DATABASE_URL || 'postgresql://test:test@localhost:5433/portnimara_test';
|
||||
|
||||
// ─── DB Availability Check ────────────────────────────────────────────────────
|
||||
|
||||
let dbAvailable = false;
|
||||
|
||||
beforeAll(async () => {
|
||||
try {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1, idle_timeout: 3, connect_timeout: 3 });
|
||||
await sql`SELECT 1`;
|
||||
await sql.end();
|
||||
dbAvailable = true;
|
||||
} catch {
|
||||
console.warn('[port-scoping] Test database not available — skipping integration tests');
|
||||
}
|
||||
});
|
||||
|
||||
function itDb(name: string, fn: () => Promise<void>) {
|
||||
it(name, async () => {
|
||||
if (!dbAvailable) return;
|
||||
await fn();
|
||||
});
|
||||
}
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
async function seedPorts(): Promise<{ portA: string; portB: string }> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
|
||||
const portA = crypto.randomUUID();
|
||||
const portB = crypto.randomUUID();
|
||||
|
||||
await sql`
|
||||
INSERT INTO ports (id, name, slug, country, currency, timezone)
|
||||
VALUES
|
||||
(${portA}, 'Port Alpha', ${'alpha-' + portA.slice(0, 8)}, 'AU', 'AUD', 'UTC'),
|
||||
(${portB}, 'Port Beta', ${'beta-' + portB.slice(0, 8)}, 'NZ', 'NZD', 'UTC')
|
||||
`;
|
||||
|
||||
await sql.end();
|
||||
return { portA, portB };
|
||||
}
|
||||
|
||||
async function cleanupPorts(portA: string, portB: string): Promise<void> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
await sql`DELETE FROM ports WHERE id = ANY(${[portA, portB]})`;
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
// ─── Tests ────────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('Port Scoping — Clients', () => {
|
||||
let portA: string;
|
||||
let portB: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
({ portA, portB } = await seedPorts());
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
await cleanupPorts(portA, portB);
|
||||
});
|
||||
|
||||
itDb('client created in Port A is invisible to Port B list', async () => {
|
||||
const { createClient, listClients } = await import('@/lib/services/clients.service');
|
||||
|
||||
const meta = makeAuditMeta({ portId: portA });
|
||||
|
||||
const client = await createClient(portA, makeCreateClientInput({ fullName: 'Alice Scope' }), meta);
|
||||
|
||||
expect(client.portId).toBe(portA);
|
||||
|
||||
const result = await listClients(portB, {
|
||||
page: 1,
|
||||
limit: 50,
|
||||
sort: 'updatedAt',
|
||||
order: 'desc',
|
||||
includeArchived: false,
|
||||
});
|
||||
|
||||
const ids = (result.data as Array<{ id: string }>).map((c) => c.id);
|
||||
expect(ids).not.toContain(client.id);
|
||||
});
|
||||
|
||||
itDb('getClientById throws NotFoundError when portId does not match', async () => {
|
||||
const { createClient, getClientById } = await import('@/lib/services/clients.service');
|
||||
const { NotFoundError } = await import('@/lib/errors');
|
||||
|
||||
const meta = makeAuditMeta({ portId: portA });
|
||||
const client = await createClient(portA, makeCreateClientInput({ fullName: 'Bob Scope' }), meta);
|
||||
|
||||
await expect(getClientById(client.id, portB)).rejects.toThrow(NotFoundError);
|
||||
});
|
||||
|
||||
itDb('updateClient on wrong port throws NotFoundError', async () => {
|
||||
const { createClient, updateClient } = await import('@/lib/services/clients.service');
|
||||
const { NotFoundError } = await import('@/lib/errors');
|
||||
|
||||
const meta = makeAuditMeta({ portId: portA });
|
||||
const client = await createClient(portA, makeCreateClientInput({ fullName: 'Carol Scope' }), meta);
|
||||
|
||||
await expect(
|
||||
updateClient(client.id, portB, { fullName: 'Hacked' }, meta),
|
||||
).rejects.toThrow(NotFoundError);
|
||||
});
|
||||
|
||||
itDb('archiveClient on wrong port throws NotFoundError', async () => {
|
||||
const { createClient, archiveClient } = await import('@/lib/services/clients.service');
|
||||
const { NotFoundError } = await import('@/lib/errors');
|
||||
|
||||
const meta = makeAuditMeta({ portId: portA });
|
||||
const client = await createClient(portA, makeCreateClientInput({ fullName: 'Dave Scope' }), meta);
|
||||
|
||||
await expect(archiveClient(client.id, portB, meta)).rejects.toThrow(NotFoundError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Port Scoping — Interests', () => {
|
||||
let portA: string;
|
||||
let portB: string;
|
||||
let clientIdA: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
({ portA, portB } = await seedPorts());
|
||||
|
||||
const { createClient } = await import('@/lib/services/clients.service');
|
||||
const meta = makeAuditMeta({ portId: portA });
|
||||
const client = await createClient(portA, makeCreateClientInput({ fullName: 'Scope Test Client' }), meta);
|
||||
clientIdA = client.id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
await cleanupPorts(portA, portB);
|
||||
});
|
||||
|
||||
itDb('interest created in Port A is invisible to Port B list', async () => {
|
||||
const { createInterest, listInterests } = await import('@/lib/services/interests.service');
|
||||
|
||||
const meta = makeAuditMeta({ portId: portA });
|
||||
const interest = await createInterest(portA, makeCreateInterestInput({ clientId: clientIdA }), meta);
|
||||
|
||||
expect(interest.portId).toBe(portA);
|
||||
|
||||
const result = await listInterests(portB, {
|
||||
page: 1,
|
||||
limit: 50,
|
||||
sort: 'updatedAt',
|
||||
order: 'desc',
|
||||
includeArchived: false,
|
||||
});
|
||||
|
||||
const ids = (result.data as unknown as Array<{ id: string }>).map((i) => i.id);
|
||||
expect(ids).not.toContain(interest.id);
|
||||
});
|
||||
|
||||
itDb('getInterestById throws NotFoundError when portId does not match', async () => {
|
||||
const { createInterest, getInterestById } = await import('@/lib/services/interests.service');
|
||||
const { NotFoundError } = await import('@/lib/errors');
|
||||
|
||||
const meta = makeAuditMeta({ portId: portA });
|
||||
const interest = await createInterest(portA, makeCreateInterestInput({ clientId: clientIdA }), meta);
|
||||
|
||||
await expect(getInterestById(interest.id, portB)).rejects.toThrow(NotFoundError);
|
||||
});
|
||||
|
||||
itDb('changeInterestStage on wrong port throws NotFoundError', async () => {
|
||||
const { createInterest, changeInterestStage } = await import('@/lib/services/interests.service');
|
||||
const { NotFoundError } = await import('@/lib/errors');
|
||||
|
||||
const meta = makeAuditMeta({ portId: portA });
|
||||
const interest = await createInterest(portA, makeCreateInterestInput({ clientId: clientIdA }), meta);
|
||||
|
||||
await expect(
|
||||
changeInterestStage(interest.id, portB, { pipelineStage: 'details_sent' }, meta),
|
||||
).rejects.toThrow(NotFoundError);
|
||||
});
|
||||
});
|
||||
250
tests/integration/webhook-delivery.test.ts
Normal file
250
tests/integration/webhook-delivery.test.ts
Normal file
@@ -0,0 +1,250 @@
|
||||
/**
|
||||
* Webhook delivery integration tests.
|
||||
*
|
||||
* Verifies:
|
||||
* - Create a webhook subscribed to ['client.created']
|
||||
* - dispatchWebhookEvent with 'client:created' creates a delivery record
|
||||
* - Event name is translated to dot-style ('client.created')
|
||||
* - A pending delivery record exists in webhook_deliveries
|
||||
* - BullMQ job is enqueued for each matching webhook
|
||||
*
|
||||
* Skips gracefully when TEST_DATABASE_URL is not reachable.
|
||||
*/
|
||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
||||
|
||||
import { makeAuditMeta } from '../helpers/factories';
|
||||
|
||||
const TEST_DB_URL =
|
||||
process.env.TEST_DATABASE_URL || 'postgresql://test:test@localhost:5433/portnimara_test';
|
||||
|
||||
let dbAvailable = false;
|
||||
|
||||
beforeAll(async () => {
|
||||
try {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1, idle_timeout: 3, connect_timeout: 3 });
|
||||
await sql`SELECT 1`;
|
||||
await sql.end();
|
||||
dbAvailable = true;
|
||||
} catch {
|
||||
console.warn('[webhook-delivery] Test database not available — skipping integration tests');
|
||||
}
|
||||
});
|
||||
|
||||
function itDb(name: string, fn: () => Promise<void>) {
|
||||
it(name, async () => {
|
||||
if (!dbAvailable) return;
|
||||
await fn();
|
||||
});
|
||||
}
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
async function seedPortAndUser(): Promise<{ portId: string; userId: string }> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
|
||||
const portId = crypto.randomUUID();
|
||||
const userId = crypto.randomUUID();
|
||||
|
||||
await sql`
|
||||
INSERT INTO ports (id, name, slug, country, currency, timezone)
|
||||
VALUES (${portId}, 'Webhook Test Port', ${'webhook-' + portId.slice(0, 8)}, 'AU', 'AUD', 'UTC')
|
||||
`;
|
||||
|
||||
await sql`
|
||||
INSERT INTO "user" (id, name, email, email_verified, created_at, updated_at)
|
||||
VALUES (${userId}, 'Webhook User', ${'webhook-' + userId.slice(0, 8) + '@test.local'}, true, NOW(), NOW())
|
||||
`;
|
||||
|
||||
await sql`
|
||||
INSERT INTO user_profiles (id, user_id, display_name, is_super_admin, is_active, preferences)
|
||||
VALUES (${crypto.randomUUID()}, ${userId}, 'Webhook User', false, true, '{}')
|
||||
`;
|
||||
|
||||
await sql.end();
|
||||
return { portId, userId };
|
||||
}
|
||||
|
||||
async function cleanupPortAndUser(portId: string, userId: string): Promise<void> {
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
await sql`DELETE FROM ports WHERE id = ${portId}`;
|
||||
await sql`DELETE FROM user_profiles WHERE user_id = ${userId}`;
|
||||
await sql`DELETE FROM "user" WHERE id = ${userId}`;
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
// ─── Tests ────────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('Webhook Delivery', () => {
|
||||
let portId: string;
|
||||
let userId: string;
|
||||
|
||||
const mockQueueAdd = vi.fn().mockResolvedValue({ id: 'mock-job' });
|
||||
|
||||
vi.mock('@/lib/queue', () => ({
|
||||
getQueue: () => ({ add: mockQueueAdd }),
|
||||
}));
|
||||
|
||||
vi.mock('@/lib/utils/encryption', () => ({
|
||||
encrypt: (v: string) => `enc:${v}`,
|
||||
decrypt: (v: string) => v.replace(/^enc:/, ''),
|
||||
}));
|
||||
|
||||
vi.mock('@/lib/audit', () => ({
|
||||
createAuditLog: vi.fn().mockResolvedValue(undefined),
|
||||
}));
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
({ portId, userId } = await seedPortAndUser());
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!dbAvailable) return;
|
||||
await cleanupPortAndUser(portId, userId);
|
||||
});
|
||||
|
||||
itDb('createWebhook returns an id and plaintext secret', async () => {
|
||||
const { createWebhook } = await import('@/lib/services/webhooks.service');
|
||||
const meta = makeAuditMeta({ portId, userId });
|
||||
|
||||
const webhook = await createWebhook(
|
||||
portId,
|
||||
userId,
|
||||
{ name: 'Delivery Test Webhook', url: 'https://example.com/hooks', events: ['client.created'], isActive: true },
|
||||
meta,
|
||||
);
|
||||
|
||||
expect(webhook.id).toBeDefined();
|
||||
expect(webhook.portId).toBe(portId);
|
||||
expect(typeof webhook.secret).toBe('string');
|
||||
expect((webhook.secret as string).length).toBeGreaterThan(10);
|
||||
});
|
||||
|
||||
itDb('dispatchWebhookEvent creates a delivery record for client:created', async () => {
|
||||
const { createWebhook } = await import('@/lib/services/webhooks.service');
|
||||
const { dispatchWebhookEvent } = await import('@/lib/services/webhook-dispatch');
|
||||
const meta = makeAuditMeta({ portId, userId });
|
||||
|
||||
const webhook = await createWebhook(
|
||||
portId,
|
||||
userId,
|
||||
{ name: 'Dispatch Test Hook', url: 'https://example.com/dispatch', events: ['client.created'], isActive: true },
|
||||
meta,
|
||||
);
|
||||
|
||||
vi.clearAllMocks();
|
||||
|
||||
await dispatchWebhookEvent(portId, 'client:created', { clientId: 'test-client-123' });
|
||||
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
const rows = await sql<Array<{ event_type: string; status: string }>>`
|
||||
SELECT event_type, status
|
||||
FROM webhook_deliveries
|
||||
WHERE webhook_id = ${webhook.id}
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1
|
||||
`;
|
||||
await sql.end();
|
||||
|
||||
expect(rows.length).toBe(1);
|
||||
expect(rows[0]!.event_type).toBe('client.created');
|
||||
expect(rows[0]!.status).toBe('pending');
|
||||
});
|
||||
|
||||
itDb('INTERNAL_TO_WEBHOOK_MAP translates internal:camel to dot.style event names', async () => {
|
||||
const { INTERNAL_TO_WEBHOOK_MAP } = await import('@/lib/services/webhook-event-map');
|
||||
|
||||
expect(INTERNAL_TO_WEBHOOK_MAP['client:created']).toBe('client.created');
|
||||
expect(INTERNAL_TO_WEBHOOK_MAP['interest:stageChanged']).toBe('interest.stage_changed');
|
||||
expect(INTERNAL_TO_WEBHOOK_MAP['berth:statusChanged']).toBe('berth.status_changed');
|
||||
expect(INTERNAL_TO_WEBHOOK_MAP['invoice:paid']).toBe('invoice.paid');
|
||||
});
|
||||
|
||||
itDb('unmapped internal events do not create delivery records', async () => {
|
||||
const { createWebhook } = await import('@/lib/services/webhooks.service');
|
||||
const { dispatchWebhookEvent } = await import('@/lib/services/webhook-dispatch');
|
||||
const meta = makeAuditMeta({ portId, userId });
|
||||
|
||||
const webhook = await createWebhook(
|
||||
portId,
|
||||
userId,
|
||||
{ name: 'Unmapped Hook', url: 'https://example.com/unmapped', events: ['client.created'], isActive: true },
|
||||
meta,
|
||||
);
|
||||
|
||||
vi.clearAllMocks();
|
||||
|
||||
await dispatchWebhookEvent(portId, 'not:a:real:event', { data: 'test' });
|
||||
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
const rows = await sql<Array<{ count: string }>>`
|
||||
SELECT COUNT(*) as count
|
||||
FROM webhook_deliveries
|
||||
WHERE webhook_id = ${webhook.id}
|
||||
AND created_at > NOW() - INTERVAL '5 seconds'
|
||||
`;
|
||||
await sql.end();
|
||||
|
||||
expect(Number(rows[0]!.count)).toBe(0);
|
||||
});
|
||||
|
||||
itDb('inactive webhooks are not dispatched to', async () => {
|
||||
const { createWebhook } = await import('@/lib/services/webhooks.service');
|
||||
const { dispatchWebhookEvent } = await import('@/lib/services/webhook-dispatch');
|
||||
const meta = makeAuditMeta({ portId, userId });
|
||||
|
||||
const webhook = await createWebhook(
|
||||
portId,
|
||||
userId,
|
||||
{ name: 'Inactive Hook', url: 'https://example.com/inactive', events: ['client.created'], isActive: false },
|
||||
meta,
|
||||
);
|
||||
|
||||
vi.clearAllMocks();
|
||||
|
||||
await dispatchWebhookEvent(portId, 'client:created', { clientId: 'xyz' });
|
||||
|
||||
const postgres = (await import('postgres')).default;
|
||||
const sql = postgres(TEST_DB_URL, { max: 1 });
|
||||
const rows = await sql<Array<{ count: string }>>`
|
||||
SELECT COUNT(*) as count
|
||||
FROM webhook_deliveries
|
||||
WHERE webhook_id = ${webhook.id}
|
||||
AND created_at > NOW() - INTERVAL '5 seconds'
|
||||
`;
|
||||
await sql.end();
|
||||
|
||||
expect(Number(rows[0]!.count)).toBe(0);
|
||||
});
|
||||
|
||||
itDb('BullMQ job is enqueued with correct event payload', async () => {
|
||||
const { createWebhook } = await import('@/lib/services/webhooks.service');
|
||||
const { dispatchWebhookEvent } = await import('@/lib/services/webhook-dispatch');
|
||||
const meta = makeAuditMeta({ portId, userId });
|
||||
|
||||
await createWebhook(
|
||||
portId,
|
||||
userId,
|
||||
{ name: 'Queue Test Hook', url: 'https://example.com/queue', events: ['client.updated'], isActive: true },
|
||||
meta,
|
||||
);
|
||||
|
||||
vi.clearAllMocks();
|
||||
|
||||
await dispatchWebhookEvent(portId, 'client:updated', { clientId: 'q-test' });
|
||||
|
||||
expect(mockQueueAdd).toHaveBeenCalledWith(
|
||||
'deliver',
|
||||
expect.objectContaining({
|
||||
portId,
|
||||
event: 'client.updated',
|
||||
payload: expect.objectContaining({ clientId: 'q-test' }),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user