feat(insights): Phase B schema + service skeletons
PR1 of Phase B per docs/superpowers/specs/2026-04-28-phase-b-insights-alerts-design.md. Lays the foundation that PRs 2-10 will fill in with behaviour. Schema (migration 0014): - alerts table with rule-engine fields (rule_id, severity, link, entity_type/id, fingerprint, fired/dismissed/acknowledged/resolved timestamps, jsonb metadata). Partial-unique fingerprint index keeps one open row per (port, rule, entity); separate indexes power severity-filtered and time-ordered queries. - analytics_snapshots (port_id, metric_id) -> jsonb cache + computedAt for the 15-min recurring refresh. - expenses: duplicate_of self-FK, dedup_scanned_at, ocr_status/raw/ confidence; partial index on (port, vendor, amount, date) where duplicate_of IS NULL drives the dedup heuristic. - audit_logs.search_text: GENERATED ALWAYS tsvector over action+entity_type+entity_id+user_id, GIN-indexed (drizzle can't model GENERATED ALWAYS in TS yet, so the migration appends manual ALTER + the GIN index). Service skeletons in src/lib/services/: - alerts.service.ts: fingerprintFor, reconcileAlertsForPort (upsert + auto-resolve), dismiss, acknowledge, listAlertsForPort. - alert-rules.ts: RULE_REGISTRY of 10 rule evaluators (currently no-op); PR2 fills in the bodies. - analytics.service.ts: readSnapshot/writeSnapshot with 15-min TTL + no-op compute* stubs for the four chart series; PR3 fills behavior. - expense-dedup.service.ts: scanForDuplicates + markBestDuplicate using the partial dedup index. PR8 wires the BullMQ trigger. - expense-ocr.service.ts: OcrResult/OcrLineItem types + ocrReceipt stub. PR9 wires Claude Vision (Haiku 4.5 + ephemeral system-prompt cache). - audit-search.service.ts: tsvector @@ plainto_tsquery + cursor pagination on (createdAt, id). PR10 wires the admin UI. tsc clean, lint clean, vitest 675/675 (one unrelated AES random-output flake passes solo). Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
56
src/lib/db/migrations/0014_black_banshee.sql
Normal file
56
src/lib/db/migrations/0014_black_banshee.sql
Normal file
@@ -0,0 +1,56 @@
|
||||
CREATE TABLE "alerts" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"port_id" text NOT NULL,
|
||||
"rule_id" text NOT NULL,
|
||||
"severity" text NOT NULL,
|
||||
"title" text NOT NULL,
|
||||
"body" text,
|
||||
"link" text NOT NULL,
|
||||
"entity_type" text,
|
||||
"entity_id" text,
|
||||
"fingerprint" text NOT NULL,
|
||||
"fired_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"dismissed_at" timestamp with time zone,
|
||||
"dismissed_by" text,
|
||||
"acknowledged_at" timestamp with time zone,
|
||||
"acknowledged_by" text,
|
||||
"resolved_at" timestamp with time zone,
|
||||
"metadata" jsonb DEFAULT '{}'::jsonb
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "analytics_snapshots" (
|
||||
"port_id" text NOT NULL,
|
||||
"metric_id" text NOT NULL,
|
||||
"computed_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"data" jsonb NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "expenses" ADD COLUMN "duplicate_of" text;--> statement-breakpoint
|
||||
ALTER TABLE "expenses" ADD COLUMN "dedup_scanned_at" timestamp with time zone;--> statement-breakpoint
|
||||
ALTER TABLE "expenses" ADD COLUMN "ocr_status" text DEFAULT 'pending';--> statement-breakpoint
|
||||
ALTER TABLE "expenses" ADD COLUMN "ocr_raw" jsonb;--> statement-breakpoint
|
||||
ALTER TABLE "expenses" ADD COLUMN "ocr_confidence" numeric;--> statement-breakpoint
|
||||
ALTER TABLE "audit_logs" ADD COLUMN "search_text" "tsvector";--> statement-breakpoint
|
||||
ALTER TABLE "alerts" ADD CONSTRAINT "alerts_port_id_ports_id_fk" FOREIGN KEY ("port_id") REFERENCES "public"."ports"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "alerts" ADD CONSTRAINT "alerts_dismissed_by_user_id_fk" FOREIGN KEY ("dismissed_by") REFERENCES "public"."user"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "alerts" ADD CONSTRAINT "alerts_acknowledged_by_user_id_fk" FOREIGN KEY ("acknowledged_by") REFERENCES "public"."user"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "analytics_snapshots" ADD CONSTRAINT "analytics_snapshots_port_id_ports_id_fk" FOREIGN KEY ("port_id") REFERENCES "public"."ports"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "idx_alerts_fingerprint_open" ON "alerts" USING btree ("port_id","fingerprint") WHERE resolved_at IS NULL;--> statement-breakpoint
|
||||
CREATE INDEX "idx_alerts_port_fired" ON "alerts" USING btree ("port_id","fired_at");--> statement-breakpoint
|
||||
CREATE INDEX "idx_alerts_port_severity_open" ON "alerts" USING btree ("port_id","severity") WHERE resolved_at IS NULL AND dismissed_at IS NULL;--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "idx_analytics_pk" ON "analytics_snapshots" USING btree ("port_id","metric_id");--> statement-breakpoint
|
||||
ALTER TABLE "expenses" ADD CONSTRAINT "expenses_duplicate_of_expenses_id_fk" FOREIGN KEY ("duplicate_of") REFERENCES "public"."expenses"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "idx_expenses_dedup" ON "expenses" USING btree ("port_id","establishment_name","amount","expense_date") WHERE duplicate_of IS NULL;--> statement-breakpoint
|
||||
-- audit_logs.search_text needs to be GENERATED ALWAYS (drizzle can't model that
|
||||
-- in TS yet); drop the empty column and re-add it as the generated form.
|
||||
ALTER TABLE "audit_logs" DROP COLUMN "search_text";--> statement-breakpoint
|
||||
ALTER TABLE "audit_logs" ADD COLUMN "search_text" tsvector
|
||||
GENERATED ALWAYS AS (
|
||||
to_tsvector('simple',
|
||||
coalesce("action", '') || ' ' ||
|
||||
coalesce("entity_type", '') || ' ' ||
|
||||
coalesce("entity_id", '') || ' ' ||
|
||||
coalesce("user_id", '')
|
||||
)
|
||||
) STORED;--> statement-breakpoint
|
||||
CREATE INDEX "idx_audit_search" ON "audit_logs" USING gin("search_text");
|
||||
9780
src/lib/db/migrations/meta/0014_snapshot.json
Normal file
9780
src/lib/db/migrations/meta/0014_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -99,6 +99,13 @@
|
||||
"when": 1777334766194,
|
||||
"tag": "0013_abnormal_thundra",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 14,
|
||||
"version": "7",
|
||||
"when": 1777379952283,
|
||||
"tag": "0014_black_banshee",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -8,7 +8,10 @@ import {
|
||||
index,
|
||||
uniqueIndex,
|
||||
primaryKey,
|
||||
jsonb,
|
||||
AnyPgColumn,
|
||||
} from 'drizzle-orm/pg-core';
|
||||
import { sql } from 'drizzle-orm';
|
||||
import { ports } from './ports';
|
||||
import { files } from './documents';
|
||||
|
||||
@@ -36,6 +39,19 @@ export const expenses = pgTable(
|
||||
paymentDate: date('payment_date'),
|
||||
paymentReference: text('payment_reference'),
|
||||
paymentNotes: text('payment_notes'),
|
||||
/** When set, this expense is flagged as a duplicate of another in the
|
||||
* same port. Self-referencing FK; the dedup service writes this. */
|
||||
duplicateOf: text('duplicate_of').references((): AnyPgColumn => expenses.id, {
|
||||
onDelete: 'set null',
|
||||
}),
|
||||
/** Last time the dedup heuristic ran against this row. */
|
||||
dedupScannedAt: timestamp('dedup_scanned_at', { withTimezone: true }),
|
||||
/** OCR pipeline state: 'pending'|'ok'|'failed'|'low_confidence'. */
|
||||
ocrStatus: text('ocr_status').default('pending'),
|
||||
/** Full Claude Vision response payload for audit/debug. */
|
||||
ocrRaw: jsonb('ocr_raw'),
|
||||
/** 0..1; values < 0.6 force the verify-mode UI. */
|
||||
ocrConfidence: numeric('ocr_confidence'),
|
||||
createdBy: text('created_by').notNull(),
|
||||
archivedAt: timestamp('archived_at', { withTimezone: true }),
|
||||
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||
@@ -45,6 +61,10 @@ export const expenses = pgTable(
|
||||
index('idx_expenses_port').on(table.portId),
|
||||
index('idx_expenses_date').on(table.portId, table.expenseDate),
|
||||
index('idx_expenses_category').on(table.portId, table.category),
|
||||
// Powers the dedup heuristic lookup (port + vendor + amount + date window).
|
||||
index('idx_expenses_dedup')
|
||||
.on(table.portId, table.establishmentName, table.amount, table.expenseDate)
|
||||
.where(sql`duplicate_of IS NULL`),
|
||||
],
|
||||
);
|
||||
|
||||
|
||||
@@ -47,5 +47,8 @@ export * from './operations';
|
||||
// System
|
||||
export * from './system';
|
||||
|
||||
// Insights (Phase B): alerts, analytics_snapshots
|
||||
export * from './insights';
|
||||
|
||||
// Relations (must come last — references all tables)
|
||||
export * from './relations';
|
||||
|
||||
101
src/lib/db/schema/insights.ts
Normal file
101
src/lib/db/schema/insights.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
/**
|
||||
* Phase B — operational insight surfaces.
|
||||
*
|
||||
* - `alerts`: rule-engine-fired actionable cards. The fingerprint column
|
||||
* dedupes re-evaluations of the same condition; the partial unique
|
||||
* index keeps a single open row per `(port, fingerprint)` while
|
||||
* resolved/dismissed history accumulates.
|
||||
* - `analytics_snapshots`: cached aggregate JSON keyed by metric+range,
|
||||
* refreshed by a recurring job so dashboard hits don't recompute.
|
||||
*/
|
||||
|
||||
import { pgTable, text, timestamp, jsonb, index, uniqueIndex } from 'drizzle-orm/pg-core';
|
||||
import { sql } from 'drizzle-orm';
|
||||
|
||||
import { ports } from './ports';
|
||||
import { user } from './users';
|
||||
|
||||
export const alerts = pgTable(
|
||||
'alerts',
|
||||
{
|
||||
id: text('id')
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
portId: text('port_id')
|
||||
.notNull()
|
||||
.references(() => ports.id, { onDelete: 'cascade' }),
|
||||
/** Stable rule identifier: 'reservation.no_agreement', 'interest.stale', ... */
|
||||
ruleId: text('rule_id').notNull(),
|
||||
/** 'info' | 'warning' | 'critical' */
|
||||
severity: text('severity').notNull(),
|
||||
title: text('title').notNull(),
|
||||
body: text('body'),
|
||||
/** Relative path the card deep-links to. */
|
||||
link: text('link').notNull(),
|
||||
/** Optional FK target: 'interest', 'reservation', 'document', 'expense', ... */
|
||||
entityType: text('entity_type'),
|
||||
entityId: text('entity_id'),
|
||||
/** Hash of (rule_id + entity_type + entity_id) — dedupes re-evaluations. */
|
||||
fingerprint: text('fingerprint').notNull(),
|
||||
firedAt: timestamp('fired_at', { withTimezone: true }).notNull().defaultNow(),
|
||||
dismissedAt: timestamp('dismissed_at', { withTimezone: true }),
|
||||
dismissedBy: text('dismissed_by').references(() => user.id),
|
||||
/** "Someone is on it" — alert stays visible but stops nagging. */
|
||||
acknowledgedAt: timestamp('acknowledged_at', { withTimezone: true }),
|
||||
acknowledgedBy: text('acknowledged_by').references(() => user.id),
|
||||
/** Set by the engine when the underlying condition no longer fires. */
|
||||
resolvedAt: timestamp('resolved_at', { withTimezone: true }),
|
||||
/** Per-rule extras: days_stale, amount_at_risk, etc. */
|
||||
metadata: jsonb('metadata').default({}),
|
||||
},
|
||||
(table) => [
|
||||
// Only one open alert per (port, fingerprint) — re-evaluation upserts.
|
||||
uniqueIndex('idx_alerts_fingerprint_open')
|
||||
.on(table.portId, table.fingerprint)
|
||||
.where(sql`resolved_at IS NULL`),
|
||||
index('idx_alerts_port_fired').on(table.portId, table.firedAt),
|
||||
index('idx_alerts_port_severity_open')
|
||||
.on(table.portId, table.severity)
|
||||
.where(sql`resolved_at IS NULL AND dismissed_at IS NULL`),
|
||||
],
|
||||
);
|
||||
|
||||
export type Alert = typeof alerts.$inferSelect;
|
||||
export type NewAlert = typeof alerts.$inferInsert;
|
||||
|
||||
export const analyticsSnapshots = pgTable(
|
||||
'analytics_snapshots',
|
||||
{
|
||||
portId: text('port_id')
|
||||
.notNull()
|
||||
.references(() => ports.id, { onDelete: 'cascade' }),
|
||||
/** Composite key: e.g. 'pipeline_funnel.30d', 'occupancy_timeline.90d'. */
|
||||
metricId: text('metric_id').notNull(),
|
||||
computedAt: timestamp('computed_at', { withTimezone: true }).notNull().defaultNow(),
|
||||
/** Pre-shaped chart data. */
|
||||
data: jsonb('data').notNull(),
|
||||
},
|
||||
(table) => [uniqueIndex('idx_analytics_pk').on(table.portId, table.metricId)],
|
||||
);
|
||||
|
||||
export type AnalyticsSnapshot = typeof analyticsSnapshots.$inferSelect;
|
||||
export type NewAnalyticsSnapshot = typeof analyticsSnapshots.$inferInsert;
|
||||
|
||||
/** Severity literal type for callers that want a typed enum. */
|
||||
export type AlertSeverity = 'info' | 'warning' | 'critical';
|
||||
|
||||
/** Rule IDs in the v1 catalog — keep in sync with `alert-rules.ts`. */
|
||||
export const ALERT_RULES = [
|
||||
'reservation.no_agreement',
|
||||
'interest.stale',
|
||||
'document.expiring_soon',
|
||||
'document.signer_overdue',
|
||||
'berth.under_offer_stalled',
|
||||
'expense.duplicate',
|
||||
'expense.unscanned',
|
||||
'interest.high_value_silent',
|
||||
'eoi.unsigned_long',
|
||||
'audit.suspicious_login',
|
||||
] as const;
|
||||
|
||||
export type AlertRuleId = (typeof ALERT_RULES)[number];
|
||||
@@ -8,14 +8,24 @@ import {
|
||||
jsonb,
|
||||
index,
|
||||
uniqueIndex,
|
||||
customType,
|
||||
} from 'drizzle-orm/pg-core';
|
||||
import { ports } from './ports';
|
||||
import { clients } from './clients';
|
||||
|
||||
// Drizzle doesn't ship a first-class tsvector type; declare a thin custom one.
|
||||
const tsvector = customType<{ data: string; driverData: string }>({
|
||||
dataType() {
|
||||
return 'tsvector';
|
||||
},
|
||||
});
|
||||
|
||||
export const auditLogs = pgTable(
|
||||
'audit_logs',
|
||||
{
|
||||
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
|
||||
id: text('id')
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
portId: text('port_id').references(() => ports.id), // null for system-level events
|
||||
userId: text('user_id'), // null for system-generated events
|
||||
action: text('action').notNull(), // create, update, delete, archive, restore, merge, login, logout, revert
|
||||
@@ -31,6 +41,10 @@ export const auditLogs = pgTable(
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
revertOf: text('revert_of').references((): any => auditLogs.id),
|
||||
metadata: jsonb('metadata').default({}),
|
||||
/** Full-text search column. Stored generated; updated by the migration's
|
||||
* GENERATED ALWAYS expression covering action + entityType + entityId
|
||||
* + actor email lookup. */
|
||||
searchText: tsvector('search_text'),
|
||||
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||
},
|
||||
(table) => [
|
||||
@@ -44,7 +58,9 @@ export const auditLogs = pgTable(
|
||||
export const tags = pgTable(
|
||||
'tags',
|
||||
{
|
||||
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
|
||||
id: text('id')
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
portId: text('port_id')
|
||||
.notNull()
|
||||
.references(() => ports.id),
|
||||
@@ -61,7 +77,9 @@ export const tags = pgTable(
|
||||
export const webhooks = pgTable(
|
||||
'webhooks',
|
||||
{
|
||||
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
|
||||
id: text('id')
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
portId: text('port_id')
|
||||
.notNull()
|
||||
.references(() => ports.id),
|
||||
@@ -80,7 +98,9 @@ export const webhooks = pgTable(
|
||||
export const webhookDeliveries = pgTable(
|
||||
'webhook_deliveries',
|
||||
{
|
||||
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
|
||||
id: text('id')
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
webhookId: text('webhook_id')
|
||||
.notNull()
|
||||
.references(() => webhooks.id, { onDelete: 'cascade' }),
|
||||
@@ -115,7 +135,9 @@ export const systemSettings = pgTable(
|
||||
export const savedViews = pgTable(
|
||||
'saved_views',
|
||||
{
|
||||
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
|
||||
id: text('id')
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
portId: text('port_id')
|
||||
.notNull()
|
||||
.references(() => ports.id),
|
||||
@@ -136,7 +158,9 @@ export const savedViews = pgTable(
|
||||
export const scratchpadNotes = pgTable(
|
||||
'scratchpad_notes',
|
||||
{
|
||||
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
|
||||
id: text('id')
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
userId: text('user_id').notNull(),
|
||||
content: text('content').notNull(),
|
||||
linkedClientId: text('linked_client_id').references(() => clients.id),
|
||||
@@ -150,7 +174,9 @@ export const scratchpadNotes = pgTable(
|
||||
export const userNotificationPreferences = pgTable(
|
||||
'user_notification_preferences',
|
||||
{
|
||||
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
|
||||
id: text('id')
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
userId: text('user_id').notNull(),
|
||||
portId: text('port_id')
|
||||
.notNull()
|
||||
@@ -167,7 +193,9 @@ export const userNotificationPreferences = pgTable(
|
||||
export const currencyRates = pgTable(
|
||||
'currency_rates',
|
||||
{
|
||||
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
|
||||
id: text('id')
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
baseCurrency: text('base_currency').notNull(),
|
||||
targetCurrency: text('target_currency').notNull(),
|
||||
rate: numeric('rate').notNull(),
|
||||
@@ -182,7 +210,9 @@ export const currencyRates = pgTable(
|
||||
export const customFieldDefinitions = pgTable(
|
||||
'custom_field_definitions',
|
||||
{
|
||||
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
|
||||
id: text('id')
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
portId: text('port_id')
|
||||
.notNull()
|
||||
.references(() => ports.id),
|
||||
@@ -204,7 +234,9 @@ export const customFieldDefinitions = pgTable(
|
||||
export const customFieldValues = pgTable(
|
||||
'custom_field_values',
|
||||
{
|
||||
id: text('id').primaryKey().$defaultFn(() => crypto.randomUUID()),
|
||||
id: text('id')
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
fieldId: text('field_id')
|
||||
.notNull()
|
||||
.references(() => customFieldDefinitions.id, { onDelete: 'cascade' }),
|
||||
|
||||
Reference in New Issue
Block a user