Files
pn-new-crm/src/lib/services/analytics.service.ts
Matt Ciaccio ba5fb6db5e feat(sales): EOI queue route + invoice→deposit auto-advance + won/lost outcomes
Three independent strengthenings of the sales spine that the prior coherence
sweep made it possible to do cleanly.

  1. EOI queue page

     - Sidebar entry under Documents → "EOI queue".
     - Route /[port]/documents/eoi renders DocumentsHub with the existing
       eoi_queue tab pre-selected (filters in-flight EOIs only).
     - .gitignore: tightened root-only `eoi/` ignore so the documents/eoi
       route is no longer silently excluded.

  2. Invoice ↔ deposit link

     - invoices.interestId (FK, ON DELETE SET NULL) + invoices.kind
       ('general' | 'deposit'). Indexed on (port_id, interest_id).
     - createInvoiceSchema requires interestId when kind === 'deposit';
       the service validates the linked interest belongs to the same port
       before insert.
     - recordPayment auto-advances pipelineStage to deposit_10pct (via
       advanceStageIfBehind) when a paid invoice is kind=deposit and has
       an interestId. No-op if the interest is already further along.
     - "Create deposit invoice" link added to the Deposit milestone on the
       interest detail. Links to /invoices/new?interestId=…&kind=deposit;
       the form prefills the billing entity from the linked interest's
       client and shows a context banner.

  3. Won / lost terminal outcomes

     - interests.outcome ('won' | 'lost_other_marina' | 'lost_unqualified'
       | 'lost_no_response' | 'cancelled') + outcomeReason text +
       outcomeAt timestamp. Indexed on (port_id, outcome).
     - setInterestOutcome / clearInterestOutcome services + POST/DELETE
       /api/v1/interests/:id/outcome endpoints (gated by change_stage
       permission). Setting an outcome moves the interest to `completed`
       in the same write; clearing reopens to `in_communication` (or a
       caller-specified stage).
     - Mark Won / Mark Lost icon buttons on the interest detail header,
       plus an outcome badge that replaces the stage pill once a terminal
       outcome is set, plus a Reopen button.
     - Funnel + dashboard math updated to exclude lost/cancelled outcomes
       from active calculations (KPIs.activeInterests, pipelineValueUsd,
       getPipelineCounts, computePipelineFunnel, getRevenueForecast).
       The funnel now also returns a `lost` summary so callers can
       surface leakage without polluting conversion percentages.

Schema changes shipped via 0019_lazy_vampiro.sql; applied to dev DB
manually via psql because drizzle-kit push hits a pre-existing zod
parsing issue on the companies index. Dev server may need a restart
to flush prepared-statement caches.

tsc clean. vitest 832/832 pass. ESLint clean on every file touched.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-02 00:01:33 +02:00

342 lines
11 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
/**
* Phase B analytics service. Reads pre-computed snapshots from
* `analytics_snapshots`; recomputes on demand if older than `SNAPSHOT_TTL_MS`.
* The recurring `analytics-refresh` BullMQ job (PR3) warms the table
* every 15 minutes per port × per metric.
*/
import { and, eq, gte, isNull, sql } from 'drizzle-orm';
import { db } from '@/lib/db';
import { analyticsSnapshots } from '@/lib/db/schema/insights';
import { interests } from '@/lib/db/schema/interests';
import { invoices } from '@/lib/db/schema/financial';
import { berthReservations } from '@/lib/db/schema/reservations';
import { PIPELINE_STAGES } from '@/lib/constants';
export type DateRange = '7d' | '30d' | '90d' | 'today';
export type MetricBase =
| 'pipeline_funnel'
| 'occupancy_timeline'
| 'revenue_breakdown'
| 'lead_source_attribution';
export type MetricId = `${MetricBase}.${DateRange}`;
export const ALL_RANGES: readonly DateRange[] = ['today', '7d', '30d', '90d'] as const;
export const ALL_METRICS: readonly MetricBase[] = [
'pipeline_funnel',
'occupancy_timeline',
'revenue_breakdown',
'lead_source_attribution',
] as const;
export const SNAPSHOT_TTL_MS = 15 * 60 * 1000; // 15 minutes
// ─── Output shapes ────────────────────────────────────────────────────────────
export interface PipelineFunnelData {
stages: Array<{ stage: string; count: number; conversionPct: number }>;
/** Counts of terminal lost/cancelled outcomes in the range. Surfaces below
* the funnel so users can see leakage without it polluting the conversion
* math. Total = sum of these counts. */
lost: { count: number; byOutcome: Record<string, number> };
}
export interface OccupancyTimelineData {
points: Array<{ date: string; occupied: number; total: number; occupancyPct: number }>;
}
export interface RevenueBreakdownData {
bars: Array<{ status: string; amount: number; currency: string }>;
}
export interface LeadSourceAttributionData {
slices: Array<{ source: string; count: number }>;
}
export type SnapshotData =
| PipelineFunnelData
| OccupancyTimelineData
| RevenueBreakdownData
| LeadSourceAttributionData;
// ─── Cache layer ──────────────────────────────────────────────────────────────
export async function readSnapshot<T extends SnapshotData>(
portId: string,
metricId: MetricId,
): Promise<T | null> {
const row = await db.query.analyticsSnapshots.findFirst({
where: and(eq(analyticsSnapshots.portId, portId), eq(analyticsSnapshots.metricId, metricId)),
});
if (!row) return null;
const age = Date.now() - row.computedAt.getTime();
if (age > SNAPSHOT_TTL_MS) return null;
return row.data as T;
}
export async function writeSnapshot(
portId: string,
metricId: MetricId,
data: SnapshotData,
): Promise<void> {
await db
.insert(analyticsSnapshots)
.values({ portId, metricId, data })
.onConflictDoUpdate({
target: [analyticsSnapshots.portId, analyticsSnapshots.metricId],
set: { data, computedAt: new Date() },
});
}
// ─── Range helpers ────────────────────────────────────────────────────────────
function rangeToCutoff(range: DateRange): Date {
const now = Date.now();
switch (range) {
case 'today':
return new Date(now - 1 * 86_400_000);
case '7d':
return new Date(now - 7 * 86_400_000);
case '30d':
return new Date(now - 30 * 86_400_000);
case '90d':
return new Date(now - 90 * 86_400_000);
}
}
function rangeToDays(range: DateRange): number {
switch (range) {
case 'today':
return 1;
case '7d':
return 7;
case '30d':
return 30;
case '90d':
return 90;
}
}
// ─── Computations ─────────────────────────────────────────────────────────────
export async function computePipelineFunnel(
portId: string,
range: DateRange,
): Promise<PipelineFunnelData> {
const cutoff = rangeToCutoff(range);
// Stage counts EXCLUDE lost/cancelled outcomes — those never become
// conversions, so polluting the funnel with them gives meaningless math.
// Lost is reported separately in the `lost` block.
const stageRows = await db
.select({ stage: interests.pipelineStage, count: sql<number>`count(*)::int` })
.from(interests)
.where(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
gte(interests.createdAt, cutoff),
sql`(${interests.outcome} IS NULL OR ${interests.outcome} = 'won')`,
),
)
.groupBy(interests.pipelineStage);
const counts = new Map(stageRows.map((r) => [r.stage, r.count]));
const top = counts.get('open') ?? 0;
const stages = PIPELINE_STAGES.map((stage) => {
const count = counts.get(stage) ?? 0;
const conversionPct = top === 0 ? 0 : Math.round((count / top) * 1000) / 10;
return { stage, count, conversionPct };
});
// Lost / cancelled summary. Same date-range filter as the funnel.
const lostRows = await db
.select({ outcome: interests.outcome, count: sql<number>`count(*)::int` })
.from(interests)
.where(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
gte(interests.createdAt, cutoff),
sql`${interests.outcome} IS NOT NULL AND ${interests.outcome} != 'won'`,
),
)
.groupBy(interests.outcome);
const byOutcome: Record<string, number> = {};
let lostTotal = 0;
for (const row of lostRows) {
if (!row.outcome) continue;
byOutcome[row.outcome] = row.count;
lostTotal += row.count;
}
return { stages, lost: { count: lostTotal, byOutcome } };
}
export async function computeOccupancyTimeline(
portId: string,
range: DateRange,
): Promise<OccupancyTimelineData> {
const days = rangeToDays(range);
// Total berths per port (current count — assumes no churn).
const totalRow = await db.execute<{ total: number }>(
sql`SELECT count(*)::int AS total FROM berths WHERE port_id = ${portId}`,
);
const total = totalRow[0]?.total ?? 0;
// For each day in range, count berths that have an active reservation
// covering that day. A reservation is "covering" if start_date <= day
// AND (end_date IS NULL OR end_date >= day).
const points: OccupancyTimelineData['points'] = [];
for (let i = days - 1; i >= 0; i--) {
const day = new Date(Date.now() - i * 86_400_000);
const dayStr = day.toISOString().slice(0, 10);
const occRow = await db
.select({ occupied: sql<number>`count(distinct ${berthReservations.berthId})::int` })
.from(berthReservations)
.where(
and(
eq(berthReservations.portId, portId),
eq(berthReservations.status, 'active'),
sql`${berthReservations.startDate} <= ${dayStr}::date`,
sql`(${berthReservations.endDate} IS NULL OR ${berthReservations.endDate} >= ${dayStr}::date)`,
),
);
const occupied = occRow[0]?.occupied ?? 0;
const occupancyPct = total === 0 ? 0 : Math.round((occupied / total) * 1000) / 10;
points.push({ date: dayStr, occupied, total, occupancyPct });
}
return { points };
}
export async function computeRevenueBreakdown(
portId: string,
range: DateRange,
): Promise<RevenueBreakdownData> {
const cutoff = rangeToCutoff(range);
const rows = await db
.select({
status: invoices.status,
currency: invoices.currency,
amount: sql<string>`coalesce(sum(${invoices.total}), 0)::text`,
})
.from(invoices)
.where(
and(
eq(invoices.portId, portId),
isNull(invoices.archivedAt),
gte(invoices.createdAt, cutoff),
),
)
.groupBy(invoices.status, invoices.currency);
return {
bars: rows.map((r) => ({
status: r.status,
currency: r.currency,
amount: Number(r.amount),
})),
};
}
export async function computeLeadSourceAttribution(
portId: string,
range: DateRange,
): Promise<LeadSourceAttributionData> {
const cutoff = rangeToCutoff(range);
const rows = await db
.select({ source: interests.source, count: sql<number>`count(*)::int` })
.from(interests)
.where(
and(
eq(interests.portId, portId),
isNull(interests.archivedAt),
gte(interests.createdAt, cutoff),
),
)
.groupBy(interests.source);
return {
slices: rows
.map((r) => ({
source: r.source ?? 'unspecified',
count: r.count,
}))
.sort((a, b) => b.count - a.count),
};
}
// ─── Public read API (cache → compute → write back) ──────────────────────────
export async function getPipelineFunnel(
portId: string,
range: DateRange,
): Promise<PipelineFunnelData> {
const metricId = `pipeline_funnel.${range}` as const;
const cached = await readSnapshot<PipelineFunnelData>(portId, metricId);
if (cached) return cached;
const fresh = await computePipelineFunnel(portId, range);
await writeSnapshot(portId, metricId, fresh);
return fresh;
}
export async function getOccupancyTimeline(
portId: string,
range: DateRange,
): Promise<OccupancyTimelineData> {
const metricId = `occupancy_timeline.${range}` as const;
const cached = await readSnapshot<OccupancyTimelineData>(portId, metricId);
if (cached) return cached;
const fresh = await computeOccupancyTimeline(portId, range);
await writeSnapshot(portId, metricId, fresh);
return fresh;
}
export async function getRevenueBreakdown(
portId: string,
range: DateRange,
): Promise<RevenueBreakdownData> {
const metricId = `revenue_breakdown.${range}` as const;
const cached = await readSnapshot<RevenueBreakdownData>(portId, metricId);
if (cached) return cached;
const fresh = await computeRevenueBreakdown(portId, range);
await writeSnapshot(portId, metricId, fresh);
return fresh;
}
export async function getLeadSourceAttribution(
portId: string,
range: DateRange,
): Promise<LeadSourceAttributionData> {
const metricId = `lead_source_attribution.${range}` as const;
const cached = await readSnapshot<LeadSourceAttributionData>(portId, metricId);
if (cached) return cached;
const fresh = await computeLeadSourceAttribution(portId, range);
await writeSnapshot(portId, metricId, fresh);
return fresh;
}
// ─── Cron entrypoint: warm every (port × metric × range) ────────────────────
export async function refreshSnapshotsForPort(portId: string): Promise<void> {
for (const range of ALL_RANGES) {
const [funnel, occupancy, revenue, leadSource] = await Promise.all([
computePipelineFunnel(portId, range),
computeOccupancyTimeline(portId, range),
computeRevenueBreakdown(portId, range),
computeLeadSourceAttribution(portId, range),
]);
await Promise.all([
writeSnapshot(portId, `pipeline_funnel.${range}`, funnel),
writeSnapshot(portId, `occupancy_timeline.${range}`, occupancy),
writeSnapshot(portId, `revenue_breakdown.${range}`, revenue),
writeSnapshot(portId, `lead_source_attribution.${range}`, leadSource),
]);
}
}