migration-apply: residential client + interest inserts now wrap in db.transaction so a partial failure can't leave an orphan client row without its interest (or vice versa). migration-transform: buildPlannedDocument returns null when there are no signers so the apply pass doesn't try to send a Documenso envelope without recipients. mapDocumentStatus gets an explicit "Awaiting Further Details" branch that no longer auto-promotes via stale sign-time fields. parseFlexibleDate handles ISO and DD-MM-YYYY inputs uniformly. backfill-legacy-lead-source: chunk UPDATE WHERE clause now isNull(source) on top of the inArray match, so a re-run can't overwrite a more accurate source written between batches. Adds 235 lines of vitest coverage on migration-transform. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
252 lines
9.5 KiB
TypeScript
252 lines
9.5 KiB
TypeScript
/**
|
|
* One-shot migration: legacy NocoDB Interests → new client/interest split.
|
|
*
|
|
* Usage:
|
|
*
|
|
* pnpm tsx scripts/migrate-from-nocodb.ts --dry-run
|
|
* Pulls the live NocoDB base, runs the transform + dedup pipeline,
|
|
* writes a report to .migration/<timestamp>/. NO database writes.
|
|
*
|
|
* pnpm tsx scripts/migrate-from-nocodb.ts --dry-run --port-slug port-nimara
|
|
* Same, but tags the planned writes with the named port (matters for
|
|
* the apply phase — every client/interest belongs to one port).
|
|
*
|
|
* pnpm tsx scripts/migrate-from-nocodb.ts --apply --port-slug port-nimara
|
|
* Re-fetches NocoDB, re-transforms, then writes the planned rows
|
|
* into the target port via the idempotent `migration_source_links`
|
|
* ledger. Re-runs are safe — already-imported source IDs are skipped.
|
|
* REQUIRES `EMAIL_REDIRECT_TO` to be set in env (safety net) unless
|
|
* `--unsafe-skip-redirect-check` is also passed.
|
|
*
|
|
* Design reference: docs/superpowers/specs/2026-05-03-dedup-and-migration-design.md §9.
|
|
*/
|
|
|
|
import 'dotenv/config';
|
|
import { randomUUID } from 'node:crypto';
|
|
import path from 'node:path';
|
|
import { fileURLToPath } from 'node:url';
|
|
|
|
import { eq } from 'drizzle-orm';
|
|
|
|
import { db } from '@/lib/db';
|
|
import { ports } from '@/lib/db/schema/ports';
|
|
import { applyPlan } from '@/lib/dedup/migration-apply';
|
|
import { fetchSnapshot, loadNocoDbConfig } from '@/lib/dedup/nocodb-source';
|
|
import { transformSnapshot } from '@/lib/dedup/migration-transform';
|
|
import { resolveReportPaths, writeReport } from '@/lib/dedup/migration-report';
|
|
|
|
interface CliArgs {
|
|
dryRun: boolean;
|
|
apply: boolean;
|
|
portSlug: string | null;
|
|
reportDir: string | null;
|
|
unsafeSkipRedirectCheck: boolean;
|
|
}
|
|
|
|
function parseArgs(argv: string[]): CliArgs {
|
|
const args: CliArgs = {
|
|
dryRun: false,
|
|
apply: false,
|
|
portSlug: null,
|
|
reportDir: null,
|
|
unsafeSkipRedirectCheck: false,
|
|
};
|
|
for (let i = 0; i < argv.length; i += 1) {
|
|
const a = argv[i]!;
|
|
if (a === '--dry-run') args.dryRun = true;
|
|
else if (a === '--apply') args.apply = true;
|
|
else if (a === '--port-slug') args.portSlug = argv[++i] ?? null;
|
|
else if (a === '--report') args.reportDir = argv[++i] ?? null;
|
|
else if (a === '--unsafe-skip-redirect-check') args.unsafeSkipRedirectCheck = true;
|
|
else if (a === '-h' || a === '--help') {
|
|
printHelp();
|
|
process.exit(0);
|
|
} else {
|
|
console.error(`Unknown argument: ${a}`);
|
|
printHelp();
|
|
process.exit(1);
|
|
}
|
|
}
|
|
return args;
|
|
}
|
|
|
|
function printHelp(): void {
|
|
console.log(`Usage:
|
|
pnpm tsx scripts/migrate-from-nocodb.ts --dry-run [--port-slug <slug>]
|
|
Pulls NocoDB → transforms → writes report to .migration/<timestamp>/.
|
|
No database writes.
|
|
|
|
pnpm tsx scripts/migrate-from-nocodb.ts --apply --port-slug <slug>
|
|
Re-fetches NocoDB, re-transforms, writes via migration_source_links
|
|
ledger. Idempotent — safe to re-run. Requires EMAIL_REDIRECT_TO set
|
|
(unless --unsafe-skip-redirect-check is also passed).
|
|
|
|
Flags:
|
|
--dry-run Read NocoDB, write report only.
|
|
--apply Actually write rows to the DB.
|
|
--port-slug <slug> Port slug to attach to all imported
|
|
entities. Defaults to the first
|
|
available port if omitted.
|
|
--report <dir> Path to a previously-generated report
|
|
dir (only used by --apply).
|
|
--unsafe-skip-redirect-check Skip the EMAIL_REDIRECT_TO precondition
|
|
check. Only use in production cutover.
|
|
-h, --help Show this help.
|
|
`);
|
|
}
|
|
|
|
/**
|
|
* Resolve the target port: use the slug if provided, otherwise the first
|
|
* port found. Errors out cleanly if the slug doesn't match any port.
|
|
*/
|
|
async function resolvePort(slug: string | null): Promise<{ id: string; slug: string }> {
|
|
if (slug) {
|
|
const [p] = await db
|
|
.select({ id: ports.id, slug: ports.slug })
|
|
.from(ports)
|
|
.where(eq(ports.slug, slug))
|
|
.limit(1);
|
|
if (!p) {
|
|
console.error(`No port found with slug "${slug}".`);
|
|
process.exit(1);
|
|
}
|
|
return { id: p.id, slug: p.slug };
|
|
}
|
|
const [first] = await db.select({ id: ports.id, slug: ports.slug }).from(ports).limit(1);
|
|
if (!first) {
|
|
console.error('No ports exist in the target DB. Seed at least one port before applying.');
|
|
process.exit(1);
|
|
}
|
|
return { id: first.id, slug: first.slug };
|
|
}
|
|
|
|
async function main(): Promise<void> {
|
|
const args = parseArgs(process.argv.slice(2));
|
|
|
|
if (!args.dryRun && !args.apply) {
|
|
console.error('Must specify --dry-run or --apply');
|
|
printHelp();
|
|
process.exit(1);
|
|
}
|
|
|
|
// Safety gate: --apply must run with EMAIL_REDIRECT_TO set, unless the
|
|
// operator explicitly opts out (production cutover).
|
|
if (args.apply && !process.env.EMAIL_REDIRECT_TO && !args.unsafeSkipRedirectCheck) {
|
|
console.error(
|
|
'--apply requires EMAIL_REDIRECT_TO to be set in the environment as a safety net.',
|
|
);
|
|
console.error('See docs/operations/outbound-comms-safety.md for the rationale.');
|
|
console.error(
|
|
'If you are running the production cutover and have read that doc, add ' +
|
|
'--unsafe-skip-redirect-check to override.',
|
|
);
|
|
process.exit(2);
|
|
}
|
|
|
|
// ── Fetch + transform (shared by dry-run and apply) ──────────────────────
|
|
|
|
console.log('[migrate] Loading NocoDB config…');
|
|
const config = loadNocoDbConfig();
|
|
console.log(`[migrate] Source: ${config.url}`);
|
|
|
|
console.log('[migrate] Fetching snapshot from NocoDB…');
|
|
const start = Date.now();
|
|
const snapshot = await fetchSnapshot(config);
|
|
const elapsed = ((Date.now() - start) / 1000).toFixed(1);
|
|
console.log(
|
|
`[migrate] Snapshot fetched in ${elapsed}s — ${snapshot.interests.length} interests, ${snapshot.residentialInterests.length} residential, ${snapshot.berths.length} berths.`,
|
|
);
|
|
|
|
console.log('[migrate] Running transform + dedup pipeline…');
|
|
const plan = transformSnapshot(snapshot);
|
|
|
|
// Resolve output paths relative to the worktree root.
|
|
const scriptDir = path.dirname(fileURLToPath(import.meta.url));
|
|
const repoRoot = path.resolve(scriptDir, '..');
|
|
const generatedAt = new Date().toISOString();
|
|
const paths = resolveReportPaths(repoRoot);
|
|
|
|
console.log(`[migrate] Writing report to ${paths.rootDir}…`);
|
|
await writeReport(paths, plan, generatedAt);
|
|
|
|
// ── Plan summary ─────────────────────────────────────────────────────────
|
|
const s = plan.stats;
|
|
console.log('');
|
|
console.log('=== Migration Plan Summary ===');
|
|
console.log(
|
|
` Input: ${s.inputInterestRows} interests, ${s.inputResidentialRows} residential interests`,
|
|
);
|
|
console.log(` Output: ${s.outputClients} clients, ${s.outputInterests} interests`);
|
|
console.log(` ${s.outputContacts} contacts, ${s.outputAddresses} addresses`);
|
|
console.log(
|
|
` ${s.outputDocuments} EOI documents, ${s.outputDocumentSigners} signers`,
|
|
);
|
|
console.log(
|
|
` ${s.outputResidentialClients} residential clients (with default-stage interests)`,
|
|
);
|
|
console.log(
|
|
` Dedup: ${s.autoLinkedClusters} auto-linked clusters, ${s.needsReviewPairs} pairs flagged for review`,
|
|
);
|
|
console.log(` Quality: ${s.flaggedRows} rows flagged (see report.csv)`);
|
|
console.log('');
|
|
console.log(` Full report: ${paths.summaryPath}`);
|
|
|
|
if (args.dryRun) {
|
|
console.log('');
|
|
console.log('Dry-run complete. Re-run with --apply to write rows.');
|
|
return;
|
|
}
|
|
|
|
// ── Apply path ───────────────────────────────────────────────────────────
|
|
|
|
const port = await resolvePort(args.portSlug);
|
|
const applyId = randomUUID();
|
|
|
|
console.log('');
|
|
console.log(`[migrate] Applying to port "${port.slug}" (id=${port.id})`);
|
|
console.log(`[migrate] Apply id: ${applyId}`);
|
|
console.log('[migrate] Inserting…');
|
|
|
|
const applyStart = Date.now();
|
|
const result = await applyPlan(plan, { port, applyId });
|
|
const applyElapsed = ((Date.now() - applyStart) / 1000).toFixed(1);
|
|
|
|
console.log('');
|
|
console.log('=== Apply Result ===');
|
|
console.log(` Time: ${applyElapsed}s`);
|
|
console.log(
|
|
` Clients: ${result.clientsInserted} inserted, ${result.clientsSkipped} already linked`,
|
|
);
|
|
console.log(` Contacts: ${result.contactsInserted} inserted`);
|
|
console.log(` Addresses: ${result.addressesInserted} inserted`);
|
|
console.log(` Yachts: ${result.yachtsInserted} inserted`);
|
|
console.log(
|
|
` Interests: ${result.interestsInserted} inserted, ${result.interestsSkipped} already linked`,
|
|
);
|
|
console.log(
|
|
` Documents: ${result.documentsInserted} inserted, ${result.documentsSkipped} already linked`,
|
|
);
|
|
console.log(` Signers: ${result.documentSignersInserted} inserted`);
|
|
console.log(
|
|
` Res-Clt: ${result.residentialClientsInserted} inserted, ${result.residentialClientsSkipped} already linked`,
|
|
);
|
|
console.log(` Res-Int: ${result.residentialInterestsInserted} inserted`);
|
|
|
|
if (result.warnings.length > 0) {
|
|
console.log('');
|
|
console.log('Warnings:');
|
|
for (const w of result.warnings.slice(0, 20)) {
|
|
console.log(` - ${w}`);
|
|
}
|
|
if (result.warnings.length > 20) {
|
|
console.log(` … ${result.warnings.length - 20} more`);
|
|
}
|
|
}
|
|
console.log('');
|
|
}
|
|
|
|
main().catch((err) => {
|
|
console.error('[migrate] Fatal error:', err);
|
|
process.exit(1);
|
|
});
|