Closes the second wave of HIGH-priority audit findings: * fetchWithTimeout helper (new src/lib/fetch-with-timeout.ts) wraps Documenso, OCR, currency, Umami, IMAP, etc. — a hung upstream can no longer pin a worker concurrency slot indefinitely. OpenAI client passes timeout: 30_000. ImapFlow gets socket / greeting / connection timeouts. * SIGTERM / SIGINT handler in src/server.ts drains in-flight HTTP, closes Socket.io, and disconnects Redis before exit; compose stop_grace_period bumped to 30s. Adds closeSocketServer() helper. * env.ts gains zod-validated PORT and MULTI_NODE_DEPLOYMENT, and filesystem.ts now reads from env (a typo can no longer silently disable the multi-node guard). * Per-port Documenso template + recipient IDs land in system_settings with env fallback (PortDocumensoConfig now exposes eoiTemplateId, clientRecipientId, developerRecipientId, approvalRecipientId). document-templates.ts uses the per-port config and threads portId into documensoGenerateFromTemplate(). * Migration 0042 wires the eleven HIGH-tier missing FK constraints (documents/files/interests/reminders/berth_waiting_list/ form_submissions) plus polymorphic CHECK round 2 (yacht_ownership_history.owner_type, document_sends.document_kind), invoices.billing_entity_id NOT EMPTY, and clients.merged_into self-FK. Drizzle schema columns updated to .references(...) where possible so the misleading "FK wired in relations.ts" comments are gone. Test status: 1168/1168 vitest, tsc clean. Refs: docs/audit-comprehensive-2026-05-05.md HIGH §§5,6,7,8,9,10 + MED §§14,15,16,18. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
417 lines
16 KiB
TypeScript
417 lines
16 KiB
TypeScript
/**
|
|
* Local filesystem backend. Stores files at `${root}/<key>` on disk and serves
|
|
* downloads via a CRM-internal proxy route (`/api/storage/[token]`) that
|
|
* verifies an HMAC token before streaming the bytes. Used for single-VPS
|
|
* deployments where running MinIO is overkill.
|
|
*
|
|
* §14.9a critical mitigations:
|
|
*
|
|
* - Storage keys are validated against `^[a-zA-Z0-9/_.-]+$`. Anything containing
|
|
* `..` or that resolves to an absolute path is rejected.
|
|
* - The resolved path is checked with `path.resolve` against the resolved
|
|
* storage root (realpath form) — symlink escapes are blocked.
|
|
* - The storage root is created with mode `0o700` (owner only).
|
|
* - Refuses to start when `MULTI_NODE_DEPLOYMENT === 'true'` — multi-node
|
|
* deployments must use an S3-compatible store.
|
|
* - Proxy download URLs carry an HMAC-signed payload (key + expiry); the
|
|
* route refuses to stream a key whose token doesn't verify.
|
|
*/
|
|
|
|
import { createHash, createHmac, randomUUID, timingSafeEqual } from 'node:crypto';
|
|
import { createReadStream } from 'node:fs';
|
|
import * as fs from 'node:fs/promises';
|
|
import * as path from 'node:path';
|
|
import { Readable } from 'node:stream';
|
|
|
|
import { env } from '@/lib/env';
|
|
import { NotFoundError, ValidationError } from '@/lib/errors';
|
|
import { logger } from '@/lib/logger';
|
|
import { decrypt } from '@/lib/utils/encryption';
|
|
|
|
import type { PresignOpts, PutOpts, StorageBackend } from './index';
|
|
|
|
// ─── key validation ─────────────────────────────────────────────────────────
|
|
|
|
const VALID_KEY_RE = /^[a-zA-Z0-9/_.-]+$/;
|
|
|
|
/**
|
|
* Validate a storage key. Rejects:
|
|
* - empty / whitespace
|
|
* - characters outside `[a-zA-Z0-9/_.-]`
|
|
* - traversal segments (`..`, `/..`, `../`)
|
|
* - absolute paths (leading `/`)
|
|
* - segments starting with `.` (hidden files / dotfiles other than the
|
|
* intentional dot-extension at the end)
|
|
*
|
|
* Use this both at write time AND at read time — a key fed back from the DB
|
|
* could in theory have been tampered with at rest.
|
|
*/
|
|
export function validateStorageKey(key: string): void {
|
|
if (typeof key !== 'string' || key.length === 0) {
|
|
throw new ValidationError('Storage key must be a non-empty string');
|
|
}
|
|
if (key.length > 1024) {
|
|
throw new ValidationError('Storage key exceeds 1024 chars');
|
|
}
|
|
if (key.startsWith('/') || key.startsWith('\\')) {
|
|
throw new ValidationError('Storage key must not be absolute');
|
|
}
|
|
if (!VALID_KEY_RE.test(key)) {
|
|
throw new ValidationError('Storage key contains forbidden characters');
|
|
}
|
|
// Reject any traversal segment in any normalized form.
|
|
const segments = key.split('/');
|
|
for (const seg of segments) {
|
|
if (seg === '..' || seg === '.' || seg === '') {
|
|
throw new ValidationError('Storage key has empty or traversal segment');
|
|
}
|
|
}
|
|
}
|
|
|
|
// ─── HMAC token helpers ─────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Token op binding. `'get'` tokens are issued by `presignDownload` and only
|
|
* accepted by the proxy GET handler. `'put'` tokens are issued by
|
|
* `presignUpload` and only accepted by the proxy PUT handler. Without this
|
|
* binding a long-lived 24h download URL emailed to a customer could be
|
|
* replayed against the PUT handler to overwrite the original storage object
|
|
* (since both routes share an HMAC and key — the magic-byte check is also
|
|
* skipped when `c` is unset).
|
|
*/
|
|
export type ProxyTokenOp = 'get' | 'put';
|
|
|
|
interface ProxyTokenPayload {
|
|
/** Storage key (validated). */
|
|
k: string;
|
|
/** Expiry epoch seconds. */
|
|
e: number;
|
|
/** Random nonce so two URLs for the same (key, expiry) differ. */
|
|
n: string;
|
|
/**
|
|
* Bound operation. Tokens minted before this field was added (legacy)
|
|
* fail-closed: the proxy handlers require the field's exact value.
|
|
*/
|
|
op: ProxyTokenOp;
|
|
/** Optional download filename. */
|
|
f?: string;
|
|
/** Optional content-type override. */
|
|
c?: string;
|
|
}
|
|
|
|
function b64urlEncode(buf: Buffer): string {
|
|
return buf.toString('base64').replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
|
|
}
|
|
|
|
function b64urlDecode(s: string): Buffer {
|
|
const pad = s.length % 4 === 0 ? '' : '='.repeat(4 - (s.length % 4));
|
|
return Buffer.from(s.replace(/-/g, '+').replace(/_/g, '/') + pad, 'base64');
|
|
}
|
|
|
|
export function signProxyToken(payload: ProxyTokenPayload, secret: string): string {
|
|
const json = JSON.stringify(payload);
|
|
const body = b64urlEncode(Buffer.from(json, 'utf8'));
|
|
const sig = createHmac('sha256', secret).update(body).digest();
|
|
return `${body}.${b64urlEncode(sig)}`;
|
|
}
|
|
|
|
export function verifyProxyToken(
|
|
token: string,
|
|
secret: string,
|
|
/**
|
|
* Required: the operation the verifier is allowed to perform. The token
|
|
* must have been minted with the same `op`. Without this argument an
|
|
* upload token could be replayed as a download (and vice versa).
|
|
*/
|
|
expectedOp: ProxyTokenOp,
|
|
): { ok: true; payload: ProxyTokenPayload } | { ok: false; reason: string } {
|
|
if (typeof token !== 'string' || !token.includes('.')) {
|
|
return { ok: false, reason: 'malformed' };
|
|
}
|
|
const [body, sigB64] = token.split('.', 2);
|
|
if (!body || !sigB64) return { ok: false, reason: 'malformed' };
|
|
|
|
const expected = createHmac('sha256', secret).update(body).digest();
|
|
let provided: Buffer;
|
|
try {
|
|
provided = b64urlDecode(sigB64);
|
|
} catch {
|
|
return { ok: false, reason: 'malformed' };
|
|
}
|
|
if (provided.length !== expected.length) return { ok: false, reason: 'sig-mismatch' };
|
|
if (!timingSafeEqual(provided, expected)) return { ok: false, reason: 'sig-mismatch' };
|
|
|
|
let payload: ProxyTokenPayload;
|
|
try {
|
|
payload = JSON.parse(b64urlDecode(body).toString('utf8')) as ProxyTokenPayload;
|
|
} catch {
|
|
return { ok: false, reason: 'malformed-payload' };
|
|
}
|
|
|
|
// `Number.isFinite` catches NaN / ±Infinity that a tampered token could
|
|
// otherwise smuggle past the `< Date.now()` comparison (NaN compares
|
|
// false against any number, which would treat the token as eternally
|
|
// valid). Reject non-finite expiries outright.
|
|
if (!Number.isFinite(payload.e) || payload.e * 1000 < Date.now()) {
|
|
return { ok: false, reason: 'expired' };
|
|
}
|
|
try {
|
|
validateStorageKey(payload.k);
|
|
} catch {
|
|
return { ok: false, reason: 'invalid-key' };
|
|
}
|
|
// Op-binding: tokens minted before this field was added have no `op`
|
|
// and are now rejected. Fresh tokens must match `expectedOp` exactly.
|
|
if (payload.op !== expectedOp) {
|
|
return { ok: false, reason: 'op-mismatch' };
|
|
}
|
|
return { ok: true, payload };
|
|
}
|
|
|
|
// ─── backend ────────────────────────────────────────────────────────────────
|
|
|
|
interface FilesystemConfig {
|
|
root: string;
|
|
/** AES-GCM-encrypted HMAC secret. When absent, falls back to a derived secret. */
|
|
proxyHmacSecretEncrypted: string | null;
|
|
}
|
|
|
|
export class FilesystemBackend implements StorageBackend {
|
|
readonly name = 'filesystem' as const;
|
|
|
|
private rootResolved: string;
|
|
private hmacSecret: string;
|
|
|
|
private constructor(rootResolved: string, hmacSecret: string) {
|
|
this.rootResolved = rootResolved;
|
|
this.hmacSecret = hmacSecret;
|
|
}
|
|
|
|
/** Throws if multi-node mode is set or the root isn't writable. */
|
|
static async create(cfg: FilesystemConfig): Promise<FilesystemBackend> {
|
|
// Read from the zod-validated env, not raw process.env — a typo
|
|
// (MULTI_NODE_DEPLOY=true, MULTINODE_DEPLOYMENT=true) used to silently
|
|
// pass the string-equality check, leaving the multi-node guard
|
|
// disabled. The schema in src/lib/env.ts now coerces the value and
|
|
// rejects unknown shapes at boot.
|
|
if (env.MULTI_NODE_DEPLOYMENT) {
|
|
throw new Error(
|
|
'FilesystemBackend cannot start when MULTI_NODE_DEPLOYMENT=true. ' +
|
|
'Use an S3-compatible backend for multi-node deployments.',
|
|
);
|
|
}
|
|
const rootInput = cfg.root || './storage';
|
|
const rootAbs = path.isAbsolute(rootInput) ? rootInput : path.resolve(process.cwd(), rootInput);
|
|
await fs.mkdir(rootAbs, { recursive: true, mode: 0o700 });
|
|
// Defensive: re-chmod even if it already existed.
|
|
await fs.chmod(rootAbs, 0o700).catch(() => undefined);
|
|
// Use realpath so symlinked roots are flattened to their actual location;
|
|
// we then compare every per-key resolution against this exact prefix.
|
|
const rootResolved = await fs.realpath(rootAbs);
|
|
|
|
const hmacSecret = resolveHmacSecret(cfg.proxyHmacSecretEncrypted);
|
|
logger.info({ root: rootResolved }, 'FilesystemBackend ready');
|
|
return new FilesystemBackend(rootResolved, hmacSecret);
|
|
}
|
|
|
|
/**
|
|
* Resolve a (validated) storage key to an absolute path under the root.
|
|
* Throws if the resolved path escapes the storage root via symlink/.. tricks.
|
|
*/
|
|
private resolveKey(key: string): string {
|
|
validateStorageKey(key);
|
|
const joined = path.join(this.rootResolved, key);
|
|
const resolved = path.resolve(joined);
|
|
if (resolved !== this.rootResolved && !resolved.startsWith(this.rootResolved + path.sep)) {
|
|
throw new ValidationError('Storage key escapes storage root');
|
|
}
|
|
return resolved;
|
|
}
|
|
|
|
async put(
|
|
key: string,
|
|
body: Buffer | NodeJS.ReadableStream,
|
|
opts: PutOpts,
|
|
): Promise<{ key: string; sizeBytes: number; sha256: string }> {
|
|
const target = this.resolveKey(key);
|
|
await fs.mkdir(path.dirname(target), { recursive: true, mode: 0o700 });
|
|
|
|
const buffer = Buffer.isBuffer(body) ? body : await streamToBuffer(body);
|
|
const sha256 = opts.sha256 ?? createHash('sha256').update(buffer).digest('hex');
|
|
|
|
// Atomic write via temp + rename so partial writes don't leave half-files.
|
|
const tmp = `${target}.${randomUUID()}.tmp`;
|
|
await fs.writeFile(tmp, buffer, { mode: 0o600 });
|
|
// realpath the temp to make sure the final-rename target resolves correctly
|
|
// even if some segment of the path is a symlink we just created.
|
|
await fs.rename(tmp, target);
|
|
|
|
return { key, sizeBytes: buffer.length, sha256 };
|
|
}
|
|
|
|
async get(key: string): Promise<NodeJS.ReadableStream> {
|
|
const target = this.resolveKey(key);
|
|
try {
|
|
const stat = await fs.stat(target);
|
|
if (!stat.isFile()) throw new NotFoundError(`Storage object ${key}`);
|
|
} catch (err) {
|
|
const code = (err as NodeJS.ErrnoException).code;
|
|
if (code === 'ENOENT') throw new NotFoundError(`Storage object ${key}`);
|
|
throw err;
|
|
}
|
|
return createReadStream(target);
|
|
}
|
|
|
|
async head(key: string): Promise<{ sizeBytes: number; contentType: string } | null> {
|
|
const target = this.resolveKey(key);
|
|
try {
|
|
const stat = await fs.stat(target);
|
|
if (!stat.isFile()) return null;
|
|
// Filesystem doesn't track content-type. Caller should consult the DB
|
|
// (or sniff via ext) — we return application/octet-stream as a default.
|
|
return { sizeBytes: stat.size, contentType: extToContentType(target) };
|
|
} catch (err) {
|
|
const code = (err as NodeJS.ErrnoException).code;
|
|
if (code === 'ENOENT') return null;
|
|
throw err;
|
|
}
|
|
}
|
|
|
|
async delete(key: string): Promise<void> {
|
|
const target = this.resolveKey(key);
|
|
try {
|
|
await fs.unlink(target);
|
|
} catch (err) {
|
|
const code = (err as NodeJS.ErrnoException).code;
|
|
if (code === 'ENOENT') return;
|
|
throw err;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Filesystem mode never exposes a direct upload URL. The CRM-internal proxy
|
|
* accepts uploads via the regular API surface (multipart POST to /api/v1/...
|
|
* or PUT to /api/storage/[token]). We return a placeholder PUT URL pointing
|
|
* at the proxy so the contract stays uniform.
|
|
*/
|
|
async presignUpload(
|
|
key: string,
|
|
opts: PresignOpts,
|
|
): Promise<{ url: string; method: 'PUT' | 'POST' }> {
|
|
validateStorageKey(key);
|
|
const expiresAt = Math.floor(Date.now() / 1000) + (opts.expirySeconds ?? 900);
|
|
const token = signProxyToken(
|
|
{ k: key, e: expiresAt, n: randomUUID(), op: 'put', c: opts.contentType },
|
|
this.hmacSecret,
|
|
);
|
|
return { url: `/api/storage/${token}`, method: 'PUT' };
|
|
}
|
|
|
|
async presignDownload(key: string, opts: PresignOpts): Promise<{ url: string; expiresAt: Date }> {
|
|
validateStorageKey(key);
|
|
const expirySec = opts.expirySeconds ?? 900;
|
|
const expiresAtSec = Math.floor(Date.now() / 1000) + expirySec;
|
|
const token = signProxyToken(
|
|
{
|
|
k: key,
|
|
e: expiresAtSec,
|
|
n: randomUUID(),
|
|
op: 'get',
|
|
f: opts.filename,
|
|
c: opts.contentType,
|
|
},
|
|
this.hmacSecret,
|
|
);
|
|
// ABSOLUTE URL: send-out emails interpolate this verbatim into the
|
|
// recipient's inbox. A relative path is unreachable from a mail
|
|
// client. APP_URL strips any trailing slash to keep the join clean.
|
|
const origin = env.APP_URL.replace(/\/$/, '');
|
|
return {
|
|
url: `${origin}/api/storage/${token}`,
|
|
expiresAt: new Date(expiresAtSec * 1000),
|
|
};
|
|
}
|
|
|
|
/** Used by the proxy route — returns the validated absolute path. */
|
|
resolveKeyForProxy(key: string): string {
|
|
return this.resolveKey(key);
|
|
}
|
|
|
|
/** Used by the proxy route — same HMAC secret as presignDownload. */
|
|
getHmacSecret(): string {
|
|
return this.hmacSecret;
|
|
}
|
|
}
|
|
|
|
// ─── helpers ────────────────────────────────────────────────────────────────
|
|
|
|
function resolveHmacSecret(encryptedSecret: string | null): string {
|
|
if (encryptedSecret) {
|
|
try {
|
|
return decrypt(encryptedSecret);
|
|
} catch (err) {
|
|
logger.error({ err }, 'Failed to decrypt storage_proxy_hmac_secret_encrypted');
|
|
}
|
|
}
|
|
// Production refuses to derive: an admin must have explicitly configured
|
|
// `storage_proxy_hmac_secret_encrypted` before flipping the storage
|
|
// backend to filesystem. Conflating this trust domain with the auth
|
|
// cookie HMAC (BETTER_AUTH_SECRET) is acceptable in dev for ergonomics
|
|
// but a deployment-time misconfig in prod.
|
|
if (process.env.NODE_ENV === 'production') {
|
|
throw new Error(
|
|
'FilesystemBackend: storage_proxy_hmac_secret_encrypted must be set in production. ' +
|
|
'Generate a random secret in admin > storage and persist it before flipping the backend.',
|
|
);
|
|
}
|
|
// Dev fallback: derive a stable per-process secret so the filesystem
|
|
// backend works without explicit configuration during local development.
|
|
const seed = process.env.BETTER_AUTH_SECRET ?? env.BETTER_AUTH_SECRET ?? 'storage-default';
|
|
const derived = createHash('sha256').update(`storage-proxy:${seed}`).digest('hex');
|
|
// Warn once at boot so two processes started with different
|
|
// `BETTER_AUTH_SECRET` values are observable: tokens minted by one
|
|
// wouldn't validate on the other otherwise — which surfaces as random
|
|
// 401s on file downloads in dev.
|
|
logger.warn(
|
|
{
|
|
hint:
|
|
'Storage proxy HMAC derived from BETTER_AUTH_SECRET. ' +
|
|
'Multi-process dev setups must share the same secret value.',
|
|
secretFingerprint: derived.slice(0, 8),
|
|
},
|
|
'FilesystemBackend: using DEV HMAC fallback (no storage_proxy_hmac_secret_encrypted set)',
|
|
);
|
|
return derived;
|
|
}
|
|
|
|
async function streamToBuffer(stream: NodeJS.ReadableStream): Promise<Buffer> {
|
|
const chunks: Buffer[] = [];
|
|
for await (const chunk of stream as Readable) {
|
|
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk as string));
|
|
}
|
|
return Buffer.concat(chunks);
|
|
}
|
|
|
|
function extToContentType(filename: string): string {
|
|
const ext = path.extname(filename).toLowerCase();
|
|
switch (ext) {
|
|
case '.pdf':
|
|
return 'application/pdf';
|
|
case '.png':
|
|
return 'image/png';
|
|
case '.jpg':
|
|
case '.jpeg':
|
|
return 'image/jpeg';
|
|
case '.json':
|
|
return 'application/json';
|
|
case '.txt':
|
|
return 'text/plain';
|
|
case '.csv':
|
|
return 'text/csv';
|
|
case '.zip':
|
|
return 'application/zip';
|
|
default:
|
|
return 'application/octet-stream';
|
|
}
|
|
}
|