fix(audit-final): pre-merge hardening + expense receipt UI
Final audit pass on feat/berth-recommender (3 parallel Opus agents) caught 5 critical and ~12 high-severity findings. All addressed in-branch; medium/low items deferred to docs/audit-final-deferred.md. Critical: - Add filesystem-backend PUT handler at /api/storage/[token] so presigned uploads stop 405-ing in filesystem mode (every browser-driven berth-PDF + brochure upload was broken). Same token-verify + replay protection as GET, plus magic-byte gate when c=application/pdf. - Forward req.signal into streamExpensePdf so an aborted 1000-receipt export no longer keeps grinding for minutes. - Strengthen Content-Disposition filename sanitization: \s matches CR/LF which would let documentName forge headers; restrict to [\w. -]+ and add filename* RFC 5987 fallback. - Lock public berths feed behind an explicit slug allowlist instead of ?portSlug= enumeration. - Reject cross-port interest_berths upserts (defense-in-depth on top of the recommender SQL port filter). High: - Recommender: width-only feasibility now caps length via L/W ratio so a 200ft berth doesn't surface for a 30ft beam request; total_interest_count filters out junction rows whose interest is in another port. - Mooring normalization follow-up migration (0034) catches un-hyphenated padded forms (A01) the original 0024 WHERE missed. - Send-out rate limit moved AFTER validation and scoped per-(port, user) so typos don't burn a slot and a multi-port rep can't be DoS'd by another tenant. - Default-brochure path now blocks an archived row from sneaking through the partial unique index. - NocoDB import --update-snapshot honoured under --dry-run so reps can refresh the seed JSON without committing DB writes. - PDF export: orderBy desc(expenseDate); apply isNull(archivedAt) when expenseIds are passed (was bypassed); flag rate-unavailable rows with an amber footer instead of silently treating them as 1:1; skip the USD->EUR chain when source already matches target. - expense-form-dialog: revokeObjectURL captures the URL in the closure instead of revoking the still-displayed one; reset upload state on close. - scan/page: handleClearReceipt resets in-flight scan/upload mutations; Save disabled while upload pending. - updateExpense re-asserts receipt-or-acknowledgement at the merged row so PATCH can't slip past the create-time refine. Plus the in-progress receipt upload UI for the expense form dialog (receipt picker + "I have no receipt" checkbox + warning banner) and a noReceiptAcknowledged flag on ExpenseRow for edit-mode hydration. Includes the canonical plan doc (referenced in CLAUDE.md), the handoff prompt, and a deferred-findings index for follow-up issues. 1163/1163 vitest passing. Typecheck clean. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -3,7 +3,7 @@
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import { useParams, useRouter } from 'next/navigation';
|
||||
import { useMutation } from '@tanstack/react-query';
|
||||
import { Camera, Loader2, ScanLine, Upload } from 'lucide-react';
|
||||
import { Camera, Loader2, ScanLine, Upload, X } from 'lucide-react';
|
||||
|
||||
import { useMobileChrome } from '@/components/layout/mobile/mobile-layout-provider';
|
||||
|
||||
@@ -30,6 +30,11 @@ interface ScanResult {
|
||||
confidence: number;
|
||||
}
|
||||
|
||||
interface UploadedFileMeta {
|
||||
id: string;
|
||||
filename: string;
|
||||
}
|
||||
|
||||
export default function ScanReceiptPage() {
|
||||
const params = useParams<{ portSlug: string }>();
|
||||
const router = useRouter();
|
||||
@@ -38,6 +43,13 @@ export default function ScanReceiptPage() {
|
||||
const cameraInputRef = useRef<HTMLInputElement>(null);
|
||||
const [scanResult, setScanResult] = useState<ScanResult | null>(null);
|
||||
const [previewUrl, setPreviewUrl] = useState<string | null>(null);
|
||||
// After OCR succeeds we also upload the receipt to /api/v1/files/upload
|
||||
// so the expense links to the actual image. The legacy scanner skipped
|
||||
// this step and saved expenses without their receipt — which silently
|
||||
// disqualified them from parent-company reimbursement (the warning the
|
||||
// PDF export now surfaces).
|
||||
const [uploadedFile, setUploadedFile] = useState<UploadedFileMeta | null>(null);
|
||||
const [pendingFile, setPendingFile] = useState<File | null>(null);
|
||||
|
||||
const { setChrome } = useMobileChrome();
|
||||
useEffect(() => {
|
||||
@@ -74,6 +86,29 @@ export default function ScanReceiptPage() {
|
||||
},
|
||||
});
|
||||
|
||||
// Uploads the receipt image to /api/v1/files/upload (category=receipt)
|
||||
// so the new expense row can link to it via receiptFileIds. Runs in
|
||||
// parallel with the OCR scan so the rep can keep editing fields while
|
||||
// the upload completes.
|
||||
const uploadMutation = useMutation({
|
||||
mutationFn: async (file: File): Promise<UploadedFileMeta> => {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
formData.append('category', 'receipt');
|
||||
const res = await fetch('/api/v1/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
credentials: 'include',
|
||||
});
|
||||
if (!res.ok) throw new Error('Receipt upload failed');
|
||||
const json = (await res.json()) as { data: { id: string; filename: string } };
|
||||
return { id: json.data.id, filename: json.data.filename };
|
||||
},
|
||||
onSuccess: (meta) => {
|
||||
setUploadedFile(meta);
|
||||
},
|
||||
});
|
||||
|
||||
const saveMutation = useMutation({
|
||||
mutationFn: () =>
|
||||
apiFetch('/api/v1/expenses', {
|
||||
@@ -85,6 +120,9 @@ export default function ScanReceiptPage() {
|
||||
category: category || undefined,
|
||||
expenseDate: date ? new Date(date) : new Date(),
|
||||
paymentStatus: 'unpaid',
|
||||
receiptFileIds: uploadedFile ? [uploadedFile.id] : undefined,
|
||||
// The scanner path always has a receipt (we wouldn't have OCR'd
|
||||
// it otherwise), so we never need the no-receipt flag here.
|
||||
},
|
||||
}),
|
||||
onSuccess: () => {
|
||||
@@ -95,12 +133,32 @@ export default function ScanReceiptPage() {
|
||||
function handleFileChange(e: React.ChangeEvent<HTMLInputElement>) {
|
||||
const file = e.target.files?.[0];
|
||||
if (!file) return;
|
||||
|
||||
setPendingFile(file);
|
||||
const url = URL.createObjectURL(file);
|
||||
setPreviewUrl(url);
|
||||
// Kick off OCR scan + storage upload concurrently. The two are
|
||||
// independent server calls and the rep is staring at the preview
|
||||
// while both run.
|
||||
scanMutation.mutate(file);
|
||||
uploadMutation.mutate(file);
|
||||
}
|
||||
|
||||
function handleClearReceipt() {
|
||||
if (previewUrl) URL.revokeObjectURL(previewUrl);
|
||||
setPreviewUrl(null);
|
||||
setUploadedFile(null);
|
||||
setPendingFile(null);
|
||||
setScanResult(null);
|
||||
// Reset in-flight mutations so a late onSuccess doesn't repopulate
|
||||
// the form against an already-cleared UI (audit finding: stale
|
||||
// receipt could land on the next Save).
|
||||
scanMutation.reset();
|
||||
uploadMutation.reset();
|
||||
if (fileInputRef.current) fileInputRef.current.value = '';
|
||||
if (cameraInputRef.current) cameraInputRef.current.value = '';
|
||||
}
|
||||
void pendingFile;
|
||||
|
||||
return (
|
||||
<div className="max-w-2xl mx-auto space-y-6">
|
||||
<div className="hidden sm:block">
|
||||
@@ -119,18 +177,45 @@ export default function ScanReceiptPage() {
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
{previewUrl ? (
|
||||
<div
|
||||
className="border-2 border-dashed rounded-lg p-4 text-center cursor-pointer hover:bg-muted/50 transition-colors"
|
||||
onClick={() => fileInputRef.current?.click()}
|
||||
>
|
||||
<img
|
||||
src={previewUrl}
|
||||
alt="Receipt preview"
|
||||
className="max-h-64 mx-auto rounded object-contain"
|
||||
/>
|
||||
<div className="space-y-2">
|
||||
<div className="relative border-2 border-dashed rounded-lg p-4 text-center bg-muted/20">
|
||||
<img
|
||||
src={previewUrl}
|
||||
alt="Receipt preview"
|
||||
className="max-h-64 mx-auto rounded object-contain"
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleClearReceipt}
|
||||
aria-label="Remove receipt"
|
||||
className="absolute top-2 right-2 rounded-full bg-background/80 hover:bg-background border p-1.5 shadow-sm"
|
||||
>
|
||||
<X className="h-4 w-4" />
|
||||
</button>
|
||||
</div>
|
||||
<div className="flex flex-wrap items-center gap-2 text-xs text-muted-foreground">
|
||||
{uploadMutation.isPending && (
|
||||
<span className="inline-flex items-center gap-1">
|
||||
<Loader2 className="h-3 w-3 animate-spin" /> Uploading receipt…
|
||||
</span>
|
||||
)}
|
||||
{uploadedFile && (
|
||||
<span className="text-emerald-600">
|
||||
Receipt uploaded ({uploadedFile.filename})
|
||||
</span>
|
||||
)}
|
||||
{uploadMutation.isError && (
|
||||
<span className="text-destructive">
|
||||
Receipt upload failed — save will still create the expense without an image.
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div className="grid gap-2 sm:grid-cols-2">
|
||||
{/* Camera button — available on mobile devices that surface the
|
||||
built-in capture flow when an `image/*` input has the
|
||||
`capture` attribute. Hidden on desktop where it's a no-op. */}
|
||||
<Button
|
||||
type="button"
|
||||
size="lg"
|
||||
@@ -140,6 +225,8 @@ export default function ScanReceiptPage() {
|
||||
<Camera className="mr-2 h-5 w-5" />
|
||||
Take photo
|
||||
</Button>
|
||||
{/* File picker — works on every platform. Phrased so the copy
|
||||
fits both mobile (library/files) and desktop (drag and drop). */}
|
||||
<Button
|
||||
type="button"
|
||||
variant="outline"
|
||||
@@ -148,18 +235,30 @@ export default function ScanReceiptPage() {
|
||||
onClick={() => fileInputRef.current?.click()}
|
||||
>
|
||||
<Upload className="mr-2 h-5 w-5" />
|
||||
<span className="sm:hidden">Choose from library</span>
|
||||
<span className="hidden sm:inline">Click to upload or drag and drop</span>
|
||||
<span className="sm:hidden">Choose from device</span>
|
||||
<span className="hidden sm:inline">Choose from device or drag and drop</span>
|
||||
</Button>
|
||||
<p className="text-xs text-muted-foreground sm:col-span-2 text-center">
|
||||
JPEG, PNG, WebP up to 10MB
|
||||
JPEG, PNG, HEIC, WebP up to 10 MB
|
||||
</p>
|
||||
<p className="text-xs text-muted-foreground sm:col-span-2 text-center">
|
||||
Have many receipts?{' '}
|
||||
<a
|
||||
href={`/${params.portSlug}/expenses/bulk-upload`}
|
||||
className="text-primary hover:underline"
|
||||
>
|
||||
Bulk upload →
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
{/* `image/*` is the broadest accept — includes HEIC on iOS,
|
||||
JPEG/PNG/WebP everywhere. The capture attribute on the second
|
||||
input invokes the native camera flow on mobile. */}
|
||||
<input
|
||||
ref={fileInputRef}
|
||||
type="file"
|
||||
accept="image/*"
|
||||
accept="image/*,application/pdf"
|
||||
className="hidden"
|
||||
onChange={handleFileChange}
|
||||
/>
|
||||
@@ -264,10 +363,20 @@ export default function ScanReceiptPage() {
|
||||
</Button>
|
||||
<Button
|
||||
onClick={() => saveMutation.mutate()}
|
||||
disabled={saveMutation.isPending || !amount}
|
||||
disabled={
|
||||
saveMutation.isPending ||
|
||||
!amount ||
|
||||
// Block save while the receipt upload is still in flight —
|
||||
// otherwise the rep can hit Save before the storage round
|
||||
// trip finishes and the expense lands without `receiptFileIds`,
|
||||
// silently re-creating the legacy receipt-loss bug.
|
||||
uploadMutation.isPending
|
||||
}
|
||||
>
|
||||
{saveMutation.isPending && <Loader2 className="mr-2 h-4 w-4 animate-spin" />}
|
||||
Save as Expense
|
||||
{(saveMutation.isPending || uploadMutation.isPending) && (
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
)}
|
||||
{uploadMutation.isPending ? 'Uploading…' : 'Save as Expense'}
|
||||
</Button>
|
||||
</div>
|
||||
</CardContent>
|
||||
|
||||
@@ -16,6 +16,10 @@ import { toPublicBerth } from '@/lib/services/public-berths';
|
||||
* ("A1", "B12") - Phase 0 normalized the entire CRM dataset.
|
||||
*/
|
||||
|
||||
// Hard-coded allowlist for the public read-only feed. Adding a port here
|
||||
// is a deliberate decision (not silent enumeration via ?portSlug=), so a
|
||||
// future private tenant can't be exposed by accident.
|
||||
const PUBLIC_PORT_SLUGS = new Set(['port-nimara']);
|
||||
const DEFAULT_PUBLIC_PORT_SLUG = 'port-nimara';
|
||||
const RESPONSE_HEADERS = {
|
||||
'cache-control': 'public, s-maxage=300, stale-while-revalidate=60',
|
||||
@@ -30,7 +34,14 @@ export async function GET(
|
||||
): Promise<Response> {
|
||||
const { mooringNumber } = await ctx.params;
|
||||
const url = new URL(request.url);
|
||||
const portSlug = url.searchParams.get('portSlug') ?? DEFAULT_PUBLIC_PORT_SLUG;
|
||||
const requestedSlug = url.searchParams.get('portSlug') ?? DEFAULT_PUBLIC_PORT_SLUG;
|
||||
if (!PUBLIC_PORT_SLUGS.has(requestedSlug)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'port is not part of the public berths feed', portSlug: requestedSlug },
|
||||
{ status: 404, headers: { 'cache-control': 'no-store' } },
|
||||
);
|
||||
}
|
||||
const portSlug = requestedSlug;
|
||||
|
||||
// Reject obviously malformed mooring numbers up front so cache poisoning
|
||||
// / random-URL probing returns 400 rather than 404 (saves a DB hit).
|
||||
|
||||
@@ -25,6 +25,10 @@ import { toPublicBerth, type PublicBerth } from '@/lib/services/public-berths';
|
||||
* them up.
|
||||
*/
|
||||
|
||||
// Hard-coded allowlist for the public read-only feed. Adding a port here
|
||||
// is a deliberate decision (not silent enumeration via ?portSlug=), so a
|
||||
// future private tenant can't be exposed by accident.
|
||||
const PUBLIC_PORT_SLUGS = new Set(['port-nimara']);
|
||||
const DEFAULT_PUBLIC_PORT_SLUG = 'port-nimara';
|
||||
|
||||
const RESPONSE_HEADERS = {
|
||||
@@ -45,7 +49,14 @@ interface ListResponse {
|
||||
|
||||
export async function GET(request: Request): Promise<Response> {
|
||||
const url = new URL(request.url);
|
||||
const portSlug = url.searchParams.get('portSlug') ?? DEFAULT_PUBLIC_PORT_SLUG;
|
||||
const requestedSlug = url.searchParams.get('portSlug') ?? DEFAULT_PUBLIC_PORT_SLUG;
|
||||
if (!PUBLIC_PORT_SLUGS.has(requestedSlug)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'port is not part of the public berths feed', portSlug: requestedSlug },
|
||||
{ status: 404, headers: { 'cache-control': 'no-store' } },
|
||||
);
|
||||
}
|
||||
const portSlug = requestedSlug;
|
||||
|
||||
const [port] = await db
|
||||
.select({ id: ports.id })
|
||||
|
||||
@@ -20,10 +20,12 @@ import { Readable } from 'node:stream';
|
||||
|
||||
import { NextRequest, NextResponse } from 'next/server';
|
||||
|
||||
import { MAX_FILE_SIZE } from '@/lib/constants/file-validation';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { redis } from '@/lib/redis';
|
||||
import { FilesystemBackend, getStorageBackend } from '@/lib/storage';
|
||||
import { verifyProxyToken } from '@/lib/storage/filesystem';
|
||||
import { isPdfMagic } from '@/lib/services/berth-pdf-parser';
|
||||
|
||||
export const runtime = 'nodejs';
|
||||
export const dynamic = 'force-dynamic';
|
||||
@@ -115,3 +117,120 @@ export async function GET(
|
||||
|
||||
return new NextResponse(webStream, { status: 200, headers });
|
||||
}
|
||||
|
||||
/**
|
||||
* Filesystem-backend upload proxy. The presigned URL minted by
|
||||
* `FilesystemBackend.presignUpload` points here. Without this handler the
|
||||
* browser-driven berth-PDF / brochure uploads would 405 in filesystem
|
||||
* deployments — the entire pluggable-storage abstraction relied on the
|
||||
* GET-only counterpart for downloads.
|
||||
*
|
||||
* Same token-verify + single-use replay protection as GET, plus:
|
||||
* - Hard size cap (rejects oversized bodies before any disk I/O).
|
||||
* - Magic-byte check when the issuer declared content-type=application/pdf
|
||||
* (matches the §14.6 §6c/§7c invariant: every upload path verifies
|
||||
* bytes server-side, not just at the client).
|
||||
*/
|
||||
export async function PUT(
|
||||
req: NextRequest,
|
||||
ctx: { params: Promise<{ token: string }> },
|
||||
): Promise<NextResponse> {
|
||||
const { token } = await ctx.params;
|
||||
|
||||
const backend = await getStorageBackend();
|
||||
if (!(backend instanceof FilesystemBackend)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Storage proxy is only available in filesystem mode' },
|
||||
{ status: 404 },
|
||||
);
|
||||
}
|
||||
|
||||
const result = verifyProxyToken(token, backend.getHmacSecret());
|
||||
if (!result.ok) {
|
||||
logger.warn({ reason: result.reason }, 'Storage proxy upload token rejected');
|
||||
return NextResponse.json({ error: 'Invalid or expired token' }, { status: 403 });
|
||||
}
|
||||
const { payload } = result;
|
||||
|
||||
// Separate replay namespace from GET so a token can validly serve one
|
||||
// upload AND one download (the issuer only mints the second), but a
|
||||
// PUT cannot be replayed against itself.
|
||||
const replayKey = `storage:proxy:put:${token.split('.')[0]}`;
|
||||
const remainingSeconds = Math.max(
|
||||
REPLAY_TTL_FLOOR_SECONDS,
|
||||
Math.min(REPLAY_TTL_CEILING_SECONDS, payload.e - Math.floor(Date.now() / 1000) + 60),
|
||||
);
|
||||
const setOk = await redis.set(replayKey, '1', 'EX', remainingSeconds, 'NX');
|
||||
if (setOk !== 'OK') {
|
||||
logger.warn({ key: payload.k }, 'Storage proxy upload token replay rejected');
|
||||
return NextResponse.json({ error: 'Token already used' }, { status: 403 });
|
||||
}
|
||||
|
||||
// Pre-flight size check via Content-Length so a malicious caller can't
|
||||
// exhaust disk by streaming hundreds of MB before we look at the body.
|
||||
const contentLengthHeader = req.headers.get('content-length');
|
||||
const contentLength = contentLengthHeader ? Number(contentLengthHeader) : NaN;
|
||||
if (Number.isFinite(contentLength) && contentLength > MAX_FILE_SIZE) {
|
||||
return NextResponse.json(
|
||||
{ error: `File exceeds ${MAX_FILE_SIZE} byte cap (Content-Length: ${contentLength})` },
|
||||
{ status: 413 },
|
||||
);
|
||||
}
|
||||
|
||||
if (!req.body) {
|
||||
return NextResponse.json({ error: 'Empty body' }, { status: 400 });
|
||||
}
|
||||
|
||||
// Read the body into a buffer with a hard cap. Filesystem deployments are
|
||||
// small-tenant (single-node only — see FilesystemBackend boot guard) so
|
||||
// 50 MB ceiling fits comfortably in heap; no streaming needed.
|
||||
let buffer: Buffer;
|
||||
try {
|
||||
const chunks: Buffer[] = [];
|
||||
let total = 0;
|
||||
const reader = req.body.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
total += value.byteLength;
|
||||
if (total > MAX_FILE_SIZE) {
|
||||
try {
|
||||
await reader.cancel();
|
||||
} catch {
|
||||
/* ignore */
|
||||
}
|
||||
return NextResponse.json(
|
||||
{ error: `File exceeds ${MAX_FILE_SIZE} byte cap` },
|
||||
{ status: 413 },
|
||||
);
|
||||
}
|
||||
chunks.push(Buffer.from(value));
|
||||
}
|
||||
buffer = Buffer.concat(chunks);
|
||||
} catch (err) {
|
||||
logger.warn({ err, key: payload.k }, 'Storage proxy upload read failed');
|
||||
return NextResponse.json({ error: 'Upload read failed' }, { status: 400 });
|
||||
}
|
||||
|
||||
// Magic-byte gate: when the token was minted with `c=application/pdf`
|
||||
// (the only consumer today — berth PDFs + brochures), refuse anything
|
||||
// that isn't actually a PDF. Mirrors the post-upload check in
|
||||
// berth-pdf.service.ts so the two paths behave identically.
|
||||
if (payload.c === 'application/pdf' && !isPdfMagic(buffer)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Uploaded file failed PDF magic-byte check (does not start with %PDF-).' },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
await backend.put(payload.k, buffer, {
|
||||
contentType: payload.c ?? 'application/octet-stream',
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error({ err, key: payload.k }, 'Storage proxy upload write failed');
|
||||
return NextResponse.json({ error: 'Upload write failed' }, { status: 500 });
|
||||
}
|
||||
|
||||
return NextResponse.json({ ok: true, key: payload.k, sizeBytes: buffer.length }, { status: 200 });
|
||||
}
|
||||
|
||||
@@ -49,18 +49,25 @@ export const POST = withAuth(
|
||||
}
|
||||
: undefined,
|
||||
options: input.options,
|
||||
// Forward the request abort signal so the streaming PDF builder
|
||||
// stops fetching/resizing receipts the moment the client disconnects
|
||||
// (otherwise an aborted 1000-receipt export keeps the worker busy
|
||||
// for minutes after the user navigated away — see audit finding 2).
|
||||
signal: req.signal,
|
||||
});
|
||||
|
||||
// NextResponse extends Response; passing a ReadableStream as the
|
||||
// body keeps the streaming semantics. The wrapper's RouteHandler
|
||||
// type expects NextResponse so we use it explicitly.
|
||||
// Content-Disposition filename hardening: the validator caps length
|
||||
// but `\s` matches CR/LF, which would let an attacker forge response
|
||||
// headers. Strip everything that isn't word/space/dot/dash, AND set
|
||||
// the RFC 5987 `filename*` so a UTF-8 body still survives.
|
||||
const safeFilename = suggestedFilename.replace(/[^\w. \-]+/g, '_');
|
||||
const disposition = `attachment; filename="${safeFilename}"; filename*=UTF-8''${encodeURIComponent(suggestedFilename)}`;
|
||||
|
||||
return new NextResponse(stream, {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/pdf',
|
||||
'Content-Disposition': `attachment; filename="${suggestedFilename}"`,
|
||||
// The PDF is generated on the fly per-request and includes
|
||||
// potentially-sensitive expense data; never cache.
|
||||
'Content-Disposition': disposition,
|
||||
'Cache-Control': 'private, no-store, max-age=0',
|
||||
'X-Content-Type-Options': 'nosniff',
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user