import { requireAuth } from '~/server/utils/auth'; import { uploadFile, getMinioClient } from '~/server/utils/minio'; import formidable from 'formidable'; import { promises as fs } from 'fs'; import mime from 'mime-types'; export default defineEventHandler(async (event) => { // Check authentication (x-tag header OR Keycloak session) await requireAuth(event); try { // Get the current path and bucket from query params const query = getQuery(event); const currentPath = (query.path as string) || ''; const bucket = (query.bucket as string) || 'client-portal'; // Default bucket console.log('[Upload] Request received for bucket:', bucket, 'path:', currentPath); // Parse multipart form data const form = formidable({ maxFileSize: 50 * 1024 * 1024, // 50MB limit keepExtensions: true, }); const [fields, files] = await form.parse(event.node.req); // Handle multiple files const uploadedFiles = Array.isArray(files.file) ? files.file : [files.file]; const results = []; for (const uploadedFile of uploadedFiles) { if (!uploadedFile) continue; // Read file buffer const fileBuffer = await fs.readFile(uploadedFile.filepath); // Generate unique filename to prevent collisions const timestamp = Date.now(); const sanitizedName = uploadedFile.originalFilename?.replace(/[^a-zA-Z0-9.-]/g, '_') || 'file'; const fileName = `${timestamp}-${sanitizedName}`; // Construct full path including current folder // Ensure currentPath ends with / if it's a folder let normalizedPath = currentPath; if (normalizedPath && !normalizedPath.endsWith('/')) { normalizedPath += '/'; } const fullPath = normalizedPath ? `${normalizedPath}${fileName}` : fileName; // Get content type const contentType = mime.lookup(uploadedFile.originalFilename || '') || 'application/octet-stream'; // Upload to MinIO - handle different buckets if (bucket === 'client-portal') { await uploadFile(fullPath, fileBuffer, contentType); } else { // For other buckets, use the MinIO client directly const client = getMinioClient(); // Ensure bucket exists try { await client.bucketExists(bucket); } catch (err) { console.log(`[Upload] Bucket ${bucket} doesn't exist, creating it...`); await client.makeBucket(bucket, 'us-east-1'); } await client.putObject(bucket, fullPath, fileBuffer, fileBuffer.length, { 'Content-Type': contentType, }); } // Clean up temp file await fs.unlink(uploadedFile.filepath); results.push({ fileName: fullPath, path: fullPath, originalName: uploadedFile.originalFilename, size: uploadedFile.size, contentType, bucket: bucket }); // Log audit event await logAuditEvent(event, 'upload', fullPath, uploadedFile.size); } // Return the first file's info for single file uploads (backward compatibility) if (results.length === 1) { return { success: true, path: results[0].path, fileName: results[0].fileName, files: results, message: `File uploaded successfully`, }; } return { success: true, files: results, message: `${results.length} file(s) uploaded successfully`, }; } catch (error: any) { console.error('Failed to upload file:', error); throw createError({ statusCode: 500, statusMessage: error.message || 'Failed to upload file', }); } }); // Audit logging helper async function logAuditEvent(event: any, action: string, filePath: string, fileSize?: number) { try { const user = event.context.user || { email: 'anonymous' }; const auditLog = { user_email: user.email, action, file_path: filePath, file_size: fileSize, timestamp: new Date().toISOString(), ip_address: getClientIP(event), success: true, }; // You can store this in your database or logging system console.log('Audit log:', auditLog); } catch (error) { console.error('Failed to log audit event:', error); } } function getClientIP(event: any): string { return event.node.req.headers['x-forwarded-for'] || event.node.req.connection.remoteAddress || 'unknown'; }