2026-01-25 02:19:49 +01:00
|
|
|
import { supabaseAdmin } from './supabase';
|
2026-01-26 15:25:41 +01:00
|
|
|
import { env as publicEnv } from '$env/dynamic/public';
|
2026-01-25 02:19:49 +01:00
|
|
|
import {
|
|
|
|
|
S3Client,
|
|
|
|
|
PutObjectCommand,
|
|
|
|
|
GetObjectCommand,
|
|
|
|
|
DeleteObjectCommand,
|
|
|
|
|
DeleteObjectsCommand,
|
|
|
|
|
ListObjectsV2Command,
|
|
|
|
|
HeadBucketCommand
|
|
|
|
|
} from '@aws-sdk/client-s3';
|
|
|
|
|
import { getSignedUrl as getS3SignedUrl } from '@aws-sdk/s3-request-presigner';
|
|
|
|
|
|
|
|
|
|
export type StorageBucket = 'documents' | 'avatars' | 'event-images';
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Generate a browser-accessible public URL for Supabase Storage
|
|
|
|
|
* This uses PUBLIC_SUPABASE_URL instead of the internal Docker URL
|
|
|
|
|
*/
|
|
|
|
|
function getBrowserAccessibleUrl(bucket: StorageBucket, path: string): string {
|
2026-01-26 15:25:41 +01:00
|
|
|
return `${publicEnv.PUBLIC_SUPABASE_URL}/storage/v1/object/public/${bucket}/${path}`;
|
2026-01-25 02:19:49 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export interface UploadResult {
|
|
|
|
|
success: boolean;
|
|
|
|
|
path?: string;
|
|
|
|
|
publicUrl?: string;
|
|
|
|
|
localUrl?: string;
|
|
|
|
|
s3Url?: string;
|
|
|
|
|
error?: string;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export interface S3Config {
|
|
|
|
|
endpoint: string;
|
|
|
|
|
bucket: string;
|
|
|
|
|
accessKey: string;
|
|
|
|
|
secretKey: string;
|
|
|
|
|
region: string;
|
|
|
|
|
useSSL: boolean;
|
|
|
|
|
forcePathStyle: boolean;
|
|
|
|
|
enabled: boolean;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let s3ClientCache: S3Client | null = null;
|
|
|
|
|
let s3ConfigCache: S3Config | null = null;
|
|
|
|
|
let s3ConfigCacheTime: number = 0;
|
|
|
|
|
const S3_CONFIG_CACHE_TTL = 60000; // 1 minute cache
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get S3 configuration from app_settings table
|
|
|
|
|
*/
|
|
|
|
|
export async function getS3Config(): Promise<S3Config | null> {
|
|
|
|
|
// Check cache
|
|
|
|
|
if (s3ConfigCache && Date.now() - s3ConfigCacheTime < S3_CONFIG_CACHE_TTL) {
|
|
|
|
|
return s3ConfigCache;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const { data: settings } = await supabaseAdmin
|
|
|
|
|
.from('app_settings')
|
|
|
|
|
.select('setting_key, setting_value')
|
|
|
|
|
.eq('category', 'storage');
|
|
|
|
|
|
|
|
|
|
if (!settings || settings.length === 0) {
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const config: Record<string, any> = {};
|
|
|
|
|
for (const s of settings) {
|
|
|
|
|
let value = s.setting_value;
|
|
|
|
|
if (typeof value === 'string') {
|
|
|
|
|
// Remove surrounding quotes if present (from JSON stringified values)
|
|
|
|
|
value = value.replace(/^"|"$/g, '');
|
|
|
|
|
}
|
|
|
|
|
config[s.setting_key] = value;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check if S3 is enabled - handle both boolean true and string 'true'
|
|
|
|
|
const isEnabled = config.s3_enabled === true || config.s3_enabled === 'true';
|
|
|
|
|
|
|
|
|
|
// Check if S3 is enabled and configured
|
|
|
|
|
if (!isEnabled || !config.s3_endpoint || !config.s3_access_key || !config.s3_secret_key) {
|
|
|
|
|
console.log('S3 config check failed:', {
|
|
|
|
|
isEnabled,
|
|
|
|
|
hasEndpoint: !!config.s3_endpoint,
|
|
|
|
|
hasAccessKey: !!config.s3_access_key,
|
|
|
|
|
hasSecretKey: !!config.s3_secret_key
|
|
|
|
|
});
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
s3ConfigCache = {
|
|
|
|
|
endpoint: config.s3_endpoint,
|
|
|
|
|
bucket: config.s3_bucket || 'monacousa-documents',
|
|
|
|
|
accessKey: config.s3_access_key,
|
|
|
|
|
secretKey: config.s3_secret_key,
|
|
|
|
|
region: config.s3_region || 'us-east-1',
|
|
|
|
|
useSSL: config.s3_use_ssl === true || config.s3_use_ssl === 'true',
|
|
|
|
|
forcePathStyle: config.s3_force_path_style === true || config.s3_force_path_style === 'true' || config.s3_force_path_style === undefined,
|
|
|
|
|
enabled: true
|
|
|
|
|
};
|
|
|
|
|
s3ConfigCacheTime = Date.now();
|
|
|
|
|
|
|
|
|
|
return s3ConfigCache;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get or create S3 client
|
|
|
|
|
*/
|
|
|
|
|
export async function getS3Client(): Promise<S3Client | null> {
|
|
|
|
|
const config = await getS3Config();
|
|
|
|
|
if (!config) {
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Return cached client if config hasn't changed
|
|
|
|
|
if (s3ClientCache && s3ConfigCache) {
|
|
|
|
|
return s3ClientCache;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
s3ClientCache = new S3Client({
|
|
|
|
|
endpoint: config.endpoint,
|
|
|
|
|
region: config.region,
|
|
|
|
|
credentials: {
|
|
|
|
|
accessKeyId: config.accessKey,
|
|
|
|
|
secretAccessKey: config.secretKey
|
|
|
|
|
},
|
|
|
|
|
forcePathStyle: config.forcePathStyle
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
return s3ClientCache;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Clear S3 client cache (call when settings change)
|
|
|
|
|
*/
|
|
|
|
|
export function clearS3ClientCache(): void {
|
|
|
|
|
s3ClientCache = null;
|
|
|
|
|
s3ConfigCache = null;
|
|
|
|
|
s3ConfigCacheTime = 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Test S3 connection
|
|
|
|
|
*/
|
|
|
|
|
export async function testS3Connection(): Promise<{ success: boolean; error?: string }> {
|
|
|
|
|
const config = await getS3Config();
|
|
|
|
|
if (!config) {
|
|
|
|
|
return { success: false, error: 'S3 not configured. Please configure and enable S3 storage settings first.' };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const client = await getS3Client();
|
|
|
|
|
if (!client) {
|
|
|
|
|
return { success: false, error: 'Failed to create S3 client' };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
await client.send(new HeadBucketCommand({ Bucket: config.bucket }));
|
|
|
|
|
return { success: true };
|
|
|
|
|
} catch (error) {
|
|
|
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
|
|
|
console.error('S3 connection test error:', error);
|
|
|
|
|
return { success: false, error: `S3 connection failed: ${errorMessage}` };
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Check if S3 storage is enabled
|
|
|
|
|
*/
|
|
|
|
|
export async function isS3Enabled(): Promise<boolean> {
|
|
|
|
|
const config = await getS3Config();
|
|
|
|
|
return config !== null && config.enabled;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get the S3 key with bucket prefix for organization
|
|
|
|
|
*/
|
|
|
|
|
function getS3Key(bucket: StorageBucket, path: string): string {
|
|
|
|
|
return `${bucket}/${path}`;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Upload a file to S3
|
|
|
|
|
*/
|
|
|
|
|
async function uploadToS3(
|
|
|
|
|
bucket: StorageBucket,
|
|
|
|
|
path: string,
|
|
|
|
|
file: File | ArrayBuffer | Buffer,
|
|
|
|
|
options?: {
|
|
|
|
|
contentType?: string;
|
|
|
|
|
}
|
|
|
|
|
): Promise<UploadResult> {
|
|
|
|
|
const config = await getS3Config();
|
|
|
|
|
const client = await getS3Client();
|
|
|
|
|
|
|
|
|
|
if (!config || !client) {
|
|
|
|
|
return { success: false, error: 'S3 not configured' };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
const key = getS3Key(bucket, path);
|
|
|
|
|
let body: Buffer;
|
|
|
|
|
|
|
|
|
|
if (file instanceof ArrayBuffer) {
|
|
|
|
|
body = Buffer.from(file);
|
|
|
|
|
} else if (Buffer.isBuffer(file)) {
|
|
|
|
|
body = file;
|
|
|
|
|
} else {
|
|
|
|
|
// It's a File object
|
|
|
|
|
body = Buffer.from(await file.arrayBuffer());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
await client.send(
|
|
|
|
|
new PutObjectCommand({
|
|
|
|
|
Bucket: config.bucket,
|
|
|
|
|
Key: key,
|
|
|
|
|
Body: body,
|
|
|
|
|
ContentType: options?.contentType
|
|
|
|
|
})
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Construct public URL
|
|
|
|
|
const protocol = config.useSSL ? 'https' : 'http';
|
|
|
|
|
let publicUrl: string;
|
|
|
|
|
if (config.forcePathStyle) {
|
|
|
|
|
publicUrl = `${config.endpoint}/${config.bucket}/${key}`;
|
|
|
|
|
} else {
|
|
|
|
|
publicUrl = `${protocol}://${config.bucket}.${new URL(config.endpoint).host}/${key}`;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
success: true,
|
|
|
|
|
path: key,
|
|
|
|
|
publicUrl
|
|
|
|
|
};
|
|
|
|
|
} catch (error) {
|
|
|
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
|
|
|
console.error('S3 upload error:', error);
|
|
|
|
|
return { success: false, error: errorMessage };
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get a signed URL from S3
|
|
|
|
|
*/
|
|
|
|
|
async function getS3PresignedUrl(
|
|
|
|
|
bucket: StorageBucket,
|
|
|
|
|
path: string,
|
|
|
|
|
expiresIn: number = 3600
|
|
|
|
|
): Promise<{ url: string | null; error: string | null }> {
|
|
|
|
|
const config = await getS3Config();
|
|
|
|
|
const client = await getS3Client();
|
|
|
|
|
|
|
|
|
|
if (!config || !client) {
|
|
|
|
|
return { url: null, error: 'S3 not configured' };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
const key = getS3Key(bucket, path);
|
|
|
|
|
const command = new GetObjectCommand({
|
|
|
|
|
Bucket: config.bucket,
|
|
|
|
|
Key: key
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const url = await getS3SignedUrl(client, command, { expiresIn });
|
|
|
|
|
return { url, error: null };
|
|
|
|
|
} catch (error) {
|
|
|
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
|
|
|
console.error('S3 signed URL error:', error);
|
|
|
|
|
return { url: null, error: errorMessage };
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Delete a file from S3
|
|
|
|
|
*/
|
|
|
|
|
async function deleteFromS3(
|
|
|
|
|
bucket: StorageBucket,
|
|
|
|
|
path: string
|
|
|
|
|
): Promise<{ success: boolean; error?: string }> {
|
|
|
|
|
const config = await getS3Config();
|
|
|
|
|
const client = await getS3Client();
|
|
|
|
|
|
|
|
|
|
if (!config || !client) {
|
|
|
|
|
return { success: false, error: 'S3 not configured' };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
const key = getS3Key(bucket, path);
|
|
|
|
|
await client.send(
|
|
|
|
|
new DeleteObjectCommand({
|
|
|
|
|
Bucket: config.bucket,
|
|
|
|
|
Key: key
|
|
|
|
|
})
|
|
|
|
|
);
|
|
|
|
|
return { success: true };
|
|
|
|
|
} catch (error) {
|
|
|
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
|
|
|
console.error('S3 delete error:', error);
|
|
|
|
|
return { success: false, error: errorMessage };
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Delete multiple files from S3
|
|
|
|
|
*/
|
|
|
|
|
async function deleteMultipleFromS3(
|
|
|
|
|
bucket: StorageBucket,
|
|
|
|
|
paths: string[]
|
|
|
|
|
): Promise<{ success: boolean; error?: string }> {
|
|
|
|
|
const config = await getS3Config();
|
|
|
|
|
const client = await getS3Client();
|
|
|
|
|
|
|
|
|
|
if (!config || !client) {
|
|
|
|
|
return { success: false, error: 'S3 not configured' };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
const objects = paths.map((p) => ({ Key: getS3Key(bucket, p) }));
|
|
|
|
|
await client.send(
|
|
|
|
|
new DeleteObjectsCommand({
|
|
|
|
|
Bucket: config.bucket,
|
|
|
|
|
Delete: { Objects: objects }
|
|
|
|
|
})
|
|
|
|
|
);
|
|
|
|
|
return { success: true };
|
|
|
|
|
} catch (error) {
|
|
|
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
|
|
|
console.error('S3 delete multiple error:', error);
|
|
|
|
|
return { success: false, error: errorMessage };
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* List files from S3
|
|
|
|
|
*/
|
|
|
|
|
async function listFilesFromS3(
|
|
|
|
|
bucket: StorageBucket,
|
|
|
|
|
folder?: string,
|
|
|
|
|
options?: {
|
|
|
|
|
limit?: number;
|
|
|
|
|
}
|
|
|
|
|
): Promise<{ files: any[]; error: string | null }> {
|
|
|
|
|
const config = await getS3Config();
|
|
|
|
|
const client = await getS3Client();
|
|
|
|
|
|
|
|
|
|
if (!config || !client) {
|
|
|
|
|
return { files: [], error: 'S3 not configured' };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
const prefix = folder ? `${bucket}/${folder}/` : `${bucket}/`;
|
|
|
|
|
const response = await client.send(
|
|
|
|
|
new ListObjectsV2Command({
|
|
|
|
|
Bucket: config.bucket,
|
|
|
|
|
Prefix: prefix,
|
|
|
|
|
MaxKeys: options?.limit || 100
|
|
|
|
|
})
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
const files = (response.Contents || []).map((obj) => ({
|
|
|
|
|
name: obj.Key?.replace(prefix, '') || '',
|
|
|
|
|
size: obj.Size,
|
|
|
|
|
updated_at: obj.LastModified?.toISOString(),
|
|
|
|
|
created_at: obj.LastModified?.toISOString()
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
return { files, error: null };
|
|
|
|
|
} catch (error) {
|
|
|
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
|
|
|
console.error('S3 list error:', error);
|
|
|
|
|
return { files: [], error: errorMessage };
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ===========================================
|
|
|
|
|
// PUBLIC API - Uses S3 or Supabase based on settings
|
|
|
|
|
// ===========================================
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Upload a file to storage (S3 or Supabase)
|
|
|
|
|
*/
|
|
|
|
|
export async function uploadFile(
|
|
|
|
|
bucket: StorageBucket,
|
|
|
|
|
path: string,
|
|
|
|
|
file: File | ArrayBuffer,
|
|
|
|
|
options?: {
|
|
|
|
|
contentType?: string;
|
|
|
|
|
cacheControl?: string;
|
|
|
|
|
upsert?: boolean;
|
|
|
|
|
}
|
|
|
|
|
): Promise<UploadResult> {
|
|
|
|
|
// Check if S3 is enabled
|
|
|
|
|
if (await isS3Enabled()) {
|
|
|
|
|
return uploadToS3(bucket, path, file, options);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Fall back to Supabase Storage
|
|
|
|
|
try {
|
|
|
|
|
const { data, error } = await supabaseAdmin.storage.from(bucket).upload(path, file, {
|
|
|
|
|
contentType: options?.contentType,
|
|
|
|
|
cacheControl: options?.cacheControl || '3600',
|
|
|
|
|
upsert: options?.upsert || false
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (error) {
|
|
|
|
|
console.error('Storage upload error:', error);
|
|
|
|
|
return { success: false, error: error.message };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Generate browser-accessible public URL (not the internal Docker URL)
|
|
|
|
|
const publicUrl = getBrowserAccessibleUrl(bucket, path);
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
success: true,
|
|
|
|
|
path: data.path,
|
|
|
|
|
publicUrl
|
|
|
|
|
};
|
|
|
|
|
} catch (error) {
|
|
|
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
|
|
|
console.error('Storage upload exception:', error);
|
|
|
|
|
return { success: false, error: errorMessage };
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get the public URL for a file in storage
|
|
|
|
|
*/
|
|
|
|
|
export async function getPublicUrl(bucket: StorageBucket, path: string): Promise<string> {
|
|
|
|
|
// Check if S3 is enabled
|
|
|
|
|
if (await isS3Enabled()) {
|
|
|
|
|
const config = await getS3Config();
|
|
|
|
|
if (config) {
|
|
|
|
|
const key = getS3Key(bucket, path);
|
|
|
|
|
if (config.forcePathStyle) {
|
|
|
|
|
return `${config.endpoint}/${config.bucket}/${key}`;
|
|
|
|
|
}
|
|
|
|
|
const protocol = config.useSSL ? 'https' : 'http';
|
|
|
|
|
return `${protocol}://${config.bucket}.${new URL(config.endpoint).host}/${key}`;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Fall back to Supabase Storage - use browser-accessible URL
|
|
|
|
|
return getBrowserAccessibleUrl(bucket, path);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get a signed URL for private file access
|
|
|
|
|
*/
|
|
|
|
|
export async function getSignedUrl(
|
|
|
|
|
bucket: StorageBucket,
|
|
|
|
|
path: string,
|
|
|
|
|
expiresIn: number = 3600
|
|
|
|
|
): Promise<{ url: string | null; error: string | null }> {
|
|
|
|
|
// Check if S3 is enabled
|
|
|
|
|
if (await isS3Enabled()) {
|
|
|
|
|
return getS3PresignedUrl(bucket, path, expiresIn);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Fall back to Supabase Storage
|
|
|
|
|
const { data, error } = await supabaseAdmin.storage
|
|
|
|
|
.from(bucket)
|
|
|
|
|
.createSignedUrl(path, expiresIn);
|
|
|
|
|
|
|
|
|
|
if (error) {
|
|
|
|
|
return { url: null, error: error.message };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return { url: data.signedUrl, error: null };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Delete a file from storage
|
|
|
|
|
*/
|
|
|
|
|
export async function deleteFile(
|
|
|
|
|
bucket: StorageBucket,
|
|
|
|
|
path: string
|
|
|
|
|
): Promise<{ success: boolean; error?: string }> {
|
|
|
|
|
// Check if S3 is enabled
|
|
|
|
|
if (await isS3Enabled()) {
|
|
|
|
|
return deleteFromS3(bucket, path);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Fall back to Supabase Storage
|
|
|
|
|
const { error } = await supabaseAdmin.storage.from(bucket).remove([path]);
|
|
|
|
|
|
|
|
|
|
if (error) {
|
|
|
|
|
console.error('Storage delete error:', error);
|
|
|
|
|
return { success: false, error: error.message };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return { success: true };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Delete multiple files from storage
|
|
|
|
|
*/
|
|
|
|
|
export async function deleteFiles(
|
|
|
|
|
bucket: StorageBucket,
|
|
|
|
|
paths: string[]
|
|
|
|
|
): Promise<{ success: boolean; error?: string }> {
|
|
|
|
|
// Check if S3 is enabled
|
|
|
|
|
if (await isS3Enabled()) {
|
|
|
|
|
return deleteMultipleFromS3(bucket, paths);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Fall back to Supabase Storage
|
|
|
|
|
const { error } = await supabaseAdmin.storage.from(bucket).remove(paths);
|
|
|
|
|
|
|
|
|
|
if (error) {
|
|
|
|
|
console.error('Storage delete error:', error);
|
|
|
|
|
return { success: false, error: error.message };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return { success: true };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* List files in a bucket/folder
|
|
|
|
|
*/
|
|
|
|
|
export async function listFiles(
|
|
|
|
|
bucket: StorageBucket,
|
|
|
|
|
folder?: string,
|
|
|
|
|
options?: {
|
|
|
|
|
limit?: number;
|
|
|
|
|
offset?: number;
|
|
|
|
|
sortBy?: { column: string; order: 'asc' | 'desc' };
|
|
|
|
|
}
|
|
|
|
|
): Promise<{ files: any[]; error: string | null }> {
|
|
|
|
|
// Check if S3 is enabled
|
|
|
|
|
if (await isS3Enabled()) {
|
|
|
|
|
return listFilesFromS3(bucket, folder, options);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Fall back to Supabase Storage
|
|
|
|
|
const { data, error } = await supabaseAdmin.storage.from(bucket).list(folder || '', {
|
|
|
|
|
limit: options?.limit || 100,
|
|
|
|
|
offset: options?.offset || 0,
|
|
|
|
|
sortBy: options?.sortBy || { column: 'created_at', order: 'desc' }
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (error) {
|
|
|
|
|
return { files: [], error: error.message };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return { files: data || [], error: null };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Generate a unique filename with timestamp
|
|
|
|
|
*/
|
|
|
|
|
export function generateUniqueFilename(originalName: string): string {
|
|
|
|
|
const timestamp = Date.now();
|
|
|
|
|
const randomStr = Math.random().toString(36).substring(2, 8);
|
|
|
|
|
const safeName = originalName.replace(/[^a-zA-Z0-9.-]/g, '_').substring(0, 50);
|
|
|
|
|
const ext = safeName.split('.').pop() || '';
|
|
|
|
|
const nameWithoutExt = safeName.replace(`.${ext}`, '');
|
|
|
|
|
return `${timestamp}-${randomStr}-${nameWithoutExt}.${ext}`;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Upload an avatar image for a member
|
|
|
|
|
* Returns both S3 and local URLs for storage flexibility
|
|
|
|
|
*/
|
|
|
|
|
export async function uploadAvatar(
|
|
|
|
|
memberId: string,
|
|
|
|
|
file: File,
|
|
|
|
|
userSupabase?: ReturnType<typeof import('@supabase/supabase-js').createClient>
|
|
|
|
|
): Promise<UploadResult> {
|
|
|
|
|
// Validate file type
|
|
|
|
|
const allowedTypes = ['image/jpeg', 'image/png', 'image/webp', 'image/gif'];
|
|
|
|
|
if (!allowedTypes.includes(file.type)) {
|
|
|
|
|
return { success: false, error: 'Invalid image type. Allowed: JPEG, PNG, WebP, GIF' };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Validate file size (max 5MB)
|
|
|
|
|
const maxSize = 5 * 1024 * 1024;
|
|
|
|
|
if (file.size > maxSize) {
|
|
|
|
|
return { success: false, error: 'Image size must be less than 5MB' };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Generate path - memberId must match auth.uid() for RLS
|
|
|
|
|
const ext = file.name.split('.').pop() || 'jpg';
|
|
|
|
|
const path = `${memberId}/avatar.${ext}`;
|
|
|
|
|
|
|
|
|
|
// Convert to ArrayBuffer
|
|
|
|
|
const arrayBuffer = await file.arrayBuffer();
|
|
|
|
|
|
|
|
|
|
// Check if S3 is enabled
|
|
|
|
|
const s3Enabled = await isS3Enabled();
|
|
|
|
|
|
|
|
|
|
// Result object
|
|
|
|
|
const result: UploadResult = {
|
|
|
|
|
success: false,
|
|
|
|
|
path
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Upload to S3 if enabled
|
|
|
|
|
if (s3Enabled) {
|
|
|
|
|
const s3Result = await uploadToS3('avatars', path, arrayBuffer, {
|
|
|
|
|
contentType: file.type
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (!s3Result.success) {
|
|
|
|
|
return s3Result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
result.s3Url = s3Result.publicUrl;
|
|
|
|
|
result.publicUrl = s3Result.publicUrl;
|
|
|
|
|
result.success = true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Always upload to Supabase Storage as well (for fallback)
|
|
|
|
|
try {
|
|
|
|
|
// First try to delete existing avatar (ignore errors)
|
|
|
|
|
await supabaseAdmin.storage.from('avatars').remove([path]);
|
|
|
|
|
|
|
|
|
|
const { data, error } = await supabaseAdmin.storage.from('avatars').upload(path, arrayBuffer, {
|
|
|
|
|
contentType: file.type,
|
|
|
|
|
cacheControl: '3600',
|
|
|
|
|
upsert: true
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (error) {
|
|
|
|
|
// If S3 succeeded, this is okay - just log
|
|
|
|
|
if (result.success) {
|
|
|
|
|
console.warn('Local storage upload failed (S3 succeeded):', error);
|
|
|
|
|
} else {
|
|
|
|
|
console.error('Avatar upload error:', error);
|
|
|
|
|
return { success: false, error: error.message };
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
// Generate browser-accessible public URL (not the internal Docker URL)
|
|
|
|
|
result.localUrl = getBrowserAccessibleUrl('avatars', path);
|
|
|
|
|
|
|
|
|
|
// If S3 is not enabled, use local URL as the public URL
|
|
|
|
|
if (!s3Enabled) {
|
|
|
|
|
result.publicUrl = result.localUrl;
|
|
|
|
|
result.success = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
// If S3 succeeded, this is okay
|
|
|
|
|
if (!result.success) {
|
|
|
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
|
|
|
console.error('Avatar upload exception:', error);
|
|
|
|
|
return { success: false, error: errorMessage };
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Delete a member's avatar from ALL storage backends
|
|
|
|
|
* Always attempts to delete from both S3 and Supabase Storage
|
|
|
|
|
*/
|
|
|
|
|
export async function deleteAvatar(
|
|
|
|
|
memberId: string,
|
|
|
|
|
avatarPath?: string
|
|
|
|
|
): Promise<{ success: boolean; error?: string }> {
|
|
|
|
|
// If we have a specific path, use it; otherwise try common extensions
|
|
|
|
|
let paths: string[];
|
|
|
|
|
if (avatarPath) {
|
|
|
|
|
paths = [avatarPath];
|
|
|
|
|
} else {
|
|
|
|
|
const extensions = ['jpg', 'jpeg', 'png', 'webp', 'gif'];
|
|
|
|
|
paths = extensions.map((ext) => `${memberId}/avatar.${ext}`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const errors: string[] = [];
|
|
|
|
|
|
|
|
|
|
// Always try to delete from S3 (in case it was uploaded when S3 was enabled)
|
|
|
|
|
try {
|
|
|
|
|
const s3Config = await getS3Config();
|
|
|
|
|
if (s3Config) {
|
|
|
|
|
const result = await deleteMultipleFromS3('avatars', paths);
|
|
|
|
|
if (!result.success && result.error) {
|
|
|
|
|
console.warn('S3 avatar delete warning:', result.error);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.warn('S3 avatar delete error (non-critical):', error);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Always try to delete from Supabase Storage
|
|
|
|
|
try {
|
|
|
|
|
await supabaseAdmin.storage.from('avatars').remove(paths);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.warn('Local storage avatar delete error (non-critical):', error);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return { success: true };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get the appropriate avatar URL based on current storage settings
|
|
|
|
|
* Useful for getting the right URL when storage setting is toggled
|
|
|
|
|
*/
|
|
|
|
|
export async function getActiveAvatarUrl(member: {
|
|
|
|
|
avatar_url_s3?: string | null;
|
|
|
|
|
avatar_url_local?: string | null;
|
|
|
|
|
avatar_url?: string | null;
|
|
|
|
|
}): Promise<string | null> {
|
|
|
|
|
// Check if S3 is enabled
|
|
|
|
|
if (await isS3Enabled()) {
|
|
|
|
|
return member.avatar_url_s3 || member.avatar_url || null;
|
|
|
|
|
}
|
|
|
|
|
return member.avatar_url_local || member.avatar_url || null;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Upload a document to storage
|
|
|
|
|
* Returns both S3 and local URLs for storage flexibility (same pattern as avatars)
|
|
|
|
|
*/
|
|
|
|
|
export async function uploadDocument(
|
|
|
|
|
file: File,
|
|
|
|
|
options?: {
|
|
|
|
|
folder?: string;
|
|
|
|
|
}
|
|
|
|
|
): Promise<UploadResult> {
|
|
|
|
|
// Validate file type
|
|
|
|
|
const allowedTypes = [
|
|
|
|
|
'application/pdf',
|
|
|
|
|
'application/msword',
|
|
|
|
|
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
|
|
|
|
'application/vnd.ms-excel',
|
|
|
|
|
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|
|
|
|
'application/vnd.ms-powerpoint',
|
|
|
|
|
'application/vnd.openxmlformats-officedocument.presentationml.presentation',
|
|
|
|
|
'text/plain',
|
|
|
|
|
'text/csv',
|
|
|
|
|
'application/json',
|
|
|
|
|
'image/jpeg',
|
|
|
|
|
'image/png',
|
|
|
|
|
'image/webp',
|
|
|
|
|
'image/gif'
|
|
|
|
|
];
|
|
|
|
|
|
|
|
|
|
if (!allowedTypes.includes(file.type)) {
|
|
|
|
|
return {
|
|
|
|
|
success: false,
|
|
|
|
|
error:
|
|
|
|
|
'File type not allowed. Supported: PDF, DOC, DOCX, XLS, XLSX, PPT, PPTX, TXT, CSV, JSON, JPG, PNG, WebP, GIF'
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Validate file size (max 50MB)
|
|
|
|
|
const maxSize = 50 * 1024 * 1024;
|
|
|
|
|
if (file.size > maxSize) {
|
|
|
|
|
return { success: false, error: 'File size must be less than 50MB' };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Generate unique storage path
|
|
|
|
|
const timestamp = Date.now();
|
|
|
|
|
const randomStr = Math.random().toString(36).substring(2, 8);
|
|
|
|
|
const safeName = file.name.replace(/[^a-zA-Z0-9.-]/g, '_').substring(0, 50);
|
|
|
|
|
const path = options?.folder ? `${options.folder}/${timestamp}-${randomStr}-${safeName}` : `${timestamp}-${randomStr}-${safeName}`;
|
|
|
|
|
|
|
|
|
|
// Convert to ArrayBuffer
|
|
|
|
|
const arrayBuffer = await file.arrayBuffer();
|
|
|
|
|
|
|
|
|
|
// Check if S3 is enabled
|
|
|
|
|
const s3Enabled = await isS3Enabled();
|
|
|
|
|
|
|
|
|
|
// Result object
|
|
|
|
|
const result: UploadResult = {
|
|
|
|
|
success: false,
|
|
|
|
|
path
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Upload to S3 if enabled
|
|
|
|
|
if (s3Enabled) {
|
|
|
|
|
const s3Result = await uploadToS3('documents', path, arrayBuffer, {
|
|
|
|
|
contentType: file.type
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (!s3Result.success) {
|
|
|
|
|
return s3Result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
result.s3Url = s3Result.publicUrl;
|
|
|
|
|
result.publicUrl = s3Result.publicUrl;
|
|
|
|
|
result.success = true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Always upload to Supabase Storage as well (for fallback)
|
|
|
|
|
try {
|
|
|
|
|
const { data, error } = await supabaseAdmin.storage.from('documents').upload(path, arrayBuffer, {
|
|
|
|
|
contentType: file.type,
|
|
|
|
|
cacheControl: '3600',
|
|
|
|
|
upsert: false
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (error) {
|
|
|
|
|
// If S3 succeeded, this is okay - just log
|
|
|
|
|
if (result.success) {
|
|
|
|
|
console.warn('Local storage upload failed (S3 succeeded):', error);
|
|
|
|
|
} else {
|
|
|
|
|
console.error('Document upload error:', error);
|
|
|
|
|
return { success: false, error: error.message };
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
// Generate browser-accessible public URL (not the internal Docker URL)
|
|
|
|
|
result.localUrl = getBrowserAccessibleUrl('documents', path);
|
|
|
|
|
|
|
|
|
|
// If S3 is not enabled, use local URL as the public URL
|
|
|
|
|
if (!s3Enabled) {
|
|
|
|
|
result.publicUrl = result.localUrl;
|
|
|
|
|
result.success = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
// If S3 succeeded, this is okay
|
|
|
|
|
if (!result.success) {
|
|
|
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
|
|
|
console.error('Document upload exception:', error);
|
|
|
|
|
return { success: false, error: errorMessage };
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Delete a document from ALL storage backends
|
|
|
|
|
* Always attempts to delete from both S3 and Supabase Storage
|
|
|
|
|
*/
|
|
|
|
|
export async function deleteDocument(
|
|
|
|
|
storagePath: string
|
|
|
|
|
): Promise<{ success: boolean; error?: string }> {
|
|
|
|
|
const errors: string[] = [];
|
|
|
|
|
|
|
|
|
|
// Always try to delete from S3 (in case it was uploaded when S3 was enabled)
|
|
|
|
|
try {
|
|
|
|
|
const s3Config = await getS3Config();
|
|
|
|
|
if (s3Config) {
|
|
|
|
|
const result = await deleteFromS3('documents', storagePath);
|
|
|
|
|
if (!result.success && result.error) {
|
|
|
|
|
console.warn('S3 document delete warning:', result.error);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.warn('S3 document delete error (non-critical):', error);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Always try to delete from Supabase Storage
|
|
|
|
|
try {
|
|
|
|
|
await supabaseAdmin.storage.from('documents').remove([storagePath]);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
console.warn('Local storage document delete error (non-critical):', error);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return { success: true };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get the appropriate document URL based on current storage settings
|
|
|
|
|
* Useful for getting the right URL when storage setting is toggled
|
|
|
|
|
*/
|
|
|
|
|
export async function getActiveDocumentUrl(document: {
|
|
|
|
|
file_url_s3?: string | null;
|
|
|
|
|
file_url_local?: string | null;
|
|
|
|
|
file_path?: string | null;
|
|
|
|
|
}): Promise<string | null> {
|
|
|
|
|
// Check if S3 is enabled
|
|
|
|
|
if (await isS3Enabled()) {
|
|
|
|
|
return document.file_url_s3 || document.file_path || null;
|
|
|
|
|
}
|
|
|
|
|
return document.file_url_local || document.file_path || null;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Upload an event cover image
|
|
|
|
|
*/
|
|
|
|
|
export async function uploadEventImage(eventId: string, file: File): Promise<UploadResult> {
|
|
|
|
|
// Validate file type
|
|
|
|
|
const allowedTypes = ['image/jpeg', 'image/png', 'image/webp'];
|
|
|
|
|
if (!allowedTypes.includes(file.type)) {
|
|
|
|
|
return { success: false, error: 'Invalid image type. Allowed: JPEG, PNG, WebP' };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Validate file size (max 10MB)
|
|
|
|
|
const maxSize = 10 * 1024 * 1024;
|
|
|
|
|
if (file.size > maxSize) {
|
|
|
|
|
return { success: false, error: 'Image size must be less than 10MB' };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Generate path
|
|
|
|
|
const ext = file.name.split('.').pop() || 'jpg';
|
|
|
|
|
const path = `${eventId}/cover.${ext}`;
|
|
|
|
|
|
|
|
|
|
// Convert to ArrayBuffer
|
|
|
|
|
const arrayBuffer = await file.arrayBuffer();
|
|
|
|
|
|
|
|
|
|
// Upload with upsert to replace existing cover
|
|
|
|
|
return uploadFile('event-images', path, arrayBuffer, {
|
|
|
|
|
contentType: file.type,
|
|
|
|
|
cacheControl: '3600',
|
|
|
|
|
upsert: true
|
|
|
|
|
});
|
|
|
|
|
}
|