email server updates
This commit is contained in:
parent
be935e2ba6
commit
f111f76a3b
|
|
@ -5,15 +5,13 @@
|
|||
Email History
|
||||
<v-spacer />
|
||||
<v-btn
|
||||
:icon="mobile ? undefined : 'mdi-refresh'"
|
||||
:size="mobile ? 'default' : 'small'"
|
||||
icon="mdi-refresh"
|
||||
:size="mobile ? 'small' : 'small'"
|
||||
variant="text"
|
||||
@click="loadEmails"
|
||||
:loading="loading"
|
||||
:class="mobile ? 'rounded-circle' : ''"
|
||||
>
|
||||
<v-icon v-if="!loading">mdi-refresh</v-icon>
|
||||
<v-tooltip v-if="mobile" activator="parent" location="bottom">
|
||||
<v-tooltip activator="parent" location="bottom">
|
||||
Refresh emails
|
||||
</v-tooltip>
|
||||
</v-btn>
|
||||
|
|
@ -95,6 +93,34 @@
|
|||
Show less
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Attachments Section -->
|
||||
<div v-if="email.attachments && email.attachments.length > 0" class="mt-3">
|
||||
<v-divider class="mb-2" />
|
||||
<div class="text-caption text-grey mb-2">
|
||||
<v-icon size="small" class="mr-1">mdi-paperclip</v-icon>
|
||||
{{ email.attachments.length }} Attachment{{ email.attachments.length > 1 ? 's' : '' }}
|
||||
</div>
|
||||
<div class="d-flex flex-wrap gap-2">
|
||||
<v-chip
|
||||
v-for="(attachment, index) in email.attachments"
|
||||
:key="index"
|
||||
size="small"
|
||||
variant="outlined"
|
||||
:prepend-icon="getAttachmentIcon(attachment.contentType)"
|
||||
@click="downloadAttachment(attachment)"
|
||||
:disabled="!!attachment.error"
|
||||
>
|
||||
<span class="text-truncate" style="max-width: 150px">
|
||||
{{ attachment.filename }}
|
||||
</span>
|
||||
<span class="text-caption ml-1">({{ formatFileSize(attachment.size) }})</span>
|
||||
<v-tooltip v-if="attachment.error" activator="parent">
|
||||
{{ attachment.error }}
|
||||
</v-tooltip>
|
||||
</v-chip>
|
||||
</div>
|
||||
</div>
|
||||
</v-card-text>
|
||||
<v-card-actions>
|
||||
<v-spacer />
|
||||
|
|
@ -136,6 +162,16 @@ interface EmailMessage {
|
|||
timestamp: string;
|
||||
direction: 'sent' | 'received';
|
||||
threadId?: string;
|
||||
attachments?: Array<{
|
||||
id?: string;
|
||||
filename: string;
|
||||
originalName?: string;
|
||||
contentType: string;
|
||||
size: number;
|
||||
path?: string;
|
||||
bucket?: string;
|
||||
error?: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface EmailThread {
|
||||
|
|
@ -269,6 +305,57 @@ const reloadEmails = () => {
|
|||
loadEmails();
|
||||
};
|
||||
|
||||
// Get icon for attachment based on content type
|
||||
const getAttachmentIcon = (contentType: string) => {
|
||||
if (!contentType) return 'mdi-file';
|
||||
|
||||
if (contentType.startsWith('image/')) return 'mdi-file-image';
|
||||
if (contentType.startsWith('video/')) return 'mdi-file-video';
|
||||
if (contentType.startsWith('audio/')) return 'mdi-file-music';
|
||||
if (contentType.includes('pdf')) return 'mdi-file-pdf-box';
|
||||
if (contentType.includes('word') || contentType.includes('document')) return 'mdi-file-word';
|
||||
if (contentType.includes('sheet') || contentType.includes('excel')) return 'mdi-file-excel';
|
||||
if (contentType.includes('powerpoint') || contentType.includes('presentation')) return 'mdi-file-powerpoint';
|
||||
if (contentType.includes('zip') || contentType.includes('compressed')) return 'mdi-folder-zip';
|
||||
|
||||
return 'mdi-file';
|
||||
};
|
||||
|
||||
// Format file size for display
|
||||
const formatFileSize = (bytes: number) => {
|
||||
if (!bytes || bytes === 0) return '0 B';
|
||||
|
||||
const units = ['B', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
|
||||
return `${(bytes / Math.pow(1024, i)).toFixed(1)} ${units[i]}`;
|
||||
};
|
||||
|
||||
// Download attachment
|
||||
const downloadAttachment = async (attachment: any) => {
|
||||
if (!attachment.path || !attachment.bucket) {
|
||||
toast.error('Attachment information is missing');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Use the proxy download endpoint
|
||||
const downloadUrl = `/api/files/proxy-download?bucket=${attachment.bucket}&fileName=${encodeURIComponent(attachment.path)}`;
|
||||
|
||||
// Create a temporary link and trigger download
|
||||
const link = document.createElement('a');
|
||||
link.href = downloadUrl;
|
||||
link.download = attachment.originalName || attachment.filename;
|
||||
link.target = '_blank';
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
} catch (error) {
|
||||
console.error('Failed to download attachment:', error);
|
||||
toast.error('Failed to download attachment');
|
||||
}
|
||||
};
|
||||
|
||||
// Load emails on mount
|
||||
onMounted(() => {
|
||||
loadEmails();
|
||||
|
|
|
|||
|
|
@ -0,0 +1,268 @@
|
|||
# Comprehensive Implementation Plan for Portal Fixes
|
||||
|
||||
## Overview
|
||||
This document outlines the implementation plan for all requested fixes and improvements to the client portal system. Each section includes the problem, solution approach, and implementation details.
|
||||
|
||||
## 1. Hide Berth Recommendations
|
||||
|
||||
### Problem
|
||||
Berth recommendations are shown across all instances and need to be hidden.
|
||||
|
||||
### Solution
|
||||
- Remove or hide all UI elements related to berth recommendations
|
||||
- Clean up related API calls
|
||||
- Update the InterestDetailsModal and other components
|
||||
|
||||
### Implementation
|
||||
```vue
|
||||
<!-- Remove from InterestDetailsModal.vue -->
|
||||
<!-- Remove the berth recommendations autocomplete field -->
|
||||
<!-- Remove link-berth-recommendations-to-interest API calls -->
|
||||
```
|
||||
|
||||
## 2. EOI Fixes
|
||||
|
||||
### 2.1 Hide Debug Console
|
||||
|
||||
#### Problem
|
||||
Debug console is visible in production
|
||||
|
||||
#### Solution
|
||||
- Remove all console.log statements from EOI-related components
|
||||
- Add environment check for debug outputs
|
||||
|
||||
### 2.2 Slider Bubble Positioning
|
||||
|
||||
#### Problem
|
||||
Slider bubble doesn't fit entirely within the slider bar
|
||||
|
||||
#### Solution
|
||||
- Adjust CSS for the slider component to ensure bubble stays within bounds
|
||||
- Calculate bubble position based on slider width
|
||||
|
||||
### 2.3 EOI Deletion Requires Multiple Clicks
|
||||
|
||||
#### Problem
|
||||
EOI deletion fails unless clicked multiple times
|
||||
|
||||
#### Solution
|
||||
- Add proper loading states and disable button during deletion
|
||||
- Implement proper error handling and retry logic
|
||||
- Use operation locks to prevent concurrent operations
|
||||
|
||||
### 2.4 Clean Database on EOI Deletion
|
||||
|
||||
#### Problem
|
||||
Signature links and ID numbers remain in NocoDB after deletion
|
||||
|
||||
#### Solution
|
||||
```typescript
|
||||
// When deleting EOI:
|
||||
// 1. Delete from Documenso
|
||||
// 2. Clear all EOI-related fields in NocoDB:
|
||||
// - EOI Document
|
||||
// - EOI Client Link
|
||||
// - EOI Oscar Link
|
||||
// - EOI David Link
|
||||
// - EOI ID
|
||||
// - EOI Time Created
|
||||
// - EOI Time Sent
|
||||
// - EOI Status
|
||||
// 3. Reset Sales Process Level if needed
|
||||
```
|
||||
|
||||
### 2.5 EOI Regeneration Confirmation
|
||||
|
||||
#### Problem
|
||||
No warning when regenerating EOI
|
||||
|
||||
#### Solution
|
||||
- Add confirmation dialog with warning message
|
||||
- Remind user to verify all information
|
||||
- Delete old EOI data before regenerating
|
||||
|
||||
## 3. Email Refresh Button Styling
|
||||
|
||||
### Problem
|
||||
Refresh button needs to be round and moved away from compose button
|
||||
|
||||
### Solution
|
||||
```vue
|
||||
<v-btn
|
||||
icon="mdi-refresh"
|
||||
size="small"
|
||||
variant="text"
|
||||
class="ml-2 rounded-circle"
|
||||
@click="loadEmails"
|
||||
:loading="loading"
|
||||
>
|
||||
```
|
||||
|
||||
## 4. Session Management
|
||||
|
||||
### Problem
|
||||
Email credentials persist incorrectly between sessions
|
||||
|
||||
### Solution
|
||||
- Clear sessionStorage on page load/unload
|
||||
- Implement session validation on each API call
|
||||
- Add session expiry timestamps
|
||||
- Force re-authentication on page reload
|
||||
|
||||
## 5. Phone Input Country Issue
|
||||
|
||||
### Problem
|
||||
USA selection switches to American Samoa
|
||||
|
||||
### Solution
|
||||
- Fix country code sorting in PhoneInput component
|
||||
- Ensure US (United States) appears before AS (American Samoa)
|
||||
- Set explicit country code for USA (+1)
|
||||
|
||||
## 6. Email Attachments
|
||||
|
||||
### Problem
|
||||
Attachments not displayed or downloadable
|
||||
|
||||
### Solution (Already Implemented)
|
||||
- Added attachment display UI in EmailThreadView
|
||||
- Created helper functions for icon display and file size formatting
|
||||
- Implemented download functionality via proxy endpoint
|
||||
|
||||
## 7. IMAP Connection Reliability
|
||||
|
||||
### Problem
|
||||
IMAP requires multiple connection attempts
|
||||
|
||||
### Solution (Already Implemented)
|
||||
- Created email-sync service with exponential backoff retry
|
||||
- Implemented MinIO-based caching for offline access
|
||||
- Added connection pooling with health checks
|
||||
|
||||
## 8. Berth Assignment Concurrency
|
||||
|
||||
### Problem
|
||||
Multiple berth selections cause errors
|
||||
|
||||
### Solution
|
||||
- Implement operation locking for berth assignments
|
||||
- Add debouncing to prevent rapid clicks
|
||||
- Queue berth operations sequentially
|
||||
|
||||
## 9. 502 Gateway Errors
|
||||
|
||||
### Problem
|
||||
Frequent 502 errors requiring multiple retries
|
||||
|
||||
### Root Causes
|
||||
1. Connection pool exhaustion
|
||||
2. Database connection limits
|
||||
3. Memory/process issues
|
||||
4. No retry logic in API calls
|
||||
|
||||
### Solutions
|
||||
1. **Connection Pool Management**
|
||||
- Implement proper connection pooling for IMAP
|
||||
- Add connection recycling and health checks
|
||||
- Set maximum connection limits
|
||||
|
||||
2. **API Retry Logic**
|
||||
- Add exponential backoff retry to all API calls
|
||||
- Implement circuit breaker pattern
|
||||
- Cache responses where appropriate
|
||||
|
||||
3. **Resource Optimization**
|
||||
- Reduce concurrent database connections
|
||||
- Implement request queuing
|
||||
- Add memory monitoring
|
||||
|
||||
## 10. Performance Improvements
|
||||
|
||||
### Email System
|
||||
- **MinIO-First Architecture**: Load cached emails instantly
|
||||
- **Background Sync**: Update emails asynchronously
|
||||
- **Incremental Updates**: Only fetch new emails
|
||||
- **Connection Reuse**: Pool IMAP connections
|
||||
|
||||
### API Optimization
|
||||
- **Request Batching**: Combine multiple API calls
|
||||
- **Response Caching**: Cache frequently accessed data
|
||||
- **Lazy Loading**: Load data only when needed
|
||||
- **Debouncing**: Prevent excessive API calls
|
||||
|
||||
### Database Optimization
|
||||
- **Connection Pooling**: Reuse database connections
|
||||
- **Query Optimization**: Optimize NocoDB queries
|
||||
- **Batch Operations**: Update multiple records at once
|
||||
|
||||
## Implementation Priority
|
||||
|
||||
### Phase 1: Critical Fixes (Immediate)
|
||||
1. EOI deletion and database cleanup
|
||||
2. Session management fixes
|
||||
3. 502 error mitigation
|
||||
|
||||
### Phase 2: User Experience (This Week)
|
||||
1. Hide berth recommendations
|
||||
2. Fix phone input country selection
|
||||
3. Email attachment display
|
||||
4. UI improvements (buttons, sliders)
|
||||
|
||||
### Phase 3: Performance (Next Week)
|
||||
1. Complete MinIO email implementation
|
||||
2. API optimization
|
||||
3. Connection pooling improvements
|
||||
|
||||
## Testing Requirements
|
||||
|
||||
### Unit Tests
|
||||
- EOI deletion flow
|
||||
- Email sync service
|
||||
- Session management
|
||||
|
||||
### Integration Tests
|
||||
- IMAP connection with retry
|
||||
- MinIO caching
|
||||
- Concurrent operations
|
||||
|
||||
### Performance Tests
|
||||
- API response times
|
||||
- Connection pool limits
|
||||
- Memory usage under load
|
||||
|
||||
## Monitoring
|
||||
|
||||
### Metrics to Track
|
||||
- 502 error frequency
|
||||
- API response times
|
||||
- IMAP connection success rate
|
||||
- Memory/CPU usage
|
||||
- Active connections count
|
||||
|
||||
### Alerts
|
||||
- 502 errors above threshold
|
||||
- Connection pool exhaustion
|
||||
- High memory usage
|
||||
- Failed email syncs
|
||||
|
||||
## Additional Recommendations
|
||||
|
||||
1. **Error Handling**
|
||||
- Implement global error handler
|
||||
- User-friendly error messages
|
||||
- Automatic error reporting
|
||||
|
||||
2. **Logging**
|
||||
- Structured logging for debugging
|
||||
- Request/response logging
|
||||
- Performance metrics logging
|
||||
|
||||
3. **Caching Strategy**
|
||||
- Redis for session management
|
||||
- MinIO for email caching
|
||||
- Memory cache for frequent queries
|
||||
|
||||
4. **Architecture Improvements**
|
||||
- Message queue for async operations
|
||||
- WebSocket for real-time updates
|
||||
- Service worker for offline support
|
||||
|
|
@ -0,0 +1,223 @@
|
|||
import { getCredentialsFromSession, decryptCredentials } from '~/server/utils/encryption';
|
||||
import { getCachedEmails, syncEmailsWithRetry, getSyncMetadata } from '~/server/utils/email-sync';
|
||||
|
||||
interface EmailMessage {
|
||||
id: string;
|
||||
from: string;
|
||||
to: string | string[];
|
||||
subject: string;
|
||||
body: string;
|
||||
html?: string;
|
||||
timestamp: string;
|
||||
direction: 'sent' | 'received';
|
||||
threadId?: string;
|
||||
attachments?: any[];
|
||||
}
|
||||
|
||||
interface EmailThread {
|
||||
id: string;
|
||||
subject: string;
|
||||
emailCount: number;
|
||||
latestTimestamp: string;
|
||||
emails: EmailMessage[];
|
||||
}
|
||||
|
||||
export default defineEventHandler(async (event) => {
|
||||
const xTagHeader = getRequestHeader(event, "x-tag");
|
||||
|
||||
if (!xTagHeader || (xTagHeader !== "094ut234" && xTagHeader !== "pjnvü1230")) {
|
||||
throw createError({ statusCode: 401, statusMessage: "unauthenticated" });
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await readBody(event);
|
||||
const { clientEmail, interestId, sessionId } = body;
|
||||
|
||||
if (!clientEmail || !sessionId || !interestId) {
|
||||
throw createError({
|
||||
statusCode: 400,
|
||||
statusMessage: "Client email, interestId and sessionId are required"
|
||||
});
|
||||
}
|
||||
|
||||
// Get encrypted credentials from session
|
||||
const encryptedCredentials = getCredentialsFromSession(sessionId);
|
||||
if (!encryptedCredentials) {
|
||||
return {
|
||||
success: true,
|
||||
emails: [],
|
||||
threads: []
|
||||
};
|
||||
}
|
||||
|
||||
// Decrypt credentials
|
||||
let userEmail: string;
|
||||
let password: string;
|
||||
|
||||
try {
|
||||
const decrypted = decryptCredentials(encryptedCredentials);
|
||||
userEmail = decrypted.email;
|
||||
password = decrypted.password;
|
||||
} catch (decryptError) {
|
||||
console.error('[Email V2] Failed to decrypt credentials:', decryptError);
|
||||
return {
|
||||
success: true,
|
||||
emails: [],
|
||||
threads: []
|
||||
};
|
||||
}
|
||||
|
||||
// First, get emails from MinIO cache (instant response)
|
||||
const cachedEmails = await getCachedEmails(interestId);
|
||||
|
||||
// Get sync metadata
|
||||
const metadata = await getSyncMetadata(interestId);
|
||||
|
||||
// Trigger background sync if not currently syncing and last sync was over 5 minutes ago
|
||||
const fiveMinutesAgo = new Date(Date.now() - 5 * 60 * 1000);
|
||||
const lastSync = new Date(metadata.lastSyncTime);
|
||||
|
||||
if (metadata.syncStatus !== 'syncing' && lastSync < fiveMinutesAgo) {
|
||||
// Fire and forget - don't wait for sync to complete
|
||||
syncEmailsWithRetry(sessionId, userEmail, clientEmail, interestId).catch(err => {
|
||||
console.error('[Email V2] Background sync failed:', err);
|
||||
});
|
||||
}
|
||||
|
||||
// Process cached emails
|
||||
const emails: EmailMessage[] = cachedEmails.map(email => ({
|
||||
id: email.id || email.messageId || `${Date.now()}-${Math.random()}`,
|
||||
from: email.from || '',
|
||||
to: email.to || '',
|
||||
subject: email.subject || '',
|
||||
body: email.body || email.text || '',
|
||||
html: email.html,
|
||||
timestamp: email.timestamp || new Date().toISOString(),
|
||||
direction: email.direction || (email.from?.toLowerCase().includes(userEmail.toLowerCase()) ? 'sent' : 'received'),
|
||||
threadId: email.threadId,
|
||||
attachments: email.attachments
|
||||
}));
|
||||
|
||||
// Filter to only include emails involving the client
|
||||
const filteredEmails = emails.filter(email => {
|
||||
const fromEmail = email.from.toLowerCase();
|
||||
const toEmails = Array.isArray(email.to) ? email.to.join(' ').toLowerCase() : email.to.toLowerCase();
|
||||
|
||||
return fromEmail.includes(clientEmail.toLowerCase()) ||
|
||||
toEmails.includes(clientEmail.toLowerCase()) ||
|
||||
fromEmail.includes(userEmail.toLowerCase()) ||
|
||||
toEmails.includes(userEmail.toLowerCase());
|
||||
});
|
||||
|
||||
// Sort by timestamp (newest first)
|
||||
filteredEmails.sort((a, b) =>
|
||||
new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime()
|
||||
);
|
||||
|
||||
// Group into threads
|
||||
const threads = groupIntoThreads(filteredEmails);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
emails: filteredEmails,
|
||||
threads: threads,
|
||||
syncStatus: metadata.syncStatus,
|
||||
lastSync: metadata.lastSyncTime,
|
||||
totalEmails: metadata.totalEmails
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('[Email V2] Failed to fetch email thread:', error);
|
||||
if (error instanceof Error) {
|
||||
throw createError({
|
||||
statusCode: 500,
|
||||
statusMessage: `Failed to fetch emails: ${error.message}`
|
||||
});
|
||||
} else {
|
||||
throw createError({
|
||||
statusCode: 500,
|
||||
statusMessage: "An unexpected error occurred",
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Group emails into threads based on subject and references
|
||||
function groupIntoThreads(emails: EmailMessage[]): EmailThread[] {
|
||||
const threads = new Map<string, EmailMessage[]>();
|
||||
const emailById = new Map<string, EmailMessage>();
|
||||
|
||||
// First pass: index all emails by ID
|
||||
emails.forEach(email => {
|
||||
emailById.set(email.id, email);
|
||||
});
|
||||
|
||||
// Second pass: group emails into threads
|
||||
emails.forEach(email => {
|
||||
// Normalize subject by removing Re:, Fwd:, etc.
|
||||
const normalizedSubject = email.subject
|
||||
.replace(/^(Re:|Fwd:|Fw:|RE:|FW:|FWD:)\s*/gi, '')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
|
||||
// Check if this email belongs to an existing thread
|
||||
let threadFound = false;
|
||||
|
||||
// First, check if it has a threadId (in-reply-to header)
|
||||
if (email.threadId) {
|
||||
// Look for the parent email
|
||||
const parentEmail = emailById.get(email.threadId);
|
||||
if (parentEmail) {
|
||||
// Find which thread the parent belongs to
|
||||
for (const [threadId, threadEmails] of threads.entries()) {
|
||||
if (threadEmails.some(e => e.id === parentEmail.id)) {
|
||||
threadEmails.push(email);
|
||||
threadFound = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If not found by threadId, try to match by subject
|
||||
if (!threadFound) {
|
||||
for (const [threadId, threadEmails] of threads.entries()) {
|
||||
const threadSubject = threadEmails[0].subject
|
||||
.replace(/^(Re:|Fwd:|Fw:|RE:|FW:|FWD:)\s*/gi, '')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
|
||||
if (threadSubject === normalizedSubject) {
|
||||
threadEmails.push(email);
|
||||
threadFound = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If still not found, create a new thread
|
||||
if (!threadFound) {
|
||||
threads.set(email.id, [email]);
|
||||
}
|
||||
});
|
||||
|
||||
// Convert to array format and sort emails within each thread
|
||||
return Array.from(threads.entries())
|
||||
.map(([threadId, threadEmails]) => {
|
||||
// Sort emails within thread by timestamp (oldest first for chronological order)
|
||||
threadEmails.sort((a, b) =>
|
||||
new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()
|
||||
);
|
||||
|
||||
return {
|
||||
id: threadId,
|
||||
subject: threadEmails[0].subject,
|
||||
emailCount: threadEmails.length,
|
||||
latestTimestamp: threadEmails[threadEmails.length - 1].timestamp,
|
||||
emails: threadEmails
|
||||
};
|
||||
})
|
||||
// Sort threads by latest activity (newest first)
|
||||
.sort((a, b) => new Date(b.latestTimestamp).getTime() - new Date(a.latestTimestamp).getTime());
|
||||
}
|
||||
|
|
@ -0,0 +1,371 @@
|
|||
import { getMinioClient } from './minio';
|
||||
import { getIMAPPool } from './imap-pool';
|
||||
import type { ParsedMail } from 'mailparser';
|
||||
import { simpleParser } from 'mailparser';
|
||||
|
||||
// Email sync service for MinIO-based email management
|
||||
export interface EmailSyncMetadata {
|
||||
lastSyncTime: string;
|
||||
totalEmails: number;
|
||||
lastError?: string;
|
||||
syncStatus: 'idle' | 'syncing' | 'error';
|
||||
}
|
||||
|
||||
export interface EmailThreadIndex {
|
||||
threads: Array<{
|
||||
id: string;
|
||||
subject: string;
|
||||
participants: string[];
|
||||
emailCount: number;
|
||||
lastActivity: string;
|
||||
hasAttachments: boolean;
|
||||
}>;
|
||||
lastUpdated: string;
|
||||
}
|
||||
|
||||
// Get or create sync metadata
|
||||
export async function getSyncMetadata(interestId: string): Promise<EmailSyncMetadata> {
|
||||
const client = getMinioClient();
|
||||
const objectName = `interest-${interestId}/metadata.json`;
|
||||
|
||||
try {
|
||||
const stream = await client.getObject('client-emails', objectName);
|
||||
let data = '';
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
stream.on('data', (chunk) => { data += chunk; });
|
||||
stream.on('end', resolve);
|
||||
stream.on('error', reject);
|
||||
});
|
||||
|
||||
return JSON.parse(data);
|
||||
} catch (error: any) {
|
||||
// If not found, create default metadata
|
||||
if (error.code === 'NoSuchKey') {
|
||||
const defaultMetadata: EmailSyncMetadata = {
|
||||
lastSyncTime: new Date(0).toISOString(), // Start from beginning
|
||||
totalEmails: 0,
|
||||
syncStatus: 'idle'
|
||||
};
|
||||
|
||||
await saveSyncMetadata(interestId, defaultMetadata);
|
||||
return defaultMetadata;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Save sync metadata
|
||||
export async function saveSyncMetadata(interestId: string, metadata: EmailSyncMetadata): Promise<void> {
|
||||
const client = getMinioClient();
|
||||
const objectName = `interest-${interestId}/metadata.json`;
|
||||
const buffer = Buffer.from(JSON.stringify(metadata, null, 2));
|
||||
|
||||
await client.putObject('client-emails', objectName, buffer, buffer.length, {
|
||||
'Content-Type': 'application/json'
|
||||
});
|
||||
}
|
||||
|
||||
// Get thread index
|
||||
export async function getThreadIndex(interestId: string): Promise<EmailThreadIndex> {
|
||||
const client = getMinioClient();
|
||||
const objectName = `interest-${interestId}/threads/index.json`;
|
||||
|
||||
try {
|
||||
const stream = await client.getObject('client-emails', objectName);
|
||||
let data = '';
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
stream.on('data', (chunk) => { data += chunk; });
|
||||
stream.on('end', resolve);
|
||||
stream.on('error', reject);
|
||||
});
|
||||
|
||||
return JSON.parse(data);
|
||||
} catch (error: any) {
|
||||
if (error.code === 'NoSuchKey') {
|
||||
return {
|
||||
threads: [],
|
||||
lastUpdated: new Date().toISOString()
|
||||
};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Save thread index
|
||||
export async function saveThreadIndex(interestId: string, index: EmailThreadIndex): Promise<void> {
|
||||
const client = getMinioClient();
|
||||
const objectName = `interest-${interestId}/threads/index.json`;
|
||||
const buffer = Buffer.from(JSON.stringify(index, null, 2));
|
||||
|
||||
await client.putObject('client-emails', objectName, buffer, buffer.length, {
|
||||
'Content-Type': 'application/json'
|
||||
});
|
||||
}
|
||||
|
||||
// Sync emails with exponential backoff retry
|
||||
export async function syncEmailsWithRetry(
|
||||
sessionId: string,
|
||||
userEmail: string,
|
||||
clientEmail: string,
|
||||
interestId: string,
|
||||
maxRetries: number = 3
|
||||
): Promise<void> {
|
||||
let retryCount = 0;
|
||||
let lastError: Error | null = null;
|
||||
|
||||
while (retryCount <= maxRetries) {
|
||||
try {
|
||||
await syncEmails(sessionId, userEmail, clientEmail, interestId);
|
||||
return; // Success
|
||||
} catch (error: any) {
|
||||
lastError = error;
|
||||
retryCount++;
|
||||
|
||||
if (retryCount > maxRetries) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Exponential backoff: 1s, 2s, 4s, 8s
|
||||
const waitTime = Math.pow(2, retryCount - 1) * 1000;
|
||||
console.log(`[EmailSync] Retry ${retryCount}/${maxRetries} after ${waitTime}ms`);
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, waitTime));
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError || new Error('Failed to sync emails after retries');
|
||||
}
|
||||
|
||||
// Main sync function
|
||||
async function syncEmails(
|
||||
sessionId: string,
|
||||
userEmail: string,
|
||||
clientEmail: string,
|
||||
interestId: string
|
||||
): Promise<void> {
|
||||
const metadata = await getSyncMetadata(interestId);
|
||||
|
||||
// Update status to syncing
|
||||
metadata.syncStatus = 'syncing';
|
||||
await saveSyncMetadata(interestId, metadata);
|
||||
|
||||
try {
|
||||
const pool = getIMAPPool();
|
||||
const imap = await pool.getConnection(sessionId);
|
||||
|
||||
// Fetch emails newer than last sync
|
||||
const lastSyncDate = new Date(metadata.lastSyncTime);
|
||||
const newEmails = await fetchNewEmails(imap, userEmail, clientEmail, lastSyncDate);
|
||||
|
||||
if (newEmails.length > 0) {
|
||||
// Save new emails to MinIO
|
||||
for (const email of newEmails) {
|
||||
const emailId = email.messageId || `${Date.now()}-${Math.random()}`;
|
||||
const objectName = `interest-${interestId}/emails/${emailId}.json`;
|
||||
const buffer = Buffer.from(JSON.stringify(email, null, 2));
|
||||
|
||||
const client = getMinioClient();
|
||||
await client.putObject('client-emails', objectName, buffer, buffer.length, {
|
||||
'Content-Type': 'application/json'
|
||||
});
|
||||
}
|
||||
|
||||
// Update thread index
|
||||
await updateThreadIndex(interestId, newEmails);
|
||||
|
||||
// Update metadata
|
||||
metadata.lastSyncTime = new Date().toISOString();
|
||||
metadata.totalEmails += newEmails.length;
|
||||
metadata.syncStatus = 'idle';
|
||||
await saveSyncMetadata(interestId, metadata);
|
||||
}
|
||||
} catch (error: any) {
|
||||
// Update metadata with error
|
||||
metadata.syncStatus = 'error';
|
||||
metadata.lastError = error.message;
|
||||
await saveSyncMetadata(interestId, metadata);
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch new emails from IMAP
|
||||
async function fetchNewEmails(
|
||||
imap: any,
|
||||
userEmail: string,
|
||||
clientEmail: string,
|
||||
since: Date
|
||||
): Promise<any[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const emails: any[] = [];
|
||||
|
||||
imap.openBox('INBOX', true, (err: any) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
// Search for emails newer than last sync
|
||||
const searchCriteria = [
|
||||
['SINCE', since.toISOString().split('T')[0]],
|
||||
['OR',
|
||||
['FROM', clientEmail],
|
||||
['TO', clientEmail]
|
||||
]
|
||||
];
|
||||
|
||||
imap.search(searchCriteria, (err: any, results: number[]) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
if (results.length === 0) {
|
||||
resolve([]);
|
||||
return;
|
||||
}
|
||||
|
||||
const fetch = imap.fetch(results, {
|
||||
bodies: '',
|
||||
struct: true,
|
||||
envelope: true
|
||||
});
|
||||
|
||||
fetch.on('message', (msg: any) => {
|
||||
msg.on('body', (stream: any) => {
|
||||
simpleParser(stream, (err: any, parsed: ParsedMail) => {
|
||||
if (!err && parsed) {
|
||||
// Handle from/to addresses which can be single or array
|
||||
const fromText = Array.isArray(parsed.from)
|
||||
? parsed.from.map(addr => addr.text).join(', ')
|
||||
: parsed.from?.text || '';
|
||||
|
||||
const toText = Array.isArray(parsed.to)
|
||||
? parsed.to.map(addr => addr.text).join(', ')
|
||||
: parsed.to?.text || '';
|
||||
|
||||
emails.push({
|
||||
id: parsed.messageId,
|
||||
from: fromText,
|
||||
to: toText,
|
||||
subject: parsed.subject,
|
||||
body: parsed.text,
|
||||
html: parsed.html,
|
||||
timestamp: parsed.date?.toISOString(),
|
||||
attachments: parsed.attachments?.map(att => ({
|
||||
filename: att.filename,
|
||||
contentType: att.contentType,
|
||||
size: att.size
|
||||
}))
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
fetch.once('end', () => {
|
||||
resolve(emails);
|
||||
});
|
||||
|
||||
fetch.once('error', (err: any) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Update thread index with new emails
|
||||
async function updateThreadIndex(interestId: string, newEmails: any[]): Promise<void> {
|
||||
const index = await getThreadIndex(interestId);
|
||||
|
||||
// Group emails by thread (simplified - by subject)
|
||||
for (const email of newEmails) {
|
||||
const normalizedSubject = email.subject
|
||||
?.replace(/^(Re:|Fwd:|Fw:|RE:|FW:|FWD:)\s*/gi, '')
|
||||
.trim() || 'No Subject';
|
||||
|
||||
let thread = index.threads.find(t =>
|
||||
t.subject.replace(/^(Re:|Fwd:|Fw:|RE:|FW:|FWD:)\s*/gi, '').trim() === normalizedSubject
|
||||
);
|
||||
|
||||
if (!thread) {
|
||||
thread = {
|
||||
id: `thread-${Date.now()}-${Math.random()}`,
|
||||
subject: email.subject || 'No Subject',
|
||||
participants: [],
|
||||
emailCount: 0,
|
||||
lastActivity: email.timestamp,
|
||||
hasAttachments: false
|
||||
};
|
||||
index.threads.push(thread);
|
||||
}
|
||||
|
||||
// Update thread
|
||||
thread.emailCount++;
|
||||
thread.lastActivity = email.timestamp;
|
||||
if (email.attachments?.length > 0) {
|
||||
thread.hasAttachments = true;
|
||||
}
|
||||
|
||||
// Add participants
|
||||
const from = email.from?.match(/<(.+)>/)?.[1] || email.from;
|
||||
if (from && !thread.participants.includes(from)) {
|
||||
thread.participants.push(from);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort threads by last activity
|
||||
index.threads.sort((a, b) =>
|
||||
new Date(b.lastActivity).getTime() - new Date(a.lastActivity).getTime()
|
||||
);
|
||||
|
||||
index.lastUpdated = new Date().toISOString();
|
||||
await saveThreadIndex(interestId, index);
|
||||
}
|
||||
|
||||
// Get emails from MinIO cache
|
||||
export async function getCachedEmails(interestId: string): Promise<any[]> {
|
||||
const client = getMinioClient();
|
||||
const emails: any[] = [];
|
||||
|
||||
try {
|
||||
const stream = client.listObjectsV2('client-emails', `interest-${interestId}/emails/`, true);
|
||||
const files: any[] = [];
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
stream.on('data', (obj) => {
|
||||
if (obj && obj.name && obj.name.endsWith('.json')) {
|
||||
files.push(obj.name);
|
||||
}
|
||||
});
|
||||
stream.on('error', reject);
|
||||
stream.on('end', resolve);
|
||||
});
|
||||
|
||||
// Load each email
|
||||
for (const fileName of files) {
|
||||
try {
|
||||
const objStream = await client.getObject('client-emails', fileName);
|
||||
let data = '';
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
objStream.on('data', (chunk) => { data += chunk; });
|
||||
objStream.on('end', resolve);
|
||||
objStream.on('error', reject);
|
||||
});
|
||||
|
||||
emails.push(JSON.parse(data));
|
||||
} catch (error) {
|
||||
console.error(`Failed to load email ${fileName}:`, error);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to list cached emails:', error);
|
||||
}
|
||||
|
||||
return emails;
|
||||
}
|
||||
Loading…
Reference in New Issue