230 lines
6.3 KiB
TypeScript
230 lines
6.3 KiB
TypeScript
interface RetryOptions {
|
|
maxRetries?: number;
|
|
initialDelay?: number;
|
|
maxDelay?: number;
|
|
backoffMultiplier?: number;
|
|
timeout?: number;
|
|
retryableStatuses?: number[];
|
|
retryableErrors?: string[];
|
|
}
|
|
|
|
interface CircuitBreakerOptions {
|
|
failureThreshold?: number;
|
|
resetTimeout?: number;
|
|
halfOpenRequests?: number;
|
|
}
|
|
|
|
enum CircuitState {
|
|
CLOSED = 'CLOSED',
|
|
OPEN = 'OPEN',
|
|
HALF_OPEN = 'HALF_OPEN'
|
|
}
|
|
|
|
class CircuitBreaker {
|
|
private state: CircuitState = CircuitState.CLOSED;
|
|
private failures: number = 0;
|
|
private lastFailureTime: number = 0;
|
|
private halfOpenRequests: number = 0;
|
|
|
|
constructor(private options: CircuitBreakerOptions = {}) {
|
|
this.options = {
|
|
failureThreshold: 10,
|
|
resetTimeout: 30000, // 30 seconds
|
|
halfOpenRequests: 3,
|
|
...options
|
|
};
|
|
}
|
|
|
|
async execute<T>(fn: () => Promise<T>): Promise<T> {
|
|
// Check circuit state
|
|
if (this.state === CircuitState.OPEN) {
|
|
const timeSinceLastFailure = Date.now() - this.lastFailureTime;
|
|
if (timeSinceLastFailure > this.options.resetTimeout!) {
|
|
this.state = CircuitState.HALF_OPEN;
|
|
this.halfOpenRequests = 0;
|
|
} else {
|
|
throw new Error('Circuit breaker is OPEN');
|
|
}
|
|
}
|
|
|
|
if (this.state === CircuitState.HALF_OPEN) {
|
|
if (this.halfOpenRequests >= this.options.halfOpenRequests!) {
|
|
throw new Error('Circuit breaker is HALF_OPEN - max requests reached');
|
|
}
|
|
this.halfOpenRequests++;
|
|
}
|
|
|
|
try {
|
|
const result = await fn();
|
|
|
|
// Success - reset failures
|
|
if (this.state === CircuitState.HALF_OPEN) {
|
|
this.state = CircuitState.CLOSED;
|
|
this.failures = 0;
|
|
}
|
|
|
|
return result;
|
|
} catch (error) {
|
|
this.failures++;
|
|
this.lastFailureTime = Date.now();
|
|
|
|
if (this.failures >= this.options.failureThreshold!) {
|
|
this.state = CircuitState.OPEN;
|
|
console.error(`[CircuitBreaker] Opening circuit after ${this.failures} failures`);
|
|
}
|
|
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
getState(): { state: CircuitState; failures: number } {
|
|
return {
|
|
state: this.state,
|
|
failures: this.failures
|
|
};
|
|
}
|
|
|
|
reset(): void {
|
|
this.state = CircuitState.CLOSED;
|
|
this.failures = 0;
|
|
this.halfOpenRequests = 0;
|
|
}
|
|
}
|
|
|
|
export class ResilientHttpClient {
|
|
private circuitBreakers: Map<string, CircuitBreaker> = new Map();
|
|
|
|
constructor(
|
|
private defaultRetryOptions: RetryOptions = {},
|
|
private defaultCircuitBreakerOptions: CircuitBreakerOptions = {}
|
|
) {
|
|
this.defaultRetryOptions = {
|
|
maxRetries: 3,
|
|
initialDelay: 1000,
|
|
maxDelay: 16000,
|
|
backoffMultiplier: 2,
|
|
timeout: 30000,
|
|
retryableStatuses: [408, 429, 500, 502, 503, 504],
|
|
retryableErrors: ['ECONNREFUSED', 'ETIMEDOUT', 'ENOTFOUND', 'ECONNRESET'],
|
|
...defaultRetryOptions
|
|
};
|
|
}
|
|
|
|
private getCircuitBreaker(serviceName: string): CircuitBreaker {
|
|
if (!this.circuitBreakers.has(serviceName)) {
|
|
this.circuitBreakers.set(
|
|
serviceName,
|
|
new CircuitBreaker(this.defaultCircuitBreakerOptions)
|
|
);
|
|
}
|
|
return this.circuitBreakers.get(serviceName)!;
|
|
}
|
|
|
|
async fetchWithRetry(
|
|
url: string,
|
|
options: RequestInit & { serviceName?: string } = {},
|
|
retryOptions: RetryOptions = {}
|
|
): Promise<Response> {
|
|
const mergedRetryOptions = { ...this.defaultRetryOptions, ...retryOptions };
|
|
const serviceName = options.serviceName || new URL(url).hostname;
|
|
const circuitBreaker = this.getCircuitBreaker(serviceName);
|
|
|
|
return circuitBreaker.execute(async () => {
|
|
return this.executeWithRetry(url, options, mergedRetryOptions);
|
|
});
|
|
}
|
|
|
|
private async executeWithRetry(
|
|
url: string,
|
|
options: RequestInit,
|
|
retryOptions: RetryOptions
|
|
): Promise<Response> {
|
|
let lastError: Error | null = null;
|
|
let delay = retryOptions.initialDelay!;
|
|
|
|
for (let attempt = 0; attempt <= retryOptions.maxRetries!; attempt++) {
|
|
try {
|
|
// Add timeout to request
|
|
const controller = new AbortController();
|
|
const timeoutId = setTimeout(
|
|
() => controller.abort(),
|
|
retryOptions.timeout!
|
|
);
|
|
|
|
const response = await fetch(url, {
|
|
...options,
|
|
signal: controller.signal
|
|
});
|
|
|
|
clearTimeout(timeoutId);
|
|
|
|
// Check if response is retryable
|
|
if (
|
|
!response.ok &&
|
|
retryOptions.retryableStatuses!.includes(response.status) &&
|
|
attempt < retryOptions.maxRetries!
|
|
) {
|
|
console.warn(
|
|
`[ResilientHttp] Retryable status ${response.status} for ${url}, attempt ${attempt + 1}/${retryOptions.maxRetries}`
|
|
);
|
|
await this.delay(delay);
|
|
delay = Math.min(delay * retryOptions.backoffMultiplier!, retryOptions.maxDelay!);
|
|
continue;
|
|
}
|
|
|
|
return response;
|
|
} catch (error: any) {
|
|
lastError = error;
|
|
|
|
// Check if error is retryable
|
|
const isRetryable = retryOptions.retryableErrors!.some(
|
|
errType => error.message?.includes(errType) || error.code === errType
|
|
);
|
|
|
|
if (isRetryable && attempt < retryOptions.maxRetries!) {
|
|
console.warn(
|
|
`[ResilientHttp] Retryable error for ${url}: ${error.message}, attempt ${attempt + 1}/${retryOptions.maxRetries}`
|
|
);
|
|
await this.delay(delay);
|
|
delay = Math.min(delay * retryOptions.backoffMultiplier!, retryOptions.maxDelay!);
|
|
continue;
|
|
}
|
|
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
throw lastError || new Error(`Failed after ${retryOptions.maxRetries} retries`);
|
|
}
|
|
|
|
private delay(ms: number): Promise<void> {
|
|
return new Promise(resolve => setTimeout(resolve, ms));
|
|
}
|
|
|
|
getCircuitBreakerStatus(): { [serviceName: string]: { state: string; failures: number } } {
|
|
const status: { [serviceName: string]: { state: string; failures: number } } = {};
|
|
|
|
this.circuitBreakers.forEach((breaker, serviceName) => {
|
|
status[serviceName] = breaker.getState();
|
|
});
|
|
|
|
return status;
|
|
}
|
|
}
|
|
|
|
// Create a singleton instance
|
|
export const resilientHttp = new ResilientHttpClient(
|
|
{
|
|
maxRetries: 3,
|
|
initialDelay: 1000,
|
|
maxDelay: 8000,
|
|
backoffMultiplier: 2,
|
|
timeout: 30000
|
|
},
|
|
{
|
|
failureThreshold: 10,
|
|
resetTimeout: 30000,
|
|
halfOpenRequests: 3
|
|
}
|
|
);
|