talk2me/static/js/src/requestQueue.ts
Adolfo Delorenzo 17e0f2f03d Add connection retry logic to handle network interruptions gracefully
- Implement ConnectionManager with exponential backoff retry strategy
- Add automatic connection monitoring and health checks
- Update RequestQueueManager to integrate with connection state
- Create ConnectionUI component for visual connection status
- Queue requests during offline periods and process when online
- Add comprehensive error handling for network-related failures
- Create detailed documentation for connection retry features
- Support manual retry and automatic recovery

Features:
- Real-time connection status indicator
- Offline banner with retry button
- Request queue visualization
- Priority-based request processing
- Configurable retry parameters

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-06-03 00:00:03 -06:00

333 lines
11 KiB
TypeScript

// Request queue and throttling manager
import { ConnectionManager, ConnectionState } from './connectionManager';
export interface QueuedRequest {
id: string;
type: 'transcribe' | 'translate' | 'tts';
request: () => Promise<any>;
resolve: (value: any) => void;
reject: (reason?: any) => void;
retryCount: number;
priority: number;
timestamp: number;
}
export class RequestQueueManager {
private static instance: RequestQueueManager;
private queue: QueuedRequest[] = [];
private activeRequests: Map<string, QueuedRequest> = new Map();
private maxConcurrent = 2; // Maximum concurrent requests
private maxRetries = 3;
private retryDelay = 1000; // Base retry delay in ms
private isProcessing = false;
private connectionManager: ConnectionManager;
private isPaused = false;
// Rate limiting
private requestHistory: number[] = [];
private maxRequestsPerMinute = 30;
private maxRequestsPerSecond = 2;
private constructor() {
this.connectionManager = ConnectionManager.getInstance();
// Subscribe to connection state changes
this.connectionManager.subscribe('request-queue', (state: ConnectionState) => {
this.handleConnectionStateChange(state);
});
// Start processing queue
this.startProcessing();
}
static getInstance(): RequestQueueManager {
if (!RequestQueueManager.instance) {
RequestQueueManager.instance = new RequestQueueManager();
}
return RequestQueueManager.instance;
}
// Add request to queue
async enqueue<T>(
type: 'transcribe' | 'translate' | 'tts',
request: () => Promise<T>,
priority: number = 5
): Promise<T> {
// Check rate limits
if (!this.checkRateLimits()) {
throw new Error('Rate limit exceeded. Please slow down.');
}
return new Promise((resolve, reject) => {
const id = this.generateId();
const queuedRequest: QueuedRequest = {
id,
type,
request,
resolve,
reject,
retryCount: 0,
priority,
timestamp: Date.now()
};
// Add to queue based on priority
this.addToQueue(queuedRequest);
// Log queue status
console.log(`Request queued: ${type}, Queue size: ${this.queue.length}, Active: ${this.activeRequests.size}`);
});
}
private addToQueue(request: QueuedRequest): void {
// Insert based on priority (higher priority first)
const insertIndex = this.queue.findIndex(item => item.priority < request.priority);
if (insertIndex === -1) {
this.queue.push(request);
} else {
this.queue.splice(insertIndex, 0, request);
}
}
private checkRateLimits(): boolean {
const now = Date.now();
// Clean old entries
this.requestHistory = this.requestHistory.filter(
time => now - time < 60000 // Keep last minute
);
// Check per-second limit
const lastSecond = this.requestHistory.filter(
time => now - time < 1000
).length;
if (lastSecond >= this.maxRequestsPerSecond) {
console.warn('Per-second rate limit reached');
return false;
}
// Check per-minute limit
if (this.requestHistory.length >= this.maxRequestsPerMinute) {
console.warn('Per-minute rate limit reached');
return false;
}
// Record this request
this.requestHistory.push(now);
return true;
}
private async startProcessing(): Promise<void> {
if (this.isProcessing) return;
this.isProcessing = true;
while (true) {
await this.processQueue();
await this.delay(100); // Check queue every 100ms
}
}
private async processQueue(): Promise<void> {
// Check if we're paused or can't process more requests
if (this.isPaused || this.activeRequests.size >= this.maxConcurrent || this.queue.length === 0) {
return;
}
// Check if we're online
if (!this.connectionManager.isOnline()) {
console.log('Queue processing paused - offline');
return;
}
// Get next request
const request = this.queue.shift();
if (!request) return;
// Mark as active
this.activeRequests.set(request.id, request);
try {
// Execute request with connection manager retry logic
const result = await this.connectionManager.retryRequest(
request.request,
{
retries: this.maxRetries - request.retryCount,
delay: this.calculateRetryDelay(request.retryCount + 1),
onRetry: (attempt, error) => {
console.log(`Retry ${attempt} for ${request.type}: ${error.message}`);
}
}
);
request.resolve(result);
console.log(`Request completed: ${request.type}`);
} catch (error) {
console.error(`Request failed after retries: ${request.type}`, error);
// Check if it's a connection error and we should queue for later
if (this.isConnectionError(error) && request.retryCount < this.maxRetries) {
request.retryCount++;
console.log(`Re-queuing ${request.type} due to connection error`);
// Re-queue with higher priority
request.priority = Math.max(request.priority + 1, 10);
this.addToQueue(request);
} else {
// Non-recoverable error or max retries reached
request.reject(error);
}
} finally {
// Remove from active
this.activeRequests.delete(request.id);
}
}
// Note: shouldRetry logic is now handled by ConnectionManager
// Keeping for reference but not used directly
private calculateRetryDelay(retryCount: number): number {
// Exponential backoff with jitter
const baseDelay = this.retryDelay * Math.pow(2, retryCount - 1);
const jitter = Math.random() * 0.3 * baseDelay; // 30% jitter
return Math.min(baseDelay + jitter, 30000); // Max 30 seconds
}
private generateId(): string {
return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
// Get queue status
getStatus(): {
queueLength: number;
activeRequests: number;
requestsPerMinute: number;
} {
const now = Date.now();
const recentRequests = this.requestHistory.filter(
time => now - time < 60000
).length;
return {
queueLength: this.queue.length,
activeRequests: this.activeRequests.size,
requestsPerMinute: recentRequests
};
}
// Clear queue (for emergency use)
clearQueue(): void {
this.queue.forEach(request => {
request.reject(new Error('Queue cleared'));
});
this.queue = [];
}
// Clear stuck requests (requests older than 60 seconds)
clearStuckRequests(): void {
const now = Date.now();
const stuckThreshold = 60000; // 60 seconds
// Clear stuck active requests
this.activeRequests.forEach((request, id) => {
if (now - request.timestamp > stuckThreshold) {
console.warn(`Clearing stuck active request: ${request.type}`);
request.reject(new Error('Request timeout - cleared by recovery'));
this.activeRequests.delete(id);
}
});
// Clear old queued requests
this.queue = this.queue.filter(request => {
if (now - request.timestamp > stuckThreshold) {
console.warn(`Clearing stuck queued request: ${request.type}`);
request.reject(new Error('Request timeout - cleared by recovery'));
return false;
}
return true;
});
}
// Update settings
updateSettings(settings: {
maxConcurrent?: number;
maxRequestsPerMinute?: number;
maxRequestsPerSecond?: number;
}): void {
if (settings.maxConcurrent !== undefined) {
this.maxConcurrent = settings.maxConcurrent;
}
if (settings.maxRequestsPerMinute !== undefined) {
this.maxRequestsPerMinute = settings.maxRequestsPerMinute;
}
if (settings.maxRequestsPerSecond !== undefined) {
this.maxRequestsPerSecond = settings.maxRequestsPerSecond;
}
}
// Handle connection state changes
private handleConnectionStateChange(state: ConnectionState): void {
console.log(`Connection state changed: ${state.status}`);
if (state.status === 'offline' || state.status === 'error') {
// Pause processing when offline
this.isPaused = true;
// Notify queued requests about offline status
if (this.queue.length > 0) {
console.log(`${this.queue.length} requests queued while offline`);
}
} else if (state.status === 'online') {
// Resume processing when back online
this.isPaused = false;
console.log('Connection restored, resuming queue processing');
// Process any queued requests
if (this.queue.length > 0) {
console.log(`Processing ${this.queue.length} queued requests`);
}
}
}
// Check if error is connection-related
private isConnectionError(error: any): boolean {
const errorMessage = error.message?.toLowerCase() || '';
const connectionErrors = [
'network',
'fetch',
'connection',
'timeout',
'offline',
'cors'
];
return connectionErrors.some(e => errorMessage.includes(e));
}
// Pause queue processing
pause(): void {
this.isPaused = true;
console.log('Request queue paused');
}
// Resume queue processing
resume(): void {
this.isPaused = false;
console.log('Request queue resumed');
}
// Get number of queued requests by type
getQueuedByType(): { transcribe: number; translate: number; tts: number } {
const counts = { transcribe: 0, translate: 0, tts: 0 };
this.queue.forEach(request => {
counts[request.type]++;
});
return counts;
}
}