UNPKG

@onvo-ai/react

Version:

The react SDK for Onvo AI dashboard builder

83 lines (72 loc) 2.07 kB
interface QueueItem<T> { fn: () => Promise<T>; resolve: (value: T) => void; reject: (error: any) => void; } export class RequestQueue { private queue: QueueItem<any>[] = []; private running = 0; private maxConcurrent: number; constructor(maxConcurrent: number = 3) { this.maxConcurrent = maxConcurrent; } /** * Add a promise-returning function to the queue * @param fn Function that returns a Promise * @returns Promise that resolves with the result of the function */ add<T>(fn: () => Promise<T>): Promise<T> { return new Promise((resolve, reject) => { this.queue.push({ fn, resolve, reject }); this.processNext(); }); } /** * Clear all pending requests from the queue * Running requests will continue to completion */ flush(): void { // Reject all pending requests this.queue.forEach(item => { item.reject(new Error('Queue flushed')); }); this.queue = []; } /** * Get the current queue size (pending requests) */ get size(): number { return this.queue.length; } /** * Get the number of currently running requests */ get activeCount(): number { return this.running; } /** * Update the maximum concurrent requests limit */ setMaxConcurrent(max: number): void { this.maxConcurrent = max; // Process any queued items if we increased the limit this.processNext(); } private async processNext(): Promise<void> { if (this.running >= this.maxConcurrent || this.queue.length === 0) { return; } const item = this.queue.shift()!; this.running++; try { const result = await item.fn(); item.resolve(result); } catch (error) { item.reject(error); } finally { this.running--; // Process next item in queue this.processNext(); } } }