UNPKG

@catbee/utils

Version:

A modular, production-grade utility toolkit for Node.js and TypeScript, designed for robust, scalable applications (including Express-based services). All utilities are tree-shakable and can be imported independently.

424 lines 14.4 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.sleep = sleep; exports.debounce = debounce; exports.throttle = throttle; exports.retry = retry; exports.withTimeout = withTimeout; exports.runInBatches = runInBatches; exports.singletonAsync = singletonAsync; exports.settleAll = settleAll; exports.createTaskQueue = createTaskQueue; exports.runInSeries = runInSeries; exports.memoizeAsync = memoizeAsync; exports.abortable = abortable; exports.createDeferred = createDeferred; exports.waterfall = waterfall; exports.rateLimit = rateLimit; /** * Delays execution for a specified number of milliseconds. * * @param {number} ms - The number of milliseconds to sleep. * @returns {Promise<void>} A Promise that resolves after the given time. */ function sleep(ms) { return new Promise((resolve) => setTimeout(resolve, ms)); } /** * Creates a debounced version of a function that delays its execution. * Provides `.cancel()` and `.flush()` methods. * * @template T * @param {T} fn - The function to debounce. * @param {number} delay - Delay in milliseconds. * @returns {T & { cancel: () => void; flush: () => void }} A debounced function. */ function debounce(fn, delay) { let timer = null; let pendingArgs = null; function debounced(...args) { pendingArgs = args; if (timer) clearTimeout(timer); timer = setTimeout(() => { fn(...pendingArgs); timer = null; }, delay); } debounced.cancel = () => { if (timer) clearTimeout(timer); timer = null; pendingArgs = null; }; debounced.flush = () => { if (timer) { clearTimeout(timer); fn(...pendingArgs); timer = null; pendingArgs = null; } }; return debounced; } /** * Creates a throttled version of a function that limits its execution rate. * Allows control over leading/trailing invocation. * * @template T * @param {T} fn - The function to throttle. * @param {number} limit - Minimum time between calls in milliseconds. * @param {{ leading?: boolean, trailing?: boolean }} [opts] - Options for leading/trailing edge throttling. * @returns {(...args: Parameters<T>) => void} A throttled function. */ function throttle(fn, limit, opts = { leading: true, trailing: false, }) { let lastCall = 0; let timer = null; let savedArgs = null; return function (...args) { const now = Date.now(); const { leading = true, trailing = false } = opts; if (!lastCall && !leading) lastCall = now; const remaining = limit - (now - lastCall); if (remaining <= 0) { if (timer) { clearTimeout(timer); timer = null; } lastCall = now; fn(...args); } else if (trailing) { savedArgs = args; if (!timer) { timer = setTimeout(() => { lastCall = leading ? Date.now() : 0; timer = null; if (savedArgs) fn(...savedArgs); }, remaining); } } }; } /** * Retries an asynchronous function a given number of times with optional delay/backoff. * * @template T * @param {() => Promise<T>} fn - The async function to retry. * @param {number} [retries=3] - Number of retry attempts. * @param {number} [delay=500] - Delay in milliseconds between retries. * @param {boolean} [backoff=false] - Use exponential backoff between attempts. * @param {(error: unknown, attempt: number) => void} [onRetry] - Callback for each retry attempt. * @returns {Promise<T>} The result of the async function if successful. * @throws {*} The last encountered error if all retries fail. */ async function retry(fn, retries = 3, delay = 500, backoff = false, onRetry) { for (let i = 0; i < retries; i++) { try { return await fn(); } catch (e) { if (i === retries - 1) throw e; if (onRetry) onRetry(e, i + 1); await sleep(backoff ? delay * Math.pow(2, i) : delay); } } throw new Error("Retry failed"); // should never reach here } /** * Wraps a promise and rejects it if it doesn't resolve within the specified timeout. * * @template T * @param {Promise<T>} promise - The original promise. * @param {number} ms - Timeout in milliseconds. * @param {string} [message="Operation timed out"] - Optional timeout message. * @returns {Promise<T>} A promise that resolves or rejects within the timeout. */ function withTimeout(promise, ms, message = "Operation timed out") { return new Promise((resolve, reject) => { const timeoutHandle = setTimeout(() => reject(new Error(message)), ms); promise .then((result) => { clearTimeout(timeoutHandle); resolve(result); }) .catch((err) => { clearTimeout(timeoutHandle); reject(err); }); }); } /** * Executes async tasks in true batches. * Each batch runs in parallel, but batches run sequentially. * All tasks in a batch start at the same time, next batch waits for full completion. * NOTE: For more granular concurrency, use a "queue" or "pooled" approach. * * @template T * @param {Array<() => Promise<T>>} tasks - An array of functions that return Promises. * @param {number} limit - Number of tasks to run in parallel per batch. * @returns {Promise<T[]>} A promise that resolves to an array of resolved values. */ async function runInBatches(tasks, limit) { const results = []; for (let i = 0; i < tasks.length; i += limit) { const batch = tasks.slice(i, i + limit); const batchResults = await Promise.all(batch.map((fn) => fn())); results.push(...batchResults); } return results; } /** * Wraps a function and ensures it is only called once at a time. * Calls made while one is in progress will wait for the same Promise. * Optionally, new calls can be dropped while in progress (drop=true). * * @template TArgs * @template TResult * @param {(...args: TArgs) => Promise<TResult>} fn - The async function to wrap. * @param {boolean} [drop=false] - If true, new calls while one is pending are rejected. * @returns {(...args: TArgs) => Promise<TResult>} A wrapped function with singleton behavior. */ function singletonAsync(fn, drop = false) { let promise = null; return async (...args) => { if (!promise) { promise = fn(...args).finally(() => { promise = null; }); } else if (drop) { return Promise.reject(new Error("Busy: function already running")); } return promise; }; } /** * Resolves a list of async tasks in parallel, returning both resolved and rejected results. * * @template T * @param {Array<() => Promise<T>>} tasks - Array of promise-returning functions. * @returns {Promise<PromiseSettledResult<T>[]>} Results including status and value/reason. */ async function settleAll(tasks) { return Promise.allSettled(tasks.map((task) => task())); } /** * A simple task queue that executes async tasks with a concurrency limit. * Exposes pause, resume, and queue length getters. * * @param {number} limit - Maximum number of concurrent tasks. * @returns {function & { pause: () => void, resume: () => void, length: number, isPaused: boolean }} * Enqueue function plus queue controls. */ function createTaskQueue(limit) { const queue = []; let activeCount = 0; let paused = false; const state = { /** * Pause task processing. */ pause() { paused = true; }, /** * Resume task processing. */ resume() { paused = false; next(); }, /** * The current length of the queue. * @type {number} */ get length() { return queue.length; }, /** * Whether the queue is currently paused. * @type {boolean} */ get isPaused() { return paused; }, }; const next = () => { if (paused || queue.length === 0 || activeCount >= limit) return; const task = queue.shift(); activeCount++; task().finally(() => { activeCount--; next(); }); }; /** * Enqueues a new async task to the queue. * * @template T * @param {() => Promise<T>} taskFn - The async task function. * @returns {Promise<T>} Promise resolving when task completes. */ const enqueue = async function (taskFn) { return new Promise((resolve, reject) => { queue.push(async () => { try { const result = await taskFn(); resolve(result); } catch (err) { reject(err); } }); next(); }); }; enqueue.pause = state.pause; enqueue.resume = state.resume; Object.defineProperty(enqueue, "length", { get: () => queue.length, }); Object.defineProperty(enqueue, "isPaused", { get: () => paused, }); return enqueue; } /** * Executes async functions sequentially and collects results. * Useful when order matters or tasks depend on each other. * * @template T * @param {Array<() => Promise<T>>} tasks - Array of promise-returning functions. * @returns {Promise<T[]>} Array of resolved values. */ async function runInSeries(tasks) { const results = []; for (const task of tasks) { results.push(await task()); } return results; } /** * Memoizes an async function, caching results for repeated calls with identical arguments. * Optional TTL (time-to-live) for cached entries. * * @template T Function return type * @template Args Function arguments types * @param {(...args: Args) => Promise<T>} fn - The async function to memoize * @param {object} [options] - Memoization options * @param {number} [options.ttl] - Cache TTL in milliseconds (optional) * @param {(args: Args) => string} [options.keyFn] - Custom key generator function * @returns {(...args: Args) => Promise<T>} Memoized function */ function memoizeAsync(fn, options = {}) { const cache = new Map(); const { ttl, keyFn = JSON.stringify } = options; return async function (...args) { const key = keyFn(args); const cached = cache.get(key); if (cached && (!ttl || Date.now() < cached.expires)) { return cached.value; } const result = await fn(...args); cache.set(key, { value: result, expires: ttl ? Date.now() + ttl : Infinity, }); return result; }; } /** * Creates an abortable version of a promise that can be cancelled using an AbortController. * * @template T * @param {Promise<T>} promise - The promise to make abortable * @param {AbortSignal} signal - AbortSignal from AbortController * @param {any} [abortValue] - Value to use when rejecting on abort * @returns {Promise<T>} Promise that rejects if the signal is aborted */ function abortable(promise, signal, abortValue = new Error("Operation aborted")) { if (signal.aborted) { return Promise.reject(abortValue); } return Promise.race([ promise, new Promise((_, reject) => { const abort = () => reject(abortValue); signal.addEventListener("abort", abort, { once: true }); promise.finally(() => signal.removeEventListener("abort", abort)); }), ]); } /** * Creates a promise with external resolve/reject functions. * Useful for creating promises that can be resolved or rejected from outside. * * @template T * @returns {[Promise<T>, (value: T | PromiseLike<T>) => void, (reason?: any) => void]} * Tuple of [promise, resolve, reject] */ function createDeferred() { let resolve; let reject; const promise = new Promise((res, rej) => { resolve = res; reject = rej; }); return [promise, resolve, reject]; } /** * Chains a series of async functions, passing the result of each to the next. * Similar to function composition but for async functions. * * @template T * @param {Array<(input: any) => Promise<any>>} fns - Array of async functions to compose * @returns {(input: any) => Promise<T>} Composed function */ function waterfall(fns) { return async (initialValue) => { return fns.reduce(async (acc, fn) => fn(await acc), Promise.resolve(initialValue)); }; } /** * Creates a rate limiter that ensures functions aren't called more than * a specified number of times per interval. * * @template T * @param {(...args: any[]) => Promise<T>} fn - Function to rate limit * @param {number} maxCalls - Maximum calls allowed per interval * @param {number} interval - Time interval in milliseconds * @returns {(...args: any[]) => Promise<T>} Rate limited function */ function rateLimit(fn, maxCalls, interval) { const calls = []; return async function (...args) { const now = Date.now(); calls.splice(0, calls.length, ...calls.filter((time) => now - time < interval)); if (calls.length >= maxCalls) { const oldestCall = calls[0]; const delay = interval - (now - oldestCall); await sleep(Math.max(1, delay)); // Remove potentially stale entries after sleep const currentTime = Date.now(); calls.splice(0, calls.length, ...calls.filter((time) => currentTime - time < interval)); // If still at limit after sleep, wait for another cycle if (calls.length >= maxCalls) { const nextDelay = interval - (currentTime - calls[0]); await sleep(Math.max(1, nextDelay)); calls.splice(0, calls.length, ...calls.filter((time) => Date.now() - time < interval)); } } calls.push(Date.now()); return fn(...args); }; } //# sourceMappingURL=async.utils.js.map