@mrboombastic/mailerlite-api-v2-node
Version:
[MailerLite API v2](https://developers.mailerlite.com/docs/getting-started-with-mailerlite-api) [Node.js](https://nodejs.org/en/) SDK. It is mostly a thin wrapper on [axios](https://github.com/axios/axios) that provides [authentication](https://developers
79 lines (78 loc) • 2.79 kB
TypeScript
import type { RateLimitHeaders, RateLimitError } from "./types/index.js";
/**
* Utility functions for working with MailerLite API rate limits
*/
export declare class RateLimitUtils {
/**
* Check if an error is a rate limit error
*/
static isRateLimitError(error: any): error is RateLimitError;
/**
* Extract rate limit information from an error
*/
static getRateLimitInfo(error: RateLimitError): RateLimitHeaders | null;
/**
* Calculate how long to wait before making another request
*/
static getWaitTime(headers: RateLimitHeaders): number;
/**
* Check if we should pause before making more requests
*/
static shouldPause(headers: RateLimitHeaders, threshold?: number): boolean;
/**
* Get a human-readable description of rate limit status
*/
static getRateLimitStatus(headers: RateLimitHeaders): string;
/**
* Create a delay function that respects rate limits
*/
static waitForRateLimit(headers: RateLimitHeaders): Promise<void>;
/**
* Calculate the percentage of rate limit used
*/
static getRateLimitUsagePercent(headers: RateLimitHeaders): number;
/**
* Estimate requests per second based on current usage
*/
static estimateRequestsPerSecond(headers: RateLimitHeaders): number;
/**
* Create a rate limit aware delay for batch operations
*/
static calculateBatchDelay(headers: RateLimitHeaders, remainingOperations: number): number;
/**
* Format rate limit headers for logging
*/
static formatRateLimitHeaders(headers: RateLimitHeaders): Record<string, any>;
/**
* Check if rate limit will be exceeded with additional requests
*/
static willExceedRateLimit(headers: RateLimitHeaders, additionalRequests: number): boolean;
/**
* Get optimal batch size based on current rate limit status
*/
static getOptimalBatchSize(headers: RateLimitHeaders, maxBatchSize?: number): number;
}
/**
* Decorator function to add automatic rate limit handling to async functions
*/
export declare function withRateLimit<T extends any[], R>(fn: (...args: T) => Promise<R>, maxRetries?: number): (...args: T) => Promise<R>;
/**
* Rate limit aware batch processor
*/
export declare class RateLimitBatchProcessor<T, R> {
private items;
private processor;
private results;
private errors;
constructor(items: T[], processor: (item: T) => Promise<R>);
/**
* Process all items with automatic rate limit handling
*/
processAll(onProgress?: (completed: number, total: number, rateLimitInfo?: RateLimitHeaders) => void, batchSize?: number): Promise<{
results: R[];
errors: Array<{
item: T;
error: any;
}>;
}>;
}