@pulzar/core
Version:
Next-generation Node.js framework for ultra-fast web applications with zero-reflection DI, GraphQL, WebSockets, events, and edge runtime support
444 lines • 16 kB
JavaScript
import { EventError, } from "../types";
import { logger } from "../../utils/logger";
export class RedisDLQ {
redis;
config;
connected = false;
constructor(config = {}) {
this.config = {
host: config.host || "localhost",
port: config.port || 6379,
password: config.password || "",
db: config.db || 0,
keyPrefix: config.keyPrefix || "pulzar:dlq",
maxRetriesPerEntry: config.maxRetriesPerEntry || 5,
defaultTTL: config.defaultTTL || 7 * 24 * 60 * 60, // 7 days
maxSize: config.maxSize || 10000,
retryStrategy: config.retryStrategy || "exponential",
baseRetryDelay: config.baseRetryDelay || 1000,
};
}
/**
* Initialize Redis connection
*/
async connect() {
if (this.connected) {
return;
}
try {
const Redis = await this.importRedis();
if (!Redis) {
throw new EventError("Redis package not installed. Run: npm install ioredis", "REDIS_NOT_INSTALLED");
}
this.redis = new Redis({
host: this.config.host,
port: this.config.port,
password: this.config.password || undefined,
db: this.config.db,
retryDelayOnFailover: 100,
maxRetriesPerRequest: 3,
lazyConnect: true,
});
await this.redis.connect();
this.connected = true;
logger.info("Redis DLQ connected", {
host: this.config.host,
port: this.config.port,
db: this.config.db,
});
}
catch (error) {
logger.error("Failed to connect to Redis DLQ", { error });
throw new EventError(`Redis DLQ connection failed: ${error.message}`, "CONNECTION_FAILED", undefined, error);
}
}
/**
* Disconnect from Redis
*/
async disconnect() {
if (this.redis && this.connected) {
await this.redis.quit();
this.connected = false;
logger.info("Redis DLQ disconnected");
}
}
/**
* Add failed event to DLQ
*/
async add(event, error, originalSubject) {
if (!this.connected) {
throw new EventError("Redis DLQ not connected", "NOT_CONNECTED");
}
try {
// Check size limit
const currentSize = await this.redis.llen(this.getQueueKey());
if (currentSize >= this.config.maxSize) {
// Remove oldest entry to make space
await this.redis.lpop(this.getQueueKey());
logger.warn("DLQ size limit reached, removed oldest entry", {
maxSize: this.config.maxSize,
});
}
const dlqEntry = {
id: this.generateId(),
event,
error: {
message: error.message,
stack: error.stack,
code: error.code,
},
originalSubject,
attempts: 0,
createdAt: new Date(),
lastAttempt: new Date(),
nextRetry: this.calculateNextRetry(0),
status: "pending",
};
// Store entry
const pipeline = this.redis.pipeline();
// Add to queue
pipeline.rpush(this.getQueueKey(), JSON.stringify(dlqEntry));
// Store detailed entry
pipeline.hset(this.getEntryKey(dlqEntry.id), this.serializeEntry(dlqEntry));
// Set TTL on detailed entry
pipeline.expire(this.getEntryKey(dlqEntry.id), this.config.defaultTTL);
// Update stats
pipeline.hincrby(this.getStatsKey(), "total", 1);
pipeline.hincrby(this.getStatsKey(), "pending", 1);
await pipeline.exec();
logger.debug("Added event to DLQ", {
eventId: event.id,
dlqEntryId: dlqEntry.id,
originalSubject,
error: error.message,
});
}
catch (redisError) {
logger.error("Failed to add event to DLQ", {
eventId: event.id,
error: redisError,
});
throw new EventError(`DLQ add failed: ${redisError.message}`, "DLQ_ADD_FAILED", event, redisError);
}
}
/**
* List DLQ entries with pagination
*/
async list(limit = 50, offset = 0) {
if (!this.connected) {
throw new EventError("Redis DLQ not connected", "NOT_CONNECTED");
}
try {
const entries = await this.redis.lrange(this.getQueueKey(), offset, offset + limit - 1);
return entries.map((entry) => JSON.parse(entry));
}
catch (error) {
logger.error("Failed to list DLQ entries", { error });
throw new EventError(`DLQ list failed: ${error.message}`, "DLQ_LIST_FAILED", undefined, error);
}
}
/**
* Get specific DLQ entry
*/
async get(id) {
if (!this.connected) {
throw new EventError("Redis DLQ not connected", "NOT_CONNECTED");
}
try {
const entry = await this.redis.hgetall(this.getEntryKey(id));
if (!entry || Object.keys(entry).length === 0) {
return null;
}
return this.deserializeEntry(entry);
}
catch (error) {
logger.error("Failed to get DLQ entry", { id, error });
throw new EventError(`DLQ get failed: ${error.message}`, "DLQ_GET_FAILED", undefined, error);
}
}
/**
* Retry specific DLQ entry
*/
async retry(id) {
if (!this.connected) {
throw new EventError("Redis DLQ not connected", "NOT_CONNECTED");
}
try {
const entry = await this.get(id);
if (!entry) {
throw new EventError(`DLQ entry not found: ${id}`, "ENTRY_NOT_FOUND");
}
if (entry.attempts >= this.config.maxRetriesPerEntry) {
throw new EventError(`Max retry attempts reached for entry: ${id}`, "MAX_RETRIES_REACHED");
}
// Update entry
entry.attempts++;
entry.lastAttempt = new Date();
entry.nextRetry = this.calculateNextRetry(entry.attempts);
entry.status = "retrying";
const pipeline = this.redis.pipeline();
// Update entry
pipeline.hset(this.getEntryKey(id), this.serializeEntry(entry));
// Update stats
pipeline.hincrby(this.getStatsKey(), "retrying", 1);
pipeline.hincrby(this.getStatsKey(), "pending", -1);
await pipeline.exec();
logger.info("DLQ entry marked for retry", {
id,
attempts: entry.attempts,
nextRetry: entry.nextRetry,
});
// TODO: Trigger actual retry mechanism
// This would typically involve republishing the event
}
catch (error) {
logger.error("Failed to retry DLQ entry", { id, error });
throw error;
}
}
/**
* Retry all entries matching filter
*/
async retryAll(filter) {
if (!this.connected) {
throw new EventError("Redis DLQ not connected", "NOT_CONNECTED");
}
try {
const entries = await this.list(1000); // Get first 1000 entries
let retryCount = 0;
for (const entry of entries) {
if (this.matchesFilter(entry, filter)) {
try {
await this.retry(entry.id);
retryCount++;
}
catch (error) {
logger.warn("Failed to retry DLQ entry", {
id: entry.id,
error: error.message,
});
}
}
}
logger.info("Batch retry completed", {
totalRetried: retryCount,
filter,
});
return retryCount;
}
catch (error) {
logger.error("Failed to retry all DLQ entries", { error });
throw new EventError(`DLQ retry all failed: ${error.message}`, "DLQ_RETRY_ALL_FAILED", undefined, error);
}
}
/**
* Remove DLQ entry
*/
async remove(id) {
if (!this.connected) {
throw new EventError("Redis DLQ not connected", "NOT_CONNECTED");
}
try {
const entry = await this.get(id);
if (!entry) {
return; // Already removed
}
const pipeline = this.redis.pipeline();
// Remove from queue (need to find and remove)
// Note: This is inefficient for large queues
// In production, consider using a different data structure
const queueEntries = await this.redis.lrange(this.getQueueKey(), 0, -1);
const entryIndex = queueEntries.findIndex((e) => {
const parsed = JSON.parse(e);
return parsed.id === id;
});
if (entryIndex >= 0) {
// Remove by setting to a unique value and then removing it
const placeholder = `__REMOVE_${Date.now()}__`;
pipeline.lset(this.getQueueKey(), entryIndex, placeholder);
pipeline.lrem(this.getQueueKey(), 1, placeholder);
}
// Remove detailed entry
pipeline.del(this.getEntryKey(id));
// Update stats
pipeline.hincrby(this.getStatsKey(), "total", -1);
pipeline.hincrby(this.getStatsKey(), entry.status, -1);
await pipeline.exec();
logger.debug("Removed DLQ entry", { id });
}
catch (error) {
logger.error("Failed to remove DLQ entry", { id, error });
throw new EventError(`DLQ remove failed: ${error.message}`, "DLQ_REMOVE_FAILED", undefined, error);
}
}
/**
* Clear all DLQ entries
*/
async clear() {
if (!this.connected) {
throw new EventError("Redis DLQ not connected", "NOT_CONNECTED");
}
try {
const pipeline = this.redis.pipeline();
// Get all entry IDs to clean up detailed entries
const queueEntries = await this.redis.lrange(this.getQueueKey(), 0, -1);
const entryIds = queueEntries.map((e) => {
const parsed = JSON.parse(e);
return parsed.id;
});
// Delete queue
pipeline.del(this.getQueueKey());
// Delete all detailed entries
for (const id of entryIds) {
pipeline.del(this.getEntryKey(id));
}
// Reset stats
pipeline.del(this.getStatsKey());
await pipeline.exec();
logger.info("Cleared all DLQ entries", { count: entryIds.length });
}
catch (error) {
logger.error("Failed to clear DLQ", { error });
throw new EventError(`DLQ clear failed: ${error.message}`, "DLQ_CLEAR_FAILED", undefined, error);
}
}
/**
* Get DLQ statistics
*/
async getStats() {
if (!this.connected) {
throw new EventError("Redis DLQ not connected", "NOT_CONNECTED");
}
try {
const stats = await this.redis.hgetall(this.getStatsKey());
// Get oldest entry timestamp
let oldestEntry;
const firstEntry = await this.redis.lindex(this.getQueueKey(), 0);
if (firstEntry) {
const parsed = JSON.parse(firstEntry);
oldestEntry = new Date(parsed.createdAt);
}
return {
total: parseInt(stats.total || "0"),
pending: parseInt(stats.pending || "0"),
retrying: parseInt(stats.retrying || "0"),
failed: parseInt(stats.failed || "0"),
abandoned: parseInt(stats.abandoned || "0"),
oldestEntry,
};
}
catch (error) {
logger.error("Failed to get DLQ stats", { error });
throw new EventError(`DLQ stats failed: ${error.message}`, "DLQ_STATS_FAILED", undefined, error);
}
}
/**
* Calculate next retry time based on strategy
*/
calculateNextRetry(attempts) {
let delayMs;
switch (this.config.retryStrategy) {
case "exponential":
delayMs = this.config.baseRetryDelay * Math.pow(2, attempts);
break;
case "linear":
delayMs = this.config.baseRetryDelay * (attempts + 1);
break;
case "fixed":
default:
delayMs = this.config.baseRetryDelay;
break;
}
// Cap at 24 hours
delayMs = Math.min(delayMs, 24 * 60 * 60 * 1000);
return new Date(Date.now() + delayMs);
}
/**
* Check if entry matches filter
*/
matchesFilter(entry, filter) {
if (!filter) {
return true;
}
if (filter.subject && !entry.originalSubject.includes(filter.subject)) {
return false;
}
if (filter.error && !entry.error.message.includes(filter.error)) {
return false;
}
if (filter.status && entry.status !== filter.status) {
return false;
}
if (filter.maxAge) {
const ageMs = Date.now() - entry.createdAt.getTime();
if (ageMs > filter.maxAge * 1000) {
return false;
}
}
return true;
}
/**
* Serialize DLQ entry for Redis storage
*/
serializeEntry(entry) {
return {
id: entry.id,
event: JSON.stringify(entry.event),
error: JSON.stringify(entry.error),
originalSubject: entry.originalSubject,
attempts: entry.attempts.toString(),
createdAt: entry.createdAt.toISOString(),
lastAttempt: entry.lastAttempt.toISOString(),
nextRetry: entry.nextRetry?.toISOString() || "",
status: entry.status,
};
}
/**
* Deserialize DLQ entry from Redis
*/
deserializeEntry(data) {
return {
id: data.id || "",
event: JSON.parse(data.event || "{}"),
error: JSON.parse(data.error || "{}"),
originalSubject: data.originalSubject || "",
attempts: parseInt(data.attempts || "0"),
createdAt: new Date(data.createdAt || Date.now()),
lastAttempt: new Date(data.lastAttempt || Date.now()),
nextRetry: data.nextRetry ? new Date(data.nextRetry) : undefined,
status: data.status || "pending",
};
}
/**
* Generate Redis keys
*/
getQueueKey() {
return `${this.config.keyPrefix}:queue`;
}
getEntryKey(id) {
return `${this.config.keyPrefix}:entry:${id}`;
}
getStatsKey() {
return `${this.config.keyPrefix}:stats`;
}
/**
* Try to import Redis package
*/
async importRedis() {
try {
return (await import("ioredis")).default;
}
catch (error) {
logger.warn("Redis package not available", { error });
return null;
}
}
/**
* Generate unique ID
*/
generateId() {
return `dlq-${Date.now()}-${Math.random().toString(36).slice(2)}`;
}
}
export default RedisDLQ;
//# sourceMappingURL=redis-dlq.js.map