@cleotasks/core
Version:
A distributed task queue system for Node.js, inspired by Celery and BullMQ
1,243 lines (1,215 loc) β’ 138 kB
JavaScript
/******/ (() => { // webpackBootstrap
/******/ "use strict";
/******/ var __webpack_modules__ = ({
/***/ 824:
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.redisConnection = exports.RedisConnection = exports.RedisInstance = void 0;
const ioredis_1 = __importDefault(__webpack_require__(657));
const logger_1 = __webpack_require__(904);
var RedisInstance;
(function (RedisInstance) {
RedisInstance["DEFAULT"] = "default";
RedisInstance["QUEUE"] = "queue";
RedisInstance["WORKER"] = "worker";
RedisInstance["CACHE"] = "cache";
})(RedisInstance || (exports.RedisInstance = RedisInstance = {}));
class RedisConnection {
connections = new Map();
initializeInstance(instanceId, config) {
logger_1.logger.info("File: redis.ts π, Line: 15, Function: initializeInstance; Initializing Redis connection", { instanceId });
const redisConfig = {
host: config.REDIS_HOST,
port: parseInt(config.REDIS_PORT, 10),
password: config.REDIS_PASSWORD,
tls: config.REDIS_TLS === 'true' ? {} : undefined,
db: config.REDIS_DB ? parseInt(config.REDIS_DB, 10) : 0,
maxRetriesPerRequest: null,
};
const connection = new ioredis_1.default(redisConfig);
this.connections.set(instanceId, connection);
logger_1.logger.info("File: redis.ts π, Line: 39, Function: initializeInstance; Redis connection initialized", { instanceId });
connection.on('error', (error) => {
// Enhanced error handling for authentication issues
if (error.message && error.message.includes('NOAUTH')) {
logger_1.logger.error("File: redis.ts π, Line: 30, Function: initializeInstance; Redis authentication required - please provide correct password", {
error: 'NOAUTH Authentication required',
instanceId,
hint: 'Make sure to configure Redis password in cleo.configure({ redis: { password: "your-redis-password" } })'
});
}
else if (error.message && error.message.includes('WRONGPASS')) {
logger_1.logger.error("File: redis.ts π, Line: 30, Function: initializeInstance; Redis authentication failed - incorrect password", {
error: 'WRONGPASS Invalid password',
instanceId,
hint: 'Check your Redis password configuration'
});
}
else {
logger_1.logger.error("File: redis.ts β, Line: 30, Function: initializeInstance; Redis connection error", { error, instanceId });
}
});
return connection;
}
getConfig(instanceId = RedisInstance.DEFAULT) {
return this.connections.get(instanceId)?.options;
}
getInstance(instanceId = RedisInstance.DEFAULT) {
const connection = this.connections.get(instanceId);
if (!connection) {
throw new Error(`Redis connection not initialized for instance: ${instanceId}`);
}
return connection;
}
}
exports.RedisConnection = RedisConnection;
exports.redisConnection = new RedisConnection();
/***/ }),
/***/ 166:
/***/ ((__unused_webpack_module, exports) => {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.HEARTBEAT_TTL_MS = exports.HEARTBEAT_TTL = exports.HEARTBEAT_EXPIRE = exports.HEARTBEAT_INTERVAL = exports.HEARTBEAT_KEY = exports.TASK_HISTORY_EXPIRE = exports.TASK_HISTORY_KEY = exports.WORKER_KEY = exports.WORKERS_SET_KEY = exports.QUEUE_WORKERS_PREFIX = exports.QUEUE_CONFIG_PREFIX = exports.QUEUE_META_PREFIX = exports.QUEUES_SET_KEY = void 0;
exports.QUEUES_SET_KEY = "cleo:queues:all";
exports.QUEUE_META_PREFIX = "cleo:queue:meta:";
exports.QUEUE_CONFIG_PREFIX = "cleo:queue:config:";
exports.QUEUE_WORKERS_PREFIX = "cleo:queue:workers:";
exports.WORKERS_SET_KEY = "cleo:workers:all";
exports.WORKER_KEY = "cleo:worker";
exports.TASK_HISTORY_KEY = "cleo:task:history:";
exports.TASK_HISTORY_EXPIRE = 60 * 60 * 24 * 30; // 30 days
exports.HEARTBEAT_KEY = "cleo:heartbeat:";
exports.HEARTBEAT_INTERVAL = 10000; // 10 seconds
exports.HEARTBEAT_EXPIRE = 60 * 60 * 24 * 30; // 30 days
exports.HEARTBEAT_TTL = 60 * 60 * 24 * 30; // 30 days
exports.HEARTBEAT_TTL_MS = exports.HEARTBEAT_TTL * 1000;
/***/ }),
/***/ 770:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.MonkeyCapture = MonkeyCapture;
const redis_1 = __webpack_require__(824);
const constants_1 = __webpack_require__(166);
function MonkeyCapture(fn) {
return async (...args) => {
const promise = fn(...args);
const originalConsoleLog = console.log;
const originalFetch = global.fetch;
// job is a arg and instanceId also
const job = args[0];
const instanceId = args[1];
const redis = redis_1.redisConnection.getInstance(instanceId);
const taskHistoryKey = `${constants_1.WORKER_KEY}:${job.processedBy}:task:${job.id}:logs`;
// grep all the internal functions inside the wrapped function
const internalFunctions = fn.toString().match(/function\s+(\w+)\s*\(/g);
redis.lpush(taskHistoryKey, JSON.stringify({
timestamp: new Date().toISOString(),
level: "info",
message: "internalFunctions",
functionArgs: internalFunctions,
}));
// we need to get all the internal variables inside the wrapped function
const internalVariables = fn
.toString()
.match(/(var|const|let)\s+(\w+)\s*=/g);
redis.lpush(taskHistoryKey, JSON.stringify({
timestamp: new Date().toISOString(),
level: "info",
message: "internalVariables",
functionArgs: internalVariables,
}));
console.log = function (...logArgs) {
redis.lpush(taskHistoryKey, JSON.stringify({
timestamp: new Date().toISOString(),
level: "info",
message: logArgs.join(" "),
functionArgs: args, // Log the parameters passed to the function
}));
originalConsoleLog.apply(console, logArgs);
};
// logs fetch as well
global.fetch = async (input, init) => {
redis.lpush(taskHistoryKey, JSON.stringify({
timestamp: new Date().toISOString(),
level: "info",
message: "fetch",
functionArgs: JSON.stringify([input, init]),
}));
return originalFetch(input, init);
};
let result;
try {
if (promise instanceof Promise) {
result = await promise;
}
else {
result = promise;
}
}
finally {
console.log = originalConsoleLog; // Restore original console.log
}
return result;
};
}
/***/ }),
/***/ 914:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.initializeTaskDecorator = initializeTaskDecorator;
exports.task = task;
const logger_1 = __webpack_require__(904);
const enums_1 = __webpack_require__(952);
const index_1 = __webpack_require__(344);
const utils_1 = __webpack_require__(238);
let cleoInstance = null;
function initializeTaskDecorator(instance) {
cleoInstance = instance;
}
function task(options = {}) {
return function (target, propertyKey, descriptor) {
const originalMethod = descriptor.value;
const methodName = String(propertyKey);
if (!cleoInstance) {
throw new Error("Task decorator used before Cleo initialization");
}
const queueName = options.queue || "default";
const queueManager = cleoInstance.getQueueManager();
// Get or create queue
let queue = queueManager.getQueue(queueName);
if (!queue) {
logger_1.logger.warn("π₯ Task Decorator: Creating new queue", {
file: "task.ts",
function: methodName,
queueName,
});
queue = queueManager.createQueue(queueName, {
connection: index_1.redisConnection.getInstance("default"),
});
}
// Get or initialize worker
const worker = queueManager.getWorker(queueName);
if (!worker) {
throw new Error(`No worker found for queue ${queueName}`);
}
// Register task handler
worker.registerTask(methodName, async function (...args) {
logger_1.logger.debug("π― Task Decorator: Executing task", {
file: "task.ts",
function: methodName,
args,
});
return originalMethod.apply(this, args);
});
logger_1.logger.info("π― Task Decorator: Task registered", {
file: "task.ts",
function: methodName,
taskState: enums_1.TaskState.WAITING,
group: options.group,
});
// Replace original method
descriptor.value = async function (...args) {
const startTime = Date.now();
let taskId;
let timeoutId;
let isSettled = false;
try {
const taskOptions = {
...options,
id: `${methodName}-${(0, utils_1.generateUUID)()}`,
timeout: options.timeout || 30000,
maxRetries: options.maxRetries || 3,
retryDelay: options.retryDelay || 3000,
};
taskId = taskOptions.id;
const task = await queueManager.addTask(methodName, {
args,
context: this,
}, taskOptions);
if (taskOptions.group) {
const group = await queueManager.getGroup(taskOptions.group);
await group.addTask(methodName, taskOptions, {
args,
context: this,
});
return new Promise((resolve, reject) => {
timeoutId = setTimeout(() => {
if (!isSettled) {
isSettled = true;
cleanup();
reject(new Error("Task processing timeout"));
}
}, taskOptions.timeout);
const cleanup = () => {
if (taskId) {
queueManager.offTaskEvent(enums_1.ObserverEvent.TASK_COMPLETED);
queueManager.offTaskEvent(enums_1.ObserverEvent.TASK_FAILED);
}
clearTimeout(timeoutId);
};
// Let TaskGroup handle the completion
queueManager.onTaskEvent(enums_1.ObserverEvent.TASK_COMPLETED, (completedTaskId, status, data) => {
if (!isSettled && completedTaskId === taskId) {
isSettled = true;
cleanup();
resolve(data?.result);
}
});
// Let TaskGroup handle the failure
queueManager.onTaskEvent(enums_1.ObserverEvent.TASK_FAILED, (failedTaskId, status, data) => {
if (!isSettled && failedTaskId === taskId) {
isSettled = true;
cleanup();
reject(data?.error || new Error("Task failed"));
}
});
// Handle cancellation through TaskGroup
if (typeof AbortSignal !== "undefined" &&
args[0] instanceof AbortSignal) {
const signal = args[0];
if (signal.aborted) {
cleanup();
group.stopProcessing().catch(logger_1.logger.error);
reject(new Error("Task was cancelled"));
return;
}
signal.addEventListener("abort", async () => {
if (!isSettled) {
isSettled = true;
cleanup();
await group.stopProcessing();
reject(new Error("Task was cancelled"));
}
}, { once: true });
}
});
}
return task.result;
}
catch (error) {
const executionTime = Date.now() - startTime;
logger_1.logger.error("β Task Decorator: Task execution failed", {
file: "task.ts",
function: methodName,
taskId,
error,
executionTime,
group: options.group,
});
if (taskId) {
queueManager.offTaskEvent(enums_1.ObserverEvent.TASK_COMPLETED);
queueManager.offTaskEvent(enums_1.ObserverEvent.TASK_FAILED);
}
clearTimeout(timeoutId);
throw error;
}
};
Object.defineProperty(descriptor.value, "name", {
value: methodName,
configurable: true,
});
return Object.assign(descriptor, {
configurable: true,
enumerable: true,
writable: true,
});
};
}
/***/ }),
/***/ 73:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.TaskGroup = void 0;
const enums_1 = __webpack_require__(952);
const logger_1 = __webpack_require__(904);
const retryWithBackoff_1 = __webpack_require__(250);
const groupLock_1 = __webpack_require__(510);
class TaskGroup {
redis;
groupKey;
stateKey;
processingKey;
processingOrderKey;
queueManager = null;
worker = null;
config;
rateLimitKey;
lockKey;
groupLock;
lockHolder;
isProcessing = false;
processingInterval = null;
constructor(redis, config) {
this.redis = redis;
this.config = {
concurrency: 1,
maxConcurrency: 10,
priority: 0,
strategy: enums_1.GroupProcessingStrategy.FIFO,
retryDelay: 3000,
retryLimit: 3,
timeout: 300000,
...config,
};
const { name } = this.config;
this.groupKey = `group:${name}:tasks`;
this.stateKey = `group:${name}:state`;
this.processingKey = `group:${name}:processing`;
this.processingOrderKey = `group:${name}:order`;
this.rateLimitKey = `group:${name}:rateLimit`;
this.lockKey = `group:${name}:lock`;
// Initialize GroupLock and create unique lock holder ID
this.groupLock = new groupLock_1.GroupLock(this.redis);
this.lockHolder = `${process.pid}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
logger_1.logger.info("π₯ TaskGroup: initialized", {
file: "taskGroup.ts",
line: 20,
function: "constructor",
groupName: name,
config: this.config,
lockHolder: this.lockHolder,
});
}
/**
* Connect this group to a QueueManager and Worker
*/
connect(queueManager, worker) {
this.queueManager = queueManager;
this.worker = worker;
logger_1.logger.info("π TaskGroup: connected to QueueManager and Worker", {
file: "taskGroup.ts",
line: 45,
function: "connect",
});
}
async acquireLock(timeout = 5000) {
try {
const acquired = await this.groupLock.acquireLock(this.config.name, this.lockHolder, timeout);
if (acquired) {
logger_1.logger.debug("π TaskGroup: Lock acquired", {
file: "taskGroup.ts",
function: "acquireLock",
groupName: this.config.name,
lockHolder: this.lockHolder,
timeout,
});
}
else {
logger_1.logger.debug("β³ TaskGroup: Failed to acquire lock", {
file: "taskGroup.ts",
function: "acquireLock",
groupName: this.config.name,
lockHolder: this.lockHolder,
timeout,
});
}
return acquired;
}
catch (error) {
logger_1.logger.error("β TaskGroup: Error acquiring lock", {
file: "taskGroup.ts",
function: "acquireLock",
groupName: this.config.name,
lockHolder: this.lockHolder,
error,
});
return false;
}
}
async releaseLock() {
try {
await this.groupLock.releaseLock(this.config.name, this.lockHolder);
logger_1.logger.debug("π TaskGroup: Lock released", {
file: "taskGroup.ts",
function: "releaseLock",
groupName: this.config.name,
lockHolder: this.lockHolder,
});
}
catch (error) {
logger_1.logger.error("β TaskGroup: Error releasing lock", {
file: "taskGroup.ts",
function: "releaseLock",
groupName: this.config.name,
lockHolder: this.lockHolder,
error,
});
throw error;
}
}
async checkRateLimit() {
if (!this.config.rateLimit)
return true;
const { max, duration } = this.config.rateLimit;
const now = Date.now();
const windowStart = now - duration;
// Remove old entries
await this.redis.zremrangebyscore(this.rateLimitKey, "-inf", windowStart);
// Count current entries
const count = await this.redis.zcard(this.rateLimitKey);
return count < max;
}
async trackRateLimit() {
if (!this.config.rateLimit)
return;
const now = Date.now();
await this.redis.zadd(this.rateLimitKey, now.toString(), now.toString());
}
async addTask(methodName, taskOptions, taskData) {
try {
const { id: taskId, queue: queueName, weight = 1 } = taskOptions;
if (!taskId || !queueName) {
throw new Error("Task ID and queue name are required");
}
// Check rate limit and acquire lock
if (!(await this.checkRateLimit())) {
throw new Error(`Rate limit exceeded for group ${this.config.name}`);
}
if (!(await this.acquireLock())) {
throw new Error("Failed to acquire lock for task addition");
}
try {
// Just add to group structures
await this.redis.sadd(this.groupKey, taskId);
const timestamp = Date.now();
const priorityScore = (this.config.priority || 0) * 1000000000000;
const weightScore = weight * 10000000000;
const orderScore = priorityScore + weightScore + timestamp;
await this.redis.zadd(this.processingOrderKey, orderScore.toString(), taskId);
await this.redis.hset(this.stateKey, taskId, enums_1.TaskStatus.WAITING);
// Store task details for later queue addition
await this.redis.hset(`${this.groupKey}:tasks:${taskId}`, {
method: methodName,
data: JSON.stringify(taskData),
options: JSON.stringify(taskOptions),
});
await this.updateStats();
}
finally {
await this.releaseLock();
}
}
catch (error) {
logger_1.logger.error("β TaskGroup: Failed to add task", {
file: "taskGroup.ts",
function: "addTask",
taskId: taskOptions.id,
error,
});
throw error;
}
}
async getAllTasks() {
// get all tasks from redis
const tasks = await this.redis.hgetall(this.stateKey);
const taskIds = Object.keys(tasks);
const result = [];
for (const taskId of taskIds) {
const options = await this.redis.hget(`${this.groupKey}:options`, taskId);
const taskData = await this.redis.hget(`${this.groupKey}:data`, taskId);
const taskMethod = await this.redis.hget(`${this.groupKey}:method`, taskId);
result.push([
taskId,
JSON.parse(options)["queue"],
JSON.parse(taskData),
taskMethod,
]);
}
return result;
}
async updateStats() {
try {
const tasks = await this.redis.hgetall(this.stateKey);
const stats = {
total: Object.keys(tasks).length,
active: 0,
completed: 0,
failed: 0,
paused: 0,
};
Object.values(tasks).forEach((status) => {
switch (status) {
case enums_1.TaskStatus.ACTIVE:
stats.active++;
break;
case enums_1.TaskStatus.COMPLETED:
stats.completed++;
break;
case enums_1.TaskStatus.FAILED:
stats.failed++;
break;
case enums_1.TaskStatus.PAUSED:
stats.paused++;
break;
}
});
// Store stats in Redis
await this.redis.hmset(`${this.groupKey}:stats`, stats);
logger_1.logger.debug("π TaskGroup: stats updated", {
file: "taskGroup.ts",
line: 150,
function: "updateStats",
stats,
});
}
catch (error) {
logger_1.logger.error("β TaskGroup: failed to update stats", {
file: "taskGroup.ts",
line: 159,
function: "updateStats",
error,
});
}
}
async removeTask(taskId) {
try {
await Promise.all([
this.redis.srem(this.groupKey, taskId),
this.redis.hdel(this.stateKey, taskId),
this.redis.zrem(this.processingOrderKey, taskId),
this.redis.srem(this.processingKey, taskId),
]);
logger_1.logger.debug("β TaskGroup: task removed", {
file: "taskGroup.ts",
line: 54,
function: "removeTask",
taskId,
});
}
catch (error) {
logger_1.logger.error("β TaskGroup: failed to remove task", {
file: "taskGroup.ts",
line: 61,
function: "removeTask",
taskId,
error,
});
throw error;
}
}
async getTasks() {
try {
return await this.redis.smembers(this.groupKey);
}
catch (error) {
logger_1.logger.error("β TaskGroup: failed to get tasks", {
file: "taskGroup.ts",
line: 74,
function: "getTasks",
error,
});
throw error;
}
}
async getTaskOptionsAndData(taskId) {
const options = await this.redis.hget(`${this.groupKey}:options`, taskId);
const data = await this.redis.hget(`${this.groupKey}:data`, taskId);
const method = await this.redis.hget(`${this.groupKey}:method`, taskId);
return options && data
? { options: JSON.parse(options), data: JSON.parse(data), method }
: null;
}
async getTasksWithDetails() {
try {
if (!this.queueManager) {
throw new Error("TaskGroup not connected to QueueManager");
}
const taskIds = await this.getTasks();
const tasks = await Promise.all(taskIds.map((id) => this.queueManager.getTask(id)));
return tasks.filter((task) => task !== null);
}
catch (error) {
logger_1.logger.error("β TaskGroup: failed to get tasks with details", {
file: "taskGroup.ts",
line: 74,
function: "getTasksWithDetails",
error,
});
throw error;
}
}
async getTaskStatus(taskId) {
try {
const status = await this.redis.hget(this.stateKey, taskId);
return status;
}
catch (error) {
logger_1.logger.error("β TaskGroup: failed to get task status", {
file: "taskGroup.ts",
line: 87,
function: "getTaskStatus",
taskId,
error,
});
throw error;
}
}
async updateTaskStatus(taskId, status) {
try {
const exists = await this.redis.sismember(this.groupKey, taskId);
if (!exists) {
throw new Error(`Task ${taskId} not found in group`);
}
await this.redis.hset(this.stateKey, taskId, status);
// Update task state in QueueManager if connected
if (this.queueManager) {
const task = await this.queueManager.getTask(taskId);
if (task) {
task.state = this.mapTaskStatusToState(status);
await this.queueManager.updateTask(task);
}
}
// Update group stats
await this.updateStats();
logger_1.logger.debug("π TaskGroup: task status updated", {
file: "taskGroup.ts",
line: 104,
function: "updateTaskStatus",
taskId,
status,
});
}
catch (error) {
logger_1.logger.error("β TaskGroup: failed to update task status", {
file: "taskGroup.ts",
line: 112,
function: "updateTaskStatus",
taskId,
status,
error,
});
throw error;
}
}
mapTaskStatusToState(status) {
switch (status) {
case enums_1.TaskStatus.ACTIVE:
return enums_1.TaskState.ACTIVE;
case enums_1.TaskStatus.COMPLETED:
return enums_1.TaskState.COMPLETED;
case enums_1.TaskStatus.FAILED:
return enums_1.TaskState.FAILED;
case enums_1.TaskStatus.WAITING:
return enums_1.TaskState.WAITING;
case enums_1.TaskStatus.DELAYED:
return enums_1.TaskState.DELAYED;
default:
return enums_1.TaskState.UNKNOWN;
}
}
async getStats() {
try {
// Try to get cached stats first
const cachedStats = await this.redis.hgetall(`${this.groupKey}:stats`);
if (Object.keys(cachedStats).length > 0) {
return {
total: parseInt(cachedStats.total) || 0,
active: parseInt(cachedStats.active) || 0,
completed: parseInt(cachedStats.completed) || 0,
failed: parseInt(cachedStats.failed) || 0,
paused: parseInt(cachedStats.paused) || 0,
};
}
// If no cached stats, calculate them
await this.updateStats();
return this.getStats();
}
catch (error) {
logger_1.logger.error("β TaskGroup: failed to get stats", {
file: "taskGroup.ts",
line: 159,
function: "getStats",
error,
});
throw error;
}
}
async getNextTask() {
return (0, retryWithBackoff_1.retryWithBackoff)(async () => {
const multi = this.redis.multi();
try {
await this.redis.watch(this.processingOrderKey);
// Check concurrency limits
const processing = await this.redis.scard(this.processingKey);
if (processing >= (this.config.maxConcurrency || 10)) {
await this.redis.unwatch();
return null;
}
// Get tasks based on strategy
let tasks = [];
switch (this.config.strategy) {
case enums_1.GroupProcessingStrategy.LIFO:
tasks = await this.redis.zrange(this.processingOrderKey, -1, -1);
break;
case enums_1.GroupProcessingStrategy.PRIORITY:
tasks = await this.redis.zrevrange(this.processingOrderKey, 0, 0);
break;
case enums_1.GroupProcessingStrategy.ROUND_ROBIN:
// Get all tasks and their scores
const allTasks = await this.redis.zrange(this.processingOrderKey, 0, -1, 'WITHSCORES');
if (allTasks.length === 0) {
tasks = [];
}
else {
// Find the task with the oldest processing time
let oldestTime = Infinity;
let selectedTask = null;
for (let i = 0; i < allTasks.length; i += 2) {
const taskId = allTasks[i];
const score = parseInt(allTasks[i + 1]);
if (score < oldestTime) {
oldestTime = score;
selectedTask = taskId;
}
}
if (selectedTask) {
tasks = [selectedTask];
// Update the processing time for the selected task
await this.redis.zadd(this.processingOrderKey, Date.now().toString(), selectedTask);
}
}
break;
case enums_1.GroupProcessingStrategy.FIFO:
default:
tasks = await this.redis.zrange(this.processingOrderKey, 0, 0);
}
if (tasks.length === 0) {
await this.redis.unwatch();
return null;
}
const nextTask = tasks[0];
const now = Date.now();
multi.zrem(this.processingOrderKey, nextTask);
multi.sadd(this.processingKey, nextTask);
multi.hset(`${this.groupKey}:processing_start`, nextTask, now.toString());
const results = await multi.exec();
if (!results) {
logger_1.logger.debug("β οΈ TaskGroup: Concurrent modification detected", {
file: "taskGroup.ts",
function: "getNextTask",
});
throw new Error("Concurrent modification detected");
}
const taskOptions = await this.getTaskOptionsAndData(nextTask);
if (!taskOptions)
return null;
const { options } = taskOptions;
logger_1.logger.debug("π― TaskGroup: Selected next task", {
file: "taskGroup.ts",
function: "getNextTask",
taskId: nextTask,
weight: options.weight || 1,
strategy: this.config.strategy,
});
return [
nextTask,
options.queue,
taskOptions.data,
taskOptions.method,
options,
];
}
catch (error) {
logger_1.logger.error("β TaskGroup: Failed to get next task", {
file: "taskGroup.ts",
function: "getNextTask",
error,
});
throw error;
}
}, 3, 100);
}
async processNextTask() {
if (!this.queueManager || !this.worker) {
throw new Error("TaskGroup not connected to QueueManager and Worker");
}
try {
// Check if we can process more tasks
const processing = await this.redis.scard(this.processingKey);
if (processing >= (this.config.concurrency || 1)) {
return;
}
// Check rate limit
if (!(await this.checkRateLimit())) {
return;
}
const nextTask = await this.getNextTask();
if (!nextTask)
return;
const [nextTaskId, queueName, taskData, taskMethod, taskOptions] = nextTask;
// Update task status to processing
await this.updateTaskStatus(nextTaskId, enums_1.TaskStatus.ACTIVE);
// Track rate limit
await this.trackRateLimit();
// Add task to queue with group configuration
const enhancedOptions = {
...taskOptions,
timeout: taskOptions.timeout || this.config.timeout,
maxRetries: taskOptions.maxRetries || this.config.retryLimit,
retryDelay: taskOptions.retryDelay || this.config.retryDelay,
};
const task = {
id: nextTaskId,
name: taskMethod,
data: taskData,
options: enhancedOptions,
state: enums_1.TaskState.ACTIVE,
retryCount: 0,
createdAt: new Date(),
updatedAt: new Date(),
};
// Instead of processing directly, ensure the task is in the queue
await this.queueManager.ensureTaskInQueue(task, queueName);
logger_1.logger.debug("π TaskGroup: Processing task", {
file: "taskGroup.ts",
function: "processNextTask",
taskId: nextTaskId,
queueName,
strategy: this.config.strategy,
concurrency: await this.redis.scard(this.processingKey),
});
}
catch (error) {
logger_1.logger.error("β TaskGroup: failed to process next task", {
file: "taskGroup.ts",
line: 190,
function: "processNextTask",
error,
});
throw error;
}
}
async startProcessing() {
if (!this.queueManager || !this.worker) {
throw new Error("TaskGroup not connected to QueueManager and Worker");
}
if (this.isProcessing)
return;
this.isProcessing = true;
this.processingInterval = setInterval(() => {
this.processNextBatch().catch(error => {
logger_1.logger.error("β TaskGroup: Error in processing interval", {
file: "taskGroup.ts",
function: "startProcessing",
error
});
});
}, 1000);
await this.processNextBatch();
}
async stopProcessing() {
this.isProcessing = false;
if (this.processingInterval) {
clearInterval(this.processingInterval);
this.processingInterval = null;
}
}
async processNextBatch() {
if (!this.isProcessing)
return;
try {
const promises = [];
const concurrency = this.config.concurrency || 1;
// Process up to concurrency limit
for (let i = 0; i < concurrency; i++) {
promises.push(this.processNextTask());
}
await Promise.all(promises);
logger_1.logger.debug("β
TaskGroup: Processed batch", {
file: "taskGroup.ts",
function: "processNextBatch",
groupName: this.config.name,
concurrency
});
}
catch (error) {
logger_1.logger.error("β TaskGroup: Failed to process batch", {
file: "taskGroup.ts",
function: "processNextBatch",
error
});
}
}
async pauseAll() {
try {
const taskIds = await this.getTasks();
await Promise.all(taskIds.map((taskId) => this.updateTaskStatus(taskId, enums_1.TaskStatus.PAUSED)));
logger_1.logger.info("βΈοΈ TaskGroup: all tasks paused", {
file: "taskGroup.ts",
line: 173,
function: "pauseAll",
tasksCount: taskIds.length,
});
}
catch (error) {
logger_1.logger.error("β TaskGroup: failed to pause all tasks", {
file: "taskGroup.ts",
line: 180,
function: "pauseAll",
error,
});
throw error;
}
}
async resumeAll() {
try {
const taskIds = await this.getTasks();
await Promise.all(taskIds.map((taskId) => this.updateTaskStatus(taskId, enums_1.TaskStatus.ACTIVE)));
logger_1.logger.info("βΆοΈ TaskGroup: all tasks resumed", {
file: "taskGroup.ts",
line: 194,
function: "resumeAll",
tasksCount: taskIds.length,
});
}
catch (error) {
logger_1.logger.error("β TaskGroup: failed to resume all tasks", {
file: "taskGroup.ts",
line: 201,
function: "resumeAll",
error,
});
throw error;
}
}
async completeTask(taskId) {
if (!(await this.acquireLock())) {
throw new Error("Failed to acquire lock for task completion");
}
try {
// Update task status
await this.updateTaskStatus(taskId, enums_1.TaskStatus.COMPLETED);
// Remove from processing order and group
await this.redis.zrem(this.processingOrderKey, taskId);
await this.redis.srem(this.groupKey, taskId);
// Clean up task data
await this.redis.hdel(`${this.groupKey}:options`, taskId);
await this.redis.hdel(`${this.groupKey}:data`, taskId);
await this.redis.hdel(`${this.groupKey}:method`, taskId);
// Update stats
await this.updateStats();
logger_1.logger.debug("β
TaskGroup: Task completed", {
file: "taskGroup.ts",
function: "completeTask",
taskId,
group: this.config.name,
});
}
finally {
await this.releaseLock();
}
}
async failTask(taskId, error) {
try {
const taskOptions = await this.getTaskOptionsAndData(taskId);
if (!taskOptions) {
throw new Error(`Task ${taskId} not found`);
}
const retryCount = await this.redis.hincrby(`${this.groupKey}:retries`, taskId, 1);
if (retryCount <= (this.config.retryLimit || 3)) {
// Retry the task
await Promise.all([
this.redis.srem(this.processingKey, taskId),
this.redis.hdel(`${this.groupKey}:processing_start`, taskId),
this.redis.zadd(this.processingOrderKey, Date.now().toString(), taskId),
]);
logger_1.logger.info("π TaskGroup: Retrying failed task", {
file: "taskGroup.ts",
function: "failTask",
taskId,
retryCount,
maxRetries: this.config.retryLimit,
});
// Wait for retry delay
await new Promise((resolve) => setTimeout(resolve, this.config.retryDelay || 3000));
// Process the task again
await this.processNextTask();
}
else {
// Mark as failed after max retries
await Promise.all([
this.redis.srem(this.processingKey, taskId),
this.redis.hdel(`${this.groupKey}:processing_start`, taskId),
this.updateTaskStatus(taskId, enums_1.TaskStatus.FAILED),
]);
// Move to dead letter queue if available
if (this.queueManager?.deadLetterQueue) {
const { options } = taskOptions;
await this.queueManager.deadLetterQueue.addFailedTask(taskOptions.data, error, options.queue);
}
logger_1.logger.error("β TaskGroup: Task failed permanently", {
file: "taskGroup.ts",
function: "failTask",
taskId,
error,
retries: retryCount,
});
}
}
catch (error) {
logger_1.logger.error("β TaskGroup: Error handling task failure", {
file: "taskGroup.ts",
function: "failTask",
taskId,
error,
});
throw error;
}
}
async recoverStuckTasks(maxProcessingTime = 300000) {
try {
const processingTasks = await this.redis.smembers(this.processingKey);
const now = Date.now();
for (const taskId of processingTasks) {
const startTime = await this.redis.hget(`${this.groupKey}:processing_start`, taskId);
if (!startTime)
continue;
const processingDuration = now - parseInt(startTime);
if (processingDuration > (this.config.timeout || maxProcessingTime)) {
// Get task details
const taskOptions = await this.getTaskOptionsAndData(taskId);
if (!taskOptions)
continue;
logger_1.logger.warn("β οΈ TaskGroup: Found stuck task", {
file: "taskGroup.ts",
function: "recoverStuckTasks",
taskId,
processingDuration,
timeout: this.config.timeout,
});
// Handle as failure
await this.failTask(taskId, new Error(`Task timed out after ${processingDuration}ms`));
}
}
}
catch (error) {
logger_1.logger.error("β TaskGroup: Failed to recover stuck tasks", {
file: "taskGroup.ts",
function: "recoverStuckTasks",
error,
});
}
}
async cleanup() {
try {
// Stop processing if running
if (this.isProcessing) {
await this.stopProcessing();
}
// Release any held locks
try {
await this.releaseLock();
}
catch (error) {
// Lock might not be held, which is fine
logger_1.logger.debug("π TaskGroup: Lock release during cleanup", {
file: "taskGroup.ts",
function: "cleanup",
groupName: this.config.name,
lockHolder: this.lockHolder,
note: "Lock might not have been held",
});
}
// Get all keys related to this group
const keys = await this.redis.keys(`group:${this.config.name}:*`);
if (keys.length > 0) {
await this.redis.del(...keys);
}
logger_1.logger.info("π§Ή TaskGroup: Cleaned up group data", {
file: "taskGroup.ts",
function: "cleanup",
groupName: this.config.name,
keysRemoved: keys.length,
lockHolder: this.lockHolder,
});
}
catch (error) {
logger_1.logger.error("β TaskGroup: Failed to cleanup group", {
file: "taskGroup.ts",
function: "cleanup",
error,
});
throw error;
}
}
async hasAvailableTasks() {
const taskIds = await this.redis.smembers(this.groupKey);
const processing = await this.redis.smembers(this.processingKey);
return taskIds.length > processing.length;
}
async updateConfig(updates) {
this.config = {
...this.config,
...updates,
};
logger_1.logger.debug("βοΈ TaskGroup: Configuration updated", {
file: "taskGroup.ts",
function: "updateConfig",
groupName: this.config.name,
updates,
});
}
}
exports.TaskGroup = TaskGroup;
/***/ }),
/***/ 344:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.TaskHistoryService = exports.Worker = exports.WorkerState = exports.GroupProcessingStrategy = exports.GroupOperation = exports.ObserverEvent = exports.LogLevel = exports.TaskPriority = exports.TaskStatus = exports.TaskState = exports.TaskObserver = exports.TaskGroup = exports.QueueManager = exports.task = exports.redisConnection = exports.Cleo = void 0;
const task_1 = __webpack_require__(914);
Object.defineProperty(exports, "task", ({ enumerable: true, get: function () { return task_1.task; } }));
const queueManager_1 = __webpack_require__(444);
const logger_1 = __webpack_require__(904);
const redis_1 = __webpack_require__(824);
Object.defineProperty(exports, "redisConnection", ({ enumerable: true, get: function () { return redis_1.redisConnection; } }));
const task_2 = __webpack_require__(914);
const workerManager_1 = __webpack_require__(889);
// Create a Cleo class to manage configuration
class Cleo {
static instances = new Map();
queueManager = null;
isConfigured = false;
instanceId;
task = task_1.task;
constructor(instanceId = redis_1.RedisInstance.DEFAULT) {
this.instanceId = instanceId;
}
static getInstance(instanceId = redis_1.RedisInstance.DEFAULT) {
if (!Cleo.instances.has(instanceId)) {
logger_1.logger.info("File: index.ts π, Line: 22, Function: getInstance; Creating new Cleo instance", { instanceId });
Cleo.instances.set(instanceId, new Cleo(instanceId));
}
return Cleo.instances.get(instanceId);
}
configure(config) {
try {
logger_1.logger.info("File: index.ts βοΈ, Line: 36, Function: configure; Configuring Cleo instance", {
instanceId: this.instanceId,
redisHost: config.redis.host,
redisPort: config.redis.port,
});
if (!config.redis.host || !config.redis.port) {
throw new Error("Redis host and port are required");
}
// Store instance-specific Redis configuration
const redisConfig = {
REDIS_HOST: config.redis.host,
REDIS_PORT: config.redis.port.toString(),
REDIS_PASSWORD: config.redis.password,
REDIS_TLS: config.redis.tls ? "true" : undefined,
REDIS_DB: config.redis.db?.toString(),
INSTANCE_ID: this.instanceId,
};
logger_1.logger.info("File: index.ts π, Line: 43, Function: configure; Redis configuration", { redisConfig });
// Initialize Redis connection for this instance
redis_1.redisConnection.initializeInstance(this.instanceId, redisConfig);
this.queueManager = new queueManager_1.QueueManager("default", this.instanceId, {}, config.worker);
// Initialize task decorator with this instance
(0, task_2.initializeTaskDecorator)(this);
this.isConfigured = true;
logger_1.logger.info("File: index.ts β
, Line: 56, Function: configure; Cleo configuration complete", { instanceId: this.instanceId });
}
catch (error) {
logger_1.logger.error("File: index.ts β, Line: 58, Function: configure; Co