power-queues
Version:
High-performance Redis Streams queue for Node.js with Lua-powered bulk XADD, idempotent workers, heartbeat locks, stuck-task recovery, retries, DLQ, and distributed processing.
892 lines (859 loc) • 26.4 kB
JavaScript
// src/PowerQueues.ts
import { PowerRedis } from "power-redis";
import {
isObjFilled,
isObj,
isArrFilled,
isArr,
isStrFilled,
isNumNZ,
jsonDecode,
wait
} from "full-utils";
import { v4 as uuid } from "uuid";
// src/scripts.ts
var XAddBulk = `
local UNPACK = table and table.unpack or unpack
local stream = KEYS[1]
local maxlen = tonumber(ARGV[1])
local approxFlag = tonumber(ARGV[2]) == 1
local n = tonumber(ARGV[3])
local exactFlag = tonumber(ARGV[4]) == 1
local nomkstream = tonumber(ARGV[5]) == 1
local trimLimit = tonumber(ARGV[6])
local minidWindowMs = tonumber(ARGV[7]) or 0
local minidExact = tonumber(ARGV[8]) == 1
local idx = 9
local out = {}
local common_opts = {}
local co_len = 0
if nomkstream then
co_len = co_len + 1; common_opts[co_len] = 'NOMKSTREAM'
end
if minidWindowMs > 0 then
local tm = redis.call('TIME')
local now_ms = (tonumber(tm[1]) * 1000) + math.floor(tonumber(tm[2]) / 1000)
local cutoff_ms = now_ms - minidWindowMs
if cutoff_ms < 0 then cutoff_ms = 0 end
local cutoff_id = tostring(cutoff_ms) .. '-0'
co_len = co_len + 1; common_opts[co_len] = 'MINID'
co_len = co_len + 1; common_opts[co_len] = (minidExact and '=' or '~')
co_len = co_len + 1; common_opts[co_len] = cutoff_id
if trimLimit and trimLimit > 0 then
co_len = co_len + 1; common_opts[co_len] = 'LIMIT'
co_len = co_len + 1; common_opts[co_len] = trimLimit
end
elseif maxlen and maxlen > 0 then
co_len = co_len + 1; common_opts[co_len] = 'MAXLEN'
if exactFlag then
co_len = co_len + 1; common_opts[co_len] = '='
elseif approxFlag then
co_len = co_len + 1; common_opts[co_len] = '~'
end
co_len = co_len + 1; common_opts[co_len] = maxlen
if trimLimit and trimLimit > 0 then
co_len = co_len + 1; common_opts[co_len] = 'LIMIT'
co_len = co_len + 1; common_opts[co_len] = trimLimit
end
end
for e = 1, n do
local id = ARGV[idx]; idx = idx + 1
local num_pairs = tonumber(ARGV[idx]); idx = idx + 1
local a = {}
local a_len = 0
for i = 1, co_len do a_len = a_len + 1; a[a_len] = common_opts[i] end
a_len = a_len + 1; a[a_len] = id
for j = 1, (num_pairs * 2) do
a_len = a_len + 1; a[a_len] = ARGV[idx]; idx = idx + 1
end
local addedId = redis.call('XADD', stream, UNPACK(a))
out[#out+1] = addedId or ''
end
return out
`;
var Approve = `
local stream = KEYS[1]
local group = ARGV[1]
local delFlag = tonumber(ARGV[2]) == 1
local acked = 0
local nids = #ARGV - 2
if nids > 0 then
acked = tonumber(redis.call('XACK', stream, group, unpack(ARGV, 3))) or 0
if delFlag and nids > 0 then
local ok, deln = pcall(redis.call, 'XDEL', stream, unpack(ARGV, 3))
if not ok then
deln = 0
for i = 3, #ARGV do
deln = deln + (tonumber(redis.call('XDEL', stream, ARGV[i])) or 0)
end
end
end
end
return acked
`;
var IdempotencyAllow = `
local doneKey = KEYS[1]
local lockKey = KEYS[2]
local startKey = KEYS[3]
if redis.call('EXISTS', doneKey) == 1 then
return 1
end
local ttl = tonumber(ARGV[1]) or 0
if ttl <= 0 then return 0 end
local ok = redis.call('SET', lockKey, ARGV[2], 'NX', 'PX', ttl)
if ok then
if startKey and startKey ~= '' then
redis.call('SET', startKey, 1, 'PX', ttl)
end
return 2
else
return 0
end
`;
var IdempotencyStart = `
local lockKey = KEYS[1]
local startKey = KEYS[2]
if redis.call('GET', lockKey) == ARGV[1] then
local ttl = tonumber(ARGV[2]) or 0
if ttl > 0 then
redis.call('SET', startKey, 1, 'PX', ttl)
redis.call('PEXPIRE', lockKey, ttl)
else
redis.call('SET', startKey, 1)
end
return 1
end
return 0
`;
var IdempotencyDone = `
local doneKey = KEYS[1]
local lockKey = KEYS[2]
local startKey = KEYS[3]
redis.call('SET', doneKey, 1)
local ttlMs = tonumber(ARGV[1]) or 0
if ttlMs > 0 then
redis.call('PEXPIRE', doneKey, ttlMs)
end
if redis.call('GET', lockKey) == ARGV[2] then
redis.call('DEL', lockKey)
if startKey then
redis.call('DEL', startKey)
end
end
return 1
`;
var IdempotencyFree = `
local lockKey = KEYS[1]
local startKey = KEYS[2]
if redis.call('GET', lockKey) == ARGV[1] then
redis.call('DEL', lockKey)
if startKey then redis.call('DEL', startKey) end
return 1
end
return 0
`;
var SelectStuck = `
local stream = KEYS[1]
local group = ARGV[1]
local consumer = ARGV[2]
local pendingIdleMs = tonumber(ARGV[3])
local count = tonumber(ARGV[4]) or 0
if count < 1 then count = 1 end
local timeBudgetMs = tonumber(ARGV[5]) or 15
local t0 = redis.call('TIME')
local start_ms = (tonumber(t0[1]) * 1000) + math.floor(tonumber(t0[2]) / 1000)
local results = {}
local collected = 0
local start_id = '0-0'
local iters = 0
local max_iters = math.max(16, math.ceil(count / 100))
local function time_exceeded()
local t1 = redis.call('TIME')
local now_ms = (tonumber(t1[1]) * 1000) + math.floor(tonumber(t1[2]) / 1000)
return (now_ms - start_ms) >= timeBudgetMs
end
while (collected < count) and (iters < max_iters) do
local to_claim = count - collected
if to_claim < 1 then break end
local claim = redis.call('XAUTOCLAIM', stream, group, consumer, pendingIdleMs, start_id, 'COUNT', to_claim)
iters = iters + 1
local bucket = nil
if claim then
bucket = claim[2]
end
if bucket and #bucket > 0 then
for i = 1, #bucket do
results[#results+1] = bucket[i]
end
collected = #results
end
local next_id = claim and claim[1] or start_id
if next_id == start_id then
local s, seq = string.match(start_id, '^(%d+)%-(%d+)$')
if s and seq then
start_id = s .. '-' .. tostring(tonumber(seq) + 1)
else
start_id = '0-1'
end
else
start_id = next_id
end
if time_exceeded() then
break
end
end
local left = count - collected
if left > 0 then
local xr = redis.call('XREADGROUP', 'GROUP', group, consumer, 'COUNT', left, 'STREAMS', stream, '>')
if xr and xr[1] and xr[1][2] then
local entries = xr[1][2]
for i = 1, #entries do
results[#results+1] = entries[i]
end
end
end
return results
`;
// src/PowerQueues.ts
var PowerQueues = class extends PowerRedis {
constructor() {
super(...arguments);
this.abort = new AbortController();
this.scripts = {};
this.addingBatchTasksCount = 800;
this.addingBatchKeysLimit = 1e4;
this.workerExecuteLockTimeoutMs = 18e4;
this.workerCacheTaskTimeoutMs = 6e4;
this.approveBatchTasksCount = 2e3;
this.removeOnExecuted = false;
this.executeBatchAtOnce = false;
this.executeJobStatus = false;
this.executeJobStatusTtlMs = 3e5;
this.consumerHost = "host";
this.stream = "stream";
this.group = "group";
this.workerBatchTasksCount = 200;
this.recoveryStuckTasksTimeoutMs = 6e4;
this.workerLoopIntervalMs = 5e3;
this.workerSelectionTimeoutMs = 80;
this.workerMaxRetries = 1;
this.workerClearAttemptsTimeoutMs = 864e5;
}
async onSelected(data) {
return data;
}
async onExecute(id, payload, createdAt, job, key, attempt) {
}
async onReady(data) {
}
async onSuccess(id, payload, createdAt, job, key) {
}
async onBatchError(err, tasks) {
}
async onError(err, id, payload, createdAt, job, key) {
}
async onRetry(err, id, payload, createdAt, job, key, attempts) {
}
async runQueue() {
await this.createGroup("0-0");
await this.consumerLoop();
}
async consumerLoop() {
const signal = this.signal();
while (!signal?.aborted) {
try {
const tasks = await this.select();
if (!isArrFilled(tasks)) {
await wait(600);
continue;
}
const tasksP = await this.onSelected(tasks);
const ids = await this.execute(isArrFilled(tasksP) ? tasksP : tasks);
if (isArrFilled(ids)) {
await this.approve(ids);
}
} catch (err) {
await this.batchError(err);
await wait(600);
}
}
}
async addTasks(queueName, data, opts = {}) {
if (!isArrFilled(data)) {
throw new Error("Tasks is not filled.");
}
if (!isStrFilled(queueName)) {
throw new Error("Queue name is required.");
}
const job = uuid();
const batches = this.buildBatches(data, job, opts.idem);
const result = new Array(data.length);
const promises = [];
let cursor = 0;
for (const batch of batches) {
const start = cursor;
const end = start + batch.length;
cursor = end;
promises.push(async () => {
const partIds = await this.xaddBatch(queueName, ...this.payloadBatch(batch, opts));
for (let k = 0; k < partIds.length; k++) {
result[start + k] = partIds[k];
}
});
}
const runners = Array.from({ length: promises.length }, async () => {
while (promises.length) {
const promise = promises.shift();
if (promise) {
await promise();
}
}
});
if (opts.status) {
await this.redis.set(`${queueName}:${job}:total`, data.length);
await this.redis.pexpire(`${queueName}:${job}:total`, opts.statusTimeoutMs || 3e5);
}
await Promise.all(runners);
return result;
}
async loadScripts(full = false) {
const scripts = full ? [
["XAddBulk", XAddBulk],
["Approve", Approve],
["IdempotencyAllow", IdempotencyAllow],
["IdempotencyStart", IdempotencyStart],
["IdempotencyDone", IdempotencyDone],
["IdempotencyFree", IdempotencyFree],
["SelectStuck", SelectStuck]
] : [
["XAddBulk", XAddBulk]
];
for (const [name, code] of scripts) {
await this.loadScript(this.saveScript(name, code));
}
}
async loadScript(code) {
for (let i = 0; i < 3; i++) {
try {
return await this.redis.script("LOAD", code);
} catch (e) {
if (i === 2) {
throw e;
}
await new Promise((r) => setTimeout(r, 10 + Math.floor(Math.random() * 40)));
}
}
throw new Error("Load lua script failed.");
}
saveScript(name, codeBody) {
if (!isStrFilled(codeBody)) {
throw new Error("Script body is empty.");
}
this.scripts[name] = { codeBody };
return codeBody;
}
async runScript(name, keys, args, defaultCode) {
if (!this.scripts[name]) {
if (!isStrFilled(defaultCode)) {
throw new Error(`Undefined script "${name}". Save it before executing.`);
}
this.saveScript(name, defaultCode);
}
if (!this.scripts[name].codeReady) {
this.scripts[name].codeReady = await this.loadScript(this.scripts[name].codeBody);
}
try {
return await this.redis.evalsha(this.scripts[name].codeReady, keys.length, ...keys, ...args);
} catch (err) {
if (String(err?.message || "").includes("NOSCRIPT")) {
this.scripts[name].codeReady = await this.loadScript(this.scripts[name].codeBody);
return await this.redis.evalsha(this.scripts[name].codeReady, keys.length, ...keys, ...args);
}
throw err;
}
}
async xaddBatch(queueName, ...batches) {
return await this.runScript("XAddBulk", [queueName], batches, XAddBulk);
}
payloadBatch(data, opts) {
const maxlen = Math.max(0, Math.floor(opts?.maxlen ?? 0));
const approx = opts?.exact ? 0 : opts?.approx !== false ? 1 : 0;
const exact = opts?.exact ? 1 : 0;
const nomkstream = opts?.nomkstream ? 1 : 0;
const trimLimit = Math.max(0, Math.floor(opts?.trimLimit ?? 0));
const minidWindowMs = Math.max(0, Math.floor(opts?.minidWindowMs ?? 0));
const minidExact = opts?.minidExact ? 1 : 0;
const argv = [
String(maxlen),
String(approx),
String(data.length),
String(exact),
String(nomkstream),
String(trimLimit),
String(minidWindowMs),
String(minidExact)
];
for (const item of data) {
const entry = item;
const id = entry.id ?? "*";
let flat;
if ("flat" in entry && isArrFilled(entry.flat)) {
flat = entry.flat;
if (flat.length % 2 !== 0) {
throw new Error('Property "flat" must contain an even number of realKeysLength (field/value pairs).');
}
} else if ("payload" in entry && isObjFilled(entry.payload)) {
flat = [];
for (const [k, v] of Object.entries(entry.payload)) {
flat.push(k, v);
}
} else {
throw new Error('Task must have "payload" or "flat".');
}
const pairs = flat.length / 2;
if (isNumNZ(pairs)) {
throw new Error('Task "flat" must contain at least one field/value pair.');
}
argv.push(String(id));
argv.push(String(pairs));
for (const token of flat) {
argv.push(!token ? "" : isStrFilled(token) ? token : String(token));
}
}
return argv;
}
buildBatches(tasks, job, idem) {
const batches = [];
let batch = [], realKeysLength = 0;
for (let task of tasks) {
const createdAt = task?.createdAt || Date.now();
let entry = task;
if (isObj(entry.payload)) {
entry = {
...entry,
payload: {
payload: JSON.stringify(entry.payload),
createdAt,
job
}
};
if (idem) {
entry.payload["idemKey"] = entry?.idemKey || uuid();
}
} else if (Array.isArray(entry.flat)) {
entry.flat.push("createdAt");
entry.flat.push(String(createdAt));
entry.flat.push("job");
entry.flat.push(job);
if (idem) {
entry.flat.push("idemKey");
entry.flat.push(entry?.idemKey || uuid());
}
}
const reqKeysLength = this.keysLength(entry);
if (batch.length && (batch.length >= this.addingBatchTasksCount || realKeysLength + reqKeysLength > this.addingBatchKeysLimit)) {
batches.push(batch);
batch = [];
realKeysLength = 0;
}
batch.push(entry);
realKeysLength += reqKeysLength;
}
if (batch.length) {
batches.push(batch);
}
return batches;
}
keysLength(task) {
if ("flat" in task && Array.isArray(task.flat) && task.flat.length) {
return 2 + task.flat.length;
}
if ("payload" in task && isObj(task.payload)) {
return 2 + Object.keys(task.payload).length * 2;
}
return 2 + Object.keys(task).length * 2;
}
attemptsKey(id) {
const safeStream = this.stream.replace(/[^\w:\-]/g, "_");
const safeId = id.replace(/[^\w:\-]/g, "_");
return `q:${safeStream}:attempts:${safeId}`;
}
async incrAttempts(id) {
try {
const key = this.attemptsKey(id);
const attempts = await this.redis.incr(key);
await this.redis.pexpire(key, this.workerClearAttemptsTimeoutMs);
return attempts;
} catch (err) {
}
return 0;
}
async getAttempts(id) {
const key = this.attemptsKey(id);
const v = await this.redis.get(key);
return Number(v || 0);
}
async clearAttempts(id) {
const key = this.attemptsKey(id);
try {
await this.redis.del(key);
} catch (e) {
}
}
async success(id, payload, createdAt, job, key) {
if (this.executeJobStatus) {
const prefix = `${this.stream}:${job}:`;
await this.incr(`${prefix}ok`, this.executeJobStatusTtlMs);
await this.incr(`${prefix}ready`, this.executeJobStatusTtlMs);
}
await this.onSuccess(id, payload, createdAt, job, key);
}
async batchError(err, tasks) {
await this.onBatchError(err, tasks);
}
async error(err, id, payload, createdAt, job, key, attempt) {
if (this.executeJobStatus && attempt >= this.workerMaxRetries) {
const prefix = `${this.stream}:${job}:`;
await this.incr(`${prefix}err`, this.executeJobStatusTtlMs);
await this.incr(`${prefix}ready`, this.executeJobStatusTtlMs);
}
await this.onError(err, id, payload, createdAt, job, key);
}
async attempt(err, id, payload, createdAt, job, key, attempt) {
await this.onRetry(err, id, payload, createdAt, job, key, attempt);
}
async execute(tasks) {
const result = [];
let contended = 0, promises = [];
for (const [id, payload, createdAt, job, idemKey] of tasks) {
if (this.executeBatchAtOnce) {
promises.push((async () => {
const r = await this.executeProcess(id, payload, createdAt, job, idemKey);
if (r.id) {
result.push(id);
} else if (r.contended) {
contended++;
}
})());
} else {
const r = await this.executeProcess(id, payload, createdAt, job, idemKey);
if (r.id) {
result.push(id);
} else if (r.contended) {
contended++;
}
}
}
try {
if (this.executeBatchAtOnce && promises.length > 0) {
await Promise.all(promises);
}
await this.onReady(tasks);
if (!isArrFilled(result) && contended > tasks.length >> 1) {
await this.waitAbortable(15 + Math.floor(Math.random() * 35) + Math.min(250, 15 * contended + Math.floor(Math.random() * 40)));
}
} catch (err) {
await this.batchError(err, tasks);
}
return result;
}
async executeProcess(id, payload, createdAt, job, key) {
if (key) {
return await this.idempotency(id, payload, createdAt, job, key);
} else {
try {
await this.onExecute(id, payload, createdAt, job, key, await this.getAttempts(id));
await this.success(id, payload, createdAt, job, key);
return { id };
} catch (err) {
const attempt = await this.incrAttempts(id);
await this.attempt(err, id, payload, createdAt, job, key, attempt);
await this.error(err, id, payload, createdAt, job, key, attempt);
if (attempt >= this.workerMaxRetries) {
await this.addTasks(`${this.stream}:dlq`, [{
payload: {
...payload,
error: String(err?.message || err),
createdAt,
job,
id,
attempt
}
}]);
await this.clearAttempts(id);
return { id };
}
}
}
return {};
}
async approve(ids) {
if (!Array.isArray(ids) || !(ids.length > 0)) {
return 0;
}
const approveBatchTasksCount = Math.max(500, Math.min(4e3, this.approveBatchTasksCount));
let total = 0, i = 0;
while (i < ids.length) {
const room = Math.min(approveBatchTasksCount, ids.length - i);
const part = ids.slice(i, i + room);
const approved = await this.runScript("Approve", [this.stream], [this.group, this.removeOnExecuted ? "1" : "0", ...part], Approve);
total += Number(approved || 0);
i += room;
}
return total;
}
async idempotency(id, payload, createdAt, job, key) {
const keys = this.idempotencyKeys(key);
const allow = await this.idempotencyAllow(keys);
if (allow === 1) {
return { id };
} else if (allow === 0) {
let ttl = -2;
try {
ttl = await this.redis.pttl(keys.startKey);
} catch (err) {
}
await this.waitAbortable(ttl);
return { contended: true };
}
if (!await this.idempotencyStart(keys)) {
return { contended: true };
}
const heartbeat = this.heartbeat(keys) || (() => {
});
try {
await this.onExecute(id, payload, createdAt, job, key, await this.getAttempts(id));
await this.idempotencyDone(keys);
await this.success(id, payload, createdAt, job, key);
return { id };
} catch (err) {
const attempt = await this.incrAttempts(id);
try {
await this.attempt(err, id, payload, createdAt, job, key, attempt);
await this.error(err, id, payload, createdAt, job, key, attempt);
if (attempt >= this.workerMaxRetries) {
await this.addTasks(`${this.stream}:dlq`, [{
payload: {
...payload,
error: String(err?.message || err),
createdAt,
job,
id,
attempt: 0
}
}]);
await this.clearAttempts(id);
await this.idempotencyFree(keys);
return { id };
}
await this.idempotencyFree(keys);
} catch (err2) {
}
} finally {
heartbeat();
}
}
idempotencyKeys(key) {
const prefix = `q:${this.stream.replace(/[^\w:\-]/g, "_")}:`;
const keyP = key.replace(/[^\w:\-]/g, "_");
const doneKey = `${prefix}done:${keyP}`;
const lockKey = `${prefix}lock:${keyP}`;
const startKey = `${prefix}start:${keyP}`;
const token = `${this.consumer()}:${Date.now().toString(36)}:${Math.random().toString(36).slice(2)}`;
return {
prefix,
doneKey,
lockKey,
startKey,
token
};
}
async idempotencyAllow(keys) {
const res = await this.runScript("IdempotencyAllow", [keys.doneKey, keys.lockKey, keys.startKey], [String(this.workerExecuteLockTimeoutMs), keys.token], IdempotencyAllow);
return Number(res || 0);
}
async idempotencyStart(keys) {
const res = await this.runScript("IdempotencyStart", [keys.lockKey, keys.startKey], [keys.token, String(this.workerExecuteLockTimeoutMs)], IdempotencyStart);
return Number(res || 0) === 1;
}
async idempotencyDone(keys) {
await this.runScript("IdempotencyDone", [keys.doneKey, keys.lockKey, keys.startKey], [String(this.workerCacheTaskTimeoutMs), keys.token], IdempotencyDone);
}
async idempotencyFree(keys) {
await this.runScript("IdempotencyFree", [keys.lockKey, keys.startKey], [keys.token], IdempotencyFree);
}
async createGroup(from = "$") {
try {
await this.redis.xgroup("CREATE", this.stream, this.group, from, "MKSTREAM");
} catch (err) {
const msg = String(err?.message || "");
if (!msg.includes("BUSYGROUP")) {
throw err;
}
}
}
async select() {
let entries = await this.selectStuck();
if (!isArrFilled(entries)) {
entries = await this.selectFresh();
}
return this.normalizeEntries(entries);
}
async selectStuck() {
try {
const res = await this.runScript("SelectStuck", [this.stream], [this.group, this.consumer(), String(this.recoveryStuckTasksTimeoutMs), String(this.workerBatchTasksCount), String(this.workerSelectionTimeoutMs)], SelectStuck);
return isArr(res) ? res : [];
} catch (err) {
if (String(err?.message || "").includes("NOGROUP")) {
await this.createGroup();
}
}
return [];
}
async selectFresh() {
let entries = [];
try {
const res = await this.redis.xreadgroup(
"GROUP",
this.group,
this.consumer(),
"BLOCK",
Math.max(2, this.workerLoopIntervalMs | 0),
"COUNT",
this.workerBatchTasksCount,
"STREAMS",
this.stream,
">"
);
entries = res?.[0]?.[1] ?? [];
if (!isArrFilled(entries)) {
return [];
}
} catch (err) {
if (String(err?.message || "").includes("NOGROUP")) {
await this.createGroup();
}
}
return entries;
}
async waitAbortable(ttl) {
return new Promise((resolve) => {
const signal = this.signal();
if (signal?.aborted) {
return resolve();
}
let delay;
if (ttl > 0) {
const base = Math.max(25, Math.min(ttl, 5e3));
const jitter = Math.floor(Math.min(base, 200) * Math.random());
delay = base + jitter;
} else {
delay = 5 + Math.floor(Math.random() * 15);
}
const t = setTimeout(() => {
if (signal) {
signal.removeEventListener("abort", onAbort);
}
resolve();
}, delay);
t.unref?.();
function onAbort() {
clearTimeout(t);
resolve();
}
signal?.addEventListener?.("abort", onAbort, { once: true });
});
}
async sendHeartbeat(keys) {
try {
const r1 = await this.redis.pexpire(keys.lockKey, this.workerExecuteLockTimeoutMs);
const r2 = await this.redis.pexpire(keys.startKey, this.workerExecuteLockTimeoutMs);
const ok1 = Number(r1 || 0) === 1;
const ok2 = Number(r2 || 0) === 1;
return ok1 || ok2;
} catch {
return false;
}
}
heartbeat(keys) {
if (this.workerExecuteLockTimeoutMs <= 0) {
return;
}
const workerHeartbeatTimeoutMs = Math.max(1e3, Math.floor(Math.max(5e3, this.workerExecuteLockTimeoutMs | 0) / 4));
let timer;
let alive = true;
let hbFails = 0;
const stop = () => {
alive = false;
if (timer) {
clearTimeout(timer);
}
};
const signal = this.signal();
const onAbort = () => stop();
signal?.addEventListener?.("abort", onAbort, { once: true });
const tick = async () => {
if (!alive) {
return;
}
try {
const ok = await this.sendHeartbeat(keys);
hbFails = ok ? 0 : hbFails + 1;
if (hbFails >= 3) {
throw new Error("Heartbeat lost.");
}
} catch {
hbFails++;
if (hbFails >= 6) {
stop();
return;
}
}
timer = setTimeout(tick, workerHeartbeatTimeoutMs);
timer.unref?.();
};
timer = setTimeout(tick, workerHeartbeatTimeoutMs);
timer.unref?.();
return () => {
signal?.removeEventListener?.("abort", onAbort);
stop();
};
}
normalizeEntries(raw) {
if (!Array.isArray(raw)) {
return [];
}
return Array.from(raw || []).map((e) => {
const id = Buffer.isBuffer(e?.[0]) ? e[0].toString() : e?.[0];
const kvRaw = e?.[1] ?? [];
const kv = isArr(kvRaw) ? kvRaw.map((x) => Buffer.isBuffer(x) ? x.toString() : x) : [];
return [id, kv];
}).filter(([id, kv]) => isStrFilled(id) && isArr(kv) && (kv.length & 1) === 0).map(([id, kv]) => {
const { idemKey = "", job, createdAt, payload } = this.values(kv);
return [id, this.payload(payload), createdAt, job, idemKey];
});
}
values(value) {
const result = {};
for (let i = 0; i < value.length; i += 2) {
result[value[i]] = value[i + 1];
}
return result;
}
payload(data) {
try {
return jsonDecode(data);
} catch (err) {
}
return data;
}
signal() {
return this.abort.signal;
}
consumer() {
return `${String(this.consumerHost || "host")}:${process.pid}`;
}
};
export {
PowerQueues
};