UNPKG

simple-in-memory-queue

Version:

A simple in-memory queue, for nodejs and the browser, with consumers for common usecases.

140 lines 8.11 kB
"use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.createQueueWithResilientRemoteConsumer = void 0; const simple_in_memory_cache_1 = require("simple-in-memory-cache"); const with_simple_caching_1 = require("with-simple-caching"); const constants_1 = require("../../domain/constants"); const createQueue_1 = require("../queue/createQueue"); const getMseNow = () => new Date().getTime(); /** * creates a queue with a consumer which consumes each item one at a time with a remote calls, resiliently * * features * - is event driven: processes items one at a time as soon as they are queued, w/ a `maxConcurrency` * - is resilient: retries the items up to `retryThreshold` times in a row, removing it from queue after threshold exceeded * - calls "onFailureAttempt" method when an item's failure count is below the `retryThreshold` * - calls "onFailurePermanent" method when an item's failure count exceeds the `retryThreshold` * - is intelligent: pauses consuming items if more than `pauseThreshold` errors in a row on different items * - calls "onPause" method when the `pauseThreshold` is exceeded * - is efficient: supports concurrency and non-blocking processing of items while others are delayed * - allows consuming items in parallel * - allows consumption of items while other items that have had a failure are delayed in the background * * usecases * - make calls against a remote api for each item * - automatically retry items to recover from intermittent networking errors * - intelligently pause processing if all items are failing */ const createQueueWithResilientRemoteConsumer = ({ consumer, threshold, delay, on, }) => { // create the queue const queueSource = (0, createQueue_1.createQueue)({ order: constants_1.QueueOrder.FIRST_IN_FIRST_OUT }); // create a queue to track the delayed messages const queueDelayed = (0, createQueue_1.createQueue)({ order: constants_1.QueueOrder.FIRST_IN_FIRST_OUT }); // define a method to find the next item to consume from the queues const getNextItemToConsume = () => { // see if any item in the delay queue is ready to process const oldestDelayedItem = queueDelayed.peek()[0]; // peek to not remove it from queue yet, if we cant process it const delayedItemReadyToConsume = oldestDelayedItem && oldestDelayedItem.delayedUntilMse < getMseNow(); if (delayedItemReadyToConsume) { queueDelayed.pop(); // remove it from the queue, since we're going to process it now return oldestDelayedItem; } // otherwise, get the next item from the queue const oldestQueuedItem = queueSource.pop()[0]; // pop to remove it from the queue, since we're going to process it now; if there's a failure, it will be pushed to the delayed queue if (oldestQueuedItem) return { item: oldestQueuedItem, failedAttempts: 0 }; // otherwise, return null - no item to process return null; }; // start tracking the sequential failures we've experienced let sequentialFailures = []; // start tracking whether consumption is paused let isConsumptionPaused = false; // define the method with which we will ensure we are consuming items resiliently and intelligently const consume = (0, with_simple_caching_1.withSimpleCaching)(() => __awaiter(void 0, void 0, void 0, function* () { var _a, _b, _c; // check that we are not paused if (isConsumptionPaused) return; // if consumption is paused, we can exit here // lookup the next item to consume const itemWithMetadata = getNextItemToConsume(); if (!itemWithMetadata) return; // if no more items, we can exit here const { item, failedAttempts } = itemWithMetadata; // try to consume the item const thisAttemptNumber = failedAttempts + 1; try { // run the consumer yield consumer({ item }); // if it was successful, clear the sequential failures sequentialFailures = []; } catch (error) { // mark this failure in the set of sequential failures sequentialFailures.push({ item, error }); // if this failure took us over the sequential failure pause threshold, pause consumption if (sequentialFailures.length >= threshold.pause) { isConsumptionPaused = true; (_a = on === null || on === void 0 ? void 0 : on.pause) === null || _a === void 0 ? void 0 : _a.call(on, { failures: sequentialFailures }); } // report that it has failed (_b = on === null || on === void 0 ? void 0 : on.failureAttempt) === null || _b === void 0 ? void 0 : _b.call(on, { item, attempt: thisAttemptNumber, error }); // if we can retry it, queue it for retry const canRetry = thisAttemptNumber < threshold.retry; if (canRetry) { // add it to the delayed queue queueDelayed.push({ item, failedAttempts: thisAttemptNumber, delayedUntilMse: getMseNow() + delay.retry, }); } // if we can't retry it, report permanent failure if (!canRetry) { // dont add it to any queues, since we're not going to try to process it again // call the hook, to notify subscribers (_c = on === null || on === void 0 ? void 0 : on.failurePermanent) === null || _c === void 0 ? void 0 : _c.call(on, { item, error }); } } // now that this item has been consumed, try and consume more if possible return consume(); }), { cache: (0, simple_in_memory_cache_1.createCache)({ defaultSecondsUntilExpiration: 0, // expire the promise as soon as it resolves -> dont allow duplicate invocations }), }); // subscribe to the source queue, ensure the consumer is running, find or creating the promise each time an item is pushed queueSource.on.push.subscribe({ consumer: () => __awaiter(void 0, void 0, void 0, function* () { // if no visibility delay, invoke the consumer immediately if (!delay.visibility) return consume(); // otherwise, invoke the consumer after the visibility delay setTimeout(() => consume(), delay.visibility); }), }); // subscribe to the delay queue, ensuring a consumer will be invoked after each delay expires queueDelayed.on.push.subscribe({ consumer: ({ items }) => __awaiter(void 0, void 0, void 0, function* () { items.forEach((item) => { const millisecondsUntilReady = item.delayedUntilMse - getMseNow(); if (process.env.NODE_ENV === 'test') console.log({ millisecondsUntilReady }); // for some reason, jest never invokes the timeout unless we log here 🙃 // TODO: find a way to eliminate this setTimeout(() => consume(), millisecondsUntilReady); }); }), }); // return the source queue return queueSource; }; exports.createQueueWithResilientRemoteConsumer = createQueueWithResilientRemoteConsumer; //# sourceMappingURL=createQueueWithResilientRemoteConsumer.js.map