UNPKG

@twilio-labs/serverless-api

Version:
109 lines (108 loc) 4.96 kB
"use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.LogsStream = void 0; const stream_1 = require("stream"); const logs_1 = require("../api/logs"); class LogsStream extends stream_1.Readable { constructor(environmentSid, serviceSid, client, config) { super({ objectMode: true }); this.environmentSid = environmentSid; this.serviceSid = serviceSid; this.client = client; this.config = config; this._interval = undefined; this._viewedSids = new Set(); this._viewedLogs = []; this._pollingFrequency = config.pollingFrequency || 1000; this._pollingCacheSize = config.logCacheSize || 1000; } set pollingFrequency(frequency) { this._pollingFrequency = frequency; if (this.config.tail && this._interval) { clearInterval(this._interval); this._interval = setInterval(() => { this._poll(); }, this._pollingFrequency); } } _poll() { return __awaiter(this, void 0, void 0, function* () { try { const logs = yield (0, logs_1.listOnePageLogResources)(this.environmentSid, this.serviceSid, this.client, { functionSid: this.config.filterByFunction, pageSize: this.config.limit, }); logs .filter((log) => !this._viewedSids.has(log.sid)) .reverse() .forEach((log) => { this.push(log); }); // The logs endpoint is not reliably returning logs in the same order // Therefore we need to keep a set of all previously seen log entries // In order to avoid memory leaks we cap the total size of logs at 1000 // (or the set pollingCacheSize). // // We store an array of the logs' SIDs and created dates. // Then when a new page of logs is added, we find the unique logs, sort by // date created, newest to oldest, and chop off the end of the array (the // oldest logs) leaving the most recent logs in memory. We then turn that // into a set of SIDs to check against next time. // Creates a unique set of log sids and date created from previous logs // and new logs by stringifying the sid and the date together. const viewedLogsSet = new Set([ ...this._viewedLogs.map((log) => `${log.sid}-${log.dateCreated.toISOString()}`), ...logs.map((log) => `${log.sid}-${log.date_created}`), ]); // Then we take that set, map over the logs and split them up into sid and // date again, sort them most to least recent and chop off the oldest if // they are beyond the polling cache size. this._viewedLogs = [...viewedLogsSet] .map((logString) => { const [sid, dateCreated] = logString.split('-'); return { sid, dateCreated: new Date(dateCreated) }; }) .sort((a, b) => b.dateCreated.valueOf() - a.dateCreated.valueOf()) .slice(0, this._pollingCacheSize); // Finally we create a set of just SIDs to compare against. this._viewedSids = new Set(this._viewedLogs.map((log) => log.sid)); if (!this.config.tail) { this.push(null); } } catch (err) { if (err instanceof Error) { this.destroy(err); } } }); } _read() { if (this.config.tail) { if (!this._interval) { this._interval = setInterval(() => { this._poll(); }, this._pollingFrequency); } } else { this._poll(); } } _destroy() { if (this._interval) { clearInterval(this._interval); this._interval = undefined; } } } exports.LogsStream = LogsStream;