@netlify/content-engine
Version:
119 lines • 5.2 kB
JavaScript
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.LedgerReader = void 0;
const got_1 = __importDefault(require("got"));
const fastq_1 = __importDefault(require("fastq"));
const stream_1 = __importDefault(require("stream"));
const util_1 = __importDefault(require("util"));
const Parser_1 = require("stream-json/jsonl/Parser");
const cache_lmdb_1 = __importDefault(require("../../utils/cache-lmdb"));
const pipeline = util_1.default.promisify(stream_1.default.pipeline);
class LedgerReader {
resourceId;
serverUrl;
handleAction;
cache;
constructor(input) {
this.resourceId = input.resourceId;
this.serverUrl = input.serverUrl;
this.handleAction = input.handleAction;
this.cache = new cache_lmdb_1.default({
name: `ledger-reader-cache-${this.resourceId}`,
encoding: `json`,
directory: input.directory,
}).init();
}
async read({ cacheId, endBlockVersionId, headers, configurationId, onAction, }) {
const previousBlockIdCacheKey = `PREVIOUS_BLOCK_ID_${cacheId}`;
const startBlockVersionId = (await this.cache.get(previousBlockIdCacheKey)) || "0";
console.info(`[content-engine] synchronizing data from ${this.serverUrl} for ledger id ${this.resourceId} with cache id ${cacheId} from block id ${startBlockVersionId} to block id ${endBlockVersionId || "latest"}`);
const httpStream = await this.createReadStream({
cacheId,
configurationId,
endBlockVersionId,
startBlockVersionId,
headers,
});
const endHttpStreamPromise = new Promise((resolve) => {
httpStream.on(`response`, (res) => {
const newPreviousBlockId = res.headers[`x-consumer-offset`] ||
// TODO: update gql cloud server to return this header instead (for consistency)
res.headers[`x-block-id`];
if (newPreviousBlockId) {
console.info(`[content-engine] synchronized data up until version ${newPreviousBlockId}`);
this.cache
.set(previousBlockIdCacheKey, newPreviousBlockId)
.then(() => resolve(null));
}
else {
resolve(null);
}
});
});
let dispatchCount = 0;
let lastEmittedAction;
const handleActionQueue = (0, fastq_1.default)((entry, cb) => {
lastEmittedAction = entry.value;
if (typeof this.handleAction === `function`) {
this.handleAction(entry.value);
}
if (typeof onAction === `function`) {
onAction(entry.value);
}
// Do not block task queue of the event loop for too long:
if (dispatchCount++ % 100 === 0) {
setImmediate(() => {
cb(null);
});
}
else {
cb(null);
}
}, 1);
const decode = (0, Parser_1.parser)();
decode.on(`data`, (data) => handleActionQueue.push(data));
let decodeError = null;
decode.on(`error`, (e) => {
handleActionQueue.kill();
decodeError = e;
});
await pipeline(httpStream, decode);
if (decodeError) {
throw decodeError;
}
if (!handleActionQueue.idle()) {
await new Promise((res) => {
handleActionQueue.drain = () => res(null);
});
}
handleActionQueue.kill();
await endHttpStreamPromise;
if (lastEmittedAction &&
lastEmittedAction?.payload?.event !== `END_SOURCING`) {
throw new Error(`ledger stream closed but didn't receive an END_SOURCING action`);
}
console.info(`[content-engine] Total actions synced from ledger: ${dispatchCount > 0 ? dispatchCount - 2 : dispatchCount}`);
return dispatchCount;
}
async createReadStream({ configurationId, cacheId, startBlockVersionId, endBlockVersionId, headers, }) {
const stream = got_1.default.stream(`${this.serverUrl}${this.serverUrl.endsWith("/") ? "" : "/"}${this.resourceId}/${configurationId}/${cacheId}`, {
headers: {
...(headers ? headers : {}),
"x-consumer-offset": startBlockVersionId,
...(endBlockVersionId ? { "x-end-offset": endBlockVersionId } : {}),
// TODO: move away from adding these to the url as the path, use these headers instead
// "x-start-block": startBlockVersionId || "0",
// "x-end-block": endBlockVersionId,
// "x-ledger-id": this.ledgerId,
// "x-configuration-id": configurationId,
// "x-cache-id": cacheId,
},
});
return stream;
}
}
exports.LedgerReader = LedgerReader;
//# sourceMappingURL=ledger-reader.js.map
;