UNPKG

@mtdt.temp/browser-core

Version:
63 lines 2.45 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.readBytesFromStream = readBytesFromStream; const monitor_1 = require("./monitor"); const functionUtils_1 = require("./utils/functionUtils"); /** * Read bytes from a ReadableStream until at least `limit` bytes have been read (or until the end of * the stream). The callback is invoked with the at most `limit` bytes, and indicates that the limit * has been exceeded if more bytes were available. */ function readBytesFromStream(stream, callback, options) { const reader = stream.getReader(); const chunks = []; let readBytesCount = 0; readMore(); function readMore() { reader.read().then((0, monitor_1.monitor)((result) => { if (result.done) { onDone(); return; } if (options.collectStreamBody) { chunks.push(result.value); } readBytesCount += result.value.length; if (readBytesCount > options.bytesLimit) { onDone(); } else { readMore(); } }), (0, monitor_1.monitor)((error) => callback(error))); } function onDone() { reader.cancel().catch( // we don't care if cancel fails, but we still need to catch the error to avoid reporting it // as an unhandled rejection functionUtils_1.noop); let bytes; let limitExceeded; if (options.collectStreamBody) { let completeBuffer; if (chunks.length === 1) { // optimization: if the response is small enough to fit in a single buffer (provided by the browser), just // use it directly. completeBuffer = chunks[0]; } else { // else, we need to copy buffers into a larger buffer to concatenate them. completeBuffer = new Uint8Array(readBytesCount); let offset = 0; chunks.forEach((chunk) => { completeBuffer.set(chunk, offset); offset += chunk.length; }); } bytes = completeBuffer.slice(0, options.bytesLimit); limitExceeded = completeBuffer.length > options.bytesLimit; } callback(undefined, bytes, limitExceeded); } } //# sourceMappingURL=readBytesFromStream.js.map