nerdbank-streams
Version:
Multiplexing of streams
154 lines • 5.88 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.writeAsync = writeAsync;
exports.writeSubstream = writeSubstream;
exports.readSubstream = readSubstream;
exports.getBufferFrom = getBufferFrom;
exports.throwIfDisposed = throwIfDisposed;
exports.requireInteger = requireInteger;
exports.removeFromQueue = removeFromQueue;
const stream_1 = require("stream");
const Deferred_1 = require("./Deferred");
async function writeAsync(stream, chunk) {
return new Promise((resolve, reject) => {
stream.write(chunk, (err) => {
if (err) {
reject(err);
}
else {
resolve();
}
});
});
}
function writeSubstream(stream) {
return new stream_1.Writable({
async write(chunk, _, callback) {
try {
const dv = new DataView(new ArrayBuffer(4));
dv.setUint32(0, chunk.length, false);
await writeAsync(stream, Buffer.from(dv.buffer));
await writeAsync(stream, chunk);
callback();
}
catch (err) {
callback(err);
}
},
final(callback) {
// Write the terminating 0 length sequence.
stream.write(new Uint8Array(4), callback);
},
});
}
function readSubstream(stream) {
return new stream_1.Readable({
async read(_) {
const lenBuffer = await getBufferFrom(stream, 4);
const dv = new DataView(lenBuffer.buffer, lenBuffer.byteOffset, lenBuffer.length);
const chunkSize = dv.getUint32(0, false);
if (chunkSize === 0) {
this.push(null);
return;
}
// TODO: make this *stream* instead of read as an atomic chunk.
const payload = await getBufferFrom(stream, chunkSize);
this.push(payload);
},
});
}
async function getBufferFrom(readable, size, allowEndOfStream = false, cancellationToken) {
const streamEnded = new Deferred_1.Deferred();
if (size === 0) {
return Buffer.from([]);
}
let readBuffer = null;
let index = 0;
while (size > 0) {
cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.throwIfCancelled();
let availableSize = readable.readableLength;
if (!availableSize) {
// Check the end of stream
if (readable.readableEnded || streamEnded.isCompleted) {
// stream is closed
if (!allowEndOfStream) {
throw new Error('Stream terminated before required bytes were read.');
}
// Returns what has been read so far.
if (readBuffer === null) {
return null;
}
// We need to trim the trailing space.
return readBuffer.subarray(0, index);
}
// we retain this behavior when availableSize === false
// to make existing unit tests happy (which assumes we will try to read stream when no data is ready.)
availableSize = size;
}
else if (availableSize > size) {
availableSize = size;
}
const newBuffer = readable.read(availableSize);
if (newBuffer) {
if (newBuffer.length < availableSize && !allowEndOfStream) {
throw new Error('Stream terminated before required bytes were read.');
}
if (readBuffer === null) {
if (availableSize === size || newBuffer.length < availableSize) {
// In the fast pass, we read the entire data once, and do not allocate an extra array.
return newBuffer;
}
// If we read partial data, we need to allocate a buffer to join all data together.
readBuffer = Buffer.alloc(size);
}
// now append new data to the buffer
newBuffer.copy(readBuffer, index);
size -= newBuffer.length;
index += newBuffer.length;
}
if (size > 0) {
const bytesAvailable = new Deferred_1.Deferred();
const bytesAvailableCallback = bytesAvailable.resolve.bind(bytesAvailable);
const streamEndedCallback = streamEnded.resolve.bind(streamEnded);
readable.once('readable', bytesAvailableCallback);
readable.once('end', streamEndedCallback);
try {
const endPromise = Promise.race([bytesAvailable.promise, streamEnded.promise]);
await (cancellationToken ? cancellationToken.racePromise(endPromise) : endPromise);
}
finally {
readable.removeListener('readable', bytesAvailableCallback);
readable.removeListener('end', streamEndedCallback);
}
}
}
return readBuffer;
}
function throwIfDisposed(value) {
if (value.isDisposed) {
throw new Error('disposed');
}
}
function requireInteger(parameterName, value, serializedByteLength, signed = 'signed') {
if (!Number.isInteger(value)) {
throw new Error(`${parameterName} must be an integer.`);
}
let bits = serializedByteLength * 8;
if (signed === 'signed') {
bits--;
}
const maxValue = Math.pow(2, bits) - 1;
const minValue = signed === 'signed' ? -Math.pow(2, bits) : 0;
if (value > maxValue || value < minValue) {
throw new Error(`${parameterName} must be in the range ${minValue}-${maxValue}.`);
}
}
function removeFromQueue(value, queue) {
if (queue) {
const idx = queue.indexOf(value);
if (idx >= 0) {
queue.splice(idx, 1);
}
}
}
//# sourceMappingURL=Utilities.js.map