mockttp-mvs
Version:
Mock HTTP server for testing HTTP clients and stubbing webservices
95 lines • 3.92 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.splitBuffer = exports.streamToBuffer = exports.bufferToStream = exports.bufferThenStream = exports.asBuffer = void 0;
const stream = require("stream");
const util_1 = require("./util");
const MAX_BUFFER_SIZE = util_1.isNode
? require('buffer').constants.MAX_LENGTH
: Infinity;
const asBuffer = (input) => Buffer.isBuffer(input)
? input
: typeof input === "string"
? Buffer.from(input, 'utf8')
// Is Array:
: Buffer.from(input);
exports.asBuffer = asBuffer;
// Takes a buffer and a stream, returns a simple stream that outputs the buffer then the stream.
const bufferThenStream = (buffer, inputStream) => {
const outputStream = new stream.PassThrough();
// Forward the buffered data so far
outputStream.write(Buffer.concat(buffer.currentChunks));
// After the data, forward errors from the buffer
if (buffer.failedWith) {
// Announce async, to ensure listeners have time to get set up
setTimeout(() => outputStream.emit('error', buffer.failedWith));
}
else {
// Forward future data as it arrives
inputStream.pipe(outputStream);
// Forward any future errors from the input stream
inputStream.on('error', (e) => outputStream.emit('error', e));
// Silence 'unhandled rejection' warnings here, since we'll handle them on the stream instead
buffer.catch(() => { });
}
return outputStream;
};
exports.bufferThenStream = bufferThenStream;
const bufferToStream = (buffer) => {
const outputStream = new stream.PassThrough();
outputStream.end(buffer);
return outputStream;
};
exports.bufferToStream = bufferToStream;
const streamToBuffer = (input, maxSize = MAX_BUFFER_SIZE) => {
let chunks = [];
const bufferPromise = new Promise((resolve, reject) => {
function failWithAbortError() {
bufferPromise.failedWith = new Error('Aborted');
reject(bufferPromise.failedWith);
}
// If stream has already finished/aborted, resolve accordingly immediately:
if (input.readableEnded)
return resolve(Buffer.from([]));
if (input.readableAborted)
return failWithAbortError();
let currentSize = 0;
input.on('data', (d) => {
currentSize += d.length;
// If we go over maxSize, drop the whole stream, so the buffer
// resolves empty. MaxSize should be large, so this is rare,
// and only happens as an alternative to crashing the process.
if (currentSize > maxSize) {
chunks = []; // Drop all the data so far
return; // Don't save any more data
}
chunks.push(d);
});
input.once('end', () => resolve(Buffer.concat(chunks)));
input.once('aborted', failWithAbortError);
input.on('error', (e) => {
bufferPromise.failedWith = bufferPromise.failedWith || e;
reject(e);
});
});
bufferPromise.currentChunks = chunks;
return bufferPromise;
};
exports.streamToBuffer = streamToBuffer;
function splitBuffer(input, splitter, maxParts = Infinity) {
const parts = [];
let remainingBuffer = input;
while (remainingBuffer.length) {
let endOfPart = remainingBuffer.indexOf(splitter);
if (endOfPart === -1)
endOfPart = remainingBuffer.length;
parts.push(remainingBuffer.slice(0, endOfPart));
remainingBuffer = remainingBuffer.slice(endOfPart + splitter.length);
if (parts.length === maxParts - 1) {
parts.push(remainingBuffer);
break;
}
}
return parts;
}
exports.splitBuffer = splitBuffer;
//# sourceMappingURL=buffer-utils.js.map