igir
Version:
🕹 A zero-setup ROM collection manager that sorts, filters, extracts or archives, patches, and reports on collections of any size on any OS.
140 lines (139 loc) • 4.93 kB
JavaScript
import stream from 'node:stream';
import Defaults from '../globals/defaults.js';
export default {
/**
* Concatenate multiple readable streams into a single readable stream.
*/
concat(...readables) {
if (readables.length === 1) {
// Don't incur the overhead of any passthroughs
return readables[0];
}
const out = new stream.PassThrough({ highWaterMark: Defaults.FILE_READING_CHUNK_SIZE });
let current = 0;
let activeStream = undefined;
let destroyed = false;
/**
* Pipe the next input stream to the output stream.
*/
function pipeNext() {
if (destroyed) {
return;
}
if (current >= readables.length) {
out.end();
return;
}
activeStream = readables[current++];
activeStream.pipe(out, { end: false });
activeStream.once('error', (err) => {
out.emit('error', err);
});
activeStream.once('end', pipeNext);
}
// Allow the passthrough to be destroyed
out._destroy = (err, callback) => {
destroyed = true;
if (typeof activeStream?.destroy === 'function') {
activeStream.destroy(err ?? undefined);
}
for (let i = current; i < readables.length; i++) {
const readable = readables[i];
if (typeof readable.destroy === 'function') {
readable.destroy();
}
}
callback(err);
};
pipeNext();
return out;
},
/**
* Pad a readable stream to a specified length by appending a fill string.
*/
padEnd(readable, maxLength, fillString) {
const output = new stream.PassThrough({ highWaterMark: Defaults.FILE_READING_CHUNK_SIZE });
let readableBytesRead = 0;
readable.on('data', (chunk) => {
readableBytesRead += chunk.length;
output.write(chunk);
});
readable.on('end', () => {
const remainingBytes = maxLength - readableBytesRead;
if (remainingBytes > 0) {
this.staticReadable(remainingBytes, fillString).pipe(output, { end: true });
}
else {
output.end();
}
});
readable.on('error', (err) => output.destroy(err));
return output;
},
/**
* Split a readable stream into multiple readable streams, such that the original stream can be
* read concurrently by multiple consumers.
*/
split(readable, count) {
if (count === 0) {
return [];
}
if (count === 1) {
// Don't incur the overhead of any passthroughs
return [readable];
}
const outputs = [];
for (let i = 0; i < count; i++) {
const output = new stream.PassThrough({ highWaterMark: Defaults.FILE_READING_CHUNK_SIZE });
readable.on('data', output.write.bind(output));
readable.on('end', output.end.bind(output));
readable.on('error', output.destroy.bind(output));
outputs.push(output);
}
return outputs;
},
/**
* Generate a static readable stream that emits a fixed number of bytes filled with a specified
* string or number.
*/
staticReadable(length, fillString) {
let bytesRemaining = length;
return new stream.Readable({
read(size) {
// Emit a chunk up to `size` bytes, or the remaining bytes if less
const chunkSize = Math.min(size, bytesRemaining);
const chunk = Buffer.alloc(chunkSize, fillString);
this.push(chunk);
bytesRemaining -= chunkSize;
if (bytesRemaining <= 0) {
// End the stream
// eslint-disable-next-line unicorn/no-null
this.push(null);
}
},
});
},
/**
* Return a new readable stream that has had the specified transforms applied to it.
* This differs from {@link stream.pipeline} in that it returns a readable stream, NOT a writable
* stream.
*/
withTransforms(readable, ...transforms) {
if (transforms.length === 0) {
// Don't incur the overhead of any passthroughs
return readable;
}
const output = new stream.PassThrough({ highWaterMark: Defaults.FILE_READING_CHUNK_SIZE });
Reflect.apply(stream.pipeline, undefined, [
readable,
...transforms,
output,
(err) => {
if (err) {
output.destroy(err);
}
},
]);
return output;
},
};