UNPKG

@chickenjdk/byteutils

Version:

Advanced tools for manipulating binary data in JavaScript

221 lines (220 loc) 12.8 kB
"use strict"; var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { if (kind === "m") throw new TypeError("Private method is not writable"); if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; }; var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var _writableStream_stream, _chunkingWritableStream_instances, _chunkingWritableStream_chunkSize, _chunkingWritableStream_stream, _chunkingWritableStream_buffer, _chunkingWritableStream_used, _chunkingWritableStream_flushFull; Object.defineProperty(exports, "__esModule", { value: true }); exports.chunkingWritableStreamLE = exports.chunkingWritableStream = exports.writableStreamLE = exports.writableStream = void 0; const common_1 = require("../common"); const writableBuffer_1 = require("../writableBuffer"); class writableStream extends writableBuffer_1.writableBufferBase { /** * Write binary encoded data to a stream. * Writes each write to the stream immeditly, no matter the size of the data. * For this reason, for high speed/bandwidth, it is recommended to use `chunkingWritableStream` to prevent memory issues with large writes and spamming the stream. * This is accomplished by writing data with predictably sized chunks, regardless of how small or large the writes are. * @param stream The stream to write to. */ constructor(stream) { super(); _writableStream_stream.set(this, void 0); __classPrivateFieldSet(this, _writableStream_stream, stream, "f"); } get stream() { return __classPrivateFieldGet(this, _writableStream_stream, "f"); } writeUint8Array(value) { return new Promise((resolve, reject) => { __classPrivateFieldGet(this, _writableStream_stream, "f").write(value, (err) => { if (err) { reject(err); } else { resolve(); } }); }); } writeUint8ArrayBackwards(value) { return this.writeUint8Array(value.slice(0).reverse()); } writeArray(value) { return this.writeUint8Array(Uint8Array.from(value)); } writeArrayBackwards(value) { // writeUint8ArrayBackwards clones the input to prevent its mutation but we already clone it via Uint8Array.from so we can just reverse it directly return this.writeUint8Array(Uint8Array.from(value).reverse()); } push(value) { return new Promise((resolve, reject) => { __classPrivateFieldGet(this, _writableStream_stream, "f").write(Uint8Array.of(value), (err) => { if (err) { reject(err); } else { resolve(); } }); }); } } exports.writableStream = writableStream; _writableStream_stream = new WeakMap(); /** * Little-endian version of writableStream * @remarks You can generate this class yourself with `addDefaultEndianness(cwritableStream, true)` or make a already created instance little endian via `instance.isLe = true` */ exports.writableStreamLE = (0, common_1.addDefaultEndianness)(writableStream, true); class chunkingWritableStream extends writableBuffer_1.writableBufferBase { /** * The stream we are writing to. * @returns TThe stream we are writing to. */ get stream() { return __classPrivateFieldGet(this, _chunkingWritableStream_stream, "f"); } /** * The size of the chunks to write to the stream. * If you need to change it, please use the `setChunkSize` method. * @returns The size of the chunks to write to the stream. * @default 2000 */ get chunkSize() { return __classPrivateFieldGet(this, _chunkingWritableStream_chunkSize, "f"); } /** * Change the chunk size of the stream. * This is async because it may need to flush the current buffer if the new chunk size is smaller than the current used size. * @param value The new chunk size to set. */ async setChunkSize(value) { __classPrivateFieldSet(this, _chunkingWritableStream_chunkSize, value, "f"); if (__classPrivateFieldGet(this, _chunkingWritableStream_used, "f") < value) { // If the new chunk size is larger, we can just copy the old buffer to the new one const oldBuffer = __classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f"); __classPrivateFieldSet(this, _chunkingWritableStream_buffer, new Uint8Array(value), "f"); __classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f").set(oldBuffer.subarray(0, __classPrivateFieldGet(this, _chunkingWritableStream_used, "f")), 0); } else if (__classPrivateFieldGet(this, _chunkingWritableStream_used, "f") > value) { // Not enough space in the buffer, so we need to flush it // This will do one last write in the old chunk size, but who cares? await this.flush(); __classPrivateFieldSet(this, _chunkingWritableStream_buffer, new Uint8Array(__classPrivateFieldGet(this, _chunkingWritableStream_chunkSize, "f")), "f"); } } /** * Write to the stream in predictable sized chunks. * This is useful for high speed/bandwidth writes to a stream, as it prevents memory issues with large writes and spamming the stream. * It accomplishes this by writing data with predictably sized chunks, regardless of how small or large the writes are. * If you need the data written immediately, you can use the `flush` method to write the current buffer to the stream. * If you need each write to be written immediately, use `writableStream` instead. * @param stream The stream to write to. * @param chunkSize The size of the chunks to write to the stream. */ constructor(stream, chunkSize = 2000) { super(); _chunkingWritableStream_instances.add(this); _chunkingWritableStream_chunkSize.set(this, void 0); _chunkingWritableStream_stream.set(this, void 0); // Always get the buffer's length to allow safely changing chunkSIze _chunkingWritableStream_buffer.set(this, void 0); _chunkingWritableStream_used.set(this, 0); __classPrivateFieldSet(this, _chunkingWritableStream_stream, stream, "f"); __classPrivateFieldSet(this, _chunkingWritableStream_chunkSize, chunkSize, "f"); __classPrivateFieldSet(this, _chunkingWritableStream_buffer, new Uint8Array(chunkSize), "f"); } /** * Flush the buffer to the stream. * If the buffer is empty, it resolves immediately. * If the buffer is not empty, it writes the used section of the buffer to the stream and resets the buffer. * This is useful for ensuring that all data is sent to the stream before closing it or performing other operations. * @returns A promise that resolves when the buffer is flushed. */ flush() { if (__classPrivateFieldGet(this, _chunkingWritableStream_used, "f") === 0) { return Promise.resolve(); } return new Promise((resolve, reject) => { __classPrivateFieldGet(this, _chunkingWritableStream_stream, "f").write(__classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f").subarray(0, __classPrivateFieldGet(this, _chunkingWritableStream_used, "f")), (err) => { __classPrivateFieldSet(this, _chunkingWritableStream_used, 0, "f"); __classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f").fill(0); // Reset the buffer to zeroes if (err) { reject(err); } else { resolve(); } }); }); } async push(value) { var _a, _b; if (__classPrivateFieldGet(this, _chunkingWritableStream_used, "f") === __classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f").length) { await __classPrivateFieldGet(this, _chunkingWritableStream_instances, "m", _chunkingWritableStream_flushFull).call(this); } __classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f")[__classPrivateFieldSet(this, _chunkingWritableStream_used, (_b = __classPrivateFieldGet(this, _chunkingWritableStream_used, "f"), _a = _b++, _b), "f"), _a] = value; } async writeUint8Array(value) { let bytesLeft = value.length; let index = 0; while (bytesLeft > 0) { if (__classPrivateFieldGet(this, _chunkingWritableStream_used, "f") === __classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f").length) { await __classPrivateFieldGet(this, _chunkingWritableStream_instances, "m", _chunkingWritableStream_flushFull).call(this); } const bytesToWrite = Math.min(bytesLeft, __classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f").length - __classPrivateFieldGet(this, _chunkingWritableStream_used, "f")); __classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f").set(value.subarray(index, index + bytesToWrite), __classPrivateFieldGet(this, _chunkingWritableStream_used, "f")); __classPrivateFieldSet(this, _chunkingWritableStream_used, __classPrivateFieldGet(this, _chunkingWritableStream_used, "f") + bytesToWrite, "f"); index += bytesToWrite; bytesLeft -= bytesToWrite; } } writeUint8ArrayBackwards(value) { // Don't mutate the origional value return this.writeUint8Array(value.slice(0).reverse()); } async writeArray(value) { let bytesLeft = value.length; let index = 0; while (bytesLeft > 0) { if (__classPrivateFieldGet(this, _chunkingWritableStream_used, "f") === __classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f").length) { await __classPrivateFieldGet(this, _chunkingWritableStream_instances, "m", _chunkingWritableStream_flushFull).call(this); } const bytesToWrite = Math.min(bytesLeft, __classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f").length - __classPrivateFieldGet(this, _chunkingWritableStream_used, "f")); __classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f").set(value.slice(index, index + bytesToWrite), __classPrivateFieldGet(this, _chunkingWritableStream_used, "f")); __classPrivateFieldSet(this, _chunkingWritableStream_used, __classPrivateFieldGet(this, _chunkingWritableStream_used, "f") + bytesToWrite, "f"); index += bytesToWrite; bytesLeft -= bytesToWrite; } } writeArrayBackwards(value) { return this.writeArray(value.slice(0).reverse()); } } exports.chunkingWritableStream = chunkingWritableStream; _chunkingWritableStream_chunkSize = new WeakMap(), _chunkingWritableStream_stream = new WeakMap(), _chunkingWritableStream_buffer = new WeakMap(), _chunkingWritableStream_used = new WeakMap(), _chunkingWritableStream_instances = new WeakSet(), _chunkingWritableStream_flushFull = function _chunkingWritableStream_flushFull() { return new Promise((resolve, reject) => { __classPrivateFieldGet(this, _chunkingWritableStream_stream, "f").write(__classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f"), (err) => { __classPrivateFieldSet(this, _chunkingWritableStream_used, 0, "f"); __classPrivateFieldGet(this, _chunkingWritableStream_buffer, "f").fill(0); if (err) { reject(err); } else { resolve(); } }); }); }; /** * Little-endian version of chunkingWritableStream * @remarks You can generate this class yourself with `addDefaultEndianness(chunkingWritableStream, true)` or make a already created instance little endian via `instance.isLe = true` */ exports.chunkingWritableStreamLE = (0, common_1.addDefaultEndianness)(chunkingWritableStream, true);