@chickenjdk/byteutils
Version:
Advanced tools for manipulating binary data in JavaScript
74 lines (73 loc) • 3.78 kB
TypeScript
import { writableBufferBase } from "../writableBuffer";
import { Writable } from "stream";
export declare class writableStream extends writableBufferBase<true> {
#private;
/**
* Write binary encoded data to a stream.
* Writes each write to the stream immeditly, no matter the size of the data.
* For this reason, for high speed/bandwidth, it is recommended to use `chunkingWritableStream` to prevent memory issues with large writes and spamming the stream.
* This is accomplished by writing data with predictably sized chunks, regardless of how small or large the writes are.
* @param stream The stream to write to.
*/
constructor(stream: Writable);
get stream(): Writable;
writeUint8Array(value: Uint8Array): Promise<void>;
writeUint8ArrayBackwards(value: Uint8Array): Promise<void>;
writeArray(value: number[]): Promise<void>;
writeArrayBackwards(value: number[]): Promise<void>;
push(value: number): Promise<void>;
}
/**
* Little-endian version of writableStream
* @remarks You can generate this class yourself with `addDefaultEndianness(cwritableStream, true)` or make a already created instance little endian via `instance.isLe = true`
*/
export declare const writableStreamLE: typeof writableStream;
export declare class chunkingWritableStream extends writableBufferBase<true> {
#private;
/**
* The stream we are writing to.
* @returns TThe stream we are writing to.
*/
get stream(): Writable;
/**
* The size of the chunks to write to the stream.
* If you need to change it, please use the `setChunkSize` method.
* @returns The size of the chunks to write to the stream.
* @default 2000
*/
get chunkSize(): number;
/**
* Change the chunk size of the stream.
* This is async because it may need to flush the current buffer if the new chunk size is smaller than the current used size.
* @param value The new chunk size to set.
*/
setChunkSize(value: number): Promise<void>;
/**
* Write to the stream in predictable sized chunks.
* This is useful for high speed/bandwidth writes to a stream, as it prevents memory issues with large writes and spamming the stream.
* It accomplishes this by writing data with predictably sized chunks, regardless of how small or large the writes are.
* If you need the data written immediately, you can use the `flush` method to write the current buffer to the stream.
* If you need each write to be written immediately, use `writableStream` instead.
* @param stream The stream to write to.
* @param chunkSize The size of the chunks to write to the stream.
*/
constructor(stream: Writable, chunkSize?: number);
/**
* Flush the buffer to the stream.
* If the buffer is empty, it resolves immediately.
* If the buffer is not empty, it writes the used section of the buffer to the stream and resets the buffer.
* This is useful for ensuring that all data is sent to the stream before closing it or performing other operations.
* @returns A promise that resolves when the buffer is flushed.
*/
flush(): Promise<void>;
push(value: number): Promise<void>;
writeUint8Array(value: Uint8Array): Promise<void>;
writeUint8ArrayBackwards(value: Uint8Array): Promise<void>;
writeArray(value: number[]): Promise<void>;
writeArrayBackwards(value: number[]): Promise<void>;
}
/**
* Little-endian version of chunkingWritableStream
* @remarks You can generate this class yourself with `addDefaultEndianness(chunkingWritableStream, true)` or make a already created instance little endian via `instance.isLe = true`
*/
export declare const chunkingWritableStreamLE: typeof chunkingWritableStream;