UNPKG

node-pkware

Version:

node.js implementation of StormLib's pkware compressor/de-compressor

76 lines (75 loc) 2.35 kB
export declare class Explode { private readonly inputBuffer; /** * Used for accessing the data within inputBuffer */ private readonly inputBufferView; /** * Used for caching inputBuffer.byteLength as that getter is doing some uncached computation to measure the length of * inputBuffer */ private readonly inputBufferSize; /** * The explode algorithm works by trimming off the beginning of inputBuffer byte by byte. Instead of actually * adjusting the inputBuffer every time a byte is handled we store the beginning of the unhandled section and use it * when indexing data that is being read. */ private inputBufferStartIndex; private outputBuffer; private outputBufferView; private outputBufferSize; private needMoreInput; private extraBits; private bitBuffer; private readonly lengthCodes; private readonly distPosCodes; private compressionType; private dictionarySize; private dictionarySizeMask; private chBitsAsc; /** * the naming comes from stormlib, the 2C34 refers to the table's position in memory */ private asciiTable2C34; /** * the naming comes from stormlib, the 2D34 refers to the table's position in memory */ private asciiTable2D34; /** * the naming comes from stormlib, the 2E34 refers to the table's position in memory */ private asciiTable2E34; /** * the naming comes from stormlib, the 2EB4 refers to the table's position in memory */ private asciiTable2EB4; constructor(input: ArrayBufferLike); /** * @throws `InvalidCompressionTypeError` * @throws `InvalidDictionarySizeError` * @throws `AbortedError` */ getResult(): ArrayBuffer; private generateAsciiTables; /** * @throws `AbortedError` when there isn't enough data to be wasted */ private wasteBits; /** * @throws `AbortedError` */ private decodeNextLiteral; /** * @throws `AbortedError` */ private decodeDistance; private processInput; private parseInitialData; /** * This function assumes there are at least 2 bytes of data in the buffer * * @throws `InvalidCompressionTypeError` * @throws `InvalidDictionarySizeError` */ private readHeader; }