@crawlee/core
Version:
The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.
32 lines • 1.68 kB
TypeScript
import { Readable } from 'node:stream';
/**
* Uses Gzip compression to take an array of values, which can be anything
* from entries in a Dataset to Requests in a RequestList and compresses
* them to a Buffer in a memory-efficient way (streaming one by one). Ideally,
* the largest chunk of memory consumed will be the final compressed Buffer.
* This could be further improved by outputting a Stream, if and when
* apify-client supports streams.
* @internal
*/
export declare function serializeArray<T>(data: T[]): Promise<Buffer>;
/**
* Decompresses a Buffer previously created with compressData (technically,
* any JSON that is an Array) and collects it into an Array of values
* in a memory-efficient way (streaming the array items one by one instead
* of creating a fully decompressed buffer -> full JSON -> full Array all
* in memory at once. Could be further optimized to ingest a Stream if and
* when apify-client supports streams.
* @internal
*/
export declare function deserializeArray<T extends string | Buffer>(compressedData: Buffer | Uint8Array): Promise<T[]>;
/**
* Creates a stream that decompresses a Buffer previously created with
* compressData (technically, any JSON that is an Array) and collects it
* into an Array of values in a memory-efficient way (streaming the array
* items one by one instead of creating a fully decompressed buffer
* -> full JSON -> full Array all in memory at once. Could be further
* optimized to ingest a Stream if and when apify-client supports streams.
* @internal
*/
export declare function createDeserialize(compressedData: Buffer | Uint8Array): Readable;
//# sourceMappingURL=serialization.d.ts.map