unpak.js
Version:
Modern TypeScript library for reading Unreal Engine pak files and assets, inspired by CUE4Parse
139 lines • 4.44 kB
TypeScript
import { EventEmitter } from 'events';
/**
* Incremental parser for processing very large UE4/UE5 archives
* Addresses roadmap item: "Performance Optimization - Incremental parsing for large files (>100GB)"
*
* This system enables processing of massive archives by:
* - Streaming file data instead of loading into memory
* - Processing assets in batches with configurable chunk sizes
* - Providing progress tracking and cancellation support
* - Memory-efficient handling of large asset collections
*/
export declare class IncrementalParser extends EventEmitter {
private readonly chunkSize;
private readonly maxConcurrency;
private readonly maxMemoryUsage;
private isProcessing;
private cancelled;
private processedItems;
private totalItems;
private currentMemoryUsage;
constructor(options?: IncrementalParsingOptions);
/**
* Process a large collection of items incrementally
*/
processIncrementally<T, R>(items: T[], processor: (item: T, context: ProcessingContext) => Promise<R>, options?: ProcessingOptions): Promise<R[]>;
/**
* Process a single chunk of items with concurrency control
*/
private processChunk;
/**
* Create chunks from input array
*/
private createChunks;
/**
* Memory management and cleanup
*/
private manageMemory;
/**
* Force garbage collection if available
*/
private forceGarbageCollection;
/**
* Get current memory usage
*/
private getCurrentMemoryUsage;
/**
* Estimate object size for memory tracking
*/
private estimateObjectSize;
/**
* Yield control to the event loop
*/
private yieldToEventLoop;
/**
* Cancel the current processing
*/
cancel(): void;
/**
* Check if parser is currently processing
*/
get isActive(): boolean;
/**
* Get current processing statistics
*/
getStatistics(): ProcessingStatistics;
}
/**
* Configuration options for incremental parsing
*/
export interface IncrementalParsingOptions {
/** Number of items to process in each chunk */
chunkSize?: number;
/** Maximum number of concurrent operations */
maxConcurrency?: number;
/** Maximum memory usage in bytes before triggering cleanup */
maxMemoryUsage?: number;
}
/**
* Options for processing operations
*/
export interface ProcessingOptions {
/** Stop processing on first error */
failFast?: boolean;
/** Custom progress reporting interval */
progressInterval?: number;
/** Enable detailed logging */
verbose?: boolean;
}
/**
* Context provided to each processing function
*/
export interface ProcessingContext {
/** Index within current chunk */
index: number;
/** Current chunk index */
chunkIndex: number;
/** Global index across all items */
totalIndex: number;
/** Current memory usage */
memoryUsage: number;
/** Function to check if processing should be cancelled */
canCancel: () => boolean;
}
/**
* Statistics about the processing operation
*/
export interface ProcessingStatistics {
/** Whether processing is currently active */
isProcessing: boolean;
/** Number of items processed so far */
processedItems: number;
/** Total number of items to process */
totalItems: number;
/** Completion percentage */
percentage: number;
/** Current memory usage in bytes */
memoryUsage: number;
/** Maximum allowed memory usage in bytes */
maxMemoryUsage: number;
}
/**
* Specialized incremental parser for large archive processing
*/
export declare class LargeArchiveProcessor extends IncrementalParser {
constructor(options?: IncrementalParsingOptions);
/**
* Process large collection of files from an archive
*/
processArchiveFiles<T>(files: string[], archive: any, processor: (filePath: string, data: Buffer, context: ProcessingContext) => Promise<T>): Promise<(T | null)[]>;
/**
* Extract assets with automatic batching and memory management
*/
extractAssetsIncrementally(archive: any, filePattern?: string, extractionOptions?: any): Promise<any[]>;
}
/**
* Factory function to create appropriate parser for different scenarios
*/
export declare function createIncrementalParser(scenario: 'small' | 'medium' | 'large' | 'massive'): IncrementalParser;
//# sourceMappingURL=IncrementalParser.d.ts.map