@andrejs1979/document
Version:
MongoDB-compatible document database for NoSQL
132 lines • 4.07 kB
TypeScript
/**
* NoSQL - Bulk Operations and Streaming
* High-performance bulk document operations with streaming support
*/
import { Document, BulkWriteOperation, BulkWriteOptions, BulkWriteResult, DocumentStreamConfig, StreamingInsertOptions, ObjectId, QueryFilter, UpdateOperators, DocumentDatabaseConfig } from '../types';
import { DocumentStorage } from '../storage/document-storage';
/**
* Bulk operations manager with streaming capabilities
*/
export declare class BulkOperationsManager {
private storage;
private config;
private activeStreams;
constructor(storage: DocumentStorage, config: DocumentDatabaseConfig);
/**
* Execute bulk write operations
*/
bulkWrite(collection: string, operations: BulkWriteOperation[], options?: BulkWriteOptions): Promise<BulkWriteResult>;
/**
* Stream insert large datasets
*/
streamInsert(collection: string, documentStream: AsyncIterable<Document>, options?: StreamingInsertOptions): Promise<{
insertedCount: number;
failedCount: number;
errors: Array<{
document: Document;
error: string;
}>;
}>;
/**
* Create a document stream for real-time processing
*/
createDocumentStream(collection: string, config: DocumentStreamConfig): DocumentStream;
/**
* Bulk update with streaming
*/
bulkUpdate(collection: string, updates: Array<{
filter: QueryFilter;
update: UpdateOperators;
upsert?: boolean;
multi?: boolean;
}>, options?: {
ordered?: boolean;
batchSize?: number;
parallelism?: number;
}): Promise<{
matchedCount: number;
modifiedCount: number;
upsertedCount: number;
upsertedIds: ObjectId[];
}>;
/**
* Bulk delete with streaming
*/
bulkDelete(collection: string, filters: QueryFilter[], options?: {
ordered?: boolean;
batchSize?: number;
parallelism?: number;
}): Promise<{
deletedCount: number;
}>;
/**
* Parallel document processing with worker-like pattern
*/
parallelProcess<T>(collection: string, filter: QueryFilter, processor: (document: Document) => Promise<T>, options?: {
batchSize?: number;
parallelism?: number;
onProgress?: (processed: number, total: number) => void;
onError?: (document: Document, error: Error) => void;
}): Promise<T[]>;
/**
* Get active stream statistics
*/
getActiveStreams(): Array<{
id: string;
collection: string;
documentsProcessed: number;
bytesProcessed: number;
startTime: Date;
isActive: boolean;
}>;
/**
* Stop all active streams
*/
stopAllStreams(): Promise<void>;
private executeOrderedBulkOperations;
private executeUnorderedBulkOperations;
private executeOperation;
private processBatch;
private processBulkUpdateBatch;
private processBulkDeleteBatch;
private updateStats;
private mergeBulkUpdateResult;
private createBatches;
private limitConcurrency;
private getEmptyBulkResult;
private generateStreamId;
}
/**
* Document stream for real-time processing
*/
export declare class DocumentStream {
readonly id: string;
readonly collection: string;
private config;
private storage;
private buffer;
private flushTimer;
readonly stats: {
documentsProcessed: number;
bytesProcessed: number;
startTime: Date;
lastFlush: Date;
};
private _isActive;
constructor(id: string, collection: string, config: DocumentStreamConfig, storage: DocumentStorage);
get isActive(): boolean;
/**
* Add document to stream
*/
write(document: Document): Promise<void>;
/**
* Flush buffered documents
*/
flush(): Promise<void>;
/**
* Stop the stream and flush remaining documents
*/
stop(): Promise<void>;
private startFlushTimer;
}
//# sourceMappingURL=bulk-operations.d.ts.map