@naturalcycles/db-lib
Version:
Lowest Common Denominator API to supported Databases
69 lines (68 loc) • 2.48 kB
TypeScript
import { ErrorMode } from '@naturalcycles/js-lib/error/errorMode.js';
import type { AsyncMapper, UnixTimestamp } from '@naturalcycles/js-lib/types';
import type { TransformLogProgressOptions, TransformMapOptions } from '@naturalcycles/nodejs-lib/stream';
import { NDJsonStats } from '@naturalcycles/nodejs-lib/stream';
import type { CommonDB } from '../commondb/common.db.js';
import type { CommonDBSaveOptions } from '../db.model.js';
export interface DBPipelineCopyOptions extends TransformLogProgressOptions {
dbInput: CommonDB;
dbOutput: CommonDB;
/**
* List of tables to dump. If undefined - will call CommonDB.getTables() and dump ALL tables returned.
*/
tables?: string[];
/**
* How many tables to dump in parallel.
*
* @default 16
* Set to `1` for serial (1 at a time) processing or debugging.
*/
concurrency?: number;
/**
* @default 100
*
* Determines the size of .saveBatch()
*/
chunkSize?: number;
/**
* @default ErrorMode.SUPPRESS
*
* Used in high-level pMap(tables, ...)
* Also used as default option for TransformMapOptions
*/
errorMode?: ErrorMode;
/**
* @default undefined
* If set - will dump maximum that number of rows per table
*/
limit?: number;
/**
* If set - will do "incremental backup" (not full), only for entities that updated >= `sinceUpdated`
*
* @default undefined
*/
sinceUpdated?: UnixTimestamp;
/**
* Optionally you can provide mapper that is going to run for each table.
*
* @default `{}`
* Default mappers will be "passthroughMapper" (pass all data as-is).
*/
mapperPerTable?: Record<string, AsyncMapper>;
/**
* You can alter default `transformMapOptions` here.
*
* @default (see the code)
* The goal to have default values that are reasonable for such a job to provide resilient output (forgiving individual errors).
* `metric` will be set to table name
*/
transformMapOptions?: TransformMapOptions;
saveOptionsPerTable?: Record<string, CommonDBSaveOptions<any>>;
}
/**
* Pipeline from input stream(s) to CommonDB .saveBatch().
* Input stream can be a stream from CommonDB.streamQuery()
* Allows to define a mapper and a predicate to map/filter objects between input and output.
* Handles backpressure.
*/
export declare function dbPipelineCopy(opt: DBPipelineCopyOptions): Promise<NDJsonStats>;