UNPKG

@pujansrt/data-genie

Version:

High performant ETL engine written in TypeScript

22 lines (21 loc) 1.59 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); const csv_reader_1 = require("../readers/csv-reader"); const json_writer_1 = require("../writers/json-writer"); const transforming_reader_1 = require("../transformers/transforming-reader"); const field_transformers_1 = require("../transformers/field-transformers"); const filtering_reader_1 = require("../filters/filtering-reader"); const field_filters_1 = require("../filters/field-filters"); const job_1 = require("../core/job"); async function runExample() { console.log('\n--- Complex Pipeline Example (CSV to Fixed-Width - Conceptual) ---'); let readerComplex = new csv_reader_1.CSVReader('input/users.csv').setFieldSeparator(',').setFieldNamesInFirstRow(true); readerComplex = new filtering_reader_1.FilteringReader(readerComplex).add(new field_filters_1.FieldFilter('email').addRule((0, field_filters_1.PatternMatch)('.*\\.com')).createRecordFilter()); readerComplex = new transforming_reader_1.TransformingReader(readerComplex).add(new field_transformers_1.SelectFields('email', 'fname', 'lname').transform()); readerComplex = new transforming_reader_1.TransformingReader(readerComplex) .add(new field_transformers_1.RenameField('fname', 'first_name').transform()) .add(new field_transformers_1.RenameField('lname', 'last_name').transform()); await job_1.Job.run(readerComplex, new json_writer_1.JsonWriter('output/complex-pipeline.json')); console.log('Complex pipeline output written to output/complex-pipeline.json'); } runExample().catch(console.error);