UNPKG

@forzalabs/remora

Version:

A powerful CLI tool for seamless data translation.

76 lines (75 loc) 4.12 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); const Affirm_1 = __importDefault(require("../core/Affirm")); const Environment_1 = __importDefault(require("./Environment")); const FileCompiler_1 = __importDefault(require("./file/FileCompiler")); class ParseManagerClass { constructor() { this.csvToJson = (csv, producer) => { (0, Affirm_1.default)(csv, 'Invalid csv content'); Affirm_1.default.hasValue(csv.length, 'Invalid csv content length'); return this.csvLinesToJson(csv, producer); }; this.csvLinesToJson = (lines, producer, discover) => { var _a; (0, Affirm_1.default)(lines, 'Invalid csv lines'); Affirm_1.default.hasValue(lines.length, 'Invalid csv lines length'); const delimiterChar = (_a = producer.settings.delimiter) !== null && _a !== void 0 ? _a : ','; const { header, records } = this._getClassifiedRows(lines, delimiterChar, producer); const headerColumns = this._extractHeader(header, delimiterChar, producer, discover); const rows = records.map(x => x.split(delimiterChar).map(k => k.trim())); const result = []; for (const row of rows) { const rowObject = {}; for (let i = 0; i < headerColumns.length; i++) { const column = headerColumns[i]; rowObject[column.saveAs] = row[column.index]; } result.push(rowObject); } return result; }; this._getClassifiedRows = (lines, delimiterChar, producer) => { if (producer.settings.fileType === 'TXT' && !producer.settings.hasHeaderRow) { // If the file is a TXT and there isn't an header row, then I add a fake one that maps directly to the producer const source = Environment_1.default.getSource(producer.source); const columns = FileCompiler_1.default.compileProducer(producer, source); return { header: columns.map(x => x.nameInProducer).join(delimiterChar), records: lines }; } else { return { header: lines[0], records: lines.slice(1) }; } }; this._extractHeader = (headerLine, delimiter, producer, discover) => { var _a; (0, Affirm_1.default)(headerLine, `Invalid CSV header line for producer "${producer.name}"`); (0, Affirm_1.default)(delimiter, 'Invalid CSV delimiter'); (0, Affirm_1.default)(producer, 'Invalid producer'); const source = Environment_1.default.getSource(producer.source); const columns = FileCompiler_1.default.compileProducer(producer, source); const headerColumns = headerLine.split(delimiter).map(x => x.trim()); // If I'm discovering the file, then it means that the dimensions are not set, so I use the ones that I get from the file directly if (discover) columns.push(...headerColumns.map(x => ({ nameInProducer: x }))); const csvColumns = []; for (const pColumn of columns) { const columnKey = (_a = pColumn.aliasInProducer) !== null && _a !== void 0 ? _a : pColumn.nameInProducer; const csvColumnIndex = headerColumns.findIndex(x => x === columnKey); (0, Affirm_1.default)(csvColumnIndex > -1, `The column "${pColumn.nameInProducer}" (with key "${columnKey}") of producer "${producer.name}" doesn't exist in the underlying dataset.`); csvColumns.push({ index: csvColumnIndex, name: columnKey, saveAs: pColumn.nameInProducer }); } return csvColumns; }; } } const ParseManager = new ParseManagerClass(); exports.default = ParseManager;