@alinex/datastore
Version:
Read, work and write data structures from and to differents locations and formats.
192 lines • 6.21 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const fs_1 = require("fs");
const yargs = require("yargs");
const core_1 = require("@alinex/core");
const index_1 = require("./index");
const compression_1 = require("./compression");
const format_1 = require("./format");
let quiet = false;
for (const a of ['--get-yargs-completions', 'bashrc', '-q', '--quiet']) {
if (process.argv.includes(a))
quiet = true;
}
if (!quiet)
console.log(core_1.default.logo('Alinex Data Store'));
process.on('uncaughtException', err => core_1.default.exit(err, 1));
process.on('unhandledRejection', (err) => core_1.default.exit(err, 1));
yargs
.env('DATASTORE')
.usage('Usage: $0 [options]')
.example('$0 -i /etc/my.json -o /etc/my.yaml', 'transform data structure')
.example('$0 --input-format json --output-format yaml', 'transform stdin to stdout')
.example('$0 definition --input my.json', 'use input structure defined in file')
.option('input', {
alias: 'i',
describe: 'input path or URI to read from',
type: 'string',
group: 'Input Options:'
})
.option('input-format', {
describe: 'format used to parse input',
choices: format_1.formats,
group: 'Input Options:'
})
.option('input-compression', {
describe: 'compression method to be used',
choices: compression_1.compressions,
group: 'Input Options:'
})
.option('records', {
describe: 'flag to read the CSV always as with records',
type: 'boolean',
group: 'Input Options:'
})
.option('filter', {
alias: 'f',
describe: 'filter rule to modify data structure',
type: 'string',
group: 'Input Options:'
})
.option('output', {
alias: 'o',
describe: 'output path or URI to write to',
type: 'string',
group: 'Output Options:'
})
.option('output-format', {
describe: 'format used to transform for output',
choices: format_1.formats,
group: 'Output Options:'
})
.option('output-compression', {
describe: 'compression method to be used',
choices: compression_1.compressions,
group: 'Output Options:'
})
.option('compression-level', {
describe: 'compression level 0 (no compression) to 9 (best compression, default)',
type: 'number',
group: 'Output Options:'
})
.option('module', {
describe: 'use module format in storing JavaScript, to load it using require or import',
type: 'boolean',
group: 'Output Options:'
})
.option('rootName', {
describe: 'root element name in formatting as XML',
type: 'string',
group: 'Output Options:'
})
.option('proxy', {
describe: 'HTTP method to use for call',
choices: ['get', 'delete', 'head', 'options', 'post', 'put', 'patch'],
group: 'Protocol Options:'
})
.option('http-method', {
describe: 'HTTP method to use for call',
choices: ['get', 'delete', 'head', 'options', 'post', 'put', 'patch'],
group: 'Protocol Options:'
})
.option('http-header', {
describe: 'send additional HTTP header',
type: 'array',
group: 'Protocol Options:'
})
.option('http-data', {
describe: 'send additional HTTP POST or PUT data',
type: 'string',
group: 'Protocol Options:'
})
.option('ignore-error', {
describe: 'ignore HTTP error response codes',
type: 'boolean',
group: 'Protocol Options:'
})
.option('sftp-privatekey', {
describe: 'private key file',
type: 'string',
group: 'Protocol Options:'
})
.option('sftp-passphrase', {
describe: 'passphrase for private key, if needed',
type: 'string',
group: 'Protocol Options:'
})
.option('quiet', {
alias: 'q',
describe: 'only output result',
type: 'boolean'
})
.command('*', 'work with single input')
.command('definition', 'use input as definition (allow multiple sources)')
.wrap(yargs.terminalWidth())
.alias('V', 'version')
.alias('h', 'help')
.help()
.completion('bashrc-script', false)
.strict()
.epilog('Copyright Alexander Schilling 2019');
main(yargs.argv);
async function main(args) {
if (args['sftp-privatekey'])
args['sftp-privatekey'] = fs_1.readFileSync(args['sftp-privatekey']);
const ds = new index_1.default();
if (!args.input) {
if (!args['input-format'])
throw 'No --input-format for STDIN defined.';
args.input = 'file:/dev/stdin';
}
await ds.load({
source: args.input,
options: {
proxy: args.proxy,
httpMethod: args['http-method'],
httpHeader: args['http-header'],
httpData: args['http-data'],
ignoreError: args['ignore-error'],
privateKey: args['sftp-privatekey'],
passphrase: args['sftp-passphrase'],
format: args['input-format'],
compression: args['input-compression'],
records: args.records
}
});
if (args._.length && args._[0] === 'definition') {
if (!Array.isArray(ds.data))
throw new Error('The loaded definition of input is no array!');
await ds.load(...ds.data);
}
const dt = new index_1.default();
dt.data = ds.get(args.filter);
if (args.output) {
try {
await dt.save({
source: args.output,
options: {
privateKey: args['sftp-privatekey'],
passphrase: args['sftp-passphrase'],
format: args['output-format'],
compression: args['output-compression'],
module: args.module,
rootName: args.rootName
}
});
if (!args.quiet)
console.log(`Transformed ${ds.source} to ${dt.source}.`);
}
catch (err) {
core_1.default.exit(err, 2);
}
}
else {
const format = args['output-format'] || 'json';
if (!args.quiet) {
console.log(`Data of ${ds.source} formatted as ${format}:`);
console.log();
}
console.log((await dt.format('file:/dev/stdout', { format: format })).toString());
}
}
//# sourceMappingURL=cli.js.map