UNPKG

zcatalyst-cli

Version:

Command Line Tool for CATALYST

136 lines (135 loc) 7.16 kB
'use strict'; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); const ansi_colors_1 = require("ansi-colors"); const path_1 = require("path"); const endpoints_1 = require("../../endpoints"); const index_js_1 = __importDefault(require("../../error/index.js")); const command_1 = __importDefault(require("../../internal/command")); const prompt_1 = __importDefault(require("../../prompt")); const runtime_store_1 = __importDefault(require("../../runtime-store")); const scopes_js_1 = __importDefault(require("../../authentication/constants/scopes.js")); const env_1 = require("../../util_modules/env"); const fs_1 = require("../../util_modules/fs"); const index_1 = require("../../util_modules/logger/index"); const option_1 = require("../../util_modules/option"); const shell_1 = require("../../util_modules/shell"); const getWriteConfigFromPath = (pth) => __awaiter(void 0, void 0, void 0, function* () { const configJSON = yield fs_1.ASYNC.readJSONFile((0, path_1.resolve)(pth), { checkpath: true, throws: false }); if (!configJSON || Object.keys(configJSON).length === 0) { (0, index_1.debug)('No parsable import config given'); return {}; } return configJSON; }); exports.default = new command_1.default('ds:import [file]') .description('Bulk write records to a table in the Catalyst Data Store') .option('--table <name|id>', 'Name or ID of the table to which data to be written (e.g. "UserDetails")') .option('--config <path>', 'Path of the configuration json file to be used for import.') .option('--production', 'flag for pointing to production environment') .needs('auth', [scopes_js_1.default.projects, scopes_js_1.default.datastore, scopes_js_1.default.row]) .needs('config', { optional: true }) .needs('rc', { optional: true }) .action((uploadFile) => __awaiter(void 0, void 0, void 0, function* () { var _a, _b, _c; const env = (0, option_1.getOptionValue)('production', false) ? 'Production' : 'Development'; const configPath = (0, option_1.getOptionValue)('config', false); const importConfig = configPath ? yield getWriteConfigFromPath(configPath) : {}; const tableId = (0, option_1.getOptionValue)('table', importConfig.table_identifier); if (!tableId) { throw new index_js_1.default('Table identifier missing', { exit: 0, errorId: 'IMP-1' }); } if (!importConfig.file_id && !((_a = importConfig.object_details) === null || _a === void 0 ? void 0 : _a.object_key)) { if (!uploadFile) { throw new index_js_1.default('No CSV file provided', { exit: 0, errorId: 'IMP-2' }); } const bucketAPI = yield (0, endpoints_1.stratusAPI)({ env }); const allBuckets = (yield bucketAPI.getAllBuckets()); if (allBuckets.length > 0) { const choices = allBuckets.map((bucketDetail) => prompt_1.default.choice(bucketDetail.bucket_name, { value: bucketDetail })); const bucketAns = yield prompt_1.default.ask(prompt_1.default.question('bucket', 'Select a bucket to which you want to upload the object to : ', { type: 'list', choices, when: !((_b = importConfig.object_details) === null || _b === void 0 ? void 0 : _b.bucket_name) })); const bucket = bucketAns.bucket; const bucketName = bucket.bucket_name === undefined ? allBuckets.find((bucketDetails) => { var _a; return bucketDetails.bucket_name === ((_a = importConfig.object_details) === null || _a === void 0 ? void 0 : _a.bucket_name); }) : bucket.bucket_name; if (!bucketName) { throw new index_js_1.default('Bucket not present', { exit: 1, errorId: 'IMP-4', arg: [ ansi_colors_1.bold.red(((_c = importConfig.object_details) === null || _c === void 0 ? void 0 : _c.bucket_name) || ''), (0, ansi_colors_1.italic)(configPath || ''), (0, ansi_colors_1.bold)(uploadFile), (0, ansi_colors_1.bold)('Details(bucket_name and object_key)'), (0, ansi_colors_1.italic)('object_details') ] }); } const fileName = uploadFile ? (0, path_1.basename)(uploadFile) : ''; try { yield bucketAPI.uploadObject(bucketName, fileName, fs_1.SYNC.getReadStream((0, path_1.resolve)(uploadFile))); } catch (err) { throw new index_js_1.default('Error while uploading the object to the stratus. Use --verbose for error details', { exit: 1, original: err }); } const objDetail = { bucket_name: bucketName, object_key: fileName }; importConfig.object_details = objDetail; } else { throw new index_js_1.default('bucket needed', { exit: 0, errorId: 'IMP-3' }); } } const bulkAPI = yield (0, endpoints_1.bulkDSAPI)({ env }); const writeResponse = (yield bulkAPI.write(tableId + '', importConfig)); (0, index_1.info)(); (0, index_1.success)(`Successfully scheduled import job for table "${tableId}" with jobid "${writeResponse.job_id}"`); if ((0, env_1.isPrimaryShell)()) { const exitListeners = process.listeners('exit'); process.removeAllListeners('exit'); const allGlobalOpts = runtime_store_1.default.get('context.opts.globalOpts', {}); const optsArr = Object.keys(allGlobalOpts).reduce((arr, key) => { arr.push('--' + key); if (allGlobalOpts[key]) { arr.push(allGlobalOpts[key]); } return arr; }, (env === 'Production' ? ['--production'] : [])); (0, shell_1.spawn)('catalyst', ['ds:status', 'import', writeResponse.job_id + '', ...optsArr], { shell: true, stdio: 'inherit' }).SYNC(); (exitListeners || []).forEach((listner) => { process.addListener('exit', listner); }); } }));