UNPKG

@getanthill/datastore

Version:

Event-Sourced Datastore

452 lines (367 loc) 10.8 kB
import type { Services } from '../typings'; import type { Step } from '../sdk/aggregator/Aggregator'; import fs from 'fs'; import { Command, Option } from 'commander'; import yaml from 'js-yaml'; import pick from 'lodash/pick'; import omit from 'lodash/omit'; import * as utils from './utils'; import { Aggregator } from '../sdk'; function filterFixtures(fixtures: any, ids: string[]) { if (ids.length === 0) { return []; } const toAdd = fixtures.filter((fixture: any) => ids.includes(fixture.id)); const dependencies = []; for (const addition of toAdd) { dependencies.push( ...filterFixtures( fixtures, (addition.links || []).map((f: any) => f.id), ), ); } const filteredFixtures = new Map(); dependencies.forEach((f) => { filteredFixtures.set(f.id, f); }); toAdd.forEach((f: any) => { filteredFixtures.set(f.id, f); }); return Array.from(filteredFixtures.values()); } export function aggregate(services: Services) { return async (filePath: string, cmd: any) => { const aggregator = new Aggregator(services.datastores); try { const pipeline = yaml.load(fs.readFileSync(filePath, 'utf8')) as Step[]; const data = await aggregator.aggregate(pipeline); if (cmd.verbose === true) { aggregator.logs.forEach((logLine) => { utils.log(logLine, cmd.format); }); } utils.log(data, cmd.format); } catch (err: any) { aggregator.logs.forEach((logLine) => { utils.log(logLine, cmd.format); }); if (err.response) { utils.log(err.response.data, cmd.format); return; } utils.log(err, cmd.format); } }; } export function importData(services: Services) { return async (filePath: string, cmd: any) => { try { const datastore = services.datastores.get(cmd.datastore); if (!datastore) { return; } let fixtures; if (cmd.json === true) { fixtures = require(filePath); } else { fixtures = yaml.load(fs.readFileSync(filePath, 'utf8')); } /** * @deprecated Backward compatibility for * `fixtures` key */ if ('fixtures' in fixtures) { fixtures = fixtures.fixtures; } if (cmd.ids) { fixtures = filterFixtures(fixtures, cmd.ids); } const { data: modelConfigs } = await datastore.getModels(); const entities = await datastore.import(fixtures, modelConfigs, { dryRun: cmd.dryRun, }); let result: any[] = Array.from(entities.values()); if (cmd.diffOnly === true) { result = result.filter( (r) => r.__update__ === undefined || r.__update__.length > 0, ); } utils.log(result, cmd.format); } catch (err: any) { if (err.response) { utils.log(err.response.data, cmd.format); return; } utils.log(err, cmd.format); } }; } export function replayEvents(services: Services) { return async (filePath: string, cmd: any) => { try { const datastore = services.datastores.get(cmd.datastore); if (!datastore) { return; } let fixtures; if (cmd.json === true) { fixtures = require(filePath); } else { fixtures = yaml.load(fs.readFileSync(filePath, 'utf8')); } if (cmd.ids) { fixtures = filterFixtures(fixtures, cmd.ids); } fixtures = fixtures.sort((a: any, b: any) => a.event.created_at.localeCompare(b.event.created_at), ); const { data: modelConfigs } = await datastore.getModels(); const result = []; for (const e of fixtures) { const modelConfig = modelConfigs[e.model]; const event = e.event; const { data: res } = await datastore?.apply( e.model, event[modelConfig.correlation_field], event.type, event.v, event, { replay: 'true', }, ); result.push(res); } utils.log(result, cmd.format); } catch (err: any) { if (err.response) { utils.log(err.response.data, cmd.format); return; } utils.log(err, cmd.format); } }; } export function exportData(services: Services) { return async (model: string, filePath: string, cmd: any) => { try { const datastore = services.datastores.get(cmd.datastore); if (!datastore) { return; } const writeStream = fs.createWriteStream(filePath); const { data: modelConfigs } = await datastore.getModel(model); const modelConfig = modelConfigs[model]; if (cmd.format === 'json') { writeStream.write('['); } const query: any = cmd.query || {}; const total: number = await datastore.count(model, query); let count = 0; await datastore.walk( model, query, async (entity: any) => { const data = cmd.raw === true ? entity : { model, id: `${entity[modelConfig.correlation_field]}${ cmd.source === 'events' ? '/v' + entity.version : '' }`, idempotency: pick( entity, cmd.idempotencyKeys || entity[modelConfig.correlation_field], ), event: cmd.source !== 'events' ? undefined : entity, entity: cmd.source !== 'entities' ? undefined : omit( entity, modelConfig.correlation_field, 'created_at', 'updated_at', 'version', ), }; writeStream.write( cmd.format === 'json' ? JSON.stringify(data, null, 2) : yaml.dump([data]), 'utf-8', ); count += 1; if (cmd.format === 'json' && count < total) { writeStream.write(', '); } }, 500, cmd.source, {}, { sleep: 500, }, ); if (cmd.format === 'json') { writeStream.write(']'); } writeStream.end(); utils.log( { msg: 'Export succeed', count, file_path: filePath, }, cmd.format, ); } catch (err: any) { if (err.response) { utils.log(err.response.data, cmd.format); return; } utils.log(err, cmd.format); } }; } export function validateData(services: Services) { return async (model: string, cmd: any) => { try { const datastore = services.datastores.get(cmd.datastore); if (!datastore) { return; } const query: any = cmd.query || {}; const total: number = await datastore.count(model, query); utils.log( { msg: 'Starting the validation', total, }, cmd.format, ); const stats = { total, processed: 0, errors: 0, }; try { await datastore.walk( model, query, async () => { stats.processed += 1; }, 100, 'entities', {}, { sleep: 500, }, ); } catch (err: any) { stats.errors += 1; console.error(err?.response?.data); utils.log( { msg: 'Validation error', }, cmd.format, ); } utils.log( { msg: 'Validation ended', stats, }, cmd.format, ); } catch (err: any) { if (err.response) { utils.log(err.response.data, cmd.format); return; } utils.log(err, cmd.format); } }; } export default function register(services: Services, name = 'data') { const program = new Command(name); program.summary('Data utilities commands'); // Aggregate program .command('aggregate <file_path>') .addOption( new Option('--format <format>', 'Response format').choices([ 'json', 'yaml', ]), ) .option('--verbose', 'Print the aggregation logs', false) .description('Perform an aggregation on the datastore') .action(aggregate(services)); // Import let c = program .command('import') .argument('<file_path>', 'Path of the data file to import'); utils.addDatastoreOptions(c, services); c.option('--ids <ids...>', 'IDs to import in the file') .addOption( new Option('--format <format>', 'Response format').choices([ 'json', 'yaml', ]), ) .option('--json', 'Input as JSON', false) .option('--dry-run', 'If present, does not perform the import', false) .option('--diff-only', 'Show changes only', false) .description('Import entities into the Datastore') .action(importData(services)); // Replay c = program .command('replay') .argument('<file_path>', 'Path of the data file to import'); utils.addDatastoreOptions(c, services); c.option('--ids <ids...>', 'IDs to import in the file') .addOption( new Option('--format <format>', 'Response format').choices([ 'json', 'yaml', ]), ) .option('--json', 'Input as JSON', false) .option('--dry-run', 'If present, does not perform the import', false) .description('Replay events into the Datastore') .action(replayEvents(services)); // Export c = program.command('export <model> <file_path>'); utils.addDatastoreOptions(c, services); c.addOption( new Option('-s, --source <source>', 'Count source: entities or events') .default('entities') .choices(['entities', 'events']), ); c.option( '-i, --idempotency-keys <idempotency_keys...>', 'Idempotency keys to use', ) .option('-r, --raw', 'Export raw data without import logic') .option( '-f, --format <format>', 'Export data into the given format', 'yaml', ) .option('-q, --query <query>', 'Export query to apply', JSON.parse) .description('Export data from the Datastore') .action(exportData(services)); // Validate c = program.command('validate <model>'); utils.addDatastoreOptions(c, services); c.option('-q, --query <query>', 'Validate query to apply', JSON.parse) .description('Validate data from the Datastore') .action(validateData(services)); return program; }