UNPKG

@getanthill/datastore

Version:

Event-Sourced Datastore

267 lines (241 loc) 6.55 kB
process.env.OTEL_PROMETHEUS_EXPORTER_PREVENT_SERVER_START = 'true'; const { default: services } = require('../../dist/services'); const { default: App } = require('../../dist/App'); const { Datastore } = require('../../dist/sdk'); const { walkMulti } = require('../../dist/sdk/utils'); const ModelConfig = { is_enabled: true, db: 'datastore', name: 'things', correlation_field: 'thing_id', retry_duration: 30000, encrypted_fields: ['firstname'], schema: { model: { properties: { firstname: { type: 'string', }, count: { type: 'number', }, }, }, events: { CREATED: { '0_0_0': { properties: { firstname: { type: 'string', }, count: { type: 'number', }, }, }, }, UPDATED: { '0_0_0': { properties: { firstname: { type: 'string', }, count: { type: 'number', }, }, }, }, RESTORED: { '0_0_0': { additionalProperties: true, }, }, ROLLBACKED: { '0_0_0': { additionalProperties: true, }, }, PATCHED: { '0_0_0': { additionalProperties: true, }, }, }, }, }; function uuid() { return 'uuid' + (Math.random() * 1e16).toFixed(0); } function time(from) { const hrTime = process.hrtime(from); return (hrTime[0] * 1000000000 + hrTime[1]) / 1000000; } async function start() { services.config.mode = 'development'; services.config.port = 3002; services.config.security.tokens = [ { id: 'admin', level: 'admin', token: 'token', }, ]; services.config.features.api.admin = true; services.config.features.api.openAPI.isEnabled = true; services.config.features.api.updateSpecOnModelsChange = true; services.config.features.initInternalModels = false; services.config.security.activeNumberEncryptionKeys = 3; services.config.security.encryptionKeys = { all: [ '0171568cb939a6a678f80a2a5204cec8', '0171568cb939a6a678f80a2a5204cec9', '0171568cb939a6a678f80a2a5204ceca', ], }; // const dbName = `datastore_${(Math.random() * 1e10).toFixed(0)}`; const dbName = 'datastore_bench'; services.config.mongodb.databases[0].url = `mongodb://localhost:27017/${dbName}`; services.config.mongodb.databases[1].url = `mongodb://localhost:27017/${dbName}`; const app = new App(services); const sdk = new Datastore({ baseUrl: `http://localhost:${app.services.config.port}`, debug: false, token: 'token', timeout: 300000, }); await app.start(); const modelConfig = { ...ModelConfig, name: 'projections', correlation_field: 'projection_id', encrypted_fields: ['firstname'], indexes: [ { collection: 'projections', fields: { 'firstname::hash': 1 }, opts: { name: 'email_hash_1' }, }, ], }; try { await sdk.createModel(modelConfig); await sdk.createModelIndexes(modelConfig); await new Promise((resolve) => setTimeout(resolve, 2500)); services.config.features.initInternalModels = false; await app.restart(); } catch (err) { // ... Model already exists? } return { app, sdk }; } async function main() { services.telemetry.logger.info('[bench#load] Starting...'); const { app, sdk } = await start(); services.telemetry.logger.info('[bench#load] Datastore is up and running ✅'); let count = await sdk.count('projections', {}, 'entities'); const target = count || 5_000_000; const parallel = 100; const total = Math.max(0, target - count); const iterations = Math.ceil(total / parallel); services.telemetry.logger.info('[bench#load] Adding entities', { iterations, parallel, total, count, }); services.telemetry.logger.level = 'warn'; let tic = process.hrtime(); for (let i = 0; i < iterations; i++) { await Promise.all( new Array(parallel).fill(1).map(() => sdk.create('projections', { firstname: `alice:${uuid()}`, }), ), ); } let duration = time(tic); services.telemetry.logger.level = 'info'; services.telemetry.logger.info( '[bench#load] Entities imported successfully ✅', { duration: `${Math.floor(duration / 1000)}s`, per_request: total > 0 ? `${duration / total}ms` : -1, }, ); count = await sdk.count('projections', {}, 'entities'); services.telemetry.logger.info('[bench#load] Reading all entities...', { count, }); let processed = 0; tic = process.hrtime(); await walkMulti( new Map([['datastore', sdk]]), [ { datastore: 'datastore', model: 'projections', query: {}, source: 'entities', headers: {}, }, ], parallel, (input, query) => { processed += 1; if (processed % 10000 === 0) { duration = time(tic); services.telemetry.logger.info('[bench#load] Reading all entities...', { count, processed, progress: `${Math.floor(10000 * (processed / count)) / 100}%`, duration: `${Math.floor(duration / 1000)}s`, per_request: `${duration / processed}ms`, }); } }, ); // await sdk.walk( // 'projections', // {}, // () => { // processed += 1; // if (processed % 1000 === 0) { // duration = time(tic); // services.telemetry.logger.info('[bench#load] Reading all entities...', { // count, // processed, // progress: `${Math.floor(10000 * (processed / count)) / 100}%`, // duration: `${Math.floor(duration / 1000)}s`, // per_request: `${duration / processed}ms`, // }); // } // }, // parallel, // 'entities', // // { // // // decrypt: 'false', // // }, // ); duration = time(tic); services.telemetry.logger.info( '[bench#load] All entities read successfully ✅', { duration: `${Math.floor(duration / 1000)}s`, per_request: `${duration / processed}ms`, }, ); await app.stop(); } main() .then(() => { services.telemetry.logger.info('[bench#load] Ended successfully'); }) .catch((err) => { services.telemetry.logger.error('[bench#load] Error', { err, data: err?.response?.data, }); process.exit(1); });