@getanthill/datastore
Version:
Event-Sourced Datastore
401 lines • 17 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.syncPostgreSQL = exports.main = void 0;
exports.getProjectionConfiguration = getProjectionConfiguration;
exports.getDestinationDatastore = getDestinationDatastore;
exports.initDestinationModel = initDestinationModel;
exports.getTriggers = getTriggers;
const node_assert_1 = require("node:assert");
const get_1 = __importDefault(require("lodash/get"));
const cloneDeep_1 = __importDefault(require("lodash/cloneDeep"));
const ajv_1 = __importDefault(require("ajv"));
const telemetry = __importStar(require("@getanthill/telemetry"));
const Aggregator_1 = __importDefault(require("../aggregator/Aggregator"));
const things_json_1 = __importDefault(require("../../templates/examples/things.json"));
const pg_1 = __importDefault(require("../../services/pg"));
const services_1 = require("../../services");
__exportStar(require("./utils"), exports);
const schemas_json_1 = __importDefault(require("../schemas.json"));
const validator = new ajv_1.default({
schemas: schemas_json_1.default,
useDefaults: false,
coerceTypes: true,
strict: false,
});
function validate(schemaPath, data) {
const isValid = validator.validate(schemaPath, data);
if (isValid === false) {
const err = new Error('Validation failed');
// @ts-ignore
err.details = [
{
path: schemaPath,
},
{
data,
},
// @ts-ignore
...validator.errors,
];
throw err;
}
}
async function getProjectionConfiguration(url, datastores) {
var _a, _b, _c, _d, _e, _f;
const names = Array.from(datastores.keys());
const projectionSource = (_a = url.searchParams.get('source')) !== null && _a !== void 0 ? _a : names[0];
const projectionEntityType = (_b = url.searchParams.get('entity_type')) !== null && _b !== void 0 ? _b : 'projections';
const projectionConfigurationPath = (_c = url.searchParams.get('path')) !== null && _c !== void 0 ? _c : null;
const configurationPath = (_d = url.searchParams.get('configuration_path')) !== null && _d !== void 0 ? _d : null;
const projectionField = url.searchParams.get('projection_field');
const projectionId = url.searchParams.get('projection_id');
let configuration = null;
if (configurationPath) {
configuration = require(configurationPath);
}
else if (!projectionField && projectionId) {
const res = await ((_e = datastores
.get(projectionSource)) === null || _e === void 0 ? void 0 : _e.get(projectionEntityType, projectionId));
configuration = res.data;
}
else if (projectionField && projectionId) {
const res = await ((_f = datastores
.get(projectionSource)) === null || _f === void 0 ? void 0 : _f.find(projectionEntityType, {
[projectionField]: projectionId,
}));
configuration = res.data[0];
}
if (projectionConfigurationPath) {
configuration = (0, get_1.default)(configuration, projectionConfigurationPath, null);
}
(0, node_assert_1.ok)(configuration, 'Projection configuration not found');
validate('/schemas/datastore/projection', configuration);
return configuration;
}
function getDestinationDatastore(url, datastores, configuration) {
const names = Array.from(datastores.keys());
const projectionDestination = url.searchParams.get('destination') ||
(configuration === null || configuration === void 0 ? void 0 : configuration.destination) ||
names[names.length - 1];
const destination = datastores.get(projectionDestination);
return destination;
}
async function initDestinationModel(url, datastores, configuration) {
const destination = getDestinationDatastore(url, datastores, configuration);
(0, node_assert_1.ok)(!!configuration.name, 'Missing Datastores configuration name');
const projectionModelConfig = {
...things_json_1.default,
...configuration.model,
name: configuration.name,
};
(0, node_assert_1.ok)(!!projectionModelConfig.correlation_field, 'Missing projection correlation_field');
try {
await destination.createModel(projectionModelConfig);
}
catch (err) {
if (err.response && err.response.status === 409) {
await destination.updateModel(projectionModelConfig);
}
else {
throw err;
}
}
await destination.createModelIndexes(projectionModelConfig);
}
async function getTriggers(url, datastores, aggregator, configuration) {
var _a, _b, _c, _d;
const destination = getDestinationDatastore(url, datastores, configuration);
// Trigger configuration
const triggers = ((_a = configuration.triggers) !== null && _a !== void 0 ? _a : [
{
...configuration.from,
...configuration.trigger,
},
]).map((trigger) => ({
...trigger,
query: aggregator.applyMap(trigger.query, trigger.map, {}),
}));
// Increment feature
const isIncremental = ((_b = url.searchParams.get('is_incremental')) !== null && _b !== void 0 ? _b : 'false') === 'true';
if (isIncremental === true) {
const incrementalField = (_c = url.searchParams.get('incremental_field')) !== null && _c !== void 0 ? _c : 'updated_at';
const incrementalSourceField = (_d = url.searchParams.get('incremental_source_field')) !== null && _d !== void 0 ? _d : incrementalField;
const { data: [lastDocument], } = await destination.find(configuration.name, {
_sort: { [incrementalField]: -1 },
_fields: {
[incrementalField]: 1,
},
}, 0, 1);
if (lastDocument) {
triggers.forEach((trigger) => {
trigger.query[incrementalSourceField] = {
'date($gt)': lastDocument === null || lastDocument === void 0 ? void 0 : lastDocument[incrementalField],
};
});
}
}
triggers.forEach((trigger) => validate('/schemas/datastore/runner/trigger', trigger));
return triggers;
}
const main = async function (url,
/* istanbul ignore next */
services = (0, services_1.build)()) {
var _a, _b, _c, _d;
const datastores = services.datastores;
const heartbeat = (_a = url.searchParams.get('heartbeat')) !== null && _a !== void 0 ? _a : 'true';
const progress = Number.parseInt((_b = url.searchParams.get('progress')) !== null && _b !== void 0 ? _b : '1000', 10);
const withInit = url.searchParams.get('init') === 'true';
// Checking the Datastore configurations.
await Promise.all(Array.from(services.datastores.values()).map((ds) => heartbeat !== 'false' && ds.heartbeat()));
const aggregator = new Aggregator_1.default(services.datastores);
const configuration = await getProjectionConfiguration(url, datastores);
if (withInit === true) {
await initDestinationModel(url, datastores, configuration);
}
const triggers = await getTriggers(url, datastores, aggregator, configuration);
const counts = [];
for (const trigger of triggers) {
const count = (_d = (await ((_c = datastores
.get(trigger.datastore)) === null || _c === void 0 ? void 0 : _c.count(trigger.model, trigger.query, trigger.source)))) !== null && _d !== void 0 ? _d : 0;
counts.push(count);
}
// Stats
const stats = {
count: 0,
total: counts.reduce((s, c) => s + c, 0),
processed: 0,
failed: 0,
skipped: 0,
};
telemetry.logger.debug('[projections] Initialization', {
configuration,
});
telemetry.logger.info('[projections] Initialization', {
datastores_count: datastores.size,
name: configuration.name,
datastores_names: Array.from(datastores.keys()),
from: configuration.from,
triggers,
});
return {
triggers,
start: async () => ({
datastores: Object.fromEntries(datastores),
}),
stop: async () => {
return;
},
handler: async (entity, metadata) => {
var _a;
stats.count += 1;
telemetry.logger.debug('[projections] Projecting entity...', {
entity,
name: configuration.name,
metadata,
});
const _aggregator = new Aggregator_1.default(datastores, configuration.aggregator);
let data = null;
try {
data = await _aggregator.aggregate(configuration.pipeline, {
...configuration.state,
metadata,
entity,
});
stats.processed += 1;
_aggregator.logs.forEach((l) =>
/* @ts-ignore */
telemetry.logger[l.level](l.msg, l.context));
telemetry.logger.debug('[projections] Entity successfully projected', {
entity,
name: configuration.name,
data,
});
}
catch (err) {
telemetry.logger.debug('[projections] Projection error - logs', {
logs: _aggregator.logs,
});
if (err.message === Aggregator_1.default.ERROR_VALIDATE_STEP_FAILED.message) {
stats.skipped += 1;
telemetry.logger.debug('[projections] Skipped entity on validation failed step', {
entity,
name: configuration.name,
err,
});
}
else if (err === Aggregator_1.default.ERROR_ENTITY_NOT_FOUND) {
stats.skipped += 1;
telemetry.logger.debug('[projections] Skipped entity on fetch step as entity not found', {
entity,
name: configuration.name,
err,
});
}
else {
stats.failed += 1;
telemetry.logger.error('[projections] Projection error', {
entity,
name: configuration.name,
details: (_a = err === null || err === void 0 ? void 0 : err.response) === null || _a === void 0 ? void 0 : _a.data,
err,
});
throw err;
}
}
stats.count % progress === 0 &&
telemetry.logger.info('[projections] Stats', {
name: configuration.name,
...stats,
progress: stats.count / stats.total,
});
return { stats, data };
},
};
};
exports.main = main;
const syncPostgreSQL = async function (url, services = (0, services_1.build)()) {
var _a, _b, _c, _d, _e, _f;
const datastoreName = (_a = url.searchParams.get('datastore')) !== null && _a !== void 0 ? _a : 'datastore';
const source = ((_b = url.searchParams.get('source')) !== null && _b !== void 0 ? _b : 'events');
const query = JSON.parse((_c = url.searchParams.get('query')) !== null && _c !== void 0 ? _c : '{}');
const skip = ((_d = url.searchParams.get('skip')) !== null && _d !== void 0 ? _d : '')
.split(',')
.map((v) => v.trim())
.filter((v) => !!v);
const only = ((_e = url.searchParams.get('only')) !== null && _e !== void 0 ? _e : '')
.split(',')
.map((v) => v.trim())
.filter((v) => !!v);
const withEncryptedData = url.searchParams.get('with_encrypted_data') === 'true';
// Overwriting the pg namespace:
const pgConfig = (0, cloneDeep_1.default)({
...services.config.pg,
namespace: (_f = url.searchParams.get('ns')) !== null && _f !== void 0 ? _f : services.config.pg.namespace,
});
const pg = new pg_1.default(pgConfig);
const sdk = services.datastores.get(datastoreName);
if (!sdk) {
throw new Error('Unknown datastore');
}
// @ts-ignore
services.pg = pg;
const { data: models } = await sdk.getModels();
const triggers = Object.keys(models)
.filter((m) => {
if (only.length > 0 && !only.includes(m)) {
return false;
}
if (skip.length > 0 && skip.includes(m)) {
return false;
}
return true;
})
.map((model) => ({
datastore: datastoreName,
model,
source,
query,
}));
const counts = [];
for (const trigger of triggers) {
const count = await sdk.count(trigger.model, trigger.query, trigger.source);
counts.push(count);
}
services.telemetry.logger.info('[projections#syncPostgreSQL] Counts', triggers.reduce((s, c, i) => {
s[c.model] = counts[i];
return s;
}, {}));
const stats = {
count: 0,
total: counts.reduce((s, c) => s + c, 0),
success: 0,
error: 0,
};
return {
triggers,
start: async () => {
services.telemetry.logger.info('[projections#syncPostgreSQL] Starting', {
datastore: datastoreName,
source,
query,
});
services.telemetry.logger.debug('[projections#syncPostgreSQL] Connecting to PostgreSQL');
await pg.connect();
if (url.searchParams.get('init') === '1') {
services.telemetry.logger.debug('[projections#syncPostgreSQL] Database creation if not exists');
await pg_1.default.init(services.config.pg);
services.telemetry.logger.debug('[projections#syncPostgreSQL] Database schema initialization');
await pg.queryAll(pg_1.default.getSqlSchemaForModels(Object.values(models), url.searchParams.get('clean') === '1', pgConfig.namespace));
}
return services;
},
stop: async () => {
services.telemetry.logger.info('[projections#syncPostgreSQL] Ending', stats);
await pg.disconnect();
},
handler: async (event, query) => {
stats.count % 1000 === 0 &&
services.telemetry.logger.info('[projections#syncPostgreSQL] Syncing...', {
...stats,
progress: stats.count / stats.total,
});
try {
stats.count += 1;
services.telemetry.logger.debug('[projections#syncPostgreSQL] New event', {
event,
query,
stats,
});
await pg.insert(models[query.model], query.source, event, {
with_encrypted_data: withEncryptedData,
});
stats.success += 1;
}
catch (err) {
stats.error += 1;
services.telemetry.logger.error('[projections#syncPostgreSQL] Error', err);
}
},
};
};
exports.syncPostgreSQL = syncPostgreSQL;
//# sourceMappingURL=index.js.map