UNPKG

@resin/pinejs

Version:

Pine.js is a sophisticated rules-driven API engine that enables you to define rules in a structured subset of English. Those rules are used in order for Pine.js to generate a database schema and the associated [OData](http://www.odata.org/) API. This make

1,181 lines • 44.7 kB
"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.setup = exports.addPureHook = exports.addSideEffectHook = exports.executeStandardModels = exports.handleODataRequest = exports.getAffectedIds = exports.getAbstractSqlModel = exports.runURI = exports.api = exports.PinejsClient = exports.runRule = exports.getID = exports.deleteModel = exports.executeModels = exports.executeModel = exports.generateModels = exports.generateAbstractSqlModel = exports.generateLfModel = exports.validateModel = exports.resolveNavigationResource = exports.resolveSynonym = exports.sbvrTypes = exports.db = void 0; const Bluebird = require("bluebird"); const _ = require("lodash"); Bluebird.config({ cancellation: true, }); const cached_compile_1 = require("./cached-compile"); const AbstractSQLCompiler = require("@resin/abstract-sql-compiler"); const package_json_1 = require("@resin/abstract-sql-compiler/package.json"); const LF2AbstractSQL = require("@balena/lf-to-abstract-sql"); const odata_to_abstract_sql_1 = require("@resin/odata-to-abstract-sql"); const sbvrTypes = require("@resin/sbvr-types"); exports.sbvrTypes = sbvrTypes; const deepFreeze = require("deep-freeze"); const pinejs_client_core_1 = require("pinejs-client-core"); const extended_sbvr_parser_1 = require("../extended-sbvr-parser/extended-sbvr-parser"); const migrator = require("../migrator/migrator"); const odata_metadata_generator_1 = require("../odata-metadata/odata-metadata-generator"); const devModel = require('./dev.sbvr'); const permissions = require("./permissions"); __exportStar(require("./permissions"), exports); const errors_1 = require("./errors"); const uriParser = require("./uri-parser"); __exportStar(require("./errors"), exports); const hooks_1 = require("./hooks"); const memoize = require("memoizee"); const memoizeWeak = require("memoizee/weak"); const controlFlow = require("./control-flow"); const { DEBUG } = process.env; exports.db = undefined; const package_json_2 = require("@balena/lf-to-abstract-sql/package.json"); const package_json_3 = require("@resin/sbvr-types/package.json"); const abstract_sql_1 = require("./abstract-sql"); var abstract_sql_2 = require("./abstract-sql"); Object.defineProperty(exports, "resolveOdataBind", { enumerable: true, get: function () { return abstract_sql_2.resolveOdataBind; } }); const odataResponse = require("./odata-response"); const LF2AbstractSQLTranslator = LF2AbstractSQL.createTranslator(sbvrTypes); const LF2AbstractSQLTranslatorVersion = `${package_json_2.version}+${package_json_3.version}`; const models = {}; const hookNames = [ 'PREPARSE', 'POSTPARSE', 'PRERUN', 'POSTRUN', 'PRERESPOND', 'POSTRUN-ERROR', ]; const isValidHook = (x) => hookNames.includes(x); const apiHooks = { all: {}, GET: {}, PUT: {}, POST: {}, PATCH: {}, MERGE: {}, DELETE: {}, OPTIONS: {}, }; apiHooks.MERGE = apiHooks.PATCH; const memoizedResolvedSynonym = memoizeWeak((abstractSqlModel, resourceName) => { const sqlName = odata_to_abstract_sql_1.odataNameToSqlName(resourceName); return _(sqlName) .split('-') .map((namePart) => { const synonym = abstractSqlModel.synonyms[namePart]; if (synonym != null) { return synonym; } return namePart; }) .join('-'); }, { primitive: true }); exports.resolveSynonym = (request) => { const abstractSqlModel = exports.getAbstractSqlModel(request); return memoizedResolvedSynonym(abstractSqlModel, request.resourceName); }; const memoizedResolveNavigationResource = memoizeWeak((abstractSqlModel, resourceName, navigationName) => { const navigation = _(odata_to_abstract_sql_1.odataNameToSqlName(navigationName)) .split('-') .flatMap((namePart) => memoizedResolvedSynonym(abstractSqlModel, namePart).split('-')) .concat('$') .value(); const resolvedResourceName = memoizedResolvedSynonym(abstractSqlModel, resourceName); const mapping = _.get(abstractSqlModel.relationships[resolvedResourceName], navigation); if (mapping == null) { throw new Error(`Cannot navigate from '${resourceName}' to '${navigationName}'`); } if (mapping.length < 2) { throw new Error(`'${resourceName}' to '${navigationName}' is a field not a navigation`); } return odata_to_abstract_sql_1.sqlNameToODataName(abstractSqlModel.tables[mapping[1][0]].name); }, { primitive: true }); exports.resolveNavigationResource = (request, navigationName) => { const abstractSqlModel = exports.getAbstractSqlModel(request); return memoizedResolveNavigationResource(abstractSqlModel, request.resourceName, navigationName); }; const prettifyConstraintError = (err, request) => { if (err instanceof exports.db.ConstraintError) { let matches = null; if (err instanceof exports.db.UniqueConstraintError) { switch (exports.db.engine) { case 'mysql': matches = /ER_DUP_ENTRY: Duplicate entry '.*?[^\\]' for key '(.*?[^\\])'/.exec(err.message); break; case 'postgres': const resourceName = exports.resolveSynonym(request); const abstractSqlModel = exports.getAbstractSqlModel(request); matches = new RegExp('"' + abstractSqlModel.tables[resourceName].name + '_(.*?)_key"').exec(err.message); break; } if (matches == null) { throw new exports.db.UniqueConstraintError('Unique key constraint violated'); } const columns = matches[1].split('_'); throw new exports.db.UniqueConstraintError('"' + columns.map(odata_to_abstract_sql_1.sqlNameToODataName).join('" and "') + '" must be unique.'); } if (err instanceof exports.db.ForeignKeyConstraintError) { switch (exports.db.engine) { case 'mysql': matches = /ER_ROW_IS_REFERENCED_: Cannot delete or update a parent row: a foreign key constraint fails \(".*?"\.(".*?").*/.exec(err.message); break; case 'postgres': const resourceName = exports.resolveSynonym(request); const abstractSqlModel = exports.getAbstractSqlModel(request); const tableName = abstractSqlModel.tables[resourceName].name; matches = new RegExp('"' + tableName + '" violates foreign key constraint ".*?" on table "(.*?)"').exec(err.message); if (matches == null) { matches = new RegExp('"' + tableName + '" violates foreign key constraint "' + tableName + '_(.*?)_fkey"').exec(err.message); } break; } if (matches == null) { throw new exports.db.ForeignKeyConstraintError('Foreign key constraint violated'); } throw new exports.db.ForeignKeyConstraintError('Data is referenced by ' + odata_to_abstract_sql_1.sqlNameToODataName(matches[1]) + '.'); } throw err; } }; exports.validateModel = (tx, modelName, request) => { return Bluebird.map(models[modelName].sql.rules, async (rule) => { if (!abstract_sql_1.isRuleAffected(rule, request)) { return; } const values = await abstract_sql_1.getAndCheckBindValues({ vocabulary: modelName, odataBinds: [], values: {}, engine: exports.db.engine, }, rule.bindings); const result = await tx.executeSql(rule.sql, values); const v = result.rows[0].result; if (v === false || v === 0 || v === '0') { throw new errors_1.SbvrValidationError(rule.structuredEnglish); } }).return(); }; exports.generateLfModel = (seModel) => cached_compile_1.cachedCompile('lfModel', extended_sbvr_parser_1.ExtendedSBVRParser.version, seModel, () => extended_sbvr_parser_1.ExtendedSBVRParser.matchAll(seModel, 'Process')); exports.generateAbstractSqlModel = (lfModel) => cached_compile_1.cachedCompile('abstractSqlModel', LF2AbstractSQLTranslatorVersion, lfModel, () => LF2AbstractSQLTranslator(lfModel, 'Process')); exports.generateModels = (model, targetDatabaseEngine) => { const { apiRoot: vocab, modelText: se } = model; let { abstractSql: maybeAbstractSql } = model; let lf; if (se) { try { lf = exports.generateLfModel(se); } catch (e) { console.error(`Error parsing model '${vocab}':`, e); throw new Error(`Error parsing model '${vocab}': ` + e); } try { maybeAbstractSql = exports.generateAbstractSqlModel(lf); } catch (e) { console.error(`Error translating model '${vocab}':`, e); throw new Error(`Error translating model '${vocab}': ` + e); } } const abstractSql = maybeAbstractSql; const odataMetadata = cached_compile_1.cachedCompile('metadata', odata_metadata_generator_1.generateODataMetadata.version, { vocab, abstractSqlModel: abstractSql }, () => odata_metadata_generator_1.generateODataMetadata(vocab, abstractSql)); let sql; try { sql = cached_compile_1.cachedCompile('sqlModel', package_json_1.version + '+' + targetDatabaseEngine, abstractSql, () => AbstractSQLCompiler[targetDatabaseEngine].compileSchema(abstractSql)); } catch (e) { console.error(`Error compiling model '${vocab}':`, e); throw new Error(`Error compiling model '${vocab}': ` + e); } return { vocab, se, lf, abstractSql, sql, odataMetadata }; }; exports.executeModel = (tx, model) => exports.executeModels(tx, [model]); exports.executeModels = Bluebird.method(async (tx, execModels) => { try { await Bluebird.map(execModels, async (model) => { var _a, _b, _c, _d; const { apiRoot } = model; await migrator.run(tx, model); const compiledModel = exports.generateModels(model, exports.db.engine); for (const createStatement of compiledModel.sql.createSchema) { const promise = tx.executeSql(createStatement); if (exports.db.engine === 'websql') { promise.catch((err) => { console.warn("Ignoring errors in the create table statements for websql as it doesn't support CREATE IF NOT EXISTS", err); }); } await promise; } await migrator.postRun(tx, model); odataResponse.prepareModel(compiledModel.abstractSql); deepFreeze(compiledModel.abstractSql); models[apiRoot] = compiledModel; await exports.validateModel(tx, apiRoot); exports.api[apiRoot] = new PinejsClient('/' + apiRoot + '/'); exports.api[apiRoot].logger = _.cloneDeep(console); if (model.logging != null) { const defaultSetting = (_b = (_a = model.logging) === null || _a === void 0 ? void 0 : _a.default) !== null && _b !== void 0 ? _b : true; for (const k of Object.keys(model.logging)) { const key = k; if (typeof exports.api[apiRoot].logger[key] === 'function' && !((_d = (_c = model.logging) === null || _c === void 0 ? void 0 : _c[key]) !== null && _d !== void 0 ? _d : defaultSetting)) { exports.api[apiRoot].logger[key] = _.noop; } } } return compiledModel; }).map((model) => { const updateModel = async (modelType) => { var _a; if (model[modelType] == null) { return exports.api.dev.delete({ resource: 'model', passthrough: { tx, req: permissions.root, }, options: { $filter: { is_of__vocabulary: model.vocab, model_type: modelType, }, }, }); } const result = (await exports.api.dev.get({ resource: 'model', passthrough: { tx, req: permissions.rootRead, }, options: { $select: 'id', $filter: { is_of__vocabulary: model.vocab, model_type: modelType, }, }, })); let method = 'POST'; let uri = '/dev/model'; const body = { is_of__vocabulary: model.vocab, model_value: model[modelType], model_type: modelType, }; const id = (_a = result === null || result === void 0 ? void 0 : result[0]) === null || _a === void 0 ? void 0 : _a.id; if (id != null) { uri += '(' + id + ')'; method = 'PATCH'; body.id = id; } else { uri += '?returnResource=false'; } return exports.runURI(method, uri, body, tx, permissions.root); }; return Bluebird.map(['se', 'lf', 'abstractSql', 'sql', 'odataMetadata'], updateModel); }); } catch (err) { await Bluebird.map(execModels, ({ apiRoot }) => cleanupModel(apiRoot)); throw err; } }); const cleanupModel = (vocab) => { delete models[vocab]; delete exports.api[vocab]; }; const mergeHooks = (a, b) => { return _.mergeWith({}, a, b, (x, y) => { if (Array.isArray(x)) { return x.concat(y); } }); }; const getResourceHooks = (vocabHooks, resourceName) => { if (vocabHooks == null) { return {}; } if (resourceName == null) { return vocabHooks['all']; } return mergeHooks(vocabHooks[resourceName], vocabHooks['all']); }; const getVocabHooks = (methodHooks, vocabulary, resourceName) => { if (methodHooks == null) { return {}; } return mergeHooks(getResourceHooks(methodHooks[vocabulary], resourceName), getResourceHooks(methodHooks['all'], resourceName)); }; const getMethodHooks = memoize((method, vocabulary, resourceName) => mergeHooks(getVocabHooks(apiHooks[method], vocabulary, resourceName), getVocabHooks(apiHooks['all'], vocabulary, resourceName)), { primitive: true }); const getHooks = (request) => { let { resourceName } = request; if (resourceName != null) { resourceName = exports.resolveSynonym(request); } return hooks_1.instantiateHooks(getMethodHooks(request.method, request.vocabulary, resourceName)); }; getHooks.clear = () => getMethodHooks.clear(); const runHooks = Bluebird.method(async (hookName, hooksList, args) => { if (hooksList == null) { return; } const hooks = hooksList[hookName]; if (hooks == null || hooks.length === 0) { return; } const { request, req, tx } = args; if (request != null) { const { vocabulary } = request; Object.defineProperty(args, 'api', { get: _.once(() => exports.api[vocabulary].clone({ passthrough: { req, tx }, })), }); } await Bluebird.map(hooks, async (hook) => { await hook.run(args); }); }); exports.deleteModel = Bluebird.method(async (vocabulary) => { await exports.db.transaction((tx) => { const dropStatements = models[vocabulary].sql.dropSchema.map((dropStatement) => tx.executeSql(dropStatement)); return Promise.all(dropStatements.concat([ exports.api.dev.delete({ resource: 'model', passthrough: { tx, req: permissions.root, }, options: { $filter: { is_of__vocabulary: vocabulary, }, }, }), ])); }); await cleanupModel(vocabulary); }); const isWhereNode = (x) => x[0] === 'Where'; const isEqualsNode = (x) => x[0] === 'Equals'; exports.getID = (vocab, request) => { if (request.abstractSqlQuery == null) { throw new Error('Can only get the id if an abstractSqlQuery is provided'); } const { idField } = models[vocab].abstractSql.tables[request.resourceName]; for (const whereClause of request.abstractSqlQuery) { if (isWhereNode(whereClause)) { for (const comparison of whereClause.slice(1)) { if (isEqualsNode(comparison)) { if (comparison[1][2] === idField) { return comparison[2][1]; } if (comparison[2][2] === idField) { return comparison[1][1]; } } } } } return 0; }; exports.runRule = (() => { const LF2AbstractSQLPrepHack = LF2AbstractSQL.LF2AbstractSQLPrep._extend({ CardinalityOptimisation() { this._pred(false); }, }); const translator = LF2AbstractSQL.LF2AbstractSQL.createInstance(); translator.addTypes(sbvrTypes); return Bluebird.method(async (vocab, rule) => { const seModel = models[vocab].se; const { logger } = exports.api[vocab]; let lfModel; let slfModel; let abstractSqlModel; try { lfModel = extended_sbvr_parser_1.ExtendedSBVRParser.matchAll(seModel + '\nRule: ' + rule, 'Process'); } catch (e) { logger.error('Error parsing rule', rule, e); throw new Error(`Error parsing rule'${rule}': ${e}`); } const ruleLF = lfModel.pop(); try { slfModel = LF2AbstractSQL.LF2AbstractSQLPrep.match(lfModel, 'Process'); slfModel.push(ruleLF); slfModel = LF2AbstractSQLPrepHack.match(slfModel, 'Process'); translator.reset(); abstractSqlModel = translator.match(slfModel, 'Process'); } catch (e) { logger.error('Error compiling rule', rule, e); throw new Error(`Error compiling rule '${rule}': ${e}`); } const formulationType = ruleLF[1][0]; let resourceName; if (ruleLF[1][1][0] === 'LogicalNegation') { resourceName = ruleLF[1][1][1][1][2][1]; } else { resourceName = ruleLF[1][1][1][2][1]; } let fetchingViolators = false; const ruleAbs = _.last(abstractSqlModel.rules); if (ruleAbs == null) { throw new Error('Unable to generate rule'); } const ruleBody = ruleAbs.find((node) => node[0] === 'Body'); if (ruleBody[1][0] === 'Not' && ruleBody[1][1][0] === 'Exists' && ruleBody[1][1][1][0] === 'SelectQuery') { ruleBody[1] = ruleBody[1][1][1]; fetchingViolators = true; } else if (ruleBody[1][0] === 'Exists' && ruleBody[1][1][0] === 'SelectQuery') { ruleBody[1] = ruleBody[1][1]; } else { throw new Error('Unsupported rule formulation'); } const wantNonViolators = formulationType in ['PossibilityFormulation', 'PermissibilityFormulation']; if (wantNonViolators === fetchingViolators) { ruleBody[1] = ruleBody[1].map((queryPart) => { if (queryPart[0] !== 'Where') { return queryPart; } if (queryPart.length > 2) { throw new Error('Unsupported rule formulation'); } return ['Where', ['Not', queryPart[1]]]; }); } ruleBody[1] = ruleBody[1].map((queryPart) => { if (queryPart[0] !== 'Select') { return queryPart; } return ['Select', '*']; }); const compiledRule = AbstractSQLCompiler[exports.db.engine].compileRule(ruleBody); if (Array.isArray(compiledRule)) { throw new Error('Unexpected query generated'); } const values = await abstract_sql_1.getAndCheckBindValues({ vocabulary: vocab, odataBinds: [], values: {}, engine: exports.db.engine, }, compiledRule.bindings); const result = await exports.db.executeSql(compiledRule.query, values); const table = models[vocab].abstractSql.tables[resourceName]; const odataIdField = odata_to_abstract_sql_1.sqlNameToODataName(table.idField); let ids = result.rows.map((row) => row[table.idField]); ids = _.uniq(ids); ids = ids.map((id) => odataIdField + ' eq ' + id); let filter; if (ids.length > 0) { filter = ids.join(' or '); } else { filter = '0 eq 1'; } const odataResult = (await exports.runURI('GET', '/' + vocab + '/' + odata_to_abstract_sql_1.sqlNameToODataName(table.resourceName) + '?$filter=' + filter, undefined, undefined, permissions.rootRead)); odataResult.__formulationType = formulationType; odataResult.__resourceName = resourceName; return odataResult; }); })(); class PinejsClient extends pinejs_client_core_1.PinejsClientCoreFactory(Bluebird) { _request({ method, url, body, tx, req, custom, }) { return exports.runURI(method, url, body, tx, req, custom); } } exports.PinejsClient = PinejsClient; exports.api = {}; exports.runURI = (method, uri, body = {}, tx, req, custom) => { let user; let apiKey; if (req != null && _.isObject(req)) { user = req.user; apiKey = req.apiKey; } else { if (req != null) { console.warn('Non-object req passed to runURI?', req, new Error().stack); } user = { id: 0, actor: 0, permissions: [], }; } _.forEach(body, (v, k) => { if (v === undefined) { delete body[k]; } }); const emulatedReq = { on: _.noop, custom, user, apiKey, method, url: uri, body, params: {}, query: {}, tx, }; return new Bluebird((resolve, reject) => { const res = { __internalPinejs: true, on: _.noop, statusCode: 200, status(statusCode) { this.statusCode = statusCode; return this; }, sendStatus: (statusCode) => { if (statusCode >= 400) { const ErrorClass = errors_1.statusCodeToError[statusCode]; if (ErrorClass != null) { reject(new ErrorClass()); } else { reject(new errors_1.HttpError(statusCode)); } } else { resolve(); } }, send(statusCode) { if (statusCode == null) { statusCode = this.statusCode; } this.sendStatus(statusCode); }, json(data, statusCode) { if (_.isError(data)) { reject(data); return; } if (statusCode == null) { statusCode = this.statusCode; } if (statusCode >= 400) { const ErrorClass = errors_1.statusCodeToError[statusCode]; if (ErrorClass != null) { reject(new ErrorClass(data)); } else { reject(new errors_1.HttpError(statusCode, data)); } } else { resolve(data); } }, set: _.noop, type: _.noop, }; const next = (route) => { console.warn('Next called on a runURI?!', method, uri, route); res.sendStatus(500); }; exports.handleODataRequest(emulatedReq, res, next); }); }; exports.getAbstractSqlModel = (request) => { if (request.abstractSqlModel == null) { request.abstractSqlModel = models[request.vocabulary].abstractSql; } return request.abstractSqlModel; }; exports.getAffectedIds = Bluebird.method(async ({ req, request, tx, }) => { if (request.method === 'GET') { throw new Error('Cannot call `getAffectedIds` with a GET request'); } request = await uriParser.parseOData({ method: request.method, url: `/${request.vocabulary}${request.url}`, }); request.engine = exports.db.engine; const abstractSqlModel = exports.getAbstractSqlModel(request); const resourceName = exports.resolveSynonym(request); const resourceTable = abstractSqlModel.tables[resourceName]; if (resourceTable == null) { throw new Error('Unknown resource: ' + request.resourceName); } const { idField } = resourceTable; if (request.odataQuery.options == null) { request.odataQuery.options = {}; } request.odataQuery.options.$select = { properties: [{ name: idField }], }; delete request.odataQuery.options.$expand; await permissions.addPermissions(req, request); request.method = 'GET'; request = uriParser.translateUri(request); request = abstract_sql_1.compileRequest(request); let result; if (tx != null) { result = await runQuery(tx, request); } else { result = await runTransaction(req, (newTx) => runQuery(newTx, request)); } return result.rows.map((row) => row[idField]); }); exports.handleODataRequest = (req, res, next) => { const [, apiRoot] = req.url.split('/', 2); if (apiRoot == null || models[apiRoot] == null) { return next('route'); } if (DEBUG) { exports.api[apiRoot].logger.log('Parsing', req.method, req.url); } const mapSeries = controlFlow.getMappingFn(req.headers); const reqHooks = getHooks({ method: req.method, vocabulary: apiRoot, }); req.on('close', () => { handlePromise.cancel(); hooks_1.rollbackRequestHooks(reqHooks); }); res.on('close', () => { handlePromise.cancel(); hooks_1.rollbackRequestHooks(reqHooks); }); if (req.tx != null) { req.tx.on('rollback', () => { hooks_1.rollbackRequestHooks(reqHooks); }); } const handlePromise = runHooks('PREPARSE', reqHooks, { req, tx: req.tx }) .then(async () => { let requests; if (req.batch != null && req.batch.length > 0) { requests = req.batch; } else { const { method, url, body } = req; requests = [{ method, url, data: body }]; } const prepareRequest = async ($request) => { $request.engine = exports.db.engine; $request.hooks = getHooks($request); try { await runHooks('POSTPARSE', $request.hooks, { req, request: $request, tx: req.tx, }); const translatedRequest = await uriParser.translateUri($request); return await abstract_sql_1.compileRequest(translatedRequest); } catch (err) { hooks_1.rollbackRequestHooks(reqHooks); hooks_1.rollbackRequestHooks($request.hooks); throw err; } }; const results = await mapSeries(requests, async (requestPart) => { let request = await uriParser.parseOData(requestPart); if (Array.isArray(request)) { request = await Bluebird.mapSeries(request, prepareRequest); } else { request = await prepareRequest(request); } return runTransaction(req, async (tx) => { tx.on('rollback', () => { hooks_1.rollbackRequestHooks(reqHooks); if (Array.isArray(request)) { request.forEach(({ hooks }) => { hooks_1.rollbackRequestHooks(hooks); }); } else { hooks_1.rollbackRequestHooks(request.hooks); } }); if (Array.isArray(request)) { const env = await Bluebird.reduce(request, runChangeSet(req, res, tx), new Map()); return Array.from(env.values()); } else { return runRequest(req, res, tx, request); } }); }); const responses = results.map((result) => { if (_.isError(result)) { return convertToHttpError(result); } else { return result; } }); res.set('Cache-Control', 'no-cache'); if (req.batch == null || req.batch.length === 0) { let [response] = responses; if (_.isError(response)) { if (res.__internalPinejs === true) { return res.json(response); } else { response = { status: response.status, body: response.getResponseBody(), }; } } const { body, headers, status } = response; if (status) { res.status(status); } _.forEach(headers, (headerValue, headerName) => { res.set(headerName, headerValue); }); if (!body) { if (status != null) { res.sendStatus(status); } else { console.error('No status or body set', req.url, responses); res.sendStatus(500); } } else { if (status != null) { res.status(status); } res.json(body); } } else { res.status(200).sendMulti(responses.map((response) => { if (_.isError(response)) { return { status: response.status, body: response.getResponseBody(), }; } else { return response; } })); } }) .catch((e) => { console.error('An error occurred while constructing the response', e); res.sendStatus(500); }); return handlePromise; }; const convertToHttpError = (err) => { if (err instanceof errors_1.HttpError) { return err; } if (err instanceof errors_1.SbvrValidationError) { return new errors_1.BadRequestError(err); } if (err instanceof errors_1.PermissionError) { return new errors_1.UnauthorizedError(err); } if (err instanceof exports.db.ConstraintError) { return new errors_1.ConflictError(err); } if (err instanceof errors_1.SqlCompilationError || err instanceof errors_1.TranslationError || err instanceof errors_1.ParsingError || err instanceof errors_1.PermissionParsingError) { return new errors_1.InternalRequestError(); } console.error('Unexpected response error type', err); return new errors_1.NotFoundError(err); }; const runRequest = async (req, res, tx, request) => { const { logger } = exports.api[request.vocabulary]; if (DEBUG) { logger.log('Running', req.method, req.url); } let result; try { try { await runHooks('PRERUN', request.hooks, { req, request, tx }); switch (request.method) { case 'GET': result = await runGet(req, res, request, tx); break; case 'POST': result = await runPost(req, res, request, tx); break; case 'PUT': case 'PATCH': case 'MERGE': result = await runPut(req, res, request, tx); break; case 'DELETE': result = await runDelete(req, res, request, tx); break; } } catch (err) { if (err instanceof exports.db.DatabaseError) { prettifyConstraintError(err, request); logger.error(err); err.message = 'Database error'; throw err; } if (err instanceof uriParser.SyntaxError || err instanceof EvalError || err instanceof RangeError || err instanceof ReferenceError || err instanceof SyntaxError || err instanceof TypeError || err instanceof URIError) { logger.error(err); throw new errors_1.InternalRequestError(); } throw err; } await runHooks('POSTRUN', request.hooks, { req, request, result, tx }); } catch (err) { await runHooks('POSTRUN-ERROR', request.hooks, { req, request, tx, error: err, }); throw err; } return prepareResponse(req, res, request, result, tx); }; const runChangeSet = (req, res, tx) => async (env, request) => { request = updateBinds(env, request); const result = await runRequest(req, res, tx, request); if (request.id == null) { throw new Error('No request id'); } if (result.headers == null) { result.headers = {}; } result.headers['Content-Id'] = request.id; env.set(request.id, result); return env; }; const updateBinds = (env, request) => { if (request._defer) { request.odataBinds = request.odataBinds.map(([tag, id]) => { if (tag === 'ContentReference') { const ref = env.get(id); if (ref == null || ref.body == null || typeof ref.body === 'string' || ref.body.id === undefined) { throw new errors_1.BadRequestError('Reference to a non existing resource in Changeset'); } return uriParser.parseId(ref.body.id); } return [tag, id]; }); } return request; }; const prepareResponse = async (req, res, request, result, tx) => { switch (request.method) { case 'GET': return respondGet(req, res, request, result, tx); case 'POST': return respondPost(req, res, request, result, tx); case 'PUT': case 'PATCH': case 'MERGE': return respondPut(req, res, request, result, tx); case 'DELETE': return respondDelete(req, res, request, result, tx); case 'OPTIONS': return respondOptions(req, res, request, result, tx); default: throw new errors_1.MethodNotAllowedError(); } }; const runTransaction = (req, callback) => { if (req.tx != null) { return callback(req.tx); } else { return exports.db.transaction(callback); } }; const runQuery = async (tx, request, queryIndex, addReturning) => { const { vocabulary } = request; let { sqlQuery } = request; if (sqlQuery == null) { throw new errors_1.InternalRequestError('No SQL query available to run'); } if (request.engine == null) { throw new errors_1.InternalRequestError('No database engine specified'); } if (Array.isArray(sqlQuery)) { if (queryIndex == null) { throw new errors_1.InternalRequestError('Received a query index to run but the query is not an array'); } sqlQuery = sqlQuery[queryIndex]; } const { query, bindings } = sqlQuery; const values = await abstract_sql_1.getAndCheckBindValues(request, bindings); if (DEBUG) { exports.api[vocabulary].logger.log(query, values); } return tx.executeSql(query, values, addReturning); }; const runGet = (_req, _res, request, tx) => { if (request.sqlQuery != null) { return runQuery(tx, request); } }; const respondGet = async (req, res, request, result, tx) => { const vocab = request.vocabulary; if (request.sqlQuery != null) { const d = await odataResponse.process(vocab, exports.getAbstractSqlModel(request), request.resourceName, result.rows); await runHooks('PRERESPOND', request.hooks, { req, res, request, result, data: d, tx, }); return { body: { d }, headers: { contentType: 'application/json' } }; } else { if (request.resourceName === '$metadata') { return { body: models[vocab].odataMetadata, headers: { contentType: 'xml' }, }; } else { return { status: 404, }; } } }; const runPost = async (_req, _res, request, tx) => { const vocab = request.vocabulary; const { idField } = exports.getAbstractSqlModel(request).tables[exports.resolveSynonym(request)]; const { rowsAffected, insertId } = await runQuery(tx, request, undefined, idField); if (rowsAffected === 0) { throw new errors_1.PermissionError(); } await exports.validateModel(tx, vocab, request); return insertId; }; const respondPost = async (req, res, request, id, tx) => { var _a, _b; const vocab = request.vocabulary; const location = odataResponse.resourceURI(vocab, request.resourceName, id); if (DEBUG) { exports.api[vocab].logger.log('Insert ID: ', request.resourceName, id); } let result = { d: [{ id }] }; if (location != null && !['0', 'false'].includes((_b = (_a = request === null || request === void 0 ? void 0 : request.odataQuery) === null || _a === void 0 ? void 0 : _a.options) === null || _b === void 0 ? void 0 : _b.returnResource)) { try { result = (await exports.runURI('GET', location, undefined, tx, req)); } catch (_c) { } } await runHooks('PRERESPOND', request.hooks, { req, res, request, result, tx, }); return { status: 201, body: result.d[0], headers: { contentType: 'application/json', Location: location, }, }; }; const runPut = async (_req, _res, request, tx) => { const vocab = request.vocabulary; let rowsAffected; if (Array.isArray(request.sqlQuery)) { ({ rowsAffected } = await runQuery(tx, request, 1)); if (rowsAffected === 0) { ({ rowsAffected } = await runQuery(tx, request, 0)); } } else { ({ rowsAffected } = await runQuery(tx, request)); } if (rowsAffected > 0) { await exports.validateModel(tx, vocab, request); } return undefined; }; const respondPut = async (req, res, request, _result, tx) => { await runHooks('PRERESPOND', request.hooks, { req, res, request, tx, }); return { status: 200, headers: {}, }; }; const respondDelete = respondPut; const respondOptions = respondPut; const runDelete = async (_req, _res, request, tx) => { const vocab = request.vocabulary; const { rowsAffected } = await runQuery(tx, request); if (rowsAffected > 0) { await exports.validateModel(tx, vocab, request); } return undefined; }; exports.executeStandardModels = Bluebird.method(async (tx) => { try { await exports.executeModel(tx, { apiRoot: 'dev', modelText: devModel, logging: { log: false, }, migrations: { '11.0.0-modified-at': ` ALTER TABLE "model" ADD COLUMN IF NOT EXISTS "modified at" TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL; `, }, }); await exports.executeModels(tx, permissions.config.models); console.info('Successfully executed standard models.'); } catch (err) { console.error('Failed to execute standard models.', err); throw err; } }); exports.addSideEffectHook = (method, apiRoot, resourceName, hooks) => { const sideEffectHook = _.mapValues(hooks, (hook) => { if (hook != null) { return { HOOK: hook, effects: true, }; } }); addHook(method, apiRoot, resourceName, sideEffectHook); }; exports.addPureHook = (method, apiRoot, resourceName, hooks) => { const pureHooks = _.mapValues(hooks, (hook) => { if (hook != null) { return { HOOK: hook, effects: false, }; } }); addHook(method, apiRoot, resourceName, pureHooks); }; const addHook = (method, apiRoot, resourceName, hooks) => { const methodHooks = apiHooks[method]; if (methodHooks == null) { throw new Error('Unsupported method: ' + method); } if (apiRoot !== 'all' && models[apiRoot] == null) { throw new Error('Unknown api root: ' + apiRoot); } if (resourceName !== 'all') { const origResourceName = resourceName; resourceName = exports.resolveSynonym({ vocabulary: apiRoot, resourceName }); if (models[apiRoot].abstractSql.tables[resourceName] == null) { throw new Error('Unknown resource for api root: ' + origResourceName + ', ' + apiRoot); } } if (methodHooks[apiRoot] == null) { methodHooks[apiRoot] = {}; } const apiRootHooks = methodHooks[apiRoot]; if (apiRootHooks[resourceName] == null) { apiRootHooks[resourceName] = {}; } const resourceHooks = apiRootHooks[resourceName]; for (const hookType of Object.keys(hooks)) { if (!isValidHook(hookType)) { throw new Error('Unknown callback type: ' + hookType); } const hook = hooks[hookType]; if (resourceHooks[hookType] == null) { resourceHooks[hookType] = []; } if (hook != null) { resourceHooks[hookType].push(hook); } } getHooks.clear(); }; exports.setup = Bluebird.method(async (_app, $db) => { exports.db = exports.db = $db; try { await exports.db.transaction(async (tx) => { await exports.executeStandardModels(tx); await permissions.setup(); }); } catch (err) { console.error('Could not execute standard models', err); process.exit(1); } try { await exports.db.executeSql('CREATE UNIQUE INDEX "uniq_model_model_type_vocab" ON "model" ("is of-vocabulary", "model type");'); } catch (_a) { } }); //# sourceMappingURL=sbvr-utils.js.map