UNPKG

@clickup/ent-framework

Version:

A PostgreSQL graph-database-alike library with microsharding and row-level security

130 lines 6.02 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.PgQueryUpdate = void 0; const uniq_1 = __importDefault(require("lodash/uniq")); const misc_1 = require("../internal/misc"); const types_1 = require("../types"); const PgRunner_1 = require("./PgRunner"); class PgQueryUpdate { constructor(schema, id, input) { this.schema = schema; this.IS_WRITE = true; // A little hack to merge the updating row with its ID. this.input = { ...input, [types_1.ID]: id }; this.allFields = Object.keys(this.schema.table); } async run(client, annotation) { // Treat undefined as an absent key. This will hopefully be JITed very // efficiently, but could still be that it won't since we enumerate object // keys and use [] to access the values. const fields = this.allFields.filter((field) => field !== types_1.ID && this.input[field] !== undefined); const casFields = this.input.$cas ? this.allFields.filter((field) => field !== types_1.ID && this.input.$cas[field] !== undefined) : []; // If there are no known fields to update, skip the entire operation. We // return true since we don't know whether the row is in the DB or not, so // we assume it is. if (fields.length === 0 && !this.input.$literal) { return true; } // An UPDATE with $literal is a little hacky: we disable batching for it, // because we can't guarantee that the SET clause in "WITH ... VALUES ... // UPDATE ... SET ... FROM rows" batched query will be identical for all // input rows. const disableBatching = !!this.input.$literal; // Since UPDATE has partial list of fields, we have to cache runners per // updating fields list. Else we'd not be able to do a partial batched update. return client .batcher(this.constructor, this.schema, fields.join(":") + ":" + casFields.join(":"), disableBatching, () => // This is run only once per every unique combination of field names, // not per every row updated, so it's cheap to do whatever we want. new PgRunnerUpdate(this.schema, client, fields, casFields)) .run(this.input, annotation); } } exports.PgQueryUpdate = PgQueryUpdate; class PgRunnerUpdate extends PgRunner_1.PgRunner { constructor(schema, client, fieldsIn, casFieldsIn) { super(schema, client); this.op = "UPDATE"; this.maxBatchSize = 100; this.default = false; // If nothing is updated, we return false. // Always include all autoUpdate fields. const fields = (0, uniq_1.default)([ ...fieldsIn, ...Object.keys(this.schema.table).filter((field) => this.schema.table[field].autoUpdate !== undefined), ]); const casFields = casFieldsIn.map((field) => ({ field, alias: `$cas.${field}`, })); this.singleBuilder = { prefix: this.fmt("UPDATE %T SET "), func1: this.createUpdateKVsBuilder(fields), midfix: this.fmt(" WHERE %PK="), func2: (input) => this.escapeValue(types_1.ID, input[types_1.ID]), cas: casFields.length > 0 ? this.createValuesBuilder({ prefix: this.fmt(" AND ROW(%FIELDS) IS NOT DISTINCT FROM ROW", { fields: casFields.map(({ field }) => field), normalize: true, }), indent: "", fields: casFields, suffix: "", }) : null, suffix: this.fmt(` RETURNING %PK AS ${types_1.ID}`), }; // There can be several updates for same id (due to batching), so returning // all keys here. this.batchBuilder = this.createWithBuilder({ fields: [...this.addPK(fields, "prepend"), ...casFields], suffix: this.fmt("UPDATE %T SET %UPDATE_FIELD_VALUE_PAIRS(rows)\n" + "FROM rows WHERE %PK(%T)=%PK(rows)", { fields }) + (casFields.length > 0 ? " AND " + this.fmt("ROW(%FIELDS(%T))", { fields: casFields.map(({ field }) => field), normalize: true, }) + " IS NOT DISTINCT FROM " + this.fmt("ROW(%FIELDS(rows))", { fields: casFields }) : "") + this.fmt(" RETURNING rows._key"), }); } key(input) { return (input[types_1.ID] + (input.$cas ? ":" + (0, misc_1.stringHash)(JSON.stringify(input.$cas)) : "")); } async runSingle(input, annotations) { const literal = input.$literal; const sql = this.singleBuilder.prefix + this.singleBuilder.func1(input, literal) + this.singleBuilder.midfix + this.singleBuilder.func2(input) + (this.singleBuilder.cas?.prefix ?? "") + (this.singleBuilder.cas?.func?.([["", input]]) ?? "") + (this.singleBuilder.cas?.suffix ?? "") + this.singleBuilder.suffix; const rows = await this.clientQuery(sql, annotations, 1); return rows.length > 0 ? true : false; } async runBatch(inputs, annotations) { const sql = this.batchBuilder.prefix + this.batchBuilder.func(inputs) + this.batchBuilder.suffix; const rows = await this.clientQuery(sql, annotations, inputs.size); const outputs = new Map(); for (const row of rows) { outputs.set(row._key, true); } return outputs; } } PgRunnerUpdate.IS_WRITE = true; //# sourceMappingURL=PgQueryUpdate.js.map