@clickup/ent-framework
Version:
A PostgreSQL graph-database-alike library with microsharding and row-level security
72 lines • 3.04 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.PgQueryInsert = void 0;
const QueryBase_1 = require("../abstract/QueryBase");
const types_1 = require("../types");
const PgRunner_1 = require("./PgRunner");
class PgQueryInsert extends QueryBase_1.QueryBase {
constructor() {
super(...arguments);
/** @ignore */
this.RUNNER_CLASS = PgRunnerInsert;
}
}
exports.PgQueryInsert = PgQueryInsert;
class PgRunnerInsert extends PgRunner_1.PgRunner {
constructor(schema, client) {
super(schema, client);
this.op = "INSERT";
this.maxBatchSize = 100;
this.default = null; // In case of duplicate key error, returns null.
const fields = this.addPK(Object.keys(this.schema.table), "append");
this.singleBuilder = this.createValuesBuilder({
prefix: this.fmt("INSERT INTO %T (%FIELDS) VALUES", { fields }),
indent: " ",
fields,
suffix: this.fmt(` ON CONFLICT DO NOTHING RETURNING %PK AS ${types_1.ID}`),
});
// We use WITH clause in INSERT, because "ON CONFLICT DO NOTHING" clause
// doesn't emit anything in "RETURNING" clause, so we could've not
// distinguished rows which were inserted from the rows which were not.
// Having WITH solves this (see RETURNING below).
this.batchBuilder = this.createWithBuilder({
fields,
suffix: this.fmt("INSERT INTO %T (%FIELDS)\n" +
"SELECT %FIELDS FROM rows OFFSET 1\n" +
`ON CONFLICT DO NOTHING RETURNING (SELECT _key FROM rows WHERE %PK(rows)=%PK(%T)), %PK AS ${types_1.ID}`, { fields }),
});
}
key(input) {
// We must NEVER dedup inserts, because:
// 1. If the table DOESN'T have an unique key, then we must insert all
// input rows (no dedup allowed).
// 2. If the table DOES have an unique key, then we must logically ensure
// that only one concurrent promise is resolved into an inserted row ID,
// and all other are resolved with null (aka "not inserted due to
// duplicate").
return super.key(input);
}
async runSingle(input, annotations) {
const sql = this.singleBuilder.prefix +
this.singleBuilder.func([["", input]]) +
this.singleBuilder.suffix;
const rows = await this.clientQuery(sql, annotations, 1);
if (!rows.length) {
return undefined;
}
return rows[0][types_1.ID];
}
async runBatch(inputs, annotations) {
const sql = this.batchBuilder.prefix +
this.batchBuilder.func(inputs) +
this.batchBuilder.suffix;
const rows = await this.clientQuery(sql, annotations, inputs.size);
const outputs = new Map();
for (const row of rows) {
outputs.set(row._key, row[types_1.ID]);
}
return outputs;
}
}
PgRunnerInsert.IS_WRITE = true;
//# sourceMappingURL=PgQueryInsert.js.map