@clickup/ent-framework
Version:
A PostgreSQL graph-database-alike library with microsharding and row-level security
73 lines • 3.06 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.PgQuerySelectBy = void 0;
const PgRunner_1 = require("./PgRunner");
class PgQuerySelectBy {
constructor(schema, input) {
this.schema = schema;
this.input = input;
this.IS_WRITE = false;
}
async run(client, annotation) {
// Treat undefined as an absent key. This will hopefully be JITed very
// efficiently, but could still be that it won't since we enumerate object
// keys and use [] to access the values.
const fields = this.schema.uniqueKey.filter((field) => this.input[field] !== undefined);
// If there are no known fields, skip the entire operation.
if (fields.length === 0) {
return [];
}
// Since we have a partial list of fields which depends on the query itself,
// we have to cache runners per updating fields list. Else we'd not be able
// to do a partial batched update.
return client
.batcher(this.constructor, this.schema, fields.join(":"), false, () =>
// This is run only once per every unique combination of field names,
// not per every row updated, so it's cheap to do whatever we want.
new PgRunnerSelectBy(this.schema, client, fields))
.run(this.input, annotation);
}
}
exports.PgQuerySelectBy = PgQuerySelectBy;
class PgRunnerSelectBy extends PgRunner_1.PgRunner {
constructor(schema, client, fields) {
super(schema, client);
this.fields = fields;
this.op = "SELECT_UNIQ_PFX";
this.maxBatchSize = 1000; // Select by unique key is cheap, so we can have much bigger load batches to accumulate more data from e.g. Shard 0 for the next multi-Shard requests.
this.default = []; // If no rows are found, returns [].
this.builders = this.createWhereBuildersFieldsEq({
prefix: this.fmt("SELECT %SELECT_FIELDS FROM %T "),
fields,
suffix: this.fmt(""),
});
}
key(input) {
return JSON.stringify(this.fields.map((field) => input[field]));
}
async runSingle(input, annotations) {
const sql = this.builders.plain.prefix +
this.builders.plain.func([["", input]]) +
this.builders.plain.suffix;
return this.clientQuery(sql, annotations, 1);
}
async runBatch(inputs, annotations) {
const sql = this.builders.optimized.prefix +
this.builders.optimized.func(inputs) +
this.builders.optimized.suffix;
const rows = await this.clientQuery(sql, annotations, inputs.size);
const outputs = new Map();
for (const row of rows) {
const key = this.key(row);
let rows = outputs.get(key);
if (!rows) {
rows = [];
outputs.set(key, rows);
}
rows.push(row);
}
return outputs;
}
}
PgRunnerSelectBy.IS_WRITE = false;
//# sourceMappingURL=PgQuerySelectBy.js.map