UNPKG

@clickup/ent-framework

Version:

A PostgreSQL graph-database-alike library with microsharding and row-level security

153 lines 6.35 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.PgQuerySelect = void 0; const pickBy_1 = __importDefault(require("lodash/pickBy")); const QueryBase_1 = require("../abstract/QueryBase"); const misc_1 = require("../internal/misc"); const escapeLiteral_1 = require("./helpers/escapeLiteral"); const buildHintQueries_1 = require("./internal/buildHintQueries"); const escapeString_1 = require("./internal/escapeString"); const PgRunner_1 = require("./PgRunner"); const ALLOWED_ORDER = [ "ASC", "ASC NULLS LAST", "ASC NULLS FIRST", "DESC", "DESC NULLS LAST", "DESC NULLS FIRST", ]; class PgQuerySelect extends QueryBase_1.QueryBase { constructor() { super(...arguments); /** @ignore */ this.RUNNER_CLASS = PgRunnerSelect; } async run(client, annotation) { const custom = this.input.custom; const disableBatching = !!custom?.hints?.[buildHintQueries_1.RAW_PREPEND_HINT]; return client .batcher(this.constructor, this.schema, JSON.stringify(custom?.hints) || "", disableBatching, () => new this.RUNNER_CLASS(this.schema, client)) .run(this.input, annotation); } } exports.PgQuerySelect = PgQuerySelect; class PgRunnerSelect extends PgRunner_1.PgRunner { constructor(schema, client) { super(schema, client); this.prefix = this.fmt("SELECT %SELECT_FIELDS FROM %T "); this.prefixUnion = this.fmt("SELECT "); this.midfixUnion = this.fmt(" AS _key, %SELECT_FIELDS FROM %T "); this.op = "SELECT"; this.maxBatchSize = 10; // PG crashes on large queries with lots of UNION ALL, so we keep this value low. this.default = []; // We just need something here. this.builder = this.createWhereBuilder({ prefix: this.fmt(""), suffix: this.fmt(""), }); } key(input) { // Coalesce equal select queries. const json = JSON.stringify(input); return (0, misc_1.stringHash)(json); } async runSingle(input, annotations) { const { sql, hints } = this.buildCustom(input, this.prefix + this.builder.prefix + this.builder.func(input.where) + this.builder.suffix + this.buildOptionalOrder(input.order) + this.buildLimit(input.limit)); return this.clientQuery(sql, annotations, 1, hints); } async runBatch(inputs, annotations) { // SELECT '...' AS _key, ... FROM ... WHERE ... // UNION ALL // SELECT '...' AS _key, ... FROM ... WHERE ... const pieces = []; let allHints = {}; for (const [key, input] of inputs.entries()) { const { sql, hints } = this.buildCustom(input, this.prefixUnion + (0, escapeString_1.escapeString)(key) + this.midfixUnion + this.builder.prefix + this.builder.func(input.where) + this.builder.suffix + this.buildOptionalOrder(input.order) + this.buildLimit(input.limit)); pieces.push("(" + sql + ")"); allHints = { ...allHints, ...(0, pickBy_1.default)(hints, (v) => v !== undefined) }; } const unionRows = await this.clientQuery(pieces.join("\n UNION ALL\n"), annotations, inputs.size, Object.keys(allHints).length > 0 ? allHints : undefined); const outputs = new Map(); for (const { _key: key, ...row } of unionRows) { let rows = outputs.get(key); if (!rows) { rows = []; outputs.set(key, rows); } rows.push(row); } return outputs; } buildCustom(input, sql) { const custom = input.custom; if (custom?.joins?.length) { sql = sql.replace(/ FROM \S+\s+/, (m) => m + "\n" + custom.joins.map((join) => (0, escapeLiteral_1.escapeLiteral)(join)).join("\n") + "\n"); } else if (custom?.from?.length) { sql = sql.replace(/ FROM \S+/, () => " FROM " + (0, escapeLiteral_1.escapeLiteral)(custom.from)); } if (custom?.ctes?.length) { sql = "WITH\n " + custom.ctes.map((cte) => (0, escapeLiteral_1.escapeLiteral)(cte)).join(",\n ") + "\n" + sql; } return { sql, hints: custom?.hints }; } buildOptionalOrder(order) { if (!order) { return ""; } // TS tuples support is unfortunately weak: it has hard time treating arrays // as tuples, and also treating strings as literal strings. E.g. we can't do: // [["field", "ASC"], ...] // in the caller code and have Order<TType> to be tuple-based; the only // work-around would be // [tuple("field" as const, "ASC" as const), ...] // which is ugly. So we use object-based order specifiers and lots of run-time // checks around the data passed. const pieces = []; for (const item of order) { if ((0, misc_1.hasKey)("$literal", item)) { if (Object.keys(item).length > 1) { throw Error("Invalid order specification - $literal must be the only key: " + (0, misc_1.inspectCompact)(item)); } pieces.push((0, escapeLiteral_1.escapeLiteral)(item.$literal)); } else { for (const [field, dir] of Object.entries(item)) { if (!ALLOWED_ORDER.includes("" + dir)) { throw Error(`Invalid order specifier: ${dir}; allowed specifiers: ` + ALLOWED_ORDER.join(", ")); } pieces.push(`${this.escapeField(field)} ${dir}`); } } } return pieces.length > 0 ? " ORDER BY " + pieces.join(", ") : ""; } buildLimit(limit) { return " LIMIT " + (parseInt("" + limit) || 0); } } PgRunnerSelect.IS_WRITE = false; //# sourceMappingURL=PgQuerySelect.js.map