forge-sql-orm
Version:
Drizzle ORM integration for Atlassian @forge/sql. Provides a custom driver, schema migration, two levels of caching (local and global via @forge/kvs), optimistic locking, and query analysis.
257 lines • 10.1 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.ForgeSQLAnalyseOperation = void 0;
const table_1 = require("drizzle-orm/table");
const luxon_1 = require("luxon");
/**
* Class implementing SQL analysis operations for ForgeSQL ORM.
* Provides methods for analyzing query performance, execution plans, and slow queries.
*/
class ForgeSQLAnalyseOperation {
forgeOperations;
/**
* Creates a new instance of ForgeSQLAnalizeOperation.
* @param {ForgeSqlOperation} forgeOperations - The ForgeSQL operations instance
*/
constructor(forgeOperations) {
this.forgeOperations = forgeOperations;
this.mapToCamelCaseClusterStatement = this.mapToCamelCaseClusterStatement.bind(this);
}
/**
* Executes EXPLAIN on a raw SQL query.
* @param {string} query - The SQL query to analyze
* @param {unknown[]} bindParams - The query parameters
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
*/
async explainRaw(query, bindParams) {
const results = await this.forgeOperations
.fetch()
.executeRawSQL(`EXPLAIN ${query}`, bindParams);
return results.map((row) => ({
id: row.id,
estRows: row.estRows,
actRows: row.actRows,
task: row.task,
accessObject: row["access object"],
executionInfo: row["execution info"],
operatorInfo: row["operator info"],
memory: row.memory,
disk: row.disk,
}));
}
/**
* Executes EXPLAIN on a Drizzle query.
* @param {{ toSQL: () => Query }} query - The Drizzle query to analyze
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
*/
async explain(query) {
const { sql, params } = query.toSQL();
return this.explainRaw(sql, params);
}
/**
* Executes EXPLAIN ANALYZE on a raw SQL query.
* @param {string} query - The SQL query to analyze
* @param {unknown[]} bindParams - The query parameters
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
*/
async explainAnalyzeRaw(query, bindParams) {
const results = await this.forgeOperations
.fetch()
.executeRawSQL(`EXPLAIN ANALYZE ${query}`, bindParams);
return results.map((row) => ({
id: row.id,
estRows: row.estRows,
actRows: row.actRows,
task: row.task,
accessObject: row["access object"],
executionInfo: row["execution info"],
operatorInfo: row["operator info"],
memory: row.memory,
disk: row.disk,
}));
}
/**
* Executes EXPLAIN ANALYZE on a Drizzle query.
* @param {{ toSQL: () => Query }} query - The Drizzle query to analyze
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
*/
async explainAnalyze(query) {
const { sql, params } = query.toSQL();
return this.explainAnalyzeRaw(sql, params);
}
/**
* Decodes a query execution plan from its string representation.
* @param {string} input - The raw execution plan string
* @returns {ExplainAnalyzeRow[]} The decoded execution plan rows
*/
decodedPlan(input) {
if (!input) {
return [];
}
const lines = input.trim().split("\n");
if (lines.length < 2)
return [];
const headersRaw = lines[0]
.split("\t")
.map((h) => h.trim())
.filter(Boolean);
const headers = headersRaw.map((h) => {
return h
.replace(/\s+/g, " ")
.replace(/[-\s]+(.)?/g, (_, c) => (c ? c.toUpperCase() : ""))
.replace(/^./, (s) => s.toLowerCase());
});
return lines.slice(1).map((line) => {
const values = line
.split("\t")
.map((s) => s.trim())
.filter(Boolean);
const row = {};
for (const key of headers) {
const i = headers.indexOf(key);
row[key] = values[i] ?? "";
}
return row;
});
}
/**
* Normalizes a raw slow query row into a more structured format.
* @param {SlowQueryRaw} row - The raw slow query data
* @returns {SlowQueryNormalized} The normalized slow query data
*/
normalizeSlowQuery(row) {
return {
time: row.Time,
txnStartTs: row.Txn_start_ts,
user: row.User,
host: row.Host,
connId: row.Conn_ID,
db: row.DB,
query: row.Query,
digest: row.Digest,
queryTime: row.Query_time,
compileTime: row.Compile_time,
optimizeTime: row.Optimize_time,
processTime: row.Process_time,
waitTime: row.Wait_time,
parseTime: row.Parse_time,
rewriteTime: row.Rewrite_time,
copTime: row.Cop_time,
copProcAvg: row.Cop_proc_avg,
copProcMax: row.Cop_proc_max,
copProcP90: row.Cop_proc_p90,
copProcAddr: row.Cop_proc_addr,
copWaitAvg: row.Cop_wait_avg,
copWaitMax: row.Cop_wait_max,
copWaitP90: row.Cop_wait_p90,
copWaitAddr: row.Cop_wait_addr,
memMax: row.Mem_max,
diskMax: row.Disk_max,
totalKeys: row.Total_keys,
processKeys: row.Process_keys,
requestCount: row.Request_count,
kvTotal: row.KV_total,
pdTotal: row.PD_total,
resultRows: row.Result_rows,
rocksdbBlockCacheHitCount: row.Rocksdb_block_cache_hit_count,
rocksdbBlockReadCount: row.Rocksdb_block_read_count,
rocksdbBlockReadByte: row.Rocksdb_block_read_byte,
plan: row.Plan,
binaryPlan: row.Binary_plan,
planDigest: row.Plan_digest,
parsedPlan: this.decodedPlan(row.Plan),
};
}
/**
* Builds a SQL query for retrieving cluster statement history.
* @param {string[]} tables - The tables to analyze
* @param {Date} [from] - The start date for the analysis
* @param {Date} [to] - The end date for the analysis
* @returns {string} The SQL query for cluster statement history
*/
buildClusterStatementQuery(tables, from, to) {
const formatDateTime = (date) => luxon_1.DateTime.fromJSDate(date).toFormat("yyyy-LL-dd'T'HH:mm:ss.SSS");
const tableConditions = tables
.map((table) => `TABLE_NAMES LIKE CONCAT(SCHEMA_NAME, '.', '%', '${table}', '%')`)
.join(" OR ");
const timeConditions = [];
if (from) {
timeConditions.push(`SUMMARY_BEGIN_TIME >= '${formatDateTime(from)}'`);
}
if (to) {
timeConditions.push(`SUMMARY_END_TIME <= '${formatDateTime(to)}'`);
}
let whereClauses;
if (tableConditions?.length) {
whereClauses = [tableConditions ? `(${tableConditions})` : "", ...timeConditions];
}
else {
whereClauses = timeConditions;
}
return `
SELECT *
FROM (
SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY
UNION ALL
SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY_HISTORY
) AS combined
${whereClauses?.length > 0 ? `WHERE ${whereClauses.join(" AND ")}` : ""}
`;
}
/**
* Retrieves and analyzes slow queries from the database.
* @returns {Promise<SlowQueryNormalized[]>} The normalized slow query data
*/
// CLUSTER_SLOW_QUERY STATISTICS
async analyzeSlowQueries() {
const results = await this.forgeOperations.fetch().executeRawSQL(`
SELECT *
FROM information_schema.slow_query
ORDER BY time DESC
`);
return results.map((row) => this.normalizeSlowQuery(row));
}
/**
* Converts a cluster statement row to camelCase format.
* @param {Record<string, any>} input - The input row data
* @returns {ClusterStatementRowCamelCase} The converted row data
*/
mapToCamelCaseClusterStatement(input) {
if (!input) {
return {};
}
const result = {};
result.parsedPlan = this.decodedPlan(input["PLAN"] ?? "");
for (const key in input) {
const camelKey = key.toLowerCase().replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
result[camelKey] = input[key];
}
return result;
}
/**
* Analyzes query history for specific tables using raw table names.
* @param {string[]} tables - The table names to analyze
* @param {Date} [fromDate] - The start date for the analysis
* @param {Date} [toDate] - The end date for the analysis
* @returns {Promise<ClusterStatementRowCamelCase[]>} The analyzed query history
*/
async analyzeQueriesHistoryRaw(tables, fromDate, toDate) {
const results = await this.forgeOperations
.fetch()
.executeRawSQL(this.buildClusterStatementQuery(tables ?? [], fromDate, toDate));
return results.map((r) => this.mapToCamelCaseClusterStatement(r));
}
/**
* Analyzes query history for specific tables using Drizzle table objects.
* @param {AnyMySqlTable[]} tables - The Drizzle table objects to analyze
* @param {Date} [fromDate] - The start date for the analysis
* @param {Date} [toDate] - The end date for the analysis
* @returns {Promise<ClusterStatementRowCamelCase[]>} The analyzed query history
*/
async analyzeQueriesHistory(tables, fromDate, toDate) {
const tableNames = tables?.map((table) => (0, table_1.getTableName)(table)) ?? [];
return this.analyzeQueriesHistoryRaw(tableNames, fromDate, toDate);
}
}
exports.ForgeSQLAnalyseOperation = ForgeSQLAnalyseOperation;
//# sourceMappingURL=ForgeSQLAnalyseOperations.js.map