forge-sql-orm
Version:
Drizzle ORM integration for Forge-SQL in Atlassian Forge applications.
1,364 lines (1,363 loc) • 49.9 kB
JavaScript
import { isTable, sql, eq, and } from "drizzle-orm";
import moment from "moment";
import { isSQLWrapper } from "drizzle-orm/sql/sql";
import { sql as sql$1, migrationRunner } from "@forge/sql";
import { drizzle } from "drizzle-orm/mysql-proxy";
import { customType, mysqlTable, timestamp, varchar, bigint } from "drizzle-orm/mysql-core";
import moment$1 from "moment/moment.js";
import { getTableName } from "drizzle-orm/table";
const parseDateTime = (value, format) => {
let result;
const m = moment(value, format, true);
if (!m.isValid()) {
const momentDate = moment(value);
if (momentDate.isValid()) {
result = momentDate.toDate();
} else {
result = new Date(value);
}
} else {
result = m.toDate();
}
if (isNaN(result.getTime())) {
result = new Date(value);
}
return result;
};
function getPrimaryKeys(table) {
const { columns, primaryKeys } = getTableMetadata(table);
const columnPrimaryKeys = Object.entries(columns).filter(([, column]) => column.primary);
if (columnPrimaryKeys.length > 0) {
return columnPrimaryKeys;
}
if (Array.isArray(primaryKeys) && primaryKeys.length > 0) {
const primaryKeyColumns = /* @__PURE__ */ new Set();
primaryKeys.forEach((primaryKeyBuilder) => {
Object.entries(columns).filter(([, column]) => {
return primaryKeyBuilder.columns.includes(column);
}).forEach(([name, column]) => {
primaryKeyColumns.add([name, column]);
});
});
return Array.from(primaryKeyColumns);
}
return [];
}
function processForeignKeys(table, foreignKeysSymbol, extraSymbol) {
const foreignKeys = [];
if (foreignKeysSymbol) {
const fkArray = table[foreignKeysSymbol];
if (fkArray) {
fkArray.forEach((fk) => {
if (fk.reference) {
const item = fk.reference(fk);
foreignKeys.push(item);
}
});
}
}
if (extraSymbol) {
const extraConfigBuilder = table[extraSymbol];
if (extraConfigBuilder && typeof extraConfigBuilder === "function") {
const configBuilderData = extraConfigBuilder(table);
if (configBuilderData) {
const configBuilders = Array.isArray(configBuilderData) ? configBuilderData : Object.values(configBuilderData).map(
(item) => item.value ?? item
);
configBuilders.forEach((builder) => {
if (!builder?.constructor) return;
const builderName = builder.constructor.name.toLowerCase();
if (builderName.includes("foreignkeybuilder")) {
foreignKeys.push(builder);
}
});
}
}
}
return foreignKeys;
}
function getTableMetadata(table) {
const symbols = Object.getOwnPropertySymbols(table);
const nameSymbol = symbols.find((s) => s.toString().includes("Name"));
const columnsSymbol = symbols.find((s) => s.toString().includes("Columns"));
const foreignKeysSymbol = symbols.find((s) => s.toString().includes("ForeignKeys)"));
const extraSymbol = symbols.find((s) => s.toString().includes("ExtraConfigBuilder"));
const builders = {
indexes: [],
checks: [],
foreignKeys: [],
primaryKeys: [],
uniqueConstraints: [],
extras: []
};
builders.foreignKeys = processForeignKeys(table, foreignKeysSymbol, extraSymbol);
if (extraSymbol) {
const extraConfigBuilder = table[extraSymbol];
if (extraConfigBuilder && typeof extraConfigBuilder === "function") {
const configBuilderData = extraConfigBuilder(table);
if (configBuilderData) {
const configBuilders = Array.isArray(configBuilderData) ? configBuilderData : Object.values(configBuilderData).map(
(item) => item.value ?? item
);
configBuilders.forEach((builder) => {
if (!builder?.constructor) return;
const builderName = builder.constructor.name.toLowerCase();
const builderMap = {
indexbuilder: builders.indexes,
checkbuilder: builders.checks,
primarykeybuilder: builders.primaryKeys,
uniqueconstraintbuilder: builders.uniqueConstraints
};
for (const [type, array] of Object.entries(builderMap)) {
if (builderName.includes(type)) {
array.push(builder);
break;
}
}
builders.extras.push(builder);
});
}
}
}
return {
tableName: nameSymbol ? table[nameSymbol] : "",
columns: columnsSymbol ? table[columnsSymbol] : {},
...builders
};
}
function generateDropTableStatements(tables) {
const dropStatements = [];
tables.forEach((tableName) => {
dropStatements.push(`DROP TABLE IF EXISTS \`${tableName}\`;`);
dropStatements.push(`DROP SEQUENCE IF EXISTS \`${tableName}\`;`);
});
return dropStatements;
}
function mapSelectTableToAlias(table, uniqPrefix, aliasMap) {
const { columns, tableName } = getTableMetadata(table);
const selectionsTableFields = {};
Object.keys(columns).forEach((name) => {
const column = columns[name];
const uniqName = `a_${uniqPrefix}_${tableName}_${column.name}`.toLowerCase();
const fieldAlias = sql.raw(uniqName);
selectionsTableFields[name] = sql`${column} as \`${fieldAlias}\``;
aliasMap[uniqName] = column;
});
return selectionsTableFields;
}
function isDrizzleColumn(column) {
return column && typeof column === "object" && "table" in column;
}
function mapSelectAllFieldsToAlias(selections, name, uniqName, fields, aliasMap) {
if (isTable(fields)) {
selections[name] = mapSelectTableToAlias(fields, uniqName, aliasMap);
} else if (isDrizzleColumn(fields)) {
const column = fields;
const uniqAliasName = `a_${uniqName}_${column.name}`.toLowerCase();
let aliasName = sql.raw(uniqAliasName);
selections[name] = sql`${column} as \`${aliasName}\``;
aliasMap[uniqAliasName] = column;
} else if (isSQLWrapper(fields)) {
selections[name] = fields;
} else {
const innerSelections = {};
Object.entries(fields).forEach(([iname, ifields]) => {
mapSelectAllFieldsToAlias(innerSelections, iname, `${uniqName}_${iname}`, ifields, aliasMap);
});
selections[name] = innerSelections;
}
return selections;
}
function mapSelectFieldsWithAlias(fields) {
if (!fields) {
throw new Error("fields is empty");
}
const aliasMap = {};
const selections = {};
Object.entries(fields).forEach(([name, fields2]) => {
mapSelectAllFieldsToAlias(selections, name, name, fields2, aliasMap);
});
return { selections, aliasMap };
}
function getAliasFromDrizzleAlias(value) {
const isSQL = value !== null && typeof value === "object" && isSQLWrapper(value) && "queryChunks" in value;
if (isSQL) {
const sql2 = value;
const queryChunks = sql2.queryChunks;
if (queryChunks.length > 3) {
const aliasNameChunk = queryChunks[queryChunks.length - 2];
if (isSQLWrapper(aliasNameChunk) && "queryChunks" in aliasNameChunk) {
const aliasNameChunkSql = aliasNameChunk;
if (aliasNameChunkSql.queryChunks?.length === 1 && aliasNameChunkSql.queryChunks[0]) {
const queryChunksStringChunc = aliasNameChunkSql.queryChunks[0];
if ("value" in queryChunksStringChunc) {
const values = queryChunksStringChunc.value;
if (values && values.length === 1) {
return values[0];
}
}
}
}
}
}
return void 0;
}
function transformValue(value, alias, aliasMap) {
const column = aliasMap[alias];
if (!column) return value;
let customColumn = column;
const fromDriver = customColumn?.mapFrom;
if (fromDriver && value !== null && value !== void 0) {
return fromDriver(value);
}
return value;
}
function transformObject(obj, selections, aliasMap) {
const result = {};
for (const [key, value] of Object.entries(obj)) {
const selection = selections[key];
const alias = getAliasFromDrizzleAlias(selection);
if (alias && aliasMap[alias]) {
result[key] = transformValue(value, alias, aliasMap);
} else if (selection && typeof selection === "object" && !isSQLWrapper(selection)) {
result[key] = transformObject(
value,
selection,
aliasMap
);
} else {
result[key] = value;
}
}
return result;
}
function applyFromDriverTransform(rows, selections, aliasMap) {
return rows.map((row) => {
const transformed = transformObject(
row,
selections,
aliasMap
);
return processNullBranches(transformed);
});
}
function processNullBranches(obj) {
if (obj === null || typeof obj !== "object") {
return obj;
}
if (obj.constructor && obj.constructor.name !== "Object") {
return obj;
}
const result = {};
let allNull = true;
for (const [key, value] of Object.entries(obj)) {
if (value === null || value === void 0) {
result[key] = null;
continue;
}
if (typeof value === "object") {
const processed = processNullBranches(value);
result[key] = processed;
if (processed !== null) {
allNull = false;
}
} else {
result[key] = value;
allNull = false;
}
}
return allNull ? null : result;
}
function formatLimitOffset(limitOrOffset) {
if (typeof limitOrOffset !== "number" || isNaN(limitOrOffset)) {
throw new Error("limitOrOffset must be a valid number");
}
return sql.raw(`${limitOrOffset}`);
}
function nextVal(sequenceName) {
return sql.raw(`NEXTVAL(${sequenceName})`);
}
class ForgeSQLCrudOperations {
forgeOperations;
options;
/**
* Creates a new instance of ForgeSQLCrudOperations.
* @param forgeSqlOperations - The ForgeSQL operations instance
* @param options - Configuration options for the ORM
*/
constructor(forgeSqlOperations, options) {
this.forgeOperations = forgeSqlOperations;
this.options = options;
}
/**
* Inserts records into the database with optional versioning support.
* If a version field exists in the schema, versioning is applied.
*
* @template T - The type of the table schema
* @param {T} schema - The entity schema
* @param {Partial<InferInsertModel<T>>[]} models - Array of entities to insert
* @param {boolean} [updateIfExists=false] - Whether to update existing records
* @returns {Promise<number>} The number of inserted rows
* @throws {Error} If the insert operation fails
*/
async insert(schema, models, updateIfExists = false) {
if (!models?.length) return 0;
const { tableName, columns } = getTableMetadata(schema);
const versionMetadata = this.validateVersionField(tableName, columns);
const preparedModels = models.map(
(model) => this.prepareModelWithVersion(model, versionMetadata, columns)
);
const queryBuilder = this.forgeOperations.getDrizzleQueryBuilder().insert(schema).values(preparedModels);
const finalQuery = updateIfExists ? queryBuilder.onDuplicateKeyUpdate({
set: Object.fromEntries(
Object.keys(preparedModels[0]).map((key) => [key, schema[key]])
)
}) : queryBuilder;
const result = await finalQuery;
return result[0].insertId;
}
/**
* Deletes a record by its primary key with optional version check.
* If versioning is enabled, ensures the record hasn't been modified since last read.
*
* @template T - The type of the table schema
* @param {unknown} id - The ID of the record to delete
* @param {T} schema - The entity schema
* @returns {Promise<number>} Number of affected rows
* @throws {Error} If the delete operation fails
* @throws {Error} If multiple primary keys are found
*/
async deleteById(id, schema) {
const { tableName, columns } = getTableMetadata(schema);
const primaryKeys = this.getPrimaryKeys(schema);
if (primaryKeys.length !== 1) {
throw new Error("Only single primary key is supported");
}
const [primaryKeyName, primaryKeyColumn] = primaryKeys[0];
const versionMetadata = this.validateVersionField(tableName, columns);
const conditions = [eq(primaryKeyColumn, id)];
if (versionMetadata && columns) {
const versionField = columns[versionMetadata.fieldName];
if (versionField) {
const oldModel = await this.getOldModel({ [primaryKeyName]: id }, schema, [
versionMetadata.fieldName,
versionField
]);
conditions.push(eq(versionField, oldModel[versionMetadata.fieldName]));
}
}
const queryBuilder = this.forgeOperations.getDrizzleQueryBuilder().delete(schema).where(and(...conditions));
const result = await queryBuilder;
return result[0].affectedRows;
}
/**
* Updates a record by its primary key with optimistic locking support.
* If versioning is enabled:
* - Retrieves the current version
* - Checks for concurrent modifications
* - Increments the version on successful update
*
* @template T - The type of the table schema
* @param {Partial<InferInsertModel<T>>} entity - The entity with updated values
* @param {T} schema - The entity schema
* @returns {Promise<number>} Number of affected rows
* @throws {Error} If the primary key is not provided
* @throws {Error} If optimistic locking check fails
* @throws {Error} If multiple primary keys are found
*/
async updateById(entity, schema) {
const { tableName, columns } = getTableMetadata(schema);
const primaryKeys = this.getPrimaryKeys(schema);
if (primaryKeys.length !== 1) {
throw new Error("Only single primary key is supported");
}
const [primaryKeyName, primaryKeyColumn] = primaryKeys[0];
const versionMetadata = this.validateVersionField(tableName, columns);
if (!(primaryKeyName in entity)) {
throw new Error(`Primary key ${primaryKeyName} must be provided in the entity`);
}
const currentVersion = await this.getCurrentVersion(
entity,
primaryKeyName,
versionMetadata,
columns,
schema
);
const updateData = this.prepareUpdateData(entity, versionMetadata, columns, currentVersion);
const conditions = [
eq(primaryKeyColumn, entity[primaryKeyName])
];
if (versionMetadata && columns) {
const versionField = columns[versionMetadata.fieldName];
if (versionField) {
conditions.push(eq(versionField, currentVersion));
}
}
const queryBuilder = this.forgeOperations.getDrizzleQueryBuilder().update(schema).set(updateData).where(and(...conditions));
const result = await queryBuilder;
if (versionMetadata && result[0].affectedRows === 0) {
throw new Error(
`Optimistic locking failed: record with primary key ${entity[primaryKeyName]} has been modified`
);
}
return result[0].affectedRows;
}
/**
* Updates specified fields of records based on provided conditions.
* This method does not support versioning and should be used with caution.
*
* @template T - The type of the table schema
* @param {Partial<InferInsertModel<T>>} updateData - The data to update
* @param {T} schema - The entity schema
* @param {SQL<unknown>} where - The WHERE conditions
* @returns {Promise<number>} Number of affected rows
* @throws {Error} If WHERE conditions are not provided
* @throws {Error} If the update operation fails
*/
async updateFields(updateData, schema, where) {
if (!where) {
throw new Error("WHERE conditions must be provided");
}
const queryBuilder = this.forgeOperations.getDrizzleQueryBuilder().update(schema).set(updateData).where(where);
const result = await queryBuilder;
return result[0].affectedRows;
}
// Helper methods
/**
* Gets primary keys from the schema.
* @template T - The type of the table schema
* @param {T} schema - The table schema
* @returns {[string, AnyColumn][]} Array of primary key name and column pairs
* @throws {Error} If no primary keys are found
*/
getPrimaryKeys(schema) {
const primaryKeys = getPrimaryKeys(schema);
if (!primaryKeys) {
throw new Error(`No primary keys found for schema: ${schema}`);
}
return primaryKeys;
}
/**
* Validates and retrieves version field metadata.
* @param {string} tableName - The name of the table
* @param {Record<string, AnyColumn>} columns - The table columns
* @returns {Object | undefined} Version field metadata if valid, undefined otherwise
*/
validateVersionField(tableName, columns) {
if (this.options.disableOptimisticLocking) {
return void 0;
}
const versionMetadata = this.options.additionalMetadata?.[tableName]?.versionField;
if (!versionMetadata) return void 0;
let fieldName = versionMetadata.fieldName;
let versionField = columns[versionMetadata.fieldName];
if (!versionField) {
const find = Object.entries(columns).find(([, c]) => c.name === versionMetadata.fieldName);
if (find) {
fieldName = find[0];
versionField = find[1];
}
}
if (!versionField) {
console.warn(
`Version field "${versionMetadata.fieldName}" not found in table ${tableName}. Versioning will be skipped.`
);
return void 0;
}
if (!versionField.notNull) {
console.warn(
`Version field "${versionMetadata.fieldName}" in table ${tableName} is nullable. Versioning may not work correctly.`
);
return void 0;
}
const fieldType = versionField.getSQLType();
const isSupportedType = fieldType === "datetime" || fieldType === "timestamp" || fieldType === "int" || fieldType === "number" || fieldType === "decimal";
if (!isSupportedType) {
console.warn(
`Version field "${versionMetadata.fieldName}" in table ${tableName} has unsupported type "${fieldType}". Only datetime, timestamp, int, and decimal types are supported for versioning. Versioning will be skipped.`
);
return void 0;
}
return { fieldName, type: fieldType };
}
/**
* Gets the current version of an entity.
* @template T - The type of the table schema
* @param {Partial<InferInsertModel<T>>} entity - The entity
* @param {string} primaryKeyName - The name of the primary key
* @param {Object | undefined} versionMetadata - Version field metadata
* @param {Record<string, AnyColumn>} columns - The table columns
* @param {T} schema - The table schema
* @returns {Promise<unknown>} The current version value
*/
async getCurrentVersion(entity, primaryKeyName, versionMetadata, columns, schema) {
if (!versionMetadata || !columns) return void 0;
const versionField = columns[versionMetadata.fieldName];
if (!versionField) return void 0;
if (versionMetadata.fieldName in entity) {
return entity[versionMetadata.fieldName];
}
const oldModel = await this.getOldModel(
{ [primaryKeyName]: entity[primaryKeyName] },
schema,
[versionMetadata.fieldName, versionField]
);
return oldModel[versionMetadata.fieldName];
}
/**
* Prepares a model for insertion with version field.
* @template T - The type of the table schema
* @param {Partial<InferInsertModel<T>>} model - The model to prepare
* @param {Object | undefined} versionMetadata - Version field metadata
* @param {Record<string, AnyColumn>} columns - The table columns
* @returns {InferInsertModel<T>} The prepared model
*/
prepareModelWithVersion(model, versionMetadata, columns) {
if (!versionMetadata || !columns) return model;
let fieldName = versionMetadata.fieldName;
let versionField = columns[versionMetadata.fieldName];
if (!versionField) {
const find = Object.entries(columns).find(([, c]) => c.name === versionMetadata.fieldName);
if (find) {
fieldName = find[0];
versionField = find[1];
}
}
if (!versionField) return model;
const modelWithVersion = { ...model };
const fieldType = versionField.getSQLType();
const versionValue = fieldType === "datetime" || fieldType === "timestamp" ? /* @__PURE__ */ new Date() : 1;
modelWithVersion[fieldName] = versionValue;
return modelWithVersion;
}
/**
* Prepares update data with version field.
* @template T - The type of the table schema
* @param {Partial<InferInsertModel<T>>} entity - The entity to update
* @param {Object | undefined} versionMetadata - Version field metadata
* @param {Record<string, AnyColumn>} columns - The table columns
* @param {unknown} currentVersion - The current version value
* @returns {Partial<InferInsertModel<T>>} The prepared update data
*/
prepareUpdateData(entity, versionMetadata, columns, currentVersion) {
const updateData = { ...entity };
if (versionMetadata && columns) {
const versionField = columns[versionMetadata.fieldName];
if (versionField) {
const fieldType = versionField.getSQLType();
updateData[versionMetadata.fieldName] = fieldType === "datetime" || fieldType === "timestamp" ? /* @__PURE__ */ new Date() : currentVersion + 1;
}
}
return updateData;
}
/**
* Retrieves an existing model by primary key.
* @template T - The type of the table schema
* @param {Record<string, unknown>} primaryKeyValues - The primary key values
* @param {T} schema - The table schema
* @param {[string, AnyColumn]} versionField - The version field name and column
* @returns {Promise<Awaited<T> extends Array<any> ? Awaited<T>[number] | undefined : Awaited<T> | undefined>} The existing model
* @throws {Error} If the record is not found
*/
async getOldModel(primaryKeyValues, schema, versionField) {
const [versionFieldName, versionFieldColumn] = versionField;
const primaryKeys = this.getPrimaryKeys(schema);
const [primaryKeyName, primaryKeyColumn] = primaryKeys[0];
const resultQuery = this.forgeOperations.getDrizzleQueryBuilder().select({
[primaryKeyName]: primaryKeyColumn,
[versionFieldName]: versionFieldColumn
}).from(schema).where(eq(primaryKeyColumn, primaryKeyValues[primaryKeyName]));
const model = await this.forgeOperations.fetch().executeQueryOnlyOne(resultQuery);
if (!model) {
throw new Error(`Record not found in table ${schema}`);
}
return model;
}
}
class ForgeSQLSelectOperations {
options;
/**
* Creates a new instance of ForgeSQLSelectOperations.
* @param {ForgeSqlOrmOptions} options - Configuration options for the ORM
*/
constructor(options) {
this.options = options;
}
/**
* Executes a Drizzle query and returns a single result.
* Throws an error if more than one record is returned.
*
* @template T - The type of the query builder
* @param {T} query - The Drizzle query to execute
* @returns {Promise<Awaited<T> extends Array<any> ? Awaited<T>[number] | undefined : Awaited<T> | undefined>} A single result object or undefined
* @throws {Error} If more than one record is returned
*/
async executeQueryOnlyOne(query) {
const results = await query;
const datas = results;
if (!datas.length) {
return void 0;
}
if (datas.length > 1) {
throw new Error(`Expected 1 record but returned ${datas.length}`);
}
return datas[0];
}
/**
* Executes a raw SQL query and returns the results.
* Logs the query if logging is enabled.
*
* @template T - The type of the result objects
* @param {string} query - The raw SQL query to execute
* @param {SqlParameters[]} [params] - Optional SQL parameters
* @returns {Promise<T[]>} A list of results as objects
*/
async executeRawSQL(query, params) {
if (this.options.logRawSqlQuery) {
const paramsStr = params ? `, with params: ${JSON.stringify(params)}` : "";
console.debug(`Executing with SQL ${query}${paramsStr}`);
}
const sqlStatement = sql$1.prepare(query);
if (params) {
sqlStatement.bindParams(...params);
}
const result = await sqlStatement.execute();
return result.rows;
}
/**
* Executes a raw SQL update query.
* @param {string} query - The raw SQL update query
* @param {SqlParameters[]} [params] - Optional SQL parameters
* @returns {Promise<UpdateQueryResponse>} The update response containing affected rows
*/
async executeRawUpdateSQL(query, params) {
const sqlStatement = sql$1.prepare(query);
if (params) {
sqlStatement.bindParams(...params);
}
if (this.options.logRawSqlQuery) {
console.debug(
`Executing Update with SQL ${query}` + (params ? `, with params: ${JSON.stringify(params)}` : "")
);
}
const updateQueryResponseResults = await sqlStatement.execute();
return updateQueryResponseResults.rows;
}
}
const forgeDriver = async (query, params, method) => {
try {
if (method == "execute") {
const sqlStatement = sql$1.prepare(query);
if (params) {
sqlStatement.bindParams(...params);
}
const updateQueryResponseResults = await sqlStatement.execute();
let result = updateQueryResponseResults.rows;
return { ...result, rows: [result] };
} else {
const sqlStatement = await sql$1.prepare(query);
if (params) {
await sqlStatement.bindParams(...params);
}
const result = await sqlStatement.execute();
let rows;
rows = result.rows.map((r) => Object.values(r));
return { rows };
}
} catch (error) {
console.error("SQL Error:", JSON.stringify(error));
throw error;
}
};
function injectSqlHints(query, hints) {
if (!hints) {
return query;
}
const normalizedQuery = query.trim().toUpperCase();
let queryHints;
if (normalizedQuery.startsWith("SELECT")) {
queryHints = hints.select;
} else if (normalizedQuery.startsWith("INSERT")) {
queryHints = hints.insert;
} else if (normalizedQuery.startsWith("UPDATE")) {
queryHints = hints.update;
} else if (normalizedQuery.startsWith("DELETE")) {
queryHints = hints.delete;
}
if (!queryHints || queryHints.length === 0) {
return query;
}
const hintsString = queryHints.join(" ");
if (normalizedQuery.startsWith("SELECT")) {
return `SELECT /*+ ${hintsString} */ ${query.substring(6)}`;
} else if (normalizedQuery.startsWith("INSERT")) {
return `INSERT /*+ ${hintsString} */ ${query.substring(6)}`;
} else if (normalizedQuery.startsWith("UPDATE")) {
return `UPDATE /*+ ${hintsString} */ ${query.substring(6)}`;
} else if (normalizedQuery.startsWith("DELETE")) {
return `DELETE /*+ ${hintsString} */ ${query.substring(6)}`;
}
return query;
}
function createForgeDriverProxy(options, logRawSqlQuery) {
return async (query, params, method) => {
const modifiedQuery = injectSqlHints(query, options);
if (options && logRawSqlQuery && modifiedQuery !== query) {
console.warn("modified query: " + modifiedQuery);
}
return forgeDriver(modifiedQuery, params, method);
};
}
function createAliasedSelectBuilder(db, fields, selectFn) {
const { selections, aliasMap } = mapSelectFieldsWithAlias(fields);
const builder = selectFn(selections);
const wrapBuilder = (rawBuilder) => {
return new Proxy(rawBuilder, {
get(target, prop, receiver) {
if (prop === "execute") {
return async (...args) => {
const rows = await target.execute(...args);
return applyFromDriverTransform(rows, selections, aliasMap);
};
}
if (prop === "then") {
return (onfulfilled, onrejected) => target.execute().then((rows) => {
const transformed = applyFromDriverTransform(rows, selections, aliasMap);
return onfulfilled?.(transformed);
}, onrejected);
}
const value = Reflect.get(target, prop, receiver);
if (typeof value === "function") {
return (...args) => {
const result = value.apply(target, args);
if (typeof result === "object" && result !== null && "execute" in result) {
return wrapBuilder(result);
}
return result;
};
}
return value;
}
});
};
return wrapBuilder(builder);
}
function patchDbWithSelectAliased(db) {
db.selectAliased = function(fields) {
return createAliasedSelectBuilder(db, fields, (selections) => db.select(selections));
};
db.selectAliasedDistinct = function(fields) {
return createAliasedSelectBuilder(db, fields, (selections) => db.selectDistinct(selections));
};
return db;
}
class ForgeSQLAnalyseOperation {
forgeOperations;
/**
* Creates a new instance of ForgeSQLAnalizeOperation.
* @param {ForgeSqlOperation} forgeOperations - The ForgeSQL operations instance
*/
constructor(forgeOperations) {
this.forgeOperations = forgeOperations;
this.mapToCamelCaseClusterStatement = this.mapToCamelCaseClusterStatement.bind(this);
}
/**
* Executes EXPLAIN on a raw SQL query.
* @param {string} query - The SQL query to analyze
* @param {unknown[]} bindParams - The query parameters
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
*/
async explainRaw(query, bindParams) {
const results = await this.forgeOperations.fetch().executeRawSQL(`EXPLAIN ${query}`, bindParams);
return results.map((row) => ({
id: row.id,
estRows: row.estRows,
actRows: row.actRows,
task: row.task,
accessObject: row["access object"],
executionInfo: row["execution info"],
operatorInfo: row["operator info"],
memory: row.memory,
disk: row.disk
}));
}
/**
* Executes EXPLAIN on a Drizzle query.
* @param {{ toSQL: () => Query }} query - The Drizzle query to analyze
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
*/
async explain(query) {
const { sql: sql2, params } = query.toSQL();
return this.explainRaw(sql2, params);
}
/**
* Executes EXPLAIN ANALYZE on a raw SQL query.
* @param {string} query - The SQL query to analyze
* @param {unknown[]} bindParams - The query parameters
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
*/
async explainAnalyzeRaw(query, bindParams) {
const results = await this.forgeOperations.fetch().executeRawSQL(`EXPLAIN ANALYZE ${query}`, bindParams);
return results.map((row) => ({
id: row.id,
estRows: row.estRows,
actRows: row.actRows,
task: row.task,
accessObject: row["access object"],
executionInfo: row["execution info"],
operatorInfo: row["operator info"],
memory: row.memory,
disk: row.disk
}));
}
/**
* Executes EXPLAIN ANALYZE on a Drizzle query.
* @param {{ toSQL: () => Query }} query - The Drizzle query to analyze
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
*/
async explainAnalyze(query) {
const { sql: sql2, params } = query.toSQL();
return this.explainAnalyzeRaw(sql2, params);
}
/**
* Decodes a query execution plan from its string representation.
* @param {string} input - The raw execution plan string
* @returns {ExplainAnalyzeRow[]} The decoded execution plan rows
*/
decodedPlan(input) {
if (!input) {
return [];
}
const lines = input.trim().split("\n");
if (lines.length < 2) return [];
const headersRaw = lines[0].split(" ").map((h) => h.trim()).filter(Boolean);
const headers = headersRaw.map((h) => {
return h.replace(/\s+/g, " ").replace(/[-\s]+(.)?/g, (_, c) => c ? c.toUpperCase() : "").replace(/^./, (s) => s.toLowerCase());
});
return lines.slice(1).map((line) => {
const values = line.split(" ").map((s) => s.trim()).filter(Boolean);
const row = {};
headers.forEach((key, i) => {
row[key] = values[i] ?? "";
});
return row;
});
}
/**
* Normalizes a raw slow query row into a more structured format.
* @param {SlowQueryRaw} row - The raw slow query data
* @returns {SlowQueryNormalized} The normalized slow query data
*/
normalizeSlowQuery(row) {
return {
time: row.Time,
txnStartTs: row.Txn_start_ts,
user: row.User,
host: row.Host,
connId: row.Conn_ID,
db: row.DB,
query: row.Query,
digest: row.Digest,
queryTime: row.Query_time,
compileTime: row.Compile_time,
optimizeTime: row.Optimize_time,
processTime: row.Process_time,
waitTime: row.Wait_time,
parseTime: row.Parse_time,
rewriteTime: row.Rewrite_time,
copTime: row.Cop_time,
copProcAvg: row.Cop_proc_avg,
copProcMax: row.Cop_proc_max,
copProcP90: row.Cop_proc_p90,
copProcAddr: row.Cop_proc_addr,
copWaitAvg: row.Cop_wait_avg,
copWaitMax: row.Cop_wait_max,
copWaitP90: row.Cop_wait_p90,
copWaitAddr: row.Cop_wait_addr,
memMax: row.Mem_max,
diskMax: row.Disk_max,
totalKeys: row.Total_keys,
processKeys: row.Process_keys,
requestCount: row.Request_count,
kvTotal: row.KV_total,
pdTotal: row.PD_total,
resultRows: row.Result_rows,
rocksdbBlockCacheHitCount: row.Rocksdb_block_cache_hit_count,
rocksdbBlockReadCount: row.Rocksdb_block_read_count,
rocksdbBlockReadByte: row.Rocksdb_block_read_byte,
plan: row.Plan,
binaryPlan: row.Binary_plan,
planDigest: row.Plan_digest,
parsedPlan: this.decodedPlan(row.Plan)
};
}
/**
* Builds a SQL query for retrieving cluster statement history.
* @param {string[]} tables - The tables to analyze
* @param {Date} [from] - The start date for the analysis
* @param {Date} [to] - The end date for the analysis
* @returns {string} The SQL query for cluster statement history
*/
buildClusterStatementQuery(tables, from, to) {
const formatDateTime = (date) => moment(date).format("YYYY-MM-DDTHH:mm:ss.SSS");
const tableConditions = tables.map((table) => `TABLE_NAMES LIKE CONCAT(SCHEMA_NAME, '.', '%', '${table}', '%')`).join(" OR ");
const timeConditions = [];
if (from) {
timeConditions.push(`SUMMARY_BEGIN_TIME >= '${formatDateTime(from)}'`);
}
if (to) {
timeConditions.push(`SUMMARY_END_TIME <= '${formatDateTime(to)}'`);
}
let whereClauses;
if (tableConditions?.length) {
whereClauses = [tableConditions ? `(${tableConditions})` : "", ...timeConditions];
} else {
whereClauses = timeConditions;
}
return `
SELECT *
FROM (
SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY
UNION ALL
SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY_HISTORY
) AS combined
${whereClauses?.length > 0 ? `WHERE ${whereClauses.join(" AND ")}` : ""}
`;
}
/**
* Retrieves and analyzes slow queries from the database.
* @returns {Promise<SlowQueryNormalized[]>} The normalized slow query data
*/
// CLUSTER_SLOW_QUERY STATISTICS
async analyzeSlowQueries() {
const results = await this.forgeOperations.fetch().executeRawSQL(`
SELECT *
FROM information_schema.slow_query
ORDER BY time DESC
`);
return results.map((row) => this.normalizeSlowQuery(row));
}
/**
* Converts a cluster statement row to camelCase format.
* @param {Record<string, any>} input - The input row data
* @returns {ClusterStatementRowCamelCase} The converted row data
*/
mapToCamelCaseClusterStatement(input) {
if (!input) {
return {};
}
const result = {};
result.parsedPlan = this.decodedPlan(input["PLAN"] ?? "");
for (const key in input) {
const camelKey = key.toLowerCase().replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
result[camelKey] = input[key];
}
return result;
}
/**
* Analyzes query history for specific tables using raw table names.
* @param {string[]} tables - The table names to analyze
* @param {Date} [fromDate] - The start date for the analysis
* @param {Date} [toDate] - The end date for the analysis
* @returns {Promise<ClusterStatementRowCamelCase[]>} The analyzed query history
*/
async analyzeQueriesHistoryRaw(tables, fromDate, toDate) {
const results = await this.forgeOperations.fetch().executeRawSQL(
this.buildClusterStatementQuery(tables ?? [], fromDate, toDate)
);
return results.map((r) => this.mapToCamelCaseClusterStatement(r));
}
/**
* Analyzes query history for specific tables using Drizzle table objects.
* @param {AnyMySqlTable[]} tables - The Drizzle table objects to analyze
* @param {Date} [fromDate] - The start date for the analysis
* @param {Date} [toDate] - The end date for the analysis
* @returns {Promise<ClusterStatementRowCamelCase[]>} The analyzed query history
*/
async analyzeQueriesHistory(tables, fromDate, toDate) {
const tableNames = tables?.map((table) => getTableName(table)) ?? [];
return this.analyzeQueriesHistoryRaw(tableNames, fromDate, toDate);
}
}
class ForgeSQLORMImpl {
static instance = null;
drizzle;
crudOperations;
fetchOperations;
analyzeOperations;
/**
* Private constructor to enforce singleton behavior.
* @param options - Options for configuring ForgeSQL ORM behavior.
*/
constructor(options) {
try {
const newOptions = options ?? {
logRawSqlQuery: false,
disableOptimisticLocking: false
};
if (newOptions.logRawSqlQuery) {
console.debug("Initializing ForgeSQLORM...");
}
const proxiedDriver = createForgeDriverProxy(newOptions.hints, newOptions.logRawSqlQuery);
this.drizzle = patchDbWithSelectAliased(
drizzle(proxiedDriver, { logger: newOptions.logRawSqlQuery })
);
this.crudOperations = new ForgeSQLCrudOperations(this, newOptions);
this.fetchOperations = new ForgeSQLSelectOperations(newOptions);
this.analyzeOperations = new ForgeSQLAnalyseOperation(this);
} catch (error) {
console.error("ForgeSQLORM initialization failed:", error);
throw error;
}
}
/**
* Create the modify operations instance.
* @returns modify operations.
*/
modify() {
return this.crudOperations;
}
/**
* Returns the singleton instance of ForgeSQLORMImpl.
* @param options - Options for configuring ForgeSQL ORM behavior.
* @returns The singleton instance of ForgeSQLORMImpl.
*/
static getInstance(options) {
ForgeSQLORMImpl.instance ??= new ForgeSQLORMImpl(options);
return ForgeSQLORMImpl.instance;
}
/**
* Retrieves the CRUD operations instance.
* @returns CRUD operations.
*/
crud() {
return this.modify();
}
/**
* Retrieves the fetch operations instance.
* @returns Fetch operations.
*/
fetch() {
return this.fetchOperations;
}
analyze() {
return this.analyzeOperations;
}
/**
* Returns a Drizzle query builder instance.
*
* ⚠️ IMPORTANT: This method should be used ONLY for query building purposes.
* The returned instance should NOT be used for direct database connections or query execution.
* All database operations should be performed through Forge SQL's executeRawSQL or executeRawUpdateSQL methods.
*
* @returns A Drizzle query builder instance for query construction only.
*/
getDrizzleQueryBuilder() {
return this.drizzle;
}
/**
* Creates a select query with unique field aliases to prevent field name collisions in joins.
* This is particularly useful when working with Atlassian Forge SQL, which collapses fields with the same name in joined tables.
*
* @template TSelection - The type of the selected fields
* @param {TSelection} fields - Object containing the fields to select, with table schemas as values
* @returns {MySqlSelectBuilder<TSelection, MySql2PreparedQueryHKT>} A select query builder with unique field aliases
* @throws {Error} If fields parameter is empty
* @example
* ```typescript
* await forgeSQL
* .select({user: users, order: orders})
* .from(orders)
* .innerJoin(users, eq(orders.userId, users.id));
* ```
*/
select(fields) {
if (!fields) {
throw new Error("fields is empty");
}
return this.drizzle.selectAliased(fields);
}
/**
* Creates a distinct select query with unique field aliases to prevent field name collisions in joins.
* This is particularly useful when working with Atlassian Forge SQL, which collapses fields with the same name in joined tables.
*
* @template TSelection - The type of the selected fields
* @param {TSelection} fields - Object containing the fields to select, with table schemas as values
* @returns {MySqlSelectBuilder<TSelection, MySql2PreparedQueryHKT>} A distinct select query builder with unique field aliases
* @throws {Error} If fields parameter is empty
* @example
* ```typescript
* await forgeSQL
* .selectDistinct({user: users, order: orders})
* .from(orders)
* .innerJoin(users, eq(orders.userId, users.id));
* ```
*/
selectDistinct(fields) {
if (!fields) {
throw new Error("fields is empty");
}
return this.drizzle.selectAliasedDistinct(fields);
}
}
class ForgeSQLORM {
ormInstance;
constructor(options) {
this.ormInstance = ForgeSQLORMImpl.getInstance(options);
}
/**
* Creates a select query with unique field aliases to prevent field name collisions in joins.
* This is particularly useful when working with Atlassian Forge SQL, which collapses fields with the same name in joined tables.
*
* @template TSelection - The type of the selected fields
* @param {TSelection} fields - Object containing the fields to select, with table schemas as values
* @returns {MySqlSelectBuilder<TSelection, MySql2PreparedQueryHKT>} A select query builder with unique field aliases
* @throws {Error} If fields parameter is empty
* @example
* ```typescript
* await forgeSQL
* .select({user: users, order: orders})
* .from(orders)
* .innerJoin(users, eq(orders.userId, users.id));
* ```
*/
select(fields) {
return this.ormInstance.select(fields);
}
/**
* Creates a distinct select query with unique field aliases to prevent field name collisions in joins.
* This is particularly useful when working with Atlassian Forge SQL, which collapses fields with the same name in joined tables.
*
* @template TSelection - The type of the selected fields
* @param {TSelection} fields - Object containing the fields to select, with table schemas as values
* @returns {MySqlSelectBuilder<TSelection, MySqlRemotePreparedQueryHKT>} A distinct select query builder with unique field aliases
* @throws {Error} If fields parameter is empty
* @example
* ```typescript
* await forgeSQL
* .selectDistinct({user: users, order: orders})
* .from(orders)
* .innerJoin(users, eq(orders.userId, users.id));
* ```
*/
selectDistinct(fields) {
return this.ormInstance.selectDistinct(fields);
}
/**
* Proxies the `crud` method from `ForgeSQLORMImpl`.
* @returns CRUD operations.
*/
crud() {
return this.ormInstance.modify();
}
/**
* Proxies the `modify` method from `ForgeSQLORMImpl`.
* @returns Modify operations.
*/
modify() {
return this.ormInstance.modify();
}
/**
* Proxies the `fetch` method from `ForgeSQLORMImpl`.
* @returns Fetch operations.
*/
fetch() {
return this.ormInstance.fetch();
}
/**
* Provides query analysis capabilities including EXPLAIN ANALYZE and slow query analysis.
* @returns {SchemaAnalyzeForgeSql} Interface for analyzing query performance
*/
analyze() {
return this.ormInstance.analyze();
}
/**
* Returns a Drizzle query builder instance.
*
* ⚠️ IMPORTANT: This method should be used ONLY for query building purposes.
* The returned instance should NOT be used for direct database connections or query execution.
* All database operations should be performed through Forge SQL's executeRawSQL or executeRawUpdateSQL methods.
*
* @returns A Drizzle query builder instance for query construction only.
*/
getDrizzleQueryBuilder() {
return this.ormInstance.getDrizzleQueryBuilder();
}
}
const forgeDateTimeString = customType({
dataType() {
return "datetime";
},
toDriver(value) {
return moment$1(value).format("YYYY-MM-DDTHH:mm:ss.SSS");
},
fromDriver(value) {
const format = "YYYY-MM-DDTHH:mm:ss.SSS";
return parseDateTime(value, format);
}
});
const forgeTimestampString = customType({
dataType() {
return "timestamp";
},
toDriver(value) {
return moment$1(new Date(value)).format("YYYY-MM-DDTHH:mm:ss.SSS");
},
fromDriver(value) {
const format = "YYYY-MM-DDTHH:mm:ss.SSS";
return parseDateTime(value, format);
}
});
const forgeDateString = customType({
dataType() {
return "date";
},
toDriver(value) {
return moment$1(value).format("YYYY-MM-DD");
},
fromDriver(value) {
const format = "YYYY-MM-DD";
return parseDateTime(value, format);
}
});
const forgeTimeString = customType({
dataType() {
return "time";
},
toDriver(value) {
return moment$1(value).format("HH:mm:ss.SSS");
},
fromDriver(value) {
return parseDateTime(value, "HH:mm:ss.SSS");
}
});
const migrations = mysqlTable("__migrations", {
id: bigint("id", { mode: "number" }).primaryKey().autoincrement(),
name: varchar("name", { length: 255 }).notNull(),
migratedAt: timestamp("migratedAt").defaultNow().notNull()
});
async function getTables() {
const tables = await sql$1.executeDDL("SHOW TABLES");
return tables.rows.flatMap((tableInfo) => Object.values(tableInfo));
}
const forgeSystemTables = [migrations];
async function dropSchemaMigrations() {
try {
const tables = await getTables();
const dropStatements = generateDropTableStatements(tables);
for (const statement of dropStatements) {
console.warn(statement);
await sql$1.executeDDL(statement);
}
return getHttpResponse(
200,
"⚠️ All data in these tables has been permanently deleted. This operation cannot be undone."
);
} catch (error) {
console.error(error);
const errorMessage = error instanceof Error ? error.message : "Unknown error occurred";
return getHttpResponse(500, errorMessage);
}
}
const applySchemaMigrations = async (migration) => {
try {
if (typeof migration !== "function") {
throw new Error("migration is not a function");
}
console.log("Provisioning the database");
await sql$1._provision();
console.info("Running schema migrations");
const migrations2 = await migration(migrationRunner);
const successfulMigrations = await migrations2.run();
console.info("Migrations applied:", successfulMigrations);
const migrationList = await migrationRunner.list();
const migrationHistory = Array.isArray(migrationList) && migrationList.length > 0 ? migrationList.map((y) => `${y.id}, ${y.name}, ${y.migratedAt.toUTCString()}`).join("\n") : "No migrations found";
console.info("Migrations history:\nid, name, migrated_at\n", migrationHistory);
return {
headers: { "Content-Type": ["application/json"] },
statusCode: 200,
statusText: "OK",
body: "Migrations successfully executed"
};
} catch (error) {
console.error("Error during migration:", error);
return {
headers: { "Content-Type": ["application/json"] },
statusCode: 500,
statusText: "Internal Server Error",
body: error instanceof Error ? error.message : "Unknown error during migration"
};
}
};
async function fetchSchemaWebTrigger() {
try {
const tables = await getTables();
const createTableStatements = await generateCreateTableStatements(tables);
const sqlStatements = wrapWithForeignKeyChecks(createTableStatements);
return getHttpResponse(200, sqlStatements.join(";\n"));
} catch (error) {
console.error(JSON.stringify(error));
const errorMessage = error instanceof Error ? error.message : "Unknown error occurred";
return getHttpResponse(500, errorMessage);
}
}
async function generateCreateTableStatements(tables) {
const statements = [];
for (const table of tables) {
const createTableResult = await sql$1.executeDDL(`SHOW CREATE TABLE "${table}"`);
const createTableStatements = createTableResult.rows.filter((row) => !isSystemTable(row.Table)).map((row) => formatCreateTableStatement(row["Create Table"]));
statements.push(...createTableStatements);
}
return statements;
}
function isSystemTable(tableName) {
return forgeSystemTables.some((st) => getTableName(st) === tableName);
}
function formatCreateTableStatement(statement) {
return statement.replace(/"/g, "").replace("CREATE TABLE", "CREATE TABLE IF NOT EXISTS");
}
function wrapWithForeignKeyChecks(statements) {
return ["SET foreign_key_checks = 0", ...statements, "SET foreign_key_checks = 1"];
}
const getHttpResponse = (statusCode, body) => {
let statusText = "";
if (statusCode === 200) {
statusText = "Ok";
} else {
statusText = "Bad Request";
}
return {
headers: { "Content-Type": ["application/json"] },
statusCode,
statusText,
body
};
};
export {
ForgeSQLCrudOperations,
ForgeSQLSelectOperations,
applyFromDriverTransform,
applySchemaMigrations,
ForgeSQLORM as default,
dropSchemaMigrations,
fetchSchemaWebTrigger,
forgeDateString,
forgeDateTimeString,
forgeDriver,
forgeSystemTables,
forgeTimeString,
forgeTimestampString,
formatLimitOffset,
generateDropTableStatements,
getHttpResponse,
getPrimaryKeys,
getTableMetadata,
getTables,
mapSelectAllFieldsToAlias,
mapSelectFieldsWithAlias,
migrations,
nextVal,
parseDateTime,
patchDbWithSelectAliased
};
//# sourceMappingURL=ForgeSQLORM.mjs.map