cross-schema
Version:
Cross-database schema inspector for Node.js. Retrieve unified metadata for tables, columns, indexes, enums, and more across MySQL, PostgreSQL, SQLite, and SQL Server.
1,145 lines (1,130 loc) • 39.3 kB
JavaScript
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __glob = (map) => (path) => {
var fn = map[path];
if (fn) return fn();
throw new Error("Module not found in bundle: " + path);
};
var __esm = (fn, res) => function __init() {
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/platforms/types.js
var types_exports = {};
__export(types_exports, {
GENERAL_TYPES: () => GENERAL_TYPES,
MYSQL_MAP: () => MYSQL_MAP,
POSTGRES_MAP: () => POSTGRES_MAP,
SQLITE_MAP: () => SQLITE_MAP,
SQLSRV_TYPES: () => SQLSRV_TYPES
});
var MYSQL_MAP, SQLITE_MAP, POSTGRES_MAP, SQLSRV_TYPES, GENERAL_TYPES;
var init_types = __esm({
"src/platforms/types.js"() {
MYSQL_MAP = {
tinyint: "TINYINT",
bool: "TINYINT",
boolean: "TINYINT",
bit: "INTEGER",
smallint: "SMALLINT",
mediumint: "INTEGER",
int: "INTEGER",
integer: "INTEGER",
bigint: "BIGINT",
float: "FLOAT",
double: "DOUBLE",
"double precision": "DOUBLE",
real: "FLOAT",
decimal: "DECIMAL",
numeric: "DECIMAL",
dec: "DECIMAL",
fixed: "DECIMAL",
tinytext: "TEXT",
mediumtext: "TEXT",
longtext: "TEXT",
longblob: "BINARY",
blob: "BINARY",
text: "TEXT",
varchar: "STRING",
string: "STRING",
char: "CHAR",
datetime: "DATETIME",
year: "DATE",
date: "DATE",
time: "TIME",
timestamp: "TIMESTAMP",
enum: "STRING",
set: "STRING",
binary: "BINARY",
varbinary: "BINARY",
json: "JSON"
};
SQLITE_MAP = {
text: "TEXT",
varchar: "TEXT",
char: "TEXT",
int: "INTEGER",
integer: "INTEGER",
real: "REAL",
blob: "BLOB",
numeric: "NUMERIC"
};
POSTGRES_MAP = {
bit: "INTEGER",
"bit varying": "INTEGER",
varbit: "INTEGER",
bool: "BOOLEAN",
boolean: "BOOLEAN",
box: "STRING",
circle: "STRING",
point: "STRING",
line: "STRING",
lseg: "STRING",
polygon: "STRING",
path: "STRING",
character: "CHAR",
char: "CHAR",
bpchar: "CHAR",
"character varying": "STRING",
varchar: "STRING",
text: "TEXT",
bytea: "BINARY",
cidr: "STRING",
inet: "STRING",
macaddr: "STRING",
real: "FLOAT",
float4: "FLOAT",
"double precision": "DOUBLE",
float8: "DOUBLE",
decimal: "DECIMAL",
numeric: "DECIMAL",
money: "MONEY",
smallint: "SMALLINT",
int2: "SMALLINT",
int4: "INTEGER",
int: "INTEGER",
integer: "INTEGER",
bigint: "BIGINT",
int8: "BIGINT",
oid: "BIGINT",
smallserial: "SMALLINT",
serial2: "SMALLINT",
serial4: "INTEGER",
serial: "INTEGER",
bigserial: "BIGINT",
serial8: "BIGINT",
pg_lsn: "BIGINT",
date: "DATE",
interval: "STRING",
"time without time zone": "TIME",
time: "TIME",
"time with time zone": "TIME",
timetz: "TIME",
"timestamp without time zone": "TIMESTAMP",
timestamp: "TIMESTAMP",
"timestamp with time zone": "TIMESTAMP",
timestamptz: "TIMESTAMP",
abstime: "TIMESTAMP",
tsquery: "STRING",
tsvector: "STRING",
txid_snapshot: "STRING",
unknown: "STRING",
uuid: "STRING",
json: "JSON",
jsonb: "JSON",
xml: "STRING"
};
SQLSRV_TYPES = {
// exact numbers
BIGINT: "BIGINT",
numeric: "DECIMAL",
bit: "SMALLINT",
smallint: "SMALLINT",
decimal: "DECIMAL",
smallmoney: "MONEY",
int: "INTEGER",
tinyint: "TINYINT",
money: "MONEY",
// approximate numbers
float: "FLOAT",
double: "DOUBLE",
real: "FLOAT",
// date and time
date: "DATE",
datetimeoffset: "DATETIME",
datetime2: "DATETIME",
smalldatetime: "DATETIME",
datetime: "DATETIME",
time: "TIME",
// character strings
char: "CHAR",
varchar: "STRING",
text: "TEXT",
// unicode character strings
nchar: "CHAR",
nvarchar: "STRING",
ntext: "TEXT",
// binary strings
binary: "BINARY",
varbinary: "BINARY",
image: "BINARY",
// other data types
// 'cursor' type cannot be used with tables
timestamp: "TIMESTAMP",
hierarchyid: "STRING",
uniqueidentifier: "STRING",
sql_variant: "STRING",
xml: "STRING",
table: "STRING"
};
GENERAL_TYPES = {
integer: ["TINYINT", "SMALLINT", "INTEGER", "BIGINT"],
boolean: ["BOOLEAN"],
double: ["FLOAT", "DOUBLE", "DECIMAL"],
string: ["STRING", "TEXT", "CHAR"],
resource: ["BINARY"],
array: ["JSON"]
};
}
});
// src/platforms/mysql.js
var mysql_exports = {};
__export(mysql_exports, {
default: () => mysql_default
});
function parseEnumValues(columnType) {
if (!/^enum|set/i.test(columnType)) return [];
return columnType.replace(/(enum|set)\((.*)\)/i, "$2").split(",").map((v) => v.trim().replace(/^'(.*)'$/, "$1"));
}
async function listDatabases(knex) {
const rows = await knex("information_schema.schemata").select("schema_name");
return rows.map((row) => row.SCHEMA_NAME || row.schema_name);
}
async function listTables(knex, schema) {
const rows = await knex("information_schema.tables").select("table_name").where("table_type", "BASE TABLE").andWhere("table_schema", schema || knex.raw("DATABASE()"));
return rows.map((row) => row.TABLE_NAME || row.table_name);
}
async function listViews(knex, schema) {
const rows = await knex("information_schema.views").select("table_name").where("table_schema", schema || knex.raw("DATABASE()"));
return rows.map((row) => row.TABLE_NAME || row.table_name);
}
async function listColumns(knex, table, schema) {
const rows = await knex("information_schema.columns").select(
"COLUMN_NAME",
"COLUMN_TYPE",
"DATA_TYPE",
"IS_NULLABLE",
"COLUMN_DEFAULT",
"COLUMN_KEY",
"EXTRA",
"COLUMN_COMMENT",
"CHARACTER_MAXIMUM_LENGTH",
"NUMERIC_PRECISION",
"NUMERIC_SCALE",
"COLUMN_TYPE"
).where("table_schema", schema || knex.raw("DATABASE()")).andWhere("table_name", table);
return rows.map((col) => {
const dbTypeRaw = col.DATA_TYPE.toLowerCase();
const dbType = MYSQL_MAP[dbTypeRaw] || "STRING";
const type = GENERAL_TYPES[dbType] || "string";
return {
name: col.COLUMN_NAME,
allowNull: col.IS_NULLABLE === "YES",
autoIncrement: col.EXTRA.includes("auto_increment"),
comment: col.COLUMN_COMMENT || "",
rawType: col.DATA_TYPE,
dbType,
type,
defaultValue: col.COLUMN_DEFAULT,
enumValues: parseEnumValues(col.COLUMN_TYPE),
isPrimaryKey: col.COLUMN_KEY === "PRI",
precision: col.NUMERIC_PRECISION || null,
scale: col.NUMERIC_SCALE || null,
size: col.CHARACTER_MAXIMUM_LENGTH || null,
unsigned: /unsigned/i.test(col.COLUMN_TYPE)
};
});
}
async function listIndexes(knex, table, schema) {
const rows = await knex("information_schema.statistics").select(
knex.raw("COLUMN_NAME as column_name"),
knex.raw("INDEX_NAME as index_name"),
knex.raw("NOT non_unique AS index_is_unique"),
knex.raw("index_name = 'PRIMARY' AS index_is_primary")
).where("table_schema", schema || knex.raw("DATABASE()")).andWhere("index_schema", schema || knex.raw("DATABASE()")).andWhere("table_name", table);
return rows.map((row) => ({
column_name: row.column_name,
name: row.index_name,
index_is_unique: !!row.index_is_unique,
index_is_primary: !!row.index_is_primary
}));
}
async function listConstraints(knex, table, schema) {
const rows = await knex.select([
"kcu.constraint_name as constraint_name",
"kcu.column_name as column_name",
"kcu.referenced_table_name as referenced_table_name",
"kcu.referenced_column_name as referenced_column_name"
]).from("information_schema.referential_constraints as rc").join("information_schema.key_column_usage as kcu", function() {
this.on("kcu.constraint_schema", "=", "rc.constraint_schema").andOn("kcu.constraint_name", "=", "rc.constraint_name").andOn(function() {
this.on("kcu.constraint_catalog", "=", "rc.constraint_catalog").orOn(
knex.raw(
"kcu.constraint_catalog IS NULL AND rc.constraint_catalog IS NULL"
)
);
});
}).where("rc.constraint_schema", "=", schema || knex.raw("DATABASE()")).andWhere("kcu.table_schema", "=", schema || knex.raw("DATABASE()")).andWhere("rc.table_name", "=", table).andWhere("kcu.table_name", "=", table);
return rows;
}
async function getTableSchema(knex, table, schema) {
const columns = await listColumns(knex, table, schema);
const indexes = await listIndexes(knex, table, schema);
const foreignKeys = await listConstraints(knex, table, schema);
let sequenceName2 = null;
for (const column of columns) {
if (column.autoIncrement) {
sequenceName2 = column.name;
break;
}
}
const primaryKeys = indexes.filter((idx) => idx.index_is_primary).map((idx) => idx.column_name);
return {
schemaName: schema || await getDatabaseName(knex),
tableName: table,
primaryKeys,
sequenceName: sequenceName2,
foreignKeys,
columns
};
}
async function getDatabaseVersion(knex) {
const rows = await knex.raw("SELECT VERSION() as version");
return rows[0][0].version;
}
async function getDatabaseName(knex) {
const rows = await knex.raw("SELECT DATABASE() as db");
return rows[0][0].db;
}
var mysql_default;
var init_mysql = __esm({
"src/platforms/mysql.js"() {
init_types();
mysql_default = {
listDatabases,
listTables,
listViews,
listColumns,
listIndexes,
listConstraints,
getTableSchema,
getDatabaseVersion
};
}
});
// src/utils/helper.js
function versionCompare(v1, v2) {
const splitAndPad = (v) => v.split(".").map(Number);
const a = splitAndPad(v1);
const b = splitAndPad(v2);
const len = Math.max(a.length, b.length);
for (let i = 0; i < len; i++) {
const num1 = a[i] || 0;
const num2 = b[i] || 0;
if (num1 > num2) return 1;
if (num1 < num2) return -1;
}
return 0;
}
var init_helper = __esm({
"src/utils/helper.js"() {
}
});
// src/platforms/postgres.js
var postgres_exports = {};
__export(postgres_exports, {
default: () => postgres_default
});
async function listDatabases2(knex) {
const result = await knex.raw(
`SELECT datname FROM pg_database WHERE datistemplate = false;`
);
return result.rows.map((row) => row.datname);
}
async function listTables2(knex, schema = "public") {
const rows = await knex("information_schema.tables").select("table_name").where("table_type", "BASE TABLE").andWhere("table_schema", schema);
return rows.map((row) => row.table_name);
}
async function listViews2(knex, schema = "public") {
const rows = await knex("information_schema.views").select("table_name").where("table_schema", schema);
return rows.map((row) => row.table_name);
}
async function listColumns2(knex, table, schema = "public") {
var orIdentity = "";
if (versionCompare(await getDatabaseVersion2(knex), "12.0", ">=")) {
orIdentity = "OR attidentity != ''";
}
const result = await knex.raw(
`SELECT
d.nspname AS table_schema,
C.relname AS table_name,
A.attname AS column_name,
COALESCE ( td.typname, tb.typname, T.typname ) AS data_type,
COALESCE ( td.typtype, tb.typtype, T.typtype ) AS type_type,
( SELECT nspname FROM pg_namespace WHERE OID = COALESCE ( td.typnamespace, tb.typnamespace, T.typnamespace ) ) AS type_scheme,
A.attlen AS character_maximum_length,
pg_catalog.col_description ( C.OID, A.attnum ) AS column_comment,
A.atttypmod AS modifier,
A.attnotnull = FALSE AS is_nullable,
CAST ( pg_get_expr ( ad.adbin, ad.adrelid ) AS VARCHAR ) AS column_default,
COALESCE ( pg_get_expr ( ad.adbin, ad.adrelid ) ~ 'nextval', FALSE ) ${orIdentity} AS is_autoinc,
pg_get_serial_sequence ( quote_ident( d.nspname ) || '.' || quote_ident( C.relname ), A.attname ) AS sequence_name,
CASE
WHEN COALESCE ( td.typtype, tb.typtype, T.typtype ) = 'e' :: CHAR THEN
array_to_string( ( SELECT ARRAY_AGG ( enumlabel ) FROM pg_enum WHERE enumtypid = COALESCE ( td.OID, tb.OID, A.atttypid ) ) :: VARCHAR [], ',' ) ELSE NULL
END AS enum_values,
CASE
atttypid
WHEN 21 /*int2*/
THEN
16
WHEN 23 /*int4*/
THEN
32
WHEN 20 /*int8*/
THEN
64
WHEN 1700 /*numeric*/
THEN
CASE
WHEN atttypmod = - 1 THEN
NULL ELSE ( ( atttypmod - 4 ) >> 16 ) & 65535
END
WHEN 700 /*float4*/
THEN
24 /*FLT_MANT_DIG*/
WHEN 701 /*float8*/
THEN
53 /*DBL_MANT_DIG*/
ELSE NULL
END AS numeric_precision,
CASE
WHEN atttypid IN ( 21, 23, 20 ) THEN
0
WHEN atttypid IN ( 1700 ) THEN
CASE
WHEN atttypmod = - 1 THEN
NULL ELSE ( atttypmod - 4 ) & 65535
END ELSE NULL
END AS numeric_scale,
CAST ( information_schema._pg_char_max_length ( information_schema._pg_truetypid ( A, T ), information_schema._pg_truetypmod ( A, T ) ) AS NUMERIC ) AS size,
A.attnum = ANY ( ct.conkey ) AS is_pkey,
COALESCE ( NULLIF ( A.attndims, 0 ), NULLIF ( T.typndims, 0 ), ( T.typcategory = 'A' ) :: INT ) AS dimension
FROM
pg_class
C LEFT JOIN pg_attribute A ON A.attrelid = C.
OID LEFT JOIN pg_attrdef ad ON A.attrelid = ad.adrelid
AND A.attnum = ad.adnum
LEFT JOIN pg_type T ON A.atttypid = T.
OID LEFT JOIN pg_type tb ON ( A.attndims > 0 OR T.typcategory = 'A' )
AND T.typelem > 0
AND T.typelem = tb.OID
OR T.typbasetype > 0
AND T.typbasetype = tb.
OID LEFT JOIN pg_type td ON T.typndims > 0
AND T.typbasetype > 0
AND tb.typelem = td.
OID LEFT JOIN pg_namespace d ON d.OID = C.relnamespace
LEFT JOIN pg_constraint ct ON ct.conrelid = C.OID
AND ct.contype = 'p'
WHERE
A.attnum > 0
AND T.typname != ''
AND NOT A.attisdropped
AND C.relname = ?
AND d.nspname = ?
ORDER BY
A.attnum`,
[table, schema]
);
return result.rows.map((col) => {
const dbTypeRaw = col.data_type.toLowerCase();
const dbType = POSTGRES_MAP[dbTypeRaw] || "STRING";
const type = GENERAL_TYPES[dbType] || "string";
if (col.sequence_name != null) {
sequenceName = col.sequence_name;
}
return {
name: col.column_name,
allowNull: !col.is_nullable,
autoIncrement: col.is_autoinc || false,
comment: col.column_comment || "",
rawType: col.data_type,
dbType,
type,
defaultValue: col.column_default || null,
enumValues: col.enum_values ? col.enum_values.split(",") : null,
isPrimaryKey: col.is_pkey || false,
precision: col.numeric_precision || null,
scale: col.numeric_scale || null,
size: col.size || null,
unsigned: false
// PostgreSQL doesn't support unsigned explicitly
};
});
}
async function listIndexes2(knex, table, schema = "public") {
const result = await knex.raw(
`
SELECT
i.relname AS index_name,
a.attname AS column_name,
ix.indisunique AS index_is_unique,
ix.indisprimary AS index_is_primary
FROM pg_class t
JOIN pg_index ix ON t.oid = ix.indrelid
JOIN pg_class i ON i.oid = ix.indexrelid
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey)
WHERE t.relname = ?
AND t.relnamespace = (
SELECT oid FROM pg_namespace WHERE nspname = ?
)
`,
[table, schema]
);
return result.rows.map((row) => ({
column_name: row.column_name,
name: row.index_name,
index_is_unique: row.index_is_unique,
index_is_primary: row.index_is_primary
}));
}
async function listConstraints2(knex, table, schema = "public") {
const result = await knex.raw(
`
SELECT
tc.constraint_name,
kcu.column_name,
ccu.table_name AS referenced_table_name,
ccu.column_name AS referenced_column_name
FROM information_schema.table_constraints tc
JOIN information_schema.key_column_usage kcu
ON tc.constraint_name = kcu.constraint_name
AND tc.table_schema = kcu.table_schema
JOIN information_schema.constraint_column_usage ccu
ON ccu.constraint_name = tc.constraint_name
AND ccu.table_schema = tc.table_schema
WHERE tc.constraint_type = 'FOREIGN KEY'
AND tc.table_name = ?
AND tc.table_schema = ?
`,
[table, schema]
);
return result.rows;
}
async function getTableSchema2(knex, table, schema = "public") {
const columns = await listColumns2(knex, table, schema);
const indexes = await listIndexes2(knex, table, schema);
const foreignKeys = await listConstraints2(knex, table, schema);
const primaryKeys = indexes.filter((idx) => idx.index_is_primary).map((idx) => idx.column_name);
for (const col of columns) {
if (primaryKeys.includes(col.name)) {
col.isPrimaryKey = true;
}
}
return {
schemaName: schema,
tableName: table,
primaryKeys,
sequenceName,
foreignKeys,
columns
};
}
async function getDatabaseVersion2(knex) {
const result = await knex.raw(
"SELECT substring(version() from 'PostgreSQL ([0-9.]+)') AS version_number"
);
return result.rows[0].version_number;
}
var sequenceName, postgres_default;
var init_postgres = __esm({
"src/platforms/postgres.js"() {
init_helper();
init_types();
postgres_default = {
listDatabases: listDatabases2,
listTables: listTables2,
listViews: listViews2,
listColumns: listColumns2,
listIndexes: listIndexes2,
listConstraints: listConstraints2,
getTableSchema: getTableSchema2,
getDatabaseVersion: getDatabaseVersion2
};
}
});
// src/platforms/sqlite.js
var sqlite_exports = {};
__export(sqlite_exports, {
default: () => sqlite_default
});
async function listDatabases3(knex) {
return ["main"];
}
async function listTables3(knex, schema) {
const rows = await knex.select("name").from("sqlite_master").where("type", "table").andWhereNot("name", "like", "sqlite_%");
return rows.map((row) => row.name);
}
async function listViews3(knex) {
const rows = await knex.select("name").from("sqlite_master").where("type", "view");
return rows.map((row) => row.name);
}
async function listColumns3(knex, table, schema) {
const rows = await knex.raw(`PRAGMA table_info(${table})`);
return rows.map((col) => {
var _a;
const dbTypeRaw = col.type ? col.type.toLowerCase().split("(")[0] : "text";
const dbType = SQLITE_MAP[dbTypeRaw] || "STRING";
const type = GENERAL_TYPES[dbType] || "string";
return {
name: col.name,
allowNull: col.notnull === 0,
autoIncrement: col.pk === 1 && ((_a = col.type) == null ? void 0 : _a.toLowerCase()) === "integer",
comment: "",
// SQLite tidak punya komentar kolom
rawType: col.type,
dbType,
type,
defaultValue: col.dflt_value,
enumValues: [],
// SQLite tidak punya enum
isPrimaryKey: col.pk === 1,
precision: null,
scale: null,
size: null,
unsigned: false
};
});
}
async function listIndexes3(knex, table, schema) {
const rows = await knex.raw(`PRAGMA index_list(${table})`);
const indexes = [];
for (const idx of rows) {
const indexInfo = await knex.raw(`PRAGMA index_info(${idx.name})`);
for (const info of indexInfo) {
indexes.push({
column_name: info.name,
name: idx.name,
index_is_unique: idx.unique === 1,
index_is_primary: idx.name === "sqlite_autoindex_" + table + "_1"
});
}
}
return indexes;
}
async function listConstraints3(knex, table, schema) {
const rows = await knex.raw(`PRAGMA foreign_key_list(${table})`);
return rows.map((row) => ({
constraint_name: null,
// SQLite tidak punya nama constraint FK
column_name: row.from,
referenced_table_name: row.table,
referenced_column_name: row.to
}));
}
async function getTableSchema3(knex, table, schema) {
const columns = await listColumns3(knex, table, schema);
const indexes = await listIndexes3(knex, table, schema);
const foreignKeys = await listConstraints3(knex, table, schema);
let sequenceName2 = null;
for (const column of columns) {
if (column.autoIncrement) {
sequenceName2 = column.name;
break;
}
}
let primaryKeys = indexes.filter((idx) => idx.index_is_primary).map((idx) => idx.column_name);
if (primaryKeys.length === 0) {
primaryKeys = columns.filter((col) => col.isPrimaryKey).map((col) => col.name);
}
return {
schemaName: "main",
tableName: table,
primaryKeys,
sequenceName: sequenceName2,
foreignKeys,
columns
};
}
async function getDatabaseVersion3(knex) {
const rows = await knex.raw("SELECT sqlite_version() as version");
return rows[0]["version"];
}
var sqlite_default;
var init_sqlite = __esm({
"src/platforms/sqlite.js"() {
init_types();
sqlite_default = {
listDatabases: listDatabases3,
listTables: listTables3,
listViews: listViews3,
listColumns: listColumns3,
listIndexes: listIndexes3,
listConstraints: listConstraints3,
getTableSchema: getTableSchema3,
getDatabaseVersion: getDatabaseVersion3
};
}
});
// src/platforms/sqlsrv.js
var sqlsrv_exports = {};
__export(sqlsrv_exports, {
default: () => sqlsrv_default
});
async function listDatabases4(knex) {
throw new Error("listDatabases not implemented for platform sqlsrv");
}
async function listTables4(knex, schema = "dbo") {
const rows = await knex("INFORMATION_SCHEMA.TABLES as t").select("t.table_name").where("t.table_schema", schema || "dbo").whereIn("t.table_type", ["BASE TABLE"]).orderBy("t.table_name");
return rows.map((row) => row.table_name);
}
async function listViews4(knex, schema = "dbo") {
const rows = await knex("INFORMATION_SCHEMA.TABLES as t").select("t.table_name").where("t.table_schema", schema || "dbo").whereIn("t.table_type", ["VIEW"]).orderBy("t.table_name");
return rows.map((row) => row.table_name);
}
async function listColumns4(knex, table, schema) {
const pkRows = await knex.select("kcu.column_name AS field_name").from({ kcu: "INFORMATION_SCHEMA.KEY_COLUMN_USAGE" }).leftJoin({ tc: "INFORMATION_SCHEMA.TABLE_CONSTRAINTS" }, function() {
this.on("kcu.table_schema", "=", "tc.table_schema").andOn("kcu.table_name", "=", "tc.table_name").andOn("kcu.constraint_name", "=", "tc.constraint_name");
}).where("kcu.table_name", table).andWhere("kcu.table_schema", schema || "dbo").andWhere("tc.CONSTRAINT_TYPE", "PRIMARY KEY");
var primaryKeys = pkRows.map((col) => {
return col.field_name;
});
const rows = await knex.raw(
`SELECT
[t1].[column_name],
[t1].[is_nullable],
CASE WHEN [t1].[data_type] IN ('char','varchar','nchar','nvarchar','binary','varbinary') THEN
CASE WHEN [t1].[character_maximum_length] = NULL OR [t1].[character_maximum_length] = -1 THEN
[t1].[data_type]
ELSE
[t1].[data_type] + '(' + LTRIM(RTRIM(CONVERT(CHAR,[t1].[character_maximum_length]))) + ')'
END
ELSE
[t1].[data_type]
END AS 'data_type',
[t1].[column_default],
COLUMNPROPERTY(OBJECT_ID([t1].[table_schema] + '.' + [t1].[table_name]), [t1].[column_name], 'IsIdentity') AS is_identity,
COLUMNPROPERTY(OBJECT_ID([t1].[table_schema] + '.' + [t1].[table_name]), [t1].[column_name], 'IsComputed') AS is_computed,
(
SELECT CONVERT(VARCHAR, [t2].[value])
FROM [sys].[extended_properties] AS [t2]
WHERE
[t2].[class] = 1 AND
[t2].[class_desc] = 'OBJECT_OR_COLUMN' AND
[t2].[name] = 'MS_Description' AND
[t2].[major_id] = OBJECT_ID([t1].[TABLE_SCHEMA] + '.' + [t1].[table_name]) AND
[t2].[minor_id] = COLUMNPROPERTY(OBJECT_ID([t1].[TABLE_SCHEMA] + '.' + [t1].[TABLE_NAME]), [t1].[COLUMN_NAME], 'ColumnID')
) as comment
FROM [INFORMATION_SCHEMA].[COLUMNS] AS [t1]
WHERE [t1].[table_name] = ? AND [t1].[table_schema] = ?`,
[table, schema || "dbo"]
);
return rows.map((col) => {
const dbTypeRaw = col.type ? col.type.toLowerCase().split("(")[0] : "text";
const dbType = SQLSRV_TYPES[dbTypeRaw] || "STRING";
const type = GENERAL_TYPES[dbType] || "string";
return {
name: col.column_name,
allowNull: col.is_nullable == "YES",
autoIncrement: col.is_identity == 1,
comment: col.comment || "",
rawType: col.type,
dbType,
type,
defaultValue: col.column_default || null,
enumValues: [],
// SQLite tidak punya enum
isPrimaryKey: primaryKeys.indexOf(col.column_name) !== -1,
precision: null,
scale: null,
size: null,
unsigned: false
};
});
}
async function listIndexes4(knex, table, schema) {
const rows = await knex("sys.indexes as i").join("sys.index_columns as ic", function() {
this.on("ic.object_id", "=", "i.object_id").andOn(
"ic.index_id",
"=",
"i.index_id"
);
}).join("sys.columns as iccol", function() {
this.on("iccol.object_id", "=", "ic.object_id").andOn(
"iccol.column_id",
"=",
"ic.column_id"
);
}).select({
name: "i.name",
column_name: "iccol.name",
index_is_unique: "i.is_unique",
index_is_primary: "i.is_primary_key"
}).whereRaw("i.object_id = OBJECT_ID(?)", [table]).orderBy("ic.key_ordinal", "asc");
return rows.map((row) => ({
column_name: row.column_name,
name: row.name,
index_is_unique: row.index_is_unique == 1,
index_is_primary: row.index_is_primary == 1
}));
}
async function listConstraints4(knex, table, schema) {
const rows = await knex.raw(
`
SELECT
[fk].[name] AS [fk_name],
[cp].[name] AS [fk_column_name],
OBJECT_NAME([fk].[referenced_object_id]) AS [uq_table_name],
[cr].[name] AS [uq_column_name]
FROM [sys].[foreign_keys] AS [fk]
INNER JOIN [sys].[foreign_key_columns] AS [fkc]
ON [fk].[object_id] = [fkc].[constraint_object_id]
INNER JOIN [sys].[columns] AS [cp]
ON [fk].[parent_object_id] = [cp].[object_id]
AND [fkc].[parent_column_id] = [cp].[column_id]
INNER JOIN [sys].[columns] AS [cr]
ON [fk].[referenced_object_id] = [cr].[object_id]
AND [fkc].[referenced_column_id] = [cr].[column_id]
WHERE [fk].[parent_object_id] = OBJECT_ID(?)
`,
[table]
);
return rows.map((row) => ({
constraint_name: row.fk_name,
// SQLite tidak punya nama constraint FK
column_name: row.fk_column_name,
referenced_table_name: row.uq_table_name,
referenced_column_name: row.uq_column_name
}));
}
async function getTableSchema4(knex, table, schema) {
const columns = await listColumns4(knex, table, schema);
const indexes = await listIndexes4(knex, table, schema);
const foreignKeys = await listConstraints4(knex, table, schema);
let sequenceName2 = null;
for (const column of columns) {
if (column.autoIncrement) {
sequenceName2 = column.name;
break;
}
}
let primaryKeys = indexes.filter((idx) => idx.index_is_primary).map((idx) => idx.column_name);
if (primaryKeys.length === 0) {
primaryKeys = columns.filter((col) => col.isPrimaryKey).map((col) => col.name);
}
return {
schemaName: "main",
tableName: table,
primaryKeys,
sequenceName: sequenceName2,
foreignKeys,
columns
};
}
async function getDatabaseVersion4(knex) {
const rows = await knex.raw(
`SELECT SERVERPROPERTY('ProductVersion') AS ProductVersion`
);
return rows[0].ProductVersion;
}
var sqlsrv_default;
var init_sqlsrv = __esm({
"src/platforms/sqlsrv.js"() {
init_types();
sqlsrv_default = {
listDatabases: listDatabases4,
listTables: listTables4,
listViews: listViews4,
listColumns: listColumns4,
listIndexes: listIndexes4,
listConstraints: listConstraints4,
getTableSchema: getTableSchema4,
getDatabaseVersion: getDatabaseVersion4
};
}
});
// src/index.js
var index_exports = {};
__export(index_exports, {
default: () => CrossSchema
});
module.exports = __toCommonJS(index_exports);
// import("./platforms/**/*.js") in src/index.js
var globImport_platforms_js = __glob({
"./platforms/mysql.js": () => Promise.resolve().then(() => (init_mysql(), mysql_exports)),
"./platforms/postgres.js": () => Promise.resolve().then(() => (init_postgres(), postgres_exports)),
"./platforms/sqlite.js": () => Promise.resolve().then(() => (init_sqlite(), sqlite_exports)),
"./platforms/sqlsrv.js": () => Promise.resolve().then(() => (init_sqlsrv(), sqlsrv_exports)),
"./platforms/types.js": () => Promise.resolve().then(() => (init_types(), types_exports))
});
// src/index.js
var CrossSchema = class {
/**
* @param {Object} config
* @param {'mysql'|'pgsql'|'sqlite'|'sqlsrv'} config.platform
* @param {Knex} config.client
*/
constructor({ platform, client }) {
const platformMap = {
mysql2: "mysql",
mariadb: "mysql",
pg: "postgres",
pgsql: "postgres",
"pg-native": "postgres",
mssql: "sqlsrv",
sqlserver: "sqlsrv",
tedious: "sqlsrv",
sqlite3: "sqlite",
"better-sqlite3": "sqlite"
};
this.platform = platformMap[platform] || platform;
this.client = client;
this.driver = null;
}
/**
* Internal method to load the driver for the given platform
* @private
* @throws {Error} if the platform is unsupported
*/
async _loadDriver() {
if (!this.driver) {
try {
const module2 = await globImport_platforms_js(`./platforms/${this.platform}.js`);
this.driver = module2.default;
} catch {
throw new Error(`Unsupported platform: ${this.platform}`);
}
}
}
/**
* Get a list of databases in the given connection
* @return {Promise<string[]>} a list of database names
* @throws {Error} if `listDatabases` is not implemented for the given platform
*/
async listDatabases() {
var _a, _b;
await this._loadDriver();
return ((_b = (_a = this.driver).listDatabases) == null ? void 0 : _b.call(_a, this.client)) ?? Promise.reject(
new Error(`listDatabases not implemented for ${this.platform}`)
);
}
/**
* Get a list of tables in the specified schema
* @param {string} schema - The schema name to list tables from
* @return {Promise<string[]>} a list of table names
* @throws {Error} if the driver fails to load
*/
async listTables(schema) {
await this._loadDriver();
return this.driver.listTables(this.client, schema);
}
/**
* Get a list of views in the specified schema
* @param {string} schema - The schema name to list views from
* @return {Promise<string[]>} a list of view names
* @throws {Error} if the driver fails to load or listViews is not implemented for the given platform
*/
async listViews(schema) {
var _a, _b;
await this._loadDriver();
return ((_b = (_a = this.driver).listViews) == null ? void 0 : _b.call(_a, this.client, schema)) ?? Promise.reject(
new Error(`listViews not implemented for ${this.platform}`)
);
}
/**
* Retrieves detailed information about all columns in a given table.
*
* This method queries the underlying database driver for the column definitions of a table
* and returns a normalized array of column metadata. The output format is unified across different
* database platforms (MySQL, PostgreSQL, SQLite, SQL Server) to make schema inspection easier.
*
* @param {string} table - The name of the table whose columns should be listed.
* @param {string} [schema] - Optional schema name (useful for PostgreSQL or SQL Server).
* @returns {Promise<Array<{
* name: string,
* allowNull: boolean,
* autoIncrement: boolean,
* comment: string,
* rawType: string,
* dbType: string,
* defaultValue: any,
* enumValues: string[],
* isPrimaryKey: boolean,
* type: string,
* precision: number|null,
* scale: number|null,
* size: number|null,
* unsigned: boolean
* }>>} A promise that resolves to an array of column metadata objects.
*
* @example
* const columns = await cs.listColumns('users');
* console.log(columns[0].name); // e.g., "id"
*/
async listColumns(table, schema) {
await this._loadDriver();
return this.driver.listColumns(this.client, table, schema);
}
/**
* Retrieves index definitions from the specified table and schema.
*
* This function returns metadata about the indexes available in a given table,
* including information such as the index name, the column it refers to,
* and whether it is unique or a primary key.
*
* The structure is normalized to be consistent across supported platforms
* such as MySQL, PostgreSQL, SQLite, and SQL Server.
*
* @param {string} table - The name of the table to inspect.
* @param {string} [schema] - The optional schema name (relevant for some databases like PostgreSQL).
* @returns {Promise<Array<{
* name: string, // Index name defined in the database
* column_name: string, // Column name the index refers to
* index_is_unique: boolean, // Whether the index enforces uniqueness
* index_is_primary: boolean // Whether the index is the primary key
* }>>} A promise resolving to an array of index definitions.
*
* @example
* const indexes = await crossSchema.listIndexes('users');
* indexes.forEach(i => console.log(i.name));
*/
async listIndexes(table, schema) {
var _a, _b;
await this._loadDriver();
return ((_b = (_a = this.driver).listIndexes) == null ? void 0 : _b.call(_a, this.client, table, schema)) ?? Promise.reject(
new Error(`listIndexes not implemented for ${this.platform}`)
);
}
/**
* Retrieves foreign key constraints from a specific table.
*
* This method returns an array of foreign key definitions for a given table,
* including the local column name, the name of the foreign table, and the foreign column name.
* It is especially useful when analyzing or generating relationships across tables,
* such as when building ER diagrams or generating ORM models.
*
* The structure of each constraint object includes:
*
* - `constraintName` (string): The name of the foreign key constraint in the database.
* - `columnName` (string): The column in the current table that holds the foreign key.
* - `referencedTableName` (string): The name of the table being referenced.
* - `referencedColumnName` (string): The specific column in the foreign table being referenced.
*
* If the current database platform or driver does not support foreign key inspection,
* this method will throw a descriptive error instead of returning a result.
*
* Example output:
* ```json
* [
* {
* constraintName: "fk_role_has_permissions_permission_id",
* columnName: "permission_id",
* referencedTableName: "permissions",
* referencedColumnName: "id"
* },
* {
* constraintName: "fk_role_has_permissions_role_id",
* columnName: "role_id",
* referencedTableName: "roles",
* referencedColumnName: "id"
* }
* ]
* ```
*
* @param {string} table - The name of the table to inspect.
* @param {string} [schema] - Optional schema name (used in databases like PostgreSQL or SQL Server).
* @returns {Promise<Array<{
* constraintName: string,
* columnName: string,
* referencedTableName: string,
* referencedColumnName: string
* }>>}
*/
async listConstraints(table, schema) {
var _a, _b;
await this._loadDriver();
return ((_b = (_a = this.driver).listConstraints) == null ? void 0 : _b.call(_a, this.client, table, schema)) ?? Promise.reject(
new Error(`listConstraints not implemented for ${this.platform}`)
);
}
/**
* Retrieves the complete schema definition for a specific table, including:
* - Column metadata (name, type, size, nullability, default, enum, etc.)
* - Primary key(s)
* - Auto-increment column
* - Foreign keys (if any)
*
* This is a comprehensive introspection function that gives a structured overview
* of a table’s layout in the connected database platform.
*
* @param {string} table - The name of the table whose schema will be retrieved.
* @param {string} [schema] - Optional schema name. Used in platforms that support multiple schemas (e.g., PostgreSQL).
* @returns {Promise<{
* schemaName: string,
* tableName: string,
* primaryKeys: string[],
* sequenceName?: string,
* foreignKeys: Array<any>,
* columns: Array<{
* name: string,
* allowNull: boolean,
* autoIncrement: boolean,
* comment: string,
* rawType: string,
* dbType: string,
* defaultValue: any,
* enumValues: string[],
* isPrimaryKey: boolean,
* type: string,
* precision: number|null,
* scale: number|null,
* size: number|null,
* unsigned: boolean
* }>
* }>} - A promise that resolves to the schema structure of the specified table.
* @throws {Error} If the method is not implemented for the current platform.
*/
async getTableSchema(table, schema) {
var _a, _b;
await this._loadDriver();
return ((_b = (_a = this.driver).getTableSchema) == null ? void 0 : _b.call(_a, this.client, table, schema)) ?? Promise.reject(
new Error(`getTableSchema not implemented for ${this.platform}`)
);
}
/**
* Get the version of the connected database
* @return {Promise<string>} the database version
* @throws {Error} if the driver fails to load or getDatabaseVersion is not implemented for the given platform
*/
async getDatabaseVersion() {
var _a, _b;
await this._loadDriver();
return ((_b = (_a = this.driver).getDatabaseVersion) == null ? void 0 : _b.call(_a, this.client)) ?? Promise.reject(
new Error(`getDatabaseVersion not implemented for ${this.platform}`)
);
}
};