@dbml/connector
Version:
This package was created to fetch the schema JSON from many kind of databases.
310 lines (309 loc) • 11 kB
JavaScript
/* eslint-disable camelcase */
//
// Description:
// Snowflake info schema lacks reference direction for relationships.
// Constraints and uniqueness cannot be auto-generated from Snowflake info schema.
// Snowflake does not support ENUMs.
// Snowflake does not support indexes for standard tables.
//
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.fetchSchemaJson = void 0;
const snowflake_sdk_1 = __importDefault(require("snowflake-sdk"));
const parseSchema_1 = require("../utils/parseSchema");
const parseConnectionString = (connectionString) => {
const params = {};
const regex = /([^;=]+)=([^;]*)/g;
let match;
while ((match = regex.exec(connectionString)) !== null) {
params[match[1].trim()] = match[2].trim();
}
return params;
};
const connect = async (connection) => {
return new Promise((resolve, reject) => {
console.log('Attempting to connect...');
connection.connect((err) => {
if (err) {
reject(err);
}
else {
resolve();
}
});
});
};
const executeQuery = (connection, sqlText) => {
return new Promise((resolve, reject) => {
console.log('Executing query...');
connection.execute({
sqlText: sqlText,
complete: (err, stmt, rows) => {
if (err) {
reject(err);
}
else {
console.log('Query executed successfully.');
if (rows) {
resolve(rows);
}
}
}
});
});
};
const isLogLevel = (value) => {
return ['ERROR', 'WARN', 'INFO', 'DEBUG', 'TRACE'].includes(value);
};
const connectToSnowflake = async (config) => {
const logLevel = isLogLevel(config.LOG_LEVEL) ? config.LOG_LEVEL : 'INFO';
const isDebugMode = config.IS_DEBUG_MODE === 'true';
snowflake_sdk_1.default.configure({
logLevel,
additionalLogToConsole: isDebugMode,
});
// https://docs.snowflake.com/en/developer-guide/node-js/nodejs-driver-options#authentication-options
let authConfig = {};
switch (config.AUTHENTICATOR) {
case 'SNOWFLAKE_JWT':
// https://docs.snowflake.com/en/developer-guide/node-js/nodejs-driver-authenticate#label-nodejs-key-pair-authentication
// Support private key file
authConfig = {
privateKeyPath: config.PRIVATE_KEY_PATH,
privateKeyPass: config.PASSPHRASE,
};
break;
case 'SNOWFLAKE':
case undefined:
authConfig = {
password: config.PWD,
};
break;
default:
throw new Error('Unsupported authenticator');
}
// "SERVER=myaccount.snowflakecomputing.com;UID=myusername;PWD=mypassword;DATABASE=mydatabase;WAREHOUSE=mywarehouse;ROLE=myrole";
const connectionOptions = {
authenticator: config.AUTHENTICATOR,
account: config.SERVER,
username: config.UID,
database: config.DATABASE,
warehouse: config.WAREHOUSE,
sfRetryMaxLoginRetries: 3,
timeout: 10000,
...authConfig,
};
const connection = snowflake_sdk_1.default.createConnection(connectionOptions);
// Connect to Snowflake
await connect(connection);
// Execute the query
await executeQuery(connection, 'SELECT CURRENT_VERSION();');
return connection;
};
const convertQueryBoolean = (val) => val === 'YES';
const getFieldType = (data_type, character_maximum_length, numeric_precision, numeric_scale) => {
if (character_maximum_length) {
return `${data_type}(${character_maximum_length})`;
}
if (numeric_precision && numeric_scale) {
return `${data_type}(${numeric_precision},${numeric_scale})`;
}
return data_type;
};
const getDbdefault = (column_default, default_type) => {
if (default_type === 'string') {
return {
type: default_type,
value: column_default.slice(1, -1),
};
}
return {
type: default_type,
value: column_default,
};
};
const generateField = (row) => {
const { COLUMN_NAME: column_name, DATA_TYPE: data_type, CHARACTER_MAXIMUM_LENGTH: character_maximum_length, NUMERIC_PRECISION: numeric_precision, NUMERIC_SCALE: numeric_scale, IDENTITY_INCREMENT: identity_increment, IS_NULLABLE: is_nullable, COLUMN_DEFAULT: column_default, DEFAULT_TYPE: default_type, COLUMN_COMMENT: column_comment, } = row;
const dbdefault = column_default ? getDbdefault(column_default, default_type) : null;
const fieldType = {
type_name: getFieldType(data_type, character_maximum_length, numeric_precision, numeric_scale),
schemaName: null,
};
return {
name: column_name,
type: fieldType,
dbdefault,
not_null: !convertQueryBoolean(is_nullable),
increment: !!identity_increment,
note: column_comment ? { value: column_comment } : { value: '' },
};
};
const generateTablesAndFields = async (conn, schemas) => {
const fields = {};
const schemaSql = schemas.length > 0 ? `AND c.table_schema IN (${schemas.map((schema) => `'${schema}'`).join(',')})` : '';
const tablesAndFieldsSql = `
SELECT
c.table_schema,
c.table_name,
c.column_name,
c.data_type,
c.character_maximum_length,
c.numeric_precision,
c.numeric_scale,
c.is_nullable,
c.column_default,
CASE
WHEN c.column_default IS NULL THEN NULL
WHEN c.column_default LIKE '''%' THEN 'string'
WHEN LOWER(c.column_default) = 'true' OR LOWER(c.column_default) = 'false' THEN 'boolean'
WHEN c.column_default REGEXP '^-?[0-9]+(\\.[0-9]+)?$' THEN 'number'
ELSE 'expression'
END AS default_type,
t.comment AS table_comment,
c.comment AS column_comment,
c.identity_increment
FROM
information_schema.columns c
JOIN information_schema.tables t
ON c.table_name = t.table_name
AND c.table_schema = t.table_schema
WHERE
t.table_type = 'BASE TABLE'
AND t.table_schema NOT IN ('INFORMATION_SCHEMA')
${schemaSql}
ORDER BY
c.table_schema,
t.created,
c.ordinal_position;
`;
const tablesAndFieldsResult = await executeQuery(conn, tablesAndFieldsSql);
const tables = tablesAndFieldsResult.reduce((acc, row) => {
const { TABLE_SCHEMA: table_schema, TABLE_NAME: table_name, TABLE_COMMENT: table_comment } = row;
const key = `${table_schema}.${table_name}`;
if (!acc[key]) {
acc[key] = {
name: table_name,
schemaName: table_schema,
note: table_comment ? { value: table_comment } : { value: '' },
};
}
if (!fields[key])
fields[key] = [];
const field = generateField(row);
fields[key].push(field);
return acc;
}, {});
return {
tables: Object.values(tables),
fields,
};
};
const createConstraintKeysMap = (keys, schemas, constraintType) => {
return keys.reduce((acc, row) => {
const { schema_name, table_name, column_name, constraint_name } = row;
const selectedSchema = schemas.length > 0 ? schemas.includes(schema_name) : true;
if (!selectedSchema) {
return acc;
}
const key = `${schema_name}.${table_name}.${constraint_name}`;
if (acc[key]) {
const columnNames = acc[key].columnNames;
columnNames.push(column_name);
return acc;
}
acc[key] = {
schemaName: schema_name,
tableName: table_name,
constraintName: constraint_name,
columnNames: [column_name],
type: '',
};
acc[key][constraintType] = true;
return acc;
}, {});
};
const generateIndexes = async (conn, databaseName, schemas) => {
const getPrimaryKeysSql = `
SHOW PRIMARY KEYS IN DATABASE ${databaseName};
`;
const getUniqueKeysSql = `
SHOW UNIQUE KEYS IN DATABASE ${databaseName};
`;
const primaryKeys = await executeQuery(conn, getPrimaryKeysSql);
const uniqueKeys = await executeQuery(conn, getUniqueKeysSql);
const primaryKeysByConstraint = createConstraintKeysMap(primaryKeys, schemas, 'primary');
const uniqueKeysByConstraint = createConstraintKeysMap(uniqueKeys, schemas, 'unique');
const allConstraints = [Object.values(primaryKeysByConstraint), Object.values(uniqueKeysByConstraint)].flat();
const { indexes, tableConstraints } = allConstraints.reduce((acc, row) => {
const { schemaName, tableName, constraintName, columnNames, type, primary, unique } = row;
const key = `${schemaName}.${tableName}`;
if (columnNames.length < 2) {
acc.tableConstraints[key] = {
[columnNames[0]]: {
pk: primary,
unique,
},
};
return acc;
}
const index = {
name: constraintName,
type,
unique,
pk: primary,
columns: columnNames.map((columnName) => ({
type: 'column',
value: columnName,
})),
};
if (acc.indexes[key]) {
acc.indexes[key].push(index);
}
else {
acc.indexes[key] = [index];
}
return acc;
}, { indexes: {}, tableConstraints: {} });
return {
indexes,
tableConstraints,
};
};
const fetchSchemaJson = async (connection) => {
const config = parseConnectionString(connection);
const conn = await connectToSnowflake(config);
if (conn instanceof Error) {
throw conn;
}
// Schemas: schema1,schema2,schema3
const schemas = config.SCHEMAS ? (0, parseSchema_1.parseSchema)(config.SCHEMAS) : [];
const databaseName = config.DATABASE;
const tablesAndFieldsRes = generateTablesAndFields(conn, schemas);
const indexesRes = generateIndexes(conn, databaseName, schemas);
const res = await Promise.all([
tablesAndFieldsRes,
indexesRes,
]);
conn.destroy((err) => {
if (err) {
throw err;
}
else {
console.log('Connection destroyed successfully.');
}
});
const { tables, fields } = res[0];
const { indexes, tableConstraints } = res[1];
return {
tables,
fields,
refs: [],
enums: [],
indexes,
tableConstraints,
};
};
exports.fetchSchemaJson = fetchSchemaJson;
;