UNPKG

@botport/core

Version:

Unified framework for Discord bot products, published by BotPort. Combines docky and framework functionality.

222 lines (190 loc) 6.65 kB
import mysql from "mysql2/promise"; import * as dotenv from "dotenv"; import fs from "fs/promises"; import path from "path"; import logger from "../logger/logger.js"; import { getEnvPath, getSrcDir } from "../utils/pathResolver.js"; dotenv.config({ path: getEnvPath() }); const pool = mysql.createPool({ host: process.env.DB_HOST, user: process.env.DB_USER, password: process.env.DB_PASSWORD, database: process.env.DB_NAME, port: parseInt(process.env.DB_PORT || "3306"), waitForConnections: true, connectionLimit: 10, queueLimit: 0, enableKeepAlive: true, keepAliveInitialDelay: 0 }); const db = pool; async function findJsonFiles(dir, jsonFiles = []) { try { const files = await fs.readdir(dir, { withFileTypes: true }); for (const file of files) { const fullPath = path.join(dir, file.name); if (file.isDirectory()) { await findJsonFiles(fullPath, jsonFiles); } else if (file.isFile() && file.name.endsWith(".json")) { jsonFiles.push(fullPath); } } } catch (error) { logger.warn(`Could not read directory:`, error.message); } return jsonFiles; } function ensureIfNotExists(query) { if ( query.trim().toUpperCase().startsWith("CREATE TABLE") && !query.toUpperCase().includes("IF NOT EXISTS") ) { return query.replace(/CREATE TABLE\s+/i, "CREATE TABLE IF NOT EXISTS "); } return query; } function validateQuery(query) { // Only allow CREATE TABLE statements if (!query.trim().toUpperCase().startsWith("CREATE TABLE")) { throw new Error("Only CREATE TABLE statements are allowed in database configs"); } // Prevent dangerous keywords const dangerous = ['DROP', 'DELETE', 'TRUNCATE', 'ALTER USER', 'GRANT', 'REVOKE']; const upperQuery = query.toUpperCase(); for (const keyword of dangerous) { if (upperQuery.includes(keyword)) { throw new Error(`Dangerous SQL keyword detected: ${keyword}`); } } return true; } async function createFromJson() { try { const srcDir = getSrcDir(); // Use path resolver try { const srcStats = await fs.stat(srcDir); if (!srcStats.isDirectory()) { logger.warn(`Source path exists but is not a directory`); return; } } catch (statError) { logger.error(`Source directory does not exist or is not accessible`); return; } const jsonFiles = await findJsonFiles(srcDir); logger.info(`Found ${jsonFiles.length} JSON files to process`); const allQueries = []; for (const fullPath of jsonFiles) { let cfg; try { const fileContent = await fs.readFile(fullPath, "utf8"); cfg = JSON.parse(fileContent); } catch (parseError) { logger.warn(`Could not parse JSON file, skipping`); continue; } if (cfg.Database && Array.isArray(cfg.Database)) { for (let i = 0; i < cfg.Database.length; i++) { const originalQuery = cfg.Database[i]; const query = ensureIfNotExists(originalQuery); try { validateQuery(query); allQueries.push({ query, source: `${path.basename(fullPath)}:Query ${i + 1}`, index: i + 1 }); } catch (validationError) { logger.error(`Invalid query in ${path.basename(fullPath)}:`, validationError.message); } } } else if (cfg.database && typeof cfg.database === "object") { for (const [tableName, originalSql] of Object.entries(cfg.database)) { const sql = ensureIfNotExists(originalSql); try { validateQuery(sql); allQueries.push({ query: sql, source: `${path.basename(fullPath)}:${tableName}`, tableName }); } catch (validationError) { logger.error(`Invalid query for ${tableName}:`, validationError.message); } } } } if (allQueries.length === 0) { logger.info('No database queries found to execute'); return; } logger.debug(`Executing ${allQueries.length} database queries with concurrency control...`); // Execute with concurrency limit to prevent pool exhaustion const CONCURRENCY_LIMIT = 5; // Never exceed half your pool size let successCount = 0; let skippedCount = 0; let errorCount = 0; for (let i = 0; i < allQueries.length; i += CONCURRENCY_LIMIT) { const batch = allQueries.slice(i, i + CONCURRENCY_LIMIT); const results = await Promise.allSettled( batch.map(({ query, source, tableName, index }) => db.execute(query) .then(() => ({ success: true, source, tableName, index })) .catch(err => { if (err.message.includes("already exists")) { return { success: true, source, tableName, index, skipped: true }; } return { success: false, source, tableName, index, error: err.message }; }) ) ); results.forEach((result) => { if (result.status === 'fulfilled') { const { success, source, tableName, index, skipped, error } = result.value; if (success && !skipped) { if (tableName) { logger.info(`Table ${tableName} is ready`); } else if (index) { logger.info(`Query ${index} executed successfully`); } successCount++; } else if (skipped) { logger.debug(`Skipped ${source} (already exists)`); skippedCount++; } else { logger.error(`Error in ${source}:`, error); errorCount++; } } else { logger.error(`Query failed:`, result.reason); errorCount++; } }); } logger.info(`Database setup complete: ${successCount} created, ${skippedCount} skipped, ${errorCount} errors`); } catch (error) { logger.error("Error reading JSON configuration files:", error); } } async function initDatabases() { const startTime = Date.now(); await createFromJson(); const duration = Date.now() - startTime; logger.info(`Database initialization completed in ${duration}ms`); } async function closeDatabase() { try { await pool.end(); logger.info('Database connection pool closed'); } catch (error) { logger.error('Error closing database pool:', error); throw error; } } export { db, initDatabases, closeDatabase };