@joystick.js/db-canary
Version:
JoystickDB - A minimalist database server for the Joystick framework
470 lines (385 loc) ⢠14.9 kB
JavaScript
/**
* @fileoverview Isolated enterprise scale bulk insert tests for JoystickDB.
* These tests are designed to run in complete isolation to prevent memory conflicts
* and segmentation faults when running as part of the full test suite.
*/
import test from 'ava';
import { rmSync, existsSync } from 'fs';
import { spawn } from 'child_process';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
/**
* Runs a test in a completely isolated Node.js process to prevent memory conflicts.
* @param {string} testScript - Path to the test script to run
* @param {Object} options - Test options
* @returns {Promise<Object>} Test result
*/
const run_isolated_test = (testScript, options = {}) => {
return new Promise((resolve, reject) => {
const {
timeout = 600000, // 10 minutes default timeout
maxMemory = '4096', // 4GB memory limit
gcInterval = '100' // GC every 100ms during test
} = options;
// Node.js arguments for optimal memory management
const nodeArgs = [
'--expose-gc',
'--max-old-space-size=' + maxMemory,
'--optimize-for-size',
'--gc-interval=' + gcInterval,
'--no-lazy',
'--trace-gc-verbose'
];
const child = spawn('node', [...nodeArgs, testScript], {
stdio: ['pipe', 'pipe', 'pipe'],
env: {
...process.env,
NODE_ENV: 'test',
FORCE_COLOR: '0', // Disable colors for cleaner output parsing
ISOLATED_TEST: 'true'
}
});
let stdout = '';
let stderr = '';
let timedOut = false;
// Set up timeout
const timeoutId = setTimeout(() => {
timedOut = true;
child.kill('SIGKILL');
reject(new Error(`Test timed out after ${timeout}ms`));
}, timeout);
child.stdout.on('data', (data) => {
stdout += data.toString();
});
child.stderr.on('data', (data) => {
stderr += data.toString();
});
child.on('close', (code, signal) => {
clearTimeout(timeoutId);
if (timedOut) return; // Already handled by timeout
if (code === 0) {
// Parse test results from stdout
try {
const result = {
success: true,
code,
signal,
stdout,
stderr,
performance: parse_performance_metrics(stdout)
};
resolve(result);
} catch (error) {
resolve({
success: true,
code,
signal,
stdout,
stderr,
parseError: error.message
});
}
} else {
reject(new Error(`Test failed with code ${code}, signal ${signal}\nSTDOUT: ${stdout}\nSTDERR: ${stderr}`));
}
});
child.on('error', (error) => {
clearTimeout(timeoutId);
reject(error);
});
});
};
/**
* Parses performance metrics from test output.
* @param {string} output - Test output
* @returns {Object} Parsed performance metrics
*/
const parse_performance_metrics = (output) => {
const metrics = {};
// Extract duration
const durationMatch = output.match(/Duration: ([\d.]+)\s*seconds?/i);
if (durationMatch) {
metrics.duration_seconds = parseFloat(durationMatch[1]);
}
// Extract throughput
const throughputMatch = output.match(/Throughput: ([\d,]+)\s*docs\/sec/i);
if (throughputMatch) {
metrics.documents_per_second = parseInt(throughputMatch[1].replace(/,/g, ''));
}
// Extract memory usage
const memoryMatch = output.match(/Peak Memory: ([\d.]+)MB/i);
if (memoryMatch) {
metrics.peak_memory_mb = parseFloat(memoryMatch[1]);
}
return metrics;
};
/**
* Creates an isolated test script for enterprise scale testing.
* @param {number} documentCount - Number of documents to test
* @param {string} testName - Name of the test
* @returns {string} Path to the created test script
*/
const create_isolated_test_script = (documentCount, testName) => {
const scriptPath = join(__dirname, `isolated_${documentCount}_test.js`);
const scriptContent = `
import { rmSync, existsSync } from 'fs';
import { initialize_database, cleanup_database } from '../../src/server/lib/query_engine.js';
import { bulk_insert_with_metrics } from '../../src/server/lib/bulk_insert_optimizer.js';
const TEST_DB_PATH = './test_data/isolated_${documentCount}_test';
const TEST_DATABASE = 'isolated_db_${documentCount}';
const TEST_COLLECTION = 'isolated_collection';
// Generate minimal test documents
const generate_documents = (count) => {
const documents = [];
const test_id = Date.now().toString(36);
for (let i = 0; i < count; i++) {
documents.push({
_id: \`iso_\${test_id}_\${i.toString().padStart(8, '0')}\`,
idx: i,
cat: i % 50,
val: i % 1000,
ts: Date.now() + i
});
}
return documents;
};
// Aggressive memory management
const force_cleanup = async () => {
await cleanup_database(true);
// Force multiple GC cycles
if (global.gc) {
for (let i = 0; i < 5; i++) {
global.gc();
await new Promise(resolve => setTimeout(resolve, 50));
}
}
// Wait for LMDB resources to be released
await new Promise(resolve => setTimeout(resolve, 200));
};
// Main test execution
const run_test = async () => {
try {
console.log('š Starting ${testName} (${documentCount.toLocaleString()} documents)');
// Clean setup
if (existsSync(TEST_DB_PATH)) {
rmSync(TEST_DB_PATH, { recursive: true, force: true });
}
// Initial memory state
const initial_memory = process.memoryUsage();
console.log(\`Initial Memory: \${Math.round(initial_memory.heapUsed / (1024 * 1024))}MB heap used\`);
initialize_database(TEST_DB_PATH);
// Generate documents
console.log('Generating documents...');
const documents = generate_documents(${documentCount});
// Run test with optimal settings for isolation
const start_time = Date.now();
const result = await bulk_insert_with_metrics(TEST_DATABASE, TEST_COLLECTION, documents, {
disable_indexing: true,
pre_allocate_map_size: true,
sort_keys: true,
stream_processing: true,
batch_size: ${documentCount >= 5000000 ? '250' : '500'} // Smaller batches for very large datasets
});
const total_duration = Date.now() - start_time;
const duration_seconds = total_duration / 1000;
// Output results in parseable format
console.log(\`\\nā
${testName.toUpperCase()} RESULTS:\`);
console.log(\`Duration: \${duration_seconds.toFixed(2)} seconds\`);
console.log(\`Throughput: \${result.performance.documents_per_second.toLocaleString()} docs/sec\`);
console.log(\`Memory Delta: \${result.performance.memory_usage.delta_heap_mb}MB\`);
console.log(\`Peak Memory: \${result.performance.memory_usage.peak_heap_mb}MB\`);
console.log(\`Success Rate: 100%\`);
// Validate results
if (!result.acknowledged) {
throw new Error('Insert not acknowledged');
}
if (result.inserted_count !== ${documentCount}) {
throw new Error(\`Expected \${${documentCount}} inserts, got \${result.inserted_count}\`);
}
// Performance validation
const max_duration = ${documentCount >= 10000000 ? '300' : documentCount >= 5000000 ? '180' : '120'};
const min_throughput = ${documentCount >= 10000000 ? '30000' : documentCount >= 5000000 ? '25000' : '20000'};
const max_memory = ${documentCount >= 5000000 ? '2048' : '1024'};
if (duration_seconds > max_duration) {
throw new Error(\`Duration \${duration_seconds}s exceeds \${max_duration}s limit\`);
}
if (result.performance.documents_per_second < min_throughput) {
throw new Error(\`Throughput \${result.performance.documents_per_second} below \${min_throughput} docs/sec target\`);
}
if (result.performance.memory_usage.peak_heap_mb > max_memory) {
throw new Error(\`Memory \${result.performance.memory_usage.peak_heap_mb}MB exceeds \${max_memory}MB limit\`);
}
console.log(\`\\nš ${testName.toUpperCase()} VALIDATION:\`);
console.log(\`ā
Performance targets met\`);
console.log(\`ā
Memory usage within limits\`);
console.log(\`ā
All \${${documentCount}} documents inserted successfully\`);
// Cleanup
await force_cleanup();
const final_memory = process.memoryUsage();
console.log(\`Final Memory: \${Math.round(final_memory.heapUsed / (1024 * 1024))}MB heap used\`);
console.log('\\nš Test completed successfully');
process.exit(0);
} catch (error) {
console.error(\`\\nā Test failed: \${error.message}\`);
console.error(error.stack);
try {
await force_cleanup();
} catch (cleanupError) {
console.error('Cleanup error:', cleanupError.message);
}
process.exit(1);
}
};
// Handle process signals
process.on('SIGTERM', async () => {
console.log('Received SIGTERM, cleaning up...');
try {
await force_cleanup();
} catch (error) {
console.error('Cleanup error:', error.message);
}
process.exit(1);
});
process.on('SIGINT', async () => {
console.log('Received SIGINT, cleaning up...');
try {
await force_cleanup();
} catch (error) {
console.error('Cleanup error:', error.message);
}
process.exit(1);
});
// Add uncaught exception handlers
process.on('uncaughtException', async (error) => {
console.error('\\nš„ Uncaught Exception:', error.message);
console.error(error.stack);
try {
await force_cleanup();
} catch (cleanupError) {
console.error('Cleanup error:', cleanupError.message);
}
process.exit(1);
});
process.on('unhandledRejection', async (reason, promise) => {
console.error('\\nš„ Unhandled Rejection at:', promise, 'reason:', reason);
try {
await force_cleanup();
} catch (cleanupError) {
console.error('Cleanup error:', cleanupError.message);
}
process.exit(1);
});
// Run the test
run_test();
`;
// Write the script file
import('fs').then(fs => {
fs.writeFileSync(scriptPath, scriptContent);
});
return scriptPath;
};
// Test setup and cleanup
test.beforeEach(async () => {
// Clean up any existing isolated test scripts
const testDir = __dirname;
const files = await import('fs').then(fs => fs.readdirSync(testDir));
for (const file of files) {
if (file.startsWith('isolated_') && file.endsWith('_test.js')) {
const filePath = join(testDir, file);
try {
await import('fs').then(fs => fs.unlinkSync(filePath));
} catch (error) {
// Ignore cleanup errors
}
}
}
});
test.afterEach(async () => {
// Clean up isolated test scripts
const testDir = __dirname;
const files = await import('fs').then(fs => fs.readdirSync(testDir));
for (const file of files) {
if (file.startsWith('isolated_') && file.endsWith('_test.js')) {
const filePath = join(testDir, file);
try {
await import('fs').then(fs => fs.unlinkSync(filePath));
} catch (error) {
// Ignore cleanup errors
}
}
}
});
// Isolated enterprise scale tests
test('isolated: 5M documents - enterprise scale test', async t => {
console.log('\nš Running 5M document test in isolated process...');
const scriptPath = create_isolated_test_script(5000000, '5M Document Enterprise Scale Test');
try {
const result = await run_isolated_test(scriptPath, {
timeout: 300000, // 5 minutes
maxMemory: '3072', // 3GB limit
gcInterval: '50'
});
t.true(result.success, 'Isolated test should succeed');
t.truthy(result.performance, 'Should have performance metrics');
if (result.performance.duration_seconds) {
t.true(result.performance.duration_seconds <= 180, `Duration ${result.performance.duration_seconds}s should be ⤠180s`);
}
if (result.performance.documents_per_second) {
t.true(result.performance.documents_per_second >= 25000, `Throughput ${result.performance.documents_per_second} should be ā„ 25K docs/sec`);
}
if (result.performance.peak_memory_mb) {
t.true(result.performance.peak_memory_mb <= 2048, `Memory ${result.performance.peak_memory_mb}MB should be ⤠2GB`);
}
console.log('ā
5M document isolated test completed successfully');
} catch (error) {
console.error('ā 5M document isolated test failed:', error.message);
t.fail(`Isolated test failed: ${error.message}`);
}
});
test('isolated: 10M documents - maximum enterprise scale test', async t => {
console.log('\nš Running 10M document test in isolated process...');
const scriptPath = create_isolated_test_script(10000000, '10M Document Maximum Enterprise Scale Test');
try {
const result = await run_isolated_test(scriptPath, {
timeout: 600000, // 10 minutes
maxMemory: '4096', // 4GB limit
gcInterval: '25'
});
t.true(result.success, 'Isolated test should succeed');
t.truthy(result.performance, 'Should have performance metrics');
if (result.performance.duration_seconds) {
t.true(result.performance.duration_seconds <= 300, `Duration ${result.performance.duration_seconds}s should be ⤠300s`);
}
if (result.performance.documents_per_second) {
t.true(result.performance.documents_per_second >= 30000, `Throughput ${result.performance.documents_per_second} should be ā„ 30K docs/sec`);
}
if (result.performance.peak_memory_mb) {
t.true(result.performance.peak_memory_mb <= 2048, `Memory ${result.performance.peak_memory_mb}MB should be ⤠2GB`);
}
console.log('ā
10M document isolated test completed successfully');
} catch (error) {
console.error('ā 10M document isolated test failed:', error.message);
t.fail(`Isolated test failed: ${error.message}`);
}
});
// Validation test to ensure isolation works
test('isolated: process isolation validation', async t => {
console.log('\nš Validating process isolation...');
const scriptPath = create_isolated_test_script(100000, 'Process Isolation Validation Test');
try {
const result = await run_isolated_test(scriptPath, {
timeout: 60000, // 1 minute
maxMemory: '1024', // 1GB limit
gcInterval: '100'
});
t.true(result.success, 'Isolation validation should succeed');
t.truthy(result.stdout, 'Should have stdout output');
t.true(result.stdout.includes('Test completed successfully'), 'Should complete successfully');
console.log('ā
Process isolation validation completed');
} catch (error) {
console.error('ā Process isolation validation failed:', error.message);
t.fail(`Isolation validation failed: ${error.message}`);
}
});