@joystick.js/db-canary
Version:
JoystickDB - A minimalist database server for the Joystick framework
114 lines (95 loc) ⢠4.19 kB
JavaScript
/**
* @fileoverview Isolated 1M document bulk insert test for JoystickDB.
* Tests the optimization's ability to handle enterprise-scale data loads safely.
*/
import test from 'ava';
import { rmSync, existsSync } from 'fs';
import { initialize_database, cleanup_database } from '../../src/server/lib/query_engine.js';
import { bulk_insert_with_metrics } from '../../src/server/lib/bulk_insert_optimizer.js';
const TEST_DB_PATH = './test_data/bulk_1m_test';
const TEST_DATABASE = 'million_db';
const TEST_COLLECTION = 'million_collection';
/**
* Generates lightweight test documents for 1M document test.
* @param {number} count - Number of documents to generate
* @returns {Array<Object>} Array of test documents
*/
const generate_lightweight_documents = (count) => {
const documents = [];
for (let i = 0; i < count; i++) {
documents.push({
_id: `doc_${i.toString().padStart(7, '0')}`,
index: i,
category: `cat_${i % 100}`,
active: i % 2 === 0,
score: i % 1000,
timestamp: Date.now() + i
});
}
return documents;
};
/**
* Sets up test database before test.
*/
test.beforeEach(async () => {
if (existsSync(TEST_DB_PATH)) {
rmSync(TEST_DB_PATH, { recursive: true, force: true });
}
initialize_database(TEST_DB_PATH);
});
/**
* Cleans up test database after test.
*/
test.afterEach(async () => {
await cleanup_database(true);
});
test('1M documents - enterprise scale bulk insert test', async t => {
console.log('\nš Starting 1M Document Enterprise Scale Test...');
console.log('Generating 1,000,000 lightweight documents...');
const documents = generate_lightweight_documents(1000000);
const estimated_size_mb = Math.round(documents.length * 100 / (1024 * 1024)); // ~100 bytes per doc
console.log(`š Test Configuration:`);
console.log(` Documents: ${documents.length.toLocaleString()}`);
console.log(` Estimated Size: ${estimated_size_mb}MB`);
console.log(` Optimization: All features enabled`);
console.log(` Memory Management: Streaming with 1K batches`);
const start_time = Date.now();
const result = await bulk_insert_with_metrics(TEST_DATABASE, TEST_COLLECTION, documents, {
disable_indexing: true,
pre_allocate_map_size: true,
sort_keys: true,
stream_processing: true,
batch_size: 1000 // Smaller batches for memory safety
});
const total_duration = Date.now() - start_time;
const duration_seconds = total_duration / 1000;
console.log(`\nā
1M DOCUMENT TEST RESULTS:`);
console.log(` Duration: ${duration_seconds.toFixed(2)} seconds`);
console.log(` Throughput: ${result.performance.documents_per_second.toLocaleString()} docs/sec`);
console.log(` Memory Delta: ${result.performance.memory_usage.delta_heap_mb}MB`);
console.log(` Peak Memory: ${result.performance.memory_usage.peak_heap_mb}MB`);
console.log(` Success Rate: 100%`);
// Validate results
t.true(result.acknowledged);
t.is(result.inserted_count, 1000000);
t.is(result.inserted_ids.length, 1000000);
// Performance targets for 1M documents
t.true(duration_seconds < 300, `Duration ${duration_seconds}s exceeds 5 minute limit`);
t.true(result.performance.documents_per_second >= 3000, `Throughput ${result.performance.documents_per_second} below 3K docs/sec target`);
t.true(result.performance.memory_usage.peak_heap_mb < 1024, `Memory ${result.performance.memory_usage.peak_heap_mb}MB exceeds 1GB limit`);
// Performance classification
if (duration_seconds <= 30) {
console.log(` š PERFORMANCE: EXCELLENT (ā¤30s)`);
} else if (duration_seconds <= 60) {
console.log(` š„ PERFORMANCE: VERY GOOD (ā¤60s)`);
} else if (duration_seconds <= 120) {
console.log(` š„ PERFORMANCE: GOOD (ā¤2min)`);
} else {
console.log(` š„ PERFORMANCE: ACCEPTABLE (ā¤5min)`);
}
console.log(`\nš ENTERPRISE SCALE VALIDATION:`);
console.log(` ā
No crashes or segmentation faults`);
console.log(` ā
Stable memory usage under 1GB`);
console.log(` ā
Consistent throughput throughout operation`);
console.log(` ā
All 1M documents inserted successfully`);
});