UNPKG

memory-watch

Version:

Advanced Node.js memory monitoring with stack trace analysis, user code detection, and memory leak identification

237 lines (198 loc) • 6.74 kB
const { MemoryWatch, generateDiagnosticReport } = require("../dist/index"); // Create a shared watch instance let watch; // Simulate a real application with manual context tracking class DataProcessor { constructor() { this.data = []; this.cache = new Map(); } processLargeDataset(records) { // Manually capture context for better tracking watch?.captureContext("processLargeDataset", __filename, 14); console.log("šŸ“Š Processing large dataset..."); const processedData = []; for (let i = 0; i < records; i++) { // Simulate heavy data processing const record = { id: i, data: new Array(200).fill(`record-${i}`), metadata: { processed: true, timestamp: new Date(), extra: new Array(100).fill(`meta-${i}`), }, analytics: this.calculateAnalytics(i), }; processedData.push(record); this.cache.set(i, record); // Also cache it (more memory usage) } this.data = processedData; console.log(`āœ… Processed ${records} records`); return processedData; } calculateAnalytics(id) { // This function also uses memory watch?.captureContext("calculateAnalytics", __filename, 38); return { score: Math.random(), details: new Array(50).fill(`analytics-${id}`), history: new Array(30).fill({ event: `event-${id}`, data: new Array(20).fill(`history-${id}`), }), }; } } class APIService { constructor() { this.processor = new DataProcessor(); this.requestCount = 0; } async handleRequest(size = 1000) { this.requestCount++; // Track API context watch?.captureContext("handleRequest", __filename, 58); console.log( `🌐 API Request #${this.requestCount} - Processing ${size} records` ); try { // This will show up in memory analysis const result = this.processor.processLargeDataset(size); // Simulate additional processing const response = this.formatResponse(result); console.log(`āœ… API Request #${this.requestCount} completed`); return response; } catch (error) { console.error("āŒ API Request failed:", error); throw error; } } formatResponse(data) { // Another function that might use memory watch?.captureContext("formatResponse", __filename, 78); return { status: "success", count: data.length, data: data.slice(0, 100), // Only return first 100 for response metadata: { processed: new Date(), total: data.length, summary: new Array(50).fill("summary-data"), }, }; } } // Setup memory monitoring watch = new MemoryWatch({ threshold: 0.5, // 50% threshold for quick testing interval: 3000, // Check every 3 seconds actions: [ (data) => { console.log("\n" + "🚨".repeat(25)); console.log("MEMORY ALERT - User Code Detection Test"); console.log("🚨".repeat(25)); console.log( `šŸ“Š Memory: ${(data.percentage * 100).toFixed(1)}% (${Math.round( data.usedBytes / 1024 / 1024 )}MB)` ); // Show user code in stack trace if (data.context?.stackTrace && data.context.stackTrace.length > 0) { console.log("\nšŸŽÆ USER CODE STACK TRACE:"); let userCodeFound = false; data.context.stackTrace.forEach((trace, index) => { if ( trace.fileName && (trace.fileName.includes(".js") || trace.fileName.includes(".ts")) ) { const fileName = trace.fileName.split("/").pop(); // Get just filename console.log( ` ${index + 1}. ⭐ ${trace.functionName} → ${fileName}:${ trace.lineNumber }` ); userCodeFound = true; } else if ( trace.functionName && !trace.functionName.includes("node:") ) { console.log(` ${index + 1}. ${trace.functionName}`); } }); if (!userCodeFound) { console.log(" āš ļø No user code detected in stack trace"); } } // Analyze which functions are problematic console.log("\nšŸ” ANALYSIS:"); const stackTrace = data.context?.stackTrace || []; if ( stackTrace.some((t) => t.functionName?.includes("processLargeDataset")) ) { console.log(" šŸŽÆ Memory spike in processLargeDataset function"); console.log(" šŸ’” Suggestion: Process data in smaller chunks"); } if ( stackTrace.some((t) => t.functionName?.includes("calculateAnalytics")) ) { console.log(" šŸŽÆ Memory spike in calculateAnalytics function"); console.log(" šŸ’” Suggestion: Cache analytics results"); } if (stackTrace.some((t) => t.functionName?.includes("handleRequest"))) { console.log(" šŸŽÆ Memory spike in API handleRequest"); console.log(" šŸ’” Suggestion: Implement request rate limiting"); } if (stackTrace.some((t) => t.functionName?.includes("formatResponse"))) { console.log(" šŸŽÆ Memory spike in formatResponse"); console.log(" šŸ’” Suggestion: Stream large responses"); } // Memory breakdown console.log("\nšŸ“ˆ Memory Details:"); console.log( ` Heap: ${Math.round(data.breakdown.heapUsed / 1024 / 1024)}MB` ); console.log(` RSS: ${Math.round(data.breakdown.rss / 1024 / 1024)}MB`); console.log( ` External: ${Math.round(data.breakdown.external / 1024)}KB` ); console.log("🚨".repeat(25) + "\n"); }, ], }); async function runTest() { console.log("šŸš€ Testing User Code Detection..."); console.log(" Features: Manual context capture, improved stack traces"); console.log(" Threshold: 50%\n"); watch.start(); const apiService = new APIService(); // Test with increasing load setTimeout(async () => { try { await apiService.handleRequest(500); // Small request } catch (error) { console.error("Error in small request:", error); } }, 2000); setTimeout(async () => { try { await apiService.handleRequest(1500); // Medium request } catch (error) { console.error("Error in medium request:", error); } }, 6000); setTimeout(async () => { try { await apiService.handleRequest(3000); // Large request } catch (error) { console.error("Error in large request:", error); } }, 10000); // Stop after 20 seconds setTimeout(() => { watch.stop(); console.log("āœ… User code detection test completed"); process.exit(0); }, 20000); } runTest();