@tinytapanalytics/sdk
Version:
Behavioral psychology platform that detects visitor frustration, predicts abandonment, and helps you save at-risk conversions in real-time
980 lines (766 loc) • 31.5 kB
text/typescript
/**
* Tests for PerformanceMonitoring feature
*/
import { PerformanceMonitoring } from '../PerformanceMonitoring';
import { TinyTapAnalyticsConfig } from '../../types/index';
describe('PerformanceMonitoring', () => {
let performanceMonitoring: PerformanceMonitoring;
let mockConfig: TinyTapAnalyticsConfig;
let mockSdk: any;
let mockPerformanceObserver: jest.Mock;
beforeEach(() => {
mockConfig = {
apiKey: 'test-key',
endpoint: 'https://api.test.com',
debug: false
};
mockSdk = {
track: jest.fn()
};
// Mock PerformanceObserver
mockPerformanceObserver = jest.fn().mockImplementation((callback) => ({
observe: jest.fn(),
disconnect: jest.fn()
}));
global.PerformanceObserver = mockPerformanceObserver as any;
// Mock window.performance
Object.defineProperty(window, 'performance', {
configurable: true,
writable: true,
value: {
timing: {
navigationStart: 1000,
domainLookupStart: 1050,
domainLookupEnd: 1100,
connectStart: 1100,
connectEnd: 1200,
responseStart: 1300,
domInteractive: 2000,
domContentLoadedEventEnd: 2500,
loadEventEnd: 3000
},
mark: jest.fn(),
measure: jest.fn(),
getEntries: jest.fn(() => []),
getEntriesByName: jest.fn(() => []),
getEntriesByType: jest.fn(() => [])
}
});
jest.spyOn(window, 'addEventListener').mockImplementation(() => {});
jest.spyOn(document, 'addEventListener').mockImplementation(() => {});
});
afterEach(() => {
jest.restoreAllMocks();
jest.clearAllMocks();
});
describe('start/stop', () => {
it('should start performance monitoring', () => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
expect(window.addEventListener).toHaveBeenCalled();
});
it('should not start if already active', () => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
const firstCallCount = (window.addEventListener as jest.Mock).mock.calls.length;
performanceMonitoring.start();
expect((window.addEventListener as jest.Mock).mock.calls.length).toBe(firstCallCount);
});
it('should not start if Performance API is not available', () => {
delete (window as any).performance;
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
const metrics = performanceMonitoring.getMetrics();
expect(Object.keys(metrics).length).toBe(0);
});
it('should log in debug mode', () => {
const debugConfig = { ...mockConfig, debug: true };
performanceMonitoring = new PerformanceMonitoring(debugConfig, mockSdk);
const consoleLogSpy = jest.spyOn(console, 'log').mockImplementation();
performanceMonitoring.start();
expect(consoleLogSpy).toHaveBeenCalledWith('TinyTapAnalytics: Performance monitoring started');
consoleLogSpy.mockRestore();
});
it('should stop performance monitoring', () => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
performanceMonitoring.stop();
expect(mockSdk.track).toHaveBeenCalledWith('performance_metrics', expect.any(Object));
});
it('should not stop if already inactive', () => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.stop();
expect(mockSdk.track).not.toHaveBeenCalled();
});
});
describe('metric collection', () => {
beforeEach(() => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
});
it('should collect initial metrics', () => {
const metrics = performanceMonitoring.getMetrics();
expect(metrics.domContentLoaded).toBe(1500); // 2500 - 1000
expect(metrics.loadComplete).toBe(2000); // 3000 - 1000
expect(metrics.domInteractive).toBe(1000); // 2000 - 1000
expect(metrics.dnsLookup).toBe(50); // 1100 - 1050
expect(metrics.tcpConnect).toBe(100); // 1200 - 1100
expect(metrics.ttfb).toBe(300); // 1300 - 1000
});
it('should return copy of metrics', () => {
const metrics1 = performanceMonitoring.getMetrics();
const metrics2 = performanceMonitoring.getMetrics();
expect(metrics1).toEqual(metrics2);
expect(metrics1).not.toBe(metrics2);
});
it('should clear performance data', () => {
performanceMonitoring.clearData();
const metrics = performanceMonitoring.getMetrics();
expect(Object.keys(metrics).length).toBe(0);
});
});
describe('custom marks and measures', () => {
beforeEach(() => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
});
it('should add custom performance mark', () => {
performanceMonitoring.mark('my-mark');
expect(window.performance.mark).toHaveBeenCalledWith('my-mark');
});
it('should add custom performance measure', () => {
performanceMonitoring.measure('my-measure', 'start', 'end');
expect(window.performance.measure).toHaveBeenCalledWith('my-measure', 'start', 'end');
});
it('should add measure with start mark only', () => {
performanceMonitoring.measure('my-measure', 'start');
expect(window.performance.measure).toHaveBeenCalledWith('my-measure', 'start');
});
it('should add measure without marks', () => {
performanceMonitoring.measure('my-measure');
expect(window.performance.measure).toHaveBeenCalledWith('my-measure');
});
it('should not add mark if not active', () => {
performanceMonitoring.stop();
performanceMonitoring.mark('my-mark');
expect(window.performance.mark).not.toHaveBeenCalled();
});
it('should handle mark errors', () => {
const debugConfig = { ...mockConfig, debug: true };
const debugMonitoring = new PerformanceMonitoring(debugConfig, mockSdk);
debugMonitoring.start();
const consoleWarnSpy = jest.spyOn(console, 'warn').mockImplementation();
(window.performance.mark as jest.Mock).mockImplementation(() => {
throw new Error('Mark failed');
});
debugMonitoring.mark('bad-mark');
expect(consoleWarnSpy).toHaveBeenCalledWith(
"TinyTapAnalytics: Failed to add mark 'bad-mark'",
expect.any(Error)
);
consoleWarnSpy.mockRestore();
});
it('should handle measure errors', () => {
const debugConfig = { ...mockConfig, debug: true };
const debugMonitoring = new PerformanceMonitoring(debugConfig, mockSdk);
debugMonitoring.start();
const consoleWarnSpy = jest.spyOn(console, 'warn').mockImplementation();
(window.performance.measure as jest.Mock).mockImplementation(() => {
throw new Error('Measure failed');
});
debugMonitoring.measure('bad-measure');
expect(consoleWarnSpy).toHaveBeenCalledWith(
"TinyTapAnalytics: Failed to add measure 'bad-measure'",
expect.any(Error)
);
consoleWarnSpy.mockRestore();
});
});
describe('Core Web Vitals scoring', () => {
beforeEach(() => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
});
it('should score LCP as good', () => {
// Manually set metrics for testing
(performanceMonitoring as any).metrics.lcp = 2000;
(performanceMonitoring as any).metrics.fid = 50;
(performanceMonitoring as any).metrics.cls = 0.05;
const score = performanceMonitoring.getCoreWebVitalsScore();
expect(score.lcp).toBe('good');
expect(score.fid).toBe('good');
expect(score.cls).toBe('good');
expect(score.overall).toBe('good');
});
it('should score metrics as needs-improvement', () => {
(performanceMonitoring as any).metrics.lcp = 3000;
(performanceMonitoring as any).metrics.fid = 150;
(performanceMonitoring as any).metrics.cls = 0.15;
const score = performanceMonitoring.getCoreWebVitalsScore();
expect(score.lcp).toBe('needs-improvement');
expect(score.fid).toBe('needs-improvement');
expect(score.cls).toBe('needs-improvement');
expect(score.overall).toBe('needs-improvement');
});
it('should score metrics as poor', () => {
(performanceMonitoring as any).metrics.lcp = 5000;
(performanceMonitoring as any).metrics.fid = 400;
(performanceMonitoring as any).metrics.cls = 0.5;
const score = performanceMonitoring.getCoreWebVitalsScore();
expect(score.lcp).toBe('poor');
expect(score.fid).toBe('poor');
expect(score.cls).toBe('poor');
expect(score.overall).toBe('poor');
});
it('should handle unknown metrics', () => {
performanceMonitoring.clearData();
const score = performanceMonitoring.getCoreWebVitalsScore();
expect(score.lcp).toBe('unknown');
expect(score.fid).toBe('unknown');
expect(score.cls).toBe('unknown');
expect(score.overall).toBe('poor');
});
it('should score overall as poor if any metric is poor', () => {
(performanceMonitoring as any).metrics.lcp = 2000; // good
(performanceMonitoring as any).metrics.fid = 50; // good
(performanceMonitoring as any).metrics.cls = 0.5; // poor
const score = performanceMonitoring.getCoreWebVitalsScore();
expect(score.overall).toBe('poor');
});
});
describe('performance threshold checking', () => {
beforeEach(() => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
});
it('should track performance issues when thresholds exceeded', () => {
(performanceMonitoring as any).metrics.lcp = 3000;
(performanceMonitoring as any).metrics.fid = 150;
(performanceMonitoring as any).metrics.cls = 0.2;
performanceMonitoring.stop();
expect(mockSdk.track).toHaveBeenCalledWith('performance_issues', {
issues: expect.arrayContaining(['LCP > 2.5s', 'FID > 100ms', 'CLS > 0.1']),
metrics: expect.any(Object),
severity: 'high'
});
});
it('should track medium severity for fewer issues', () => {
(performanceMonitoring as any).metrics.lcp = 3000;
(performanceMonitoring as any).metrics.fid = 50; // good
performanceMonitoring.stop();
expect(mockSdk.track).toHaveBeenCalledWith('performance_issues', expect.objectContaining({
severity: 'medium'
}));
});
it('should not track issues when all metrics are good', () => {
(performanceMonitoring as any).metrics.lcp = 2000;
(performanceMonitoring as any).metrics.fid = 50;
(performanceMonitoring as any).metrics.cls = 0.05;
performanceMonitoring.stop();
expect(mockSdk.track).not.toHaveBeenCalledWith('performance_issues', expect.any(Object));
});
});
describe('resource type detection', () => {
beforeEach(() => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
});
it('should detect script resources', () => {
const type = (performanceMonitoring as any).getResourceType('https://example.com/app.js');
expect(type).toBe('script');
});
it('should detect stylesheet resources', () => {
const type = (performanceMonitoring as any).getResourceType('https://example.com/styles.css');
expect(type).toBe('stylesheet');
});
it('should detect image resources', () => {
const type = (performanceMonitoring as any).getResourceType('https://example.com/logo.png');
expect(type).toBe('image');
});
it('should detect font resources', () => {
const type = (performanceMonitoring as any).getResourceType('https://example.com/font.woff2');
expect(type).toBe('font');
});
it('should detect video resources', () => {
const type = (performanceMonitoring as any).getResourceType('https://example.com/video.mp4');
expect(type).toBe('video');
});
it('should detect audio resources', () => {
const type = (performanceMonitoring as any).getResourceType('https://example.com/audio.mp3');
expect(type).toBe('audio');
});
it('should detect XHR resources', () => {
const type = (performanceMonitoring as any).getResourceType('https://example.com/api/data');
expect(type).toBe('xhr');
});
it('should return other for unknown types', () => {
const type = (performanceMonitoring as any).getResourceType('https://example.com/file.xyz');
expect(type).toBe('other');
});
});
describe('connection information', () => {
beforeEach(() => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
});
it('should get connection info when available', () => {
Object.defineProperty(navigator, 'connection', {
configurable: true,
value: {
effectiveType: '4g',
downlink: 10,
rtt: 50,
saveData: false
}
});
const info = (performanceMonitoring as any).getConnectionInfo();
expect(info).toEqual({
effectiveType: '4g',
downlink: 10,
rtt: 50,
saveData: false
});
});
it('should return null when connection API not available', () => {
// Store original values
const originalConnection = (navigator as any).connection;
const originalMozConnection = (navigator as any).mozConnection;
const originalWebkitConnection = (navigator as any).webkitConnection;
// Remove all connection properties
delete (navigator as any).connection;
delete (navigator as any).mozConnection;
delete (navigator as any).webkitConnection;
const info = (performanceMonitoring as any).getConnectionInfo();
expect(info).toBeNull();
// Restore original values
if (originalConnection !== undefined) (navigator as any).connection = originalConnection;
if (originalMozConnection !== undefined) (navigator as any).mozConnection = originalMozConnection;
if (originalWebkitConnection !== undefined) (navigator as any).webkitConnection = originalWebkitConnection;
});
});
describe('Performance API checks', () => {
it('should detect when Performance API is available', () => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
const isAvailable = (performanceMonitoring as any).isPerformanceAPIAvailable();
expect(isAvailable).toBe(true);
});
it('should detect when PerformanceObserver is not available', () => {
delete (global as any).PerformanceObserver;
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
const isAvailable = (performanceMonitoring as any).isPerformanceAPIAvailable();
expect(isAvailable).toBe(false);
// Restore for other tests
global.PerformanceObserver = mockPerformanceObserver as any;
});
});
describe('performance data sending', () => {
beforeEach(() => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
});
it('should send performance data with correct structure', () => {
performanceMonitoring.stop();
expect(mockSdk.track).toHaveBeenCalledWith('performance_metrics', {
metrics: expect.any(Object),
url: window.location.href,
userAgent: navigator.userAgent,
timestamp: expect.any(Number),
viewport: {
width: window.innerWidth,
height: window.innerHeight
},
connection: expect.anything()
});
});
it('should log performance data in debug mode', () => {
const debugConfig = { ...mockConfig, debug: true };
const debugMonitoring = new PerformanceMonitoring(debugConfig, mockSdk);
debugMonitoring.start();
const consoleLogSpy = jest.spyOn(console, 'log').mockImplementation();
debugMonitoring.stop();
expect(consoleLogSpy).toHaveBeenCalledWith(
'TinyTapAnalytics: Performance data sent',
expect.any(Object)
);
consoleLogSpy.mockRestore();
});
});
describe('PerformanceObserver callbacks', () => {
let observerCallbacks: Map<string, any>;
beforeEach(() => {
observerCallbacks = new Map();
// Capture all observer callbacks when they're created
mockPerformanceObserver.mockImplementation((callback) => {
// Store callback for later use
const mockObserver = {
observe: jest.fn((options: any) => {
// Store callback by entry type
const entryType = options.entryTypes[0];
observerCallbacks.set(entryType, callback);
}),
disconnect: jest.fn()
};
return mockObserver;
});
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
});
it('should process LCP entries', () => {
const lcpEntries = [
{ startTime: 1500, name: 'largest-contentful-paint' },
{ startTime: 2000, name: 'largest-contentful-paint' }
];
const lcpCallback = observerCallbacks.get('largest-contentful-paint');
lcpCallback({ getEntries: () => lcpEntries });
const metrics = performanceMonitoring.getMetrics();
expect(metrics.lcp).toBe(2000); // Last entry
});
it('should process FID entries', () => {
const fidEntries = [
{ startTime: 100, processingStart: 120, name: 'first-input' }
];
const fidCallback = observerCallbacks.get('first-input');
fidCallback({ getEntries: () => fidEntries });
const metrics = performanceMonitoring.getMetrics();
expect(metrics.fid).toBe(20); // processingStart - startTime
});
it('should process CLS entries without recent input', () => {
const clsEntries = [
{ value: 0.05, hadRecentInput: false },
{ value: 0.03, hadRecentInput: false },
{ value: 0.1, hadRecentInput: true } // Should be ignored
];
const clsCallback = observerCallbacks.get('layout-shift');
clsCallback({ getEntries: () => clsEntries });
const metrics = performanceMonitoring.getMetrics();
expect(metrics.cls).toBe(0.08); // 0.05 + 0.03
});
it('should process FCP entries', () => {
const paintEntries = [
{ name: 'first-paint', startTime: 1000 },
{ name: 'first-contentful-paint', startTime: 1200 }
];
const paintCallback = observerCallbacks.get('paint');
paintCallback({ getEntries: () => paintEntries });
const metrics = performanceMonitoring.getMetrics();
expect(metrics.fcp).toBe(1200);
});
it('should process resource entries and track slow resources', () => {
const resourceEntries = [
{
name: 'https://example.com/slow.js',
duration: 1500, // Slow (>1000ms)
startTime: 100,
transferSize: 50000
},
{
name: 'https://example.com/fast.css',
duration: 500,
startTime: 200,
transferSize: 10000
}
];
const resourceCallback = observerCallbacks.get('resource');
resourceCallback({ getEntries: () => resourceEntries });
const metrics = performanceMonitoring.getMetrics();
expect(metrics.resourceCount).toBe(2);
expect(metrics.totalResourceSize).toBe(60000);
// Should track slow resources
expect(mockSdk.track).toHaveBeenCalledWith('slow_resources', {
count: 1,
resources: [
{
name: 'https://example.com/slow.js',
type: 'script',
duration: 1500
}
]
});
});
it('should process navigation timing entries', () => {
const navEntries = [
{
domainLookupStart: 10,
domainLookupEnd: 60,
connectStart: 60,
connectEnd: 160,
responseStart: 200,
startTime: 0
}
];
const navCallback = observerCallbacks.get('navigation');
navCallback({ getEntries: () => navEntries });
const metrics = performanceMonitoring.getMetrics();
expect(metrics.dnsLookup).toBe(50);
expect(metrics.tcpConnect).toBe(100);
// ttfb already set from initial metrics (300), navigation observer updates it
expect(metrics.ttfb).toBe(200);
});
it('should process user timing mark entries', () => {
const markEntries = [
{ name: 'custom-mark-1', startTime: 1000 },
{ name: 'custom-mark-2', startTime: 2000 }
];
const markCallback = observerCallbacks.get('mark');
markCallback({ getEntries: () => markEntries });
const metrics = performanceMonitoring.getMetrics();
expect(metrics.customMarks).toEqual({
'custom-mark-1': 1000,
'custom-mark-2': 2000
});
});
it('should process user timing measure entries', () => {
const measureEntries = [
{ name: 'custom-measure-1', duration: 500 },
{ name: 'custom-measure-2', duration: 750 }
];
const measureCallback = observerCallbacks.get('measure');
measureCallback({ getEntries: () => measureEntries });
const metrics = performanceMonitoring.getMetrics();
expect(metrics.customMeasures).toEqual({
'custom-measure-1': 500,
'custom-measure-2': 750
});
});
it('should handle observer errors in debug mode', () => {
const debugConfig = { ...mockConfig, debug: true };
const consoleWarnSpy = jest.spyOn(console, 'warn').mockImplementation();
mockPerformanceObserver.mockImplementation(() => {
throw new Error('Observer not supported');
});
const debugMonitoring = new PerformanceMonitoring(debugConfig, mockSdk);
debugMonitoring.start();
expect(consoleWarnSpy).toHaveBeenCalled();
consoleWarnSpy.mockRestore();
});
});
describe('event listener callbacks', () => {
let loadCallback: any;
let beforeUnloadCallback: any;
let visibilityCallback: any;
let domContentLoadedCallback: any;
beforeEach(() => {
// Mock document.readyState as 'loading' to ensure listeners are added
Object.defineProperty(document, 'readyState', {
configurable: true,
writable: true,
value: 'loading'
});
// Capture event listener callbacks
(window.addEventListener as jest.Mock).mockImplementation((event, callback) => {
if (event === 'load') loadCallback = callback;
if (event === 'beforeunload') beforeUnloadCallback = callback;
});
(document.addEventListener as jest.Mock).mockImplementation((event, callback) => {
if (event === 'visibilitychange') visibilityCallback = callback;
if (event === 'DOMContentLoaded') domContentLoadedCallback = callback;
});
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
});
it('should send performance data on load event', () => {
jest.useFakeTimers();
// Re-establish performance.timing mock after useFakeTimers (it gets cleared)
Object.defineProperty(window, 'performance', {
configurable: true,
writable: true,
value: {
timing: {
navigationStart: 1000,
domainLookupStart: 1050,
domainLookupEnd: 1100,
connectStart: 1100,
connectEnd: 1200,
responseStart: 1300,
domInteractive: 2000,
domContentLoadedEventEnd: 2500,
loadEventEnd: 3000
}
}
});
loadCallback();
// Verify loadComplete metric was collected (3000 - 1000 = 2000)
const metrics = performanceMonitoring.getMetrics();
expect(metrics.loadComplete).toBe(2000);
// Fast forward the 1000ms setTimeout in the load callback
jest.advanceTimersByTime(1000);
expect(mockSdk.track).toHaveBeenCalledWith('performance_metrics', expect.any(Object));
jest.useRealTimers();
});
it('should send performance data on beforeunload', () => {
beforeUnloadCallback();
expect(mockSdk.track).toHaveBeenCalledWith('performance_metrics', expect.any(Object));
});
it('should send performance data on visibilitychange to hidden', () => {
Object.defineProperty(document, 'hidden', {
configurable: true,
writable: true,
value: true
});
visibilityCallback();
expect(mockSdk.track).toHaveBeenCalledWith('performance_metrics', expect.any(Object));
});
it('should not send performance data when page is visible', () => {
Object.defineProperty(document, 'hidden', {
configurable: true,
writable: true,
value: false
});
mockSdk.track.mockClear();
visibilityCallback();
expect(mockSdk.track).not.toHaveBeenCalled();
});
it('should collect domContentLoaded metric when DOMContentLoaded fires', () => {
// Call the DOMContentLoaded callback
domContentLoadedCallback();
// Get metrics to verify domContentLoaded was collected
const metrics = performanceMonitoring.getMetrics();
// Should have domContentLoaded metric (2500 - 1000 = 1500)
expect(metrics.domContentLoaded).toBe(1500);
});
it('should collect metrics immediately when page is already loaded', () => {
jest.useFakeTimers();
// Re-establish performance.timing mock after useFakeTimers
Object.defineProperty(window, 'performance', {
configurable: true,
writable: true,
value: {
timing: {
navigationStart: 1000,
domainLookupStart: 1050,
domainLookupEnd: 1100,
connectStart: 1100,
connectEnd: 1200,
responseStart: 1300,
domInteractive: 2000,
domContentLoadedEventEnd: 2500,
loadEventEnd: 3000
}
}
});
// Set document.readyState to 'complete'
Object.defineProperty(document, 'readyState', {
configurable: true,
writable: true,
value: 'complete'
});
// Reset mocks
mockSdk.track.mockClear();
(window.addEventListener as jest.Mock).mockClear();
(document.addEventListener as jest.Mock).mockClear();
// Create new instance with page already loaded
const pm = new PerformanceMonitoring(mockConfig, mockSdk);
pm.start();
// Metrics should be collected immediately
const metrics = pm.getMetrics();
expect(metrics.domContentLoaded).toBe(1500);
expect(metrics.loadComplete).toBe(2000);
// Fast forward timers to trigger sendPerformanceData
jest.advanceTimersByTime(1000);
// Should have sent performance data
expect(mockSdk.track).toHaveBeenCalledWith('performance_metrics', expect.any(Object));
jest.useRealTimers();
});
});
describe('threshold checks', () => {
beforeEach(() => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
});
it('should track FCP threshold violations', () => {
(performanceMonitoring as any).metrics.fcp = 2000; // > 1800ms
performanceMonitoring.stop();
expect(mockSdk.track).toHaveBeenCalledWith('performance_issues',
expect.objectContaining({
issues: expect.arrayContaining(['FCP > 1.8s'])
})
);
});
it('should track TTFB threshold violations', () => {
(performanceMonitoring as any).metrics.ttfb = 700; // > 600ms
performanceMonitoring.stop();
expect(mockSdk.track).toHaveBeenCalledWith('performance_issues',
expect.objectContaining({
issues: expect.arrayContaining(['TTFB > 600ms'])
})
);
});
});
describe('debug logging', () => {
let debugConfig: TinyTapAnalyticsConfig;
let debugMonitoring: PerformanceMonitoring;
let consoleLogSpy: jest.SpyInstance;
beforeEach(() => {
debugConfig = { ...mockConfig, debug: true };
consoleLogSpy = jest.spyOn(console, 'log').mockImplementation();
});
afterEach(() => {
consoleLogSpy.mockRestore();
});
it('should log when adding marks in debug mode', () => {
debugMonitoring = new PerformanceMonitoring(debugConfig, mockSdk);
debugMonitoring.start();
debugMonitoring.mark('test-mark');
expect(consoleLogSpy).toHaveBeenCalledWith("TinyTapAnalytics: Performance mark 'test-mark' added");
});
it('should log when adding measures in debug mode', () => {
debugMonitoring = new PerformanceMonitoring(debugConfig, mockSdk);
debugMonitoring.start();
debugMonitoring.measure('test-measure');
expect(consoleLogSpy).toHaveBeenCalledWith("TinyTapAnalytics: Performance measure 'test-measure' added");
});
it('should log when clearing data in debug mode', () => {
debugMonitoring = new PerformanceMonitoring(debugConfig, mockSdk);
debugMonitoring.start();
debugMonitoring.clearData();
expect(consoleLogSpy).toHaveBeenCalledWith('TinyTapAnalytics: Performance data cleared');
});
it('should log when stopping in debug mode', () => {
debugMonitoring = new PerformanceMonitoring(debugConfig, mockSdk);
debugMonitoring.start();
// Clear previous start log
consoleLogSpy.mockClear();
debugMonitoring.stop();
expect(consoleLogSpy).toHaveBeenCalledWith('TinyTapAnalytics: Performance monitoring stopped');
});
});
describe('edge cases', () => {
it('should handle missing performance.timing', () => {
Object.defineProperty(window, 'performance', {
configurable: true,
writable: true,
value: {
// No timing property
mark: jest.fn(),
measure: jest.fn()
}
});
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
const metrics = performanceMonitoring.getMetrics();
// Should not have timing-based metrics
expect(metrics.domContentLoaded).toBeUndefined();
expect(metrics.loadComplete).toBeUndefined();
});
it('should not measure when not active', () => {
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
// Don't start
performanceMonitoring.measure('test');
expect(window.performance.measure).not.toHaveBeenCalled();
});
it('should not measure when performance.measure not available', () => {
Object.defineProperty(window, 'performance', {
configurable: true,
writable: true,
value: {
timing: {},
mark: jest.fn()
// No measure method
}
});
performanceMonitoring = new PerformanceMonitoring(mockConfig, mockSdk);
performanceMonitoring.start();
performanceMonitoring.measure('test');
// Should not throw
expect(true).toBe(true);
});
});
});