@perceptr/web-sdk
Version:
Perceptr Web SDK for recording and monitoring user sessions
412 lines (411 loc) • 19.3 kB
JavaScript
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.EventBuffer = void 0;
const uuid_1 = require("uuid");
const sessionrecording_utils_1 = require("./utils/sessionrecording-utils");
const defaults_1 = require("./common/defaults");
const logger_1 = require("./utils/logger");
class EventBuffer {
constructor(config, onFlush) {
var _a;
this.buffer = [];
this.bufferSize = 0;
this.isFlushInProgress = false;
this.flushFailures = 0;
this.backoffUntil = 0;
this.unloadHandlerAttached = false;
this.onFlush = onFlush;
// Internal configuration - not exposed to users
this.config = {
maxBufferSize: 1024 * 1024, // 1MB default
flushInterval: 60000, // 1 minute default
maxBufferAge: 300000, // 5 minutes default
compressionThreshold: 100 * 1024, // 100KB
useCompression: false,
staleBufferThreshold: (_a = config.staleThreshold) !== null && _a !== void 0 ? _a : 3600000, // 1 hour default
backoffInterval: 5000, // 5 seconds initial backoff
maxBackoffInterval: 300000, // 5 minutes max backoff
persistenceEnabled: true,
persistenceKey: "perceptr_buffer_data",
};
this.startFlushTimer();
this.setupUnloadHandler();
this.handleSessionResume();
}
startNewSession() {
this.sessionId = (0, uuid_1.v4)();
this.startTime = Date.now();
this.lastFlushTime = this.startTime;
this.buffer = [];
this.bufferSize = 0;
this.lastBatchEndTime = undefined;
}
/**
* Handles session resumption logic: checks persisted buffers and determines if a new session is needed.
* If the most recent buffer is >1 hour old, generates a new sessionId and resets buffer state.
*/
handleSessionResume() {
return __awaiter(this, void 0, void 0, function* () {
if (!this.config.persistenceEnabled || typeof localStorage === "undefined")
return;
const persistedDataStr = localStorage.getItem(this.config.persistenceKey);
if (!persistedDataStr) {
this.startNewSession();
logger_1.logger.debug("Starting new session due to no persisted data");
return;
}
const persistedData = JSON.parse(persistedDataStr);
if (!Array.isArray(persistedData) || persistedData.length === 0) {
this.startNewSession();
logger_1.logger.debug("Starting new session due to malformated buffer data");
return;
}
// Find the most recent buffer
const mostRecent = persistedData.reduce((a, b) => a.endTime > b.endTime ? a : b);
const now = Date.now();
if (now - mostRecent.endTime > this.config.staleBufferThreshold) {
this.startNewSession();
logger_1.logger.debug("Starting new session due to inactivity > 1 hour");
}
else {
// Continue with previous sessionId
this.sessionId = mostRecent.sessionId;
this.startTime = mostRecent.startTime;
logger_1.logger.debug("Continuing previous session (inactivity < 1 hour)");
}
// flush any persisted data
yield this.flushStoredBuffers(persistedData);
});
}
setupUnloadHandler() {
if (typeof window === "undefined" || this.unloadHandlerAttached)
return;
// Handle browser close/refresh
window.addEventListener("beforeunload", () => {
this.persistBufferData();
});
// Handle mobile browser pausing
document.addEventListener("visibilitychange", () => {
if (document.visibilityState === "hidden") {
this.persistBufferData();
}
else if (document.visibilityState === "visible") {
this.handleSessionResume();
}
});
this.unloadHandlerAttached = true;
}
flushStoredBuffers(storedBuffers) {
return __awaiter(this, void 0, void 0, function* () {
try {
logger_1.logger.debug(`Found ${storedBuffers.length} persisted buffer(s) to send`);
// Process each persisted buffer
for (const data of [...storedBuffers]) {
// Skip and remove empty buffers
if (!Array.isArray(data.events) || data.events.length === 0) {
storedBuffers.splice(storedBuffers.indexOf(data), 1);
logger_1.logger.debug(`Removed empty persisted buffer for session ${data.sessionId}`);
continue;
}
try {
// For persisted buffers from the current session, determine if we should use
// continuous chronology or start fresh based on the stored start time
const isSameSession = data.sessionId === this.sessionId;
const useContiguousTime = isSameSession && this.lastBatchEndTime;
// Choose appropriate start time
// - For current session with lastBatchEndTime set: use lastBatchEndTime
// - Otherwise: use the persisted start time
const batchStartTime = useContiguousTime
? this.lastBatchEndTime
: data.startTime;
// Create a snapshot from the persisted data
const snapshot = {
isSessionEnded: !isSameSession,
sessionId: data.sessionId,
batchId: data.batchId,
startTime: batchStartTime || data.startTime, // Fallback to original start time if needed
endTime: data.endTime,
size: data.size,
data: data.events,
metadata: {
bufferCount: (0, uuid_1.v4)(),
eventCount: data.events.length,
compressed: false,
persisted: true,
},
userIdentity: data.userIdentity,
};
// Send the persisted data
yield this.onFlush(snapshot);
// Update lastBatchEndTime only for current session
if (isSameSession) {
this.lastBatchEndTime = data.endTime;
}
storedBuffers.splice(storedBuffers.indexOf(data), 1);
logger_1.logger.debug(`Successfully sent persisted buffer for session ${data.sessionId}`);
}
catch (error) {
logger_1.logger.error(`Failed to send persisted buffer for session ${data.sessionId}:`, error);
}
}
if (storedBuffers.length > 0) {
localStorage.setItem(this.config.persistenceKey, JSON.stringify(storedBuffers));
}
else {
localStorage.removeItem(this.config.persistenceKey);
}
logger_1.logger.debug(`Processed ${storedBuffers.length} persisted events`);
}
catch (error) {
logger_1.logger.error("Error processing persisted buffer data:", error);
// Clear potentially corrupted data
localStorage.removeItem(this.config.persistenceKey);
}
});
}
persistBufferData() {
if (!this.config.persistenceEnabled ||
typeof localStorage === "undefined" ||
this.buffer.length === 0)
return;
try {
// Get existing persisted data
let persistedData = [];
const existingData = localStorage.getItem(this.config.persistenceKey);
if (existingData) {
try {
persistedData = JSON.parse(existingData);
if (!Array.isArray(persistedData)) {
persistedData = [];
}
}
catch (e) {
persistedData = [];
}
}
const existingSession = persistedData.find((data) => data.sessionId === this.sessionId);
if (existingSession) {
existingSession.events = [...this.buffer];
}
else {
// Add current buffer to persisted data
persistedData.push({
sessionId: this.sessionId,
batchId: (0, uuid_1.v4)(),
startTime: this.startTime,
endTime: Date.now(),
events: [...this.buffer],
userIdentity: this.userIdentity,
size: this.bufferSize,
});
}
// Limit the number of persisted sessions (keep last 3)
if (persistedData.length > 3) {
persistedData = persistedData.slice(-3);
}
// Store the data
// TODO: we can use IndexedDB instead of localStorage for persistence
localStorage.setItem(this.config.persistenceKey, JSON.stringify(persistedData));
logger_1.logger.debug(` Persisted ${this.buffer.length} events to storage`);
}
catch (error) {
logger_1.logger.error("Failed to persist buffer data:", error);
}
}
addEvent(event) {
// Skip internal SDK logs
if (this.isInternalSdkLog(event)) {
return;
}
// Estimate the size of the event
const eventSize = (0, sessionrecording_utils_1.estimateSize)(event);
const now = Date.now();
// Add the event to the buffer
this.buffer.push(event);
this.bufferSize += eventSize;
// Check if we should attempt to flush
const shouldAttemptFlush =
// Buffer is getting full
this.bufferSize > this.config.maxBufferSize * 0.9 ||
// Buffer is too old
now - this.lastFlushTime > this.config.maxBufferAge;
// Only attempt to flush if we're not in a backoff period and not already flushing
if (shouldAttemptFlush &&
!this.isFlushInProgress &&
now > this.backoffUntil) {
logger_1.logger.debug("Scheduling buffer flush");
(0, sessionrecording_utils_1.scheduleIdleTask)(() => this.flush());
}
}
addEvents(events) {
for (const event of events) {
this.addEvent(event);
}
}
/**
* Set the user identity for this buffer
*/
setUserIdentity(identity) {
this.userIdentity = identity;
}
flush() {
return __awaiter(this, arguments, void 0, function* (isSessionEnded = false) {
if (this.buffer.length === 0 || this.isFlushInProgress)
return;
const now = Date.now();
// Check if we're in a backoff period
if (now < this.backoffUntil && !isSessionEnded) {
logger_1.logger.debug(`In backoff period, skipping flush. Will retry in ${Math.ceil((this.backoffUntil - now) / 1000)}s`);
return;
}
this.isFlushInProgress = true;
const bufferData = [...this.buffer];
const bufferSize = this.bufferSize;
logger_1.logger.debug(`Flushing buffer with ${bufferData.length} events (${bufferSize} bytes)`);
// Determine if compression should be used
const shouldCompress = this.config.useCompression &&
bufferSize > this.config.compressionThreshold;
// Use appropriate startTime - either original session start time (for first batch)
// or the end time of the last batch (for subsequent batches)
const batchStartTime = this.lastBatchEndTime || this.startTime;
// Create a single snapshot buffer
const snapshot = {
isSessionEnded,
sessionId: this.sessionId,
batchId: (0, uuid_1.v4)(),
startTime: batchStartTime,
endTime: now,
size: bufferSize,
data: bufferData,
metadata: {
bufferSize: this.bufferSize,
eventCount: bufferData.length,
compressed: shouldCompress,
},
userIdentity: this.userIdentity,
};
// Compress data if needed
if (shouldCompress) {
yield this.compressSnapshot(snapshot);
}
// Send the data to the server
try {
yield this.onFlush(snapshot);
// Update lastBatchEndTime for the next batch
this.lastBatchEndTime = now;
// Success! Clear the buffer and reset failure count
this.buffer = [];
this.bufferSize = 0;
this.lastFlushTime = now;
this.flushFailures = 0;
this.backoffUntil = 0;
// Reset the flush timer
this.resetFlushTimer();
logger_1.logger.debug(`Flushing ${bufferData.length} events to server`);
}
catch (error) {
// Increment failure count and implement exponential backoff
this.flushFailures++;
// Calculate backoff time with exponential increase
const backoffTime = Math.min(this.config.backoffInterval * Math.pow(2, this.flushFailures - 1), this.config.maxBackoffInterval);
this.backoffUntil = now + backoffTime;
logger_1.logger.error(`Failed to flush buffer (attempt ${this.flushFailures}). Backing off for ${backoffTime / 1000}s until ${new Date(this.backoffUntil).toISOString()}`);
// If buffer is getting too large despite failures, we might need to drop some events
if (this.bufferSize > defaults_1.SEVEN_MEGABYTES * 20) {
const eventsToKeep = Math.floor(this.buffer.length * 0.8); // Keep 80% of events
logger_1.logger.debug(`Buffer too large after flush failures. Dropping ${this.buffer.length - eventsToKeep} oldest events`);
this.buffer = this.buffer.slice(-eventsToKeep);
this.recalculateBufferSize();
}
logger_1.logger.error("Failed to flush buffer:", error);
}
finally {
this.isFlushInProgress = false;
}
});
}
recalculateBufferSize() {
this.bufferSize = this.buffer.reduce((size, event) => size + (0, sessionrecording_utils_1.estimateSize)(event), 0);
}
compressSnapshot(snapshot) {
return __awaiter(this, void 0, void 0, function* () {
if (!this.config.useCompression)
return;
try {
// Use CompressionStream if available (modern browsers)
if (typeof CompressionStream !== "undefined") {
const jsonString = JSON.stringify(snapshot.data);
const encoder = new TextEncoder();
const uint8Array = encoder.encode(jsonString);
const compressedStream = new Blob([uint8Array])
.stream()
.pipeThrough(new CompressionStream("gzip"));
const compressedBlob = yield new Response(compressedStream).blob();
const compressedBuffer = yield compressedBlob.arrayBuffer();
// Convert to base64 for transmission
const base64 = btoa(String.fromCharCode(...new Uint8Array(compressedBuffer)));
// Replace data with compressed version
snapshot.data = base64;
snapshot.metadata = Object.assign(Object.assign({}, snapshot.metadata), { compressed: true, originalSize: snapshot.size, compressionRatio: (compressedBuffer.byteLength / snapshot.size).toFixed(2) });
snapshot.size = compressedBuffer.byteLength;
}
}
catch (error) {
logger_1.logger.debug("[SDK] Compression failed, sending uncompressed data:", error);
snapshot.metadata = Object.assign(Object.assign({}, snapshot.metadata), { compressed: false, originalSize: snapshot.size, compressionRatio: 0 });
}
});
}
destroy() {
if (this.flushTimer) {
clearTimeout(this.flushTimer);
}
// Persist any remaining events before destroying
if (this.buffer.length > 0) {
this.flush(true);
}
// Reset batch time tracking
this.lastBatchEndTime = undefined;
}
startFlushTimer() {
this.flushTimer = setTimeout(() => {
logger_1.logger.debug("[SDK] Flushing buffer due to timer");
this.flush();
}, this.config.flushInterval);
}
resetFlushTimer() {
if (this.flushTimer) {
clearTimeout(this.flushTimer);
}
this.startFlushTimer();
}
/**
* Check if an event is an internal SDK log that should be filtered out
*/
isInternalSdkLog(event) {
// Check if it's a console event (type 6 in rrweb)
if (event.type === 6 && event.data.plugin === defaults_1.CONSOLE_LOG_PLUGIN_NAME) {
// Check if it's a console log with SDK prefix
const consoleData = event.data.payload;
if (consoleData.payload && Array.isArray(consoleData.payload)) {
// Check the first argument of the console log
const firstArg = consoleData.payload[0];
// If it's a string containing [SDK], it's an internal log
if (typeof firstArg === "string" && firstArg.includes("[SDK]")) {
return true;
}
}
}
return false;
}
}
exports.EventBuffer = EventBuffer;