@segment/analytics-node
Version:
https://www.npmjs.com/package/@segment/analytics-node
254 lines (253 loc) • 10.7 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Publisher = void 0;
const analytics_core_1 = require("@segment/analytics-core");
const create_url_1 = require("../../lib/create-url");
const analytics_generic_utils_1 = require("@segment/analytics-generic-utils");
const context_batch_1 = require("./context-batch");
const token_manager_1 = require("../../lib/token-manager");
function sleep(timeoutInMs) {
return new Promise((resolve) => setTimeout(resolve, timeoutInMs));
}
function noop() { }
/**
* The Publisher is responsible for batching events and sending them to the Segment API.
*/
class Publisher {
pendingFlushTimeout;
_batch;
_flushInterval;
_flushAt;
_maxRetries;
_url;
_flushPendingItemsCount;
_httpRequestTimeout;
_emitter;
_disable;
_httpClient;
_writeKey;
_tokenManager;
constructor({ host, path, maxRetries, flushAt, flushInterval, writeKey, httpRequestTimeout, httpClient, disable, oauthSettings, }, emitter) {
this._emitter = emitter;
this._maxRetries = maxRetries;
this._flushAt = Math.max(flushAt, 1);
this._flushInterval = flushInterval;
this._url = (0, create_url_1.tryCreateFormattedUrl)(host ?? 'https://api.segment.io', path ?? '/v1/batch');
this._httpRequestTimeout = httpRequestTimeout ?? 10000;
this._disable = Boolean(disable);
this._httpClient = httpClient;
this._writeKey = writeKey;
if (oauthSettings) {
this._tokenManager = new token_manager_1.TokenManager({
...oauthSettings,
httpClient: oauthSettings.httpClient ?? httpClient,
maxRetries: oauthSettings.maxRetries ?? maxRetries,
});
}
}
createBatch() {
this.pendingFlushTimeout && clearTimeout(this.pendingFlushTimeout);
const batch = new context_batch_1.ContextBatch(this._flushAt);
this._batch = batch;
this.pendingFlushTimeout = setTimeout(() => {
if (batch === this._batch) {
this._batch = undefined;
}
this.pendingFlushTimeout = undefined;
if (batch.length) {
this.send(batch).catch(noop);
}
}, this._flushInterval);
return batch;
}
clearBatch() {
this.pendingFlushTimeout && clearTimeout(this.pendingFlushTimeout);
this._batch = undefined;
}
flush(pendingItemsCount) {
if (!pendingItemsCount) {
// if number of pending items is 0, there will never be anything else entering the batch, since the app is closed.
if (this._tokenManager) {
this._tokenManager.stopPoller();
}
return;
}
this._flushPendingItemsCount = pendingItemsCount;
// if batch is empty, there's nothing to flush, and when things come in, enqueue will handle them.
if (!this._batch)
return;
// the number of globally pending items will always be larger or the same as batch size.
// Any mismatch is because some globally pending items are in plugins.
const isExpectingNoMoreItems = this._batch.length === pendingItemsCount;
if (isExpectingNoMoreItems) {
this.send(this._batch)
.catch(noop)
.finally(() => {
// stop poller so program can exit ().
if (this._tokenManager) {
this._tokenManager.stopPoller();
}
});
this.clearBatch();
}
}
/**
* Enqueues the context for future delivery.
* @param ctx - Context containing a Segment event.
* @returns a promise that resolves with the context after the event has been delivered.
*/
enqueue(ctx) {
const batch = this._batch ?? this.createBatch();
const { promise: ctxPromise, resolve } = (0, analytics_generic_utils_1.createDeferred)();
const pendingItem = {
context: ctx,
resolver: resolve,
};
/*
The following logic ensures that a batch is never orphaned,
and is always sent before a new batch is created.
Add an event to the existing batch.
Success: Check if batch is full or no more items are expected to come in (i.e. closing). If so, send batch.
Failure: Assume event is too big to fit in current batch - send existing batch.
Add an event to the new batch.
Success: Check if batch is full and send if it is.
Failure: Event exceeds maximum size (it will never fit), fail the event.
*/
const addStatus = batch.tryAdd(pendingItem);
if (addStatus.success) {
const isExpectingNoMoreItems = batch.length === this._flushPendingItemsCount;
const isFull = batch.length === this._flushAt;
if (isFull || isExpectingNoMoreItems) {
this.send(batch).catch(noop);
this.clearBatch();
}
return ctxPromise;
}
// If the new item causes the maximimum event size to be exceeded, send the current batch and create a new one.
if (batch.length) {
this.send(batch).catch(noop);
this.clearBatch();
}
const fallbackBatch = this.createBatch();
const fbAddStatus = fallbackBatch.tryAdd(pendingItem);
if (fbAddStatus.success) {
const isExpectingNoMoreItems = fallbackBatch.length === this._flushPendingItemsCount;
if (isExpectingNoMoreItems) {
this.send(fallbackBatch).catch(noop);
this.clearBatch();
}
return ctxPromise;
}
else {
// this should only occur if max event size is exceeded
ctx.setFailedDelivery({
reason: new Error(fbAddStatus.message),
});
return Promise.resolve(ctx);
}
}
async send(batch) {
if (this._flushPendingItemsCount) {
this._flushPendingItemsCount -= batch.length;
}
const events = batch.getEvents();
const maxAttempts = this._maxRetries + 1;
let currentAttempt = 0;
while (currentAttempt < maxAttempts) {
currentAttempt++;
let requestedRetryTimeout;
let failureReason;
try {
if (this._disable) {
return batch.resolveEvents();
}
let authString = undefined;
if (this._tokenManager) {
const token = await this._tokenManager.getAccessToken();
if (token && token.access_token) {
authString = `Bearer ${token.access_token}`;
}
}
const headers = {
'Content-Type': 'application/json',
'User-Agent': 'analytics-node-next/latest',
...(authString ? { Authorization: authString } : {}),
};
const request = {
url: this._url,
method: 'POST',
headers: headers,
body: JSON.stringify({
batch: events,
writeKey: this._writeKey,
sentAt: new Date(),
}),
httpRequestTimeout: this._httpRequestTimeout,
};
this._emitter.emit('http_request', {
body: request.body,
method: request.method,
url: request.url,
headers: request.headers,
});
const response = await this._httpClient.makeRequest(request);
if (response.status >= 200 && response.status < 300) {
// Successfully sent events, so exit!
batch.resolveEvents();
return;
}
else if (this._tokenManager &&
(response.status === 400 ||
response.status === 401 ||
response.status === 403)) {
// Retry with a new OAuth token if we have OAuth data
this._tokenManager.clearToken();
failureReason = new Error(`[${response.status}] ${response.statusText}`);
}
else if (response.status === 400) {
// https://segment.com/docs/connections/sources/catalog/libraries/server/http-api/#max-request-size
// Request either malformed or size exceeded - don't retry.
resolveFailedBatch(batch, new Error(`[${response.status}] ${response.statusText}`));
return;
}
else if (response.status === 429) {
// Rate limited, wait for the reset time
if (response.headers && 'x-ratelimit-reset' in response.headers) {
const rateLimitResetTimestamp = parseInt(response.headers['x-ratelimit-reset'], 10);
if (isFinite(rateLimitResetTimestamp)) {
requestedRetryTimeout = rateLimitResetTimestamp - Date.now();
}
}
failureReason = new Error(`[${response.status}] ${response.statusText}`);
}
else {
// Treat other errors as transient and retry.
failureReason = new Error(`[${response.status}] ${response.statusText}`);
}
}
catch (err) {
// Network errors get thrown, retry them.
failureReason = err;
}
// Final attempt failed, update context and resolve events.
if (currentAttempt === maxAttempts) {
resolveFailedBatch(batch, failureReason);
return;
}
// Retry after attempt-based backoff.
await sleep(requestedRetryTimeout
? requestedRetryTimeout
: (0, analytics_core_1.backoff)({
attempt: currentAttempt,
minTimeout: 25,
maxTimeout: 1000,
}));
}
}
}
exports.Publisher = Publisher;
function resolveFailedBatch(batch, reason) {
batch.getContexts().forEach((ctx) => ctx.setFailedDelivery({ reason }));
batch.resolveEvents();
}
//# sourceMappingURL=publisher.js.map