UNPKG

datadog-logger-integrations

Version:
119 lines (115 loc) 3.78 kB
import { Writable } from 'node:stream'; import { client, v2 } from '@datadog/datadog-api-client'; const safeParseJson = (input) => { if (typeof input === "object") { return input; } try { return JSON.parse(input); } catch { return {}; } }; const parseStreamLine = (input) => { if (typeof input === "object" && !Array.isArray(input)) { return [input]; } const splittedString = Array.isArray(input) ? input.flatMap((v) => { return parseStreamLine(v); }) : input.split(/\r?\n/); return splittedString.filter(Boolean).map((v) => { return safeParseJson(v); }); }; var __defProp = Object.defineProperty; var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value); class DataDogWritableStream extends Writable { constructor(config) { super({ objectMode: true }); this.config = config; __publicField(this, "apiInstance"); __publicField(this, "batch", []); __publicField(this, "timer"); __publicField(this, "flushJob"); const clientConfig = client.createConfiguration(this.config.ddClientConfig); if (config.ddServerConfig) { clientConfig.setServerVariables(config.ddServerConfig); } this.apiInstance = new v2.LogsApi(clientConfig); this.timer = this.config.sendIntervalMs !== 0 ? setInterval(async () => { const batchToSend = this.batch; this.batch = []; this.flushJob = this.flush(batchToSend).finally(() => { this.flushJob = void 0; }); }, this.config.sendIntervalMs ?? 3e3) : void 0; } async _write(item, _encoding, callback) { if (this.config.debug) { console.log(`[DataDogWritableStream] Log received: ${item}`); } const parsedItems = parseStreamLine(item); if (this.config.debug) { console.log( `[DataDogWritableStream] Parsed item: ${JSON.stringify(parsedItems)}` ); } const transformed = parsedItems.map(this.config.logMessageBuilder); if (this.config.debug) { console.log( `[DataDogWritableStream] Enqueue: ${JSON.stringify(transformed)}` ); } this.batch.push(...transformed); if (this.batch.length >= (this.config?.batchSize ?? 10) || this.config.sendIntervalMs === 0) { const batchToSend = this.batch; this.batch = []; await this.flush(batchToSend); } callback(); } async _final(callback) { clearInterval(this.timer); try { if (this.config.debug) { console.log("[DataDogWritableStream] _final flush"); } if (this.flushJob) { await this.flushJob; } else { const batchToSend = this.batch; this.batch = []; await this.flush(batchToSend); } callback(); } catch (err) { callback(err instanceof Error ? err : new Error(JSON.stringify(err))); } } async flush(batch) { if (this.config.debug) { console.log(`[DataDogWritableStream] Flush: ${JSON.stringify(batch)}`); } if (batch.length === 0) return; try { const params = { body: batch, contentEncoding: "deflate" }; if (this.config.debug) { console.log( `[DataDogWritableStream] Outgoing param: ${JSON.stringify(params)}` ); } await this.apiInstance.submitLog(params); if (this.config.debug) { console.log("[DataDogWritableStream] Log sent"); } } catch (err) { console.error("[DataDogWritableStream] Batch send failed:", err); } } } export { DataDogWritableStream as D }; //# sourceMappingURL=DataDogWritableStream-DmZdC6Mh.mjs.map