aws-logs-sink
Version:
Stream logs to Amazon CloudWatch logs
271 lines (270 loc) • 9.33 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const stream = __importStar(require("stream"));
const os = __importStar(require("os"));
const client_cloudwatch_logs_1 = require("@aws-sdk/client-cloudwatch-logs");
const yargs_1 = __importDefault(require("yargs/yargs"));
const helpers_1 = require("yargs/helpers");
const logger = {
debug: (...args) => {
process.env.DEBUG && console.debug("[aws-logs-sink] DEBUG", ...args);
},
error: (...args) => {
console.error("[aws-logs-sink] ERROR", ...args);
},
};
async function ensureLogGroup(config) {
const { client, logGroupName } = config;
try {
await client.send(new client_cloudwatch_logs_1.CreateLogGroupCommand({
logGroupName,
}));
}
catch (err) {
if (err.name !== "ResourceAlreadyExistsException")
throw err;
return false;
}
return true;
}
async function ensureLogStream(config) {
const { client, logGroupName, logStreamName } = config;
try {
await client.send(new client_cloudwatch_logs_1.CreateLogStreamCommand({
logGroupName,
logStreamName,
}));
}
catch (err) {
if (err.name !== "ResourceAlreadyExistsException")
throw err;
return false;
}
return true;
}
async function getLogStreamSequenceToken(config) {
const { client, logGroupName, logStreamName } = config;
const { logStreams } = await client.send(new client_cloudwatch_logs_1.DescribeLogStreamsCommand({
logGroupName,
logStreamNamePrefix: logStreamName,
}));
if (!logStreams) {
throw new Error(`Failed to query log stream ${logGroupName}/${logStreamName}`);
}
return logStreams[0].uploadSequenceToken;
}
function cloudWatchInit(config) {
const logStreamExists = Promise.resolve()
.then(() => ensureLogGroup(config))
.then(() => ensureLogStream(config));
let forwardChunk = (chunk, cb) => {
logStreamExists
.then(() => {
forwardChunk = (chunk, cb) => cb(null, chunk);
cb(null, chunk);
})
.catch(cb);
};
return new stream.Transform({
transform: function (chunk, _encoding, cb) {
forwardChunk(chunk, cb);
},
construct: function (cb) {
logStreamExists.catch((err) => {
this.destroy(err);
});
cb();
},
});
}
function sink(config) {
if (config.profile) {
process.env.AWS_PROFILE = config.profile;
}
const client = config.client ??
new client_cloudwatch_logs_1.CloudWatchLogsClient({
region: config.region,
});
const pipelineHead = cloudWatchInit({ ...config, client });
stream.pipeline(pipelineHead, assembleLines(config.eol), cloudWatchLogsSink({ ...config, client }),
// eslint-disable-next-line @typescript-eslint/no-empty-function
() => { });
return pipelineHead;
}
exports.default = sink;
function cloudWatchLogsSink(config) {
const buffer = [];
let flushTimer;
let sequenceToken;
let flushingBuffer = Promise.resolve(null);
async function flushBufferToCloudWatchLogs() {
await flushingBuffer;
flushingBuffer = doFlushBufferToCloudWatchLogs();
return flushingBuffer;
}
async function doFlushBufferToCloudWatchLogs() {
let logEvents;
while ((logEvents = buffer.splice(0, 1000)).length) {
sequenceToken = await sendLogsToCloudWatch({
...config,
logEvents,
sequenceToken,
});
}
return null;
}
return new stream.Writable({
construct: function (cb) {
flushTimer = setInterval(() => {
flushBufferToCloudWatchLogs().catch((err) => this.destroy(err));
}, config.flushInterval ?? 1000);
cb();
},
destroy: function (err, cb) {
clearInterval(flushTimer);
cb(err);
},
final: function (cb) {
flushBufferToCloudWatchLogs().then(cb).catch(cb);
},
writev: function (chunks, cb) {
for (const { chunk } of chunks) {
const text = chunk.toString();
if (config.tee) {
console.info(text);
}
if (text) {
buffer.push({
message: text,
timestamp: Date.now(),
});
}
}
cb();
},
});
}
async function doSendLogstoCloudWatch(props) {
const { client, logGroupName, logStreamName, sequenceToken, logEvents } = props;
const { nextSequenceToken } = await client.send(new client_cloudwatch_logs_1.PutLogEventsCommand({
logEvents,
logGroupName: logGroupName,
logStreamName: logStreamName,
sequenceToken,
}));
return nextSequenceToken;
}
async function sendLogsToCloudWatch(props) {
try {
return await doSendLogstoCloudWatch(props);
}
catch (err) {
if ([
"DataAlreadyAcceptedException",
"InvalidSequenceTokenException",
].includes(err.name ?? "__no_name_in_error__")) {
logger.debug("Syncing sequenceToken, was:", props.sequenceToken);
const sequenceToken = await getLogStreamSequenceToken(props);
logger.debug("New sequenceToken is:", sequenceToken);
return await doSendLogstoCloudWatch({ ...props, sequenceToken });
}
else {
throw err;
}
}
}
function assembleLines(eol = os.EOL) {
const eolAsBuffer = Buffer.from(eol);
const empty = Buffer.alloc(0);
let buffered = empty;
return new stream.Transform({
transform: function (chunk, _encoding, cb) {
const concatenated = Buffer.concat([buffered, chunk]);
if (Buffer.compare(eolAsBuffer, concatenated.slice(concatenated.length - eolAsBuffer.length)) === 0 // check if chunk ends with EOL
) {
// chunk ends with EOL, i.e. a complete message
this.push(concatenated);
buffered = empty;
}
else {
// chunk does not end with EOL, i.e. not a complete message, wait for additional chunk
buffered = concatenated;
}
cb();
},
final: function (cb) {
if (buffered.length) {
this.push(buffered);
}
cb();
},
});
}
if (require.main === module) {
const args = (0, yargs_1.default)((0, helpers_1.hideBin)(process.argv))
.command("$0 <log-group-name> <log-stream-name> [Options]", "Stream logs from stdin to AWS CloudWatch logs.")
.string("log-group-name")
.string("log-stream-name")
.number("flush-interval")
.alias("f", "flush-interval")
.describe("f", "Flush to CloudWatch every X seconds")
.default("f", 1)
.boolean("tee")
.describe("tee", "Also print all input to stdout")
.string("eol")
.describe("eol", "Line termination character(s)")
.default("eol", os.EOL, JSON.stringify(os.EOL))
.string("profile")
.describe("profile", "AWS profile to use")
.string("region")
.describe("region", "AWS region to use")
.demandCommand()
.parseSync();
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const logGroupName = args["log-group-name"];
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const logStreamName = args["log-stream-name"];
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const flushInterval = args["flush-interval"];
const { tee, region, profile, eol } = args;
const _sink = sink({
logGroupName,
logStreamName,
flushInterval,
tee,
profile,
region,
eol,
});
stream.pipeline(process.stdin, _sink, exit);
}
function exit(err) {
if (err) {
logger.error(err);
process.exit(1);
}
logger.debug("Completed successfully");
}