@awesomeniko/kafka-trail
Version:
A Node.js library for managing message queue with Kafka
340 lines • 15.7 kB
JavaScript
import { clearInterval } from "node:timers";
import { context, SpanKind, SpanStatusCode, trace } from "@opentelemetry/api";
import { pino } from "pino";
import { ArgumentIsRequired, NoHandlersError } from "../custom-errors/kafka-errors.js";
import { KTKafkaConsumer } from "../kafka/kafka-consumer.js";
import { KTKafkaProducer } from "../kafka/kafka-producer.js";
import { DLQKTTopic } from "../kafka/topic.js";
import { KafkaMessageKey, KafkaTopicName } from "../libs/branded-types/kafka/index.js";
import { createHandlerTraceAttributes } from "../libs/helpers/observability.js";
class KTMessageQueue {
#registeredHandlers = new Map();
#ktProducer;
#ktConsumer;
#logger = console;
#ctx;
#addPayloadToTrace = false;
constructor(params) {
let ctx = params?.ctx();
if (!ctx) {
ctx = {};
}
if (!ctx.logger) {
ctx.logger = pino();
}
this.#ctx = ctx;
this.#addPayloadToTrace = params?.tracingSettings?.addPayloadToTrace ?? false;
}
getConsumer() {
return this.#ktConsumer;
}
getProducer() {
return this.#ktProducer;
}
getAdmin() {
return this.#ktProducer?.getAdmin();
}
async initProducer(params) {
const { kafkaSettings: { brokerUrls } } = params;
if (!brokerUrls || !brokerUrls.length) {
throw new ArgumentIsRequired('brokerUrls');
}
this.#ktProducer = new KTKafkaProducer({ ...params, logger: this.#ctx.logger });
await this.#ktProducer.init();
}
async initConsumer(params) {
const registeredHandlers = [...this.#registeredHandlers.values()];
if (registeredHandlers.length === 0) {
throw new NoHandlersError('subscribe to consumer');
}
this.#ktConsumer = new KTKafkaConsumer({ ...params, logger: this.#ctx.logger });
await this.#ktConsumer.init();
if (params.kafkaSettings.batchConsuming) {
await this.#subscribeAll();
}
else {
await this.#subscribeAllEachMessages();
}
}
async destroyAll() {
await Promise.all([
this.destroyProducer(),
this.destroyConsumer(),
]);
}
async destroyProducer() {
if (this.#ktProducer) {
await this.#ktProducer.destroy();
}
}
async destroyConsumer() {
if (this.#ktConsumer) {
await this.#ktConsumer.destroy();
}
}
async #subscribeAllEachMessages() {
const topicNames = [...this.#registeredHandlers.values()].map(item => item.topic.topicSettings.topic);
await this.#ktConsumer.subscribeTopic(topicNames);
await this.#ktConsumer.consumer.run({
partitionsConsumedConcurrently: 1,
eachMessage: async (eachMessagePayload) => {
const tracer = trace.getTracer(`kafka-trail`, '1.0.0');
const span = tracer.startSpan(`kafka-trail: eachMessage`, {
kind: SpanKind.CONSUMER,
attributes: {
'messaging.system': 'kafka',
'messaging.destination': topicNames,
},
});
await context.with(trace.setSpan(context.active(), span), async () => {
const { topic, message, partition } = eachMessagePayload;
const topicName = KafkaTopicName.fromString(topic);
const handler = this.#registeredHandlers.get(topicName);
if (handler) {
const batchedValues = [];
let lastOffset = undefined;
if (message.value) {
const decodedMessage = handler.topic.decode(message.value);
batchedValues.push(decodedMessage);
lastOffset = message.offset;
}
const tracer = trace.getTracer(`kafka-trail`, '1.0.0');
const attributes = createHandlerTraceAttributes({
topicName,
partition,
lastOffset,
batchedValues,
opts: {
addPayloadToTrace: this.#addPayloadToTrace,
},
});
const span = tracer.startSpan(`kafka-trail: handler ${topicName}`, {
kind: SpanKind.CONSUMER,
attributes,
});
await context.with(trace.setSpan(context.active(), span), async () => {
try {
await handler.run(batchedValues, this.#ctx, this, {
partition,
lastOffset,
heartBeat: () => eachMessagePayload.heartbeat(),
});
}
catch (err) {
let errorMessage = '';
if (err instanceof Error) {
errorMessage = err.message;
this.#logger.error(err);
}
if (handler.topic.topicSettings.createDLQ) {
const Topic = DLQKTTopic(handler.topic.topicSettings);
const Payload = Topic({
originalOffset: lastOffset,
originalTopic: topicName,
oritinalPartition: partition,
key: KafkaMessageKey.fromString(message.key?.toString()),
value: batchedValues,
errorMessage,
failedAt: Date.now(),
}, {
messageKey: KafkaMessageKey.NULL,
meta: {}
});
await this.publishSingleMessage(Payload);
}
else {
throw err;
}
}
span.end();
});
}
span.end();
});
},
});
}
async #subscribeAll() {
const topicNames = [...this.#registeredHandlers.values()].map(item => item.topic.topicSettings.topic);
await this.#ktConsumer.subscribeTopic(topicNames);
await this.#ktConsumer.consumer.run({
eachBatchAutoResolve: false,
partitionsConsumedConcurrently: 1,
eachBatch: async (eachBatchPayload) => {
const tracer = trace.getTracer(`kafka-trail`, '1.0.0');
const span = tracer.startSpan(`kafka-trail: eachBatch`, {
kind: SpanKind.CONSUMER,
attributes: {
'messaging.system': 'kafka',
'messaging.destination': topicNames,
},
});
await context.with(trace.setSpan(context.active(), span), async () => {
const { batch: { topic, messages, partition } } = eachBatchPayload;
const topicName = KafkaTopicName.fromString(topic);
const handler = this.#registeredHandlers.get(topicName);
if (handler) {
const heartBeatInterval = setInterval(async () => {
const tracer = trace.getTracer(`kafka-trail`, '1.0.0');
const span = tracer.startSpan(`kafka-trail: manual-heartbeat`, {
kind: SpanKind.CONSUMER,
attributes: {
'messaging.system': 'kafka',
'messaging.destination': topicNames,
},
});
await eachBatchPayload.heartbeat();
context.with(trace.setSpan(context.active(), span), () => {
span.end();
});
}, this.#ktConsumer.heartBeatInterval - Math.floor(this.#ktConsumer.heartBeatInterval * this.#ktConsumer.heartbeatEarlyFactor));
const batchedValues = [];
let lastOffset = undefined;
for (const message of messages) {
if (batchedValues.length < handler.topic.topicSettings.batchMessageSizeToConsume) {
if (message.value) {
const decodedMessage = handler.topic.decode(message.value);
batchedValues.push(decodedMessage);
lastOffset = message.offset;
}
}
else {
break;
}
}
const tracer = trace.getTracer(`kafka-trail`, '1.0.0');
const attributes = createHandlerTraceAttributes({
topicName,
partition,
lastOffset,
batchedValues,
opts: {
addPayloadToTrace: this.#addPayloadToTrace,
},
});
const span = tracer.startSpan(`kafka-trail: handler ${topicName}`, {
kind: SpanKind.CONSUMER,
attributes: attributes,
});
await context.with(trace.setSpan(context.active(), span), async () => {
try {
await handler.run(batchedValues, this.#ctx, this, {
partition,
lastOffset,
heartBeat: () => eachBatchPayload.heartbeat(),
resolveOffset: (offset) => eachBatchPayload.resolveOffset(offset),
});
}
catch (err) {
let errorMessage = '';
if (err instanceof Error) {
errorMessage = err.message;
this.#logger.error(err);
}
if (handler.topic.topicSettings.createDLQ) {
const Topic = DLQKTTopic(handler.topic.topicSettings);
const Payload = Topic({
originalOffset: lastOffset,
originalTopic: topicName,
oritinalPartition: partition,
key: KafkaMessageKey.fromString(JSON.stringify(messages.map(m => m.key?.toString()))),
value: batchedValues,
errorMessage,
failedAt: Date.now(),
}, {
messageKey: KafkaMessageKey.NULL,
meta: {}
});
await this.publishSingleMessage(Payload);
}
else {
clearInterval(heartBeatInterval);
throw err;
}
}
span.end();
});
clearInterval(heartBeatInterval);
if (lastOffset) {
eachBatchPayload.resolveOffset(lastOffset);
}
}
await eachBatchPayload.heartbeat();
span.end();
});
},
});
}
async initTopics(topicEvents) {
if (!this.#ktProducer) {
throw new Error("Producer field is required");
}
for (const topicEvent of topicEvents) {
if (!topicEvent) {
throw new Error("Attemt to create topic that doesn't exists (null, instead of KTTopicEvent)");
}
await this.#ktProducer.createTopic(topicEvent.topicSettings);
}
}
getRegisteredHandler(topic) {
return this.#registeredHandlers.get(topic);
}
registerHandlers(mqHandlers) {
for (const handler of mqHandlers) {
if (!this.#registeredHandlers.has(handler.topic.topicSettings.topic)) {
this.#registeredHandlers.set(handler.topic.topicSettings.topic, handler);
}
else {
this.#logger.warn(`Attempting to register an already registered handler ${handler.topic.topicSettings.topic}`);
}
}
}
publishSingleMessage(topic) {
const tracer = trace.getTracer(`kafka-trail`, '1.0.0');
const span = tracer.startSpan(`kafka-trail: publishSingleMessage ${topic.topicName}`, {
kind: SpanKind.PRODUCER,
});
return context.with(trace.setSpan(context.active(), span), async () => {
try {
const res = await this.#ktProducer.sendSingleMessage({
topicName: topic.topicName,
value: topic.message,
messageKey: topic.messageKey,
headers: topic.meta ?? {},
});
span.end();
return res;
}
catch (error) {
span.recordException(error);
span.setStatus({ code: SpanStatusCode.ERROR, message: String(error) });
span.end();
throw error;
}
});
}
publishBatchMessages(topic) {
const tracer = trace.getTracer(`kafka-trail`, '1.0.0');
const span = tracer.startSpan(`kafka-trail: publishBatchMessages ${topic.topicName}`, {
kind: SpanKind.PRODUCER,
attributes: {
messageSize: topic.messages.length,
},
});
return context.with(trace.setSpan(context.active(), span), async () => {
try {
const res = await this.#ktProducer.sendBatchMessages(topic);
span.end();
return res;
}
catch (error) {
span.recordException(error);
span.setStatus({ code: SpanStatusCode.ERROR, message: String(error) });
span.end();
throw error;
}
});
}
}
export { KTMessageQueue };
//# sourceMappingURL=index.js.map