@jescrich/nestjs-workflow
Version:
Workflow and State Machines for NestJS
151 lines • 6.8 kB
JavaScript
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var KafkaClient_1;
Object.defineProperty(exports, "__esModule", { value: true });
exports.KafkaClient = void 0;
const common_1 = require("@nestjs/common");
const kafkajs_1 = require("kafkajs");
let KafkaClient = KafkaClient_1 = class KafkaClient {
clientId;
brokers;
logger = new common_1.Logger(KafkaClient_1.name);
kafka;
constructor(clientId, brokers) {
this.clientId = clientId;
this.brokers = brokers;
this.kafka = new kafkajs_1.Kafka({
clientId: clientId,
brokers: brokers?.split(',') ?? ['localhost:9092'],
logLevel: kafkajs_1.logLevel.WARN,
});
}
async produce(topic, key, event) {
try {
const producer = this.kafka.producer();
await producer.connect();
await producer.send({
topic,
messages: [
{
key: key,
value: JSON.stringify(event),
},
],
});
this.logger.log(`Event dispatched`, topic, key);
await producer.disconnect();
}
catch (e) {
this.logger.error(`Error dispatching event. ${key}`, e, topic, key);
throw new Error(`Error dispatching event. ${key}`);
}
}
async consume(topic, groupId, handler) {
const consumer = this.kafka.consumer({ groupId });
const RETRY_DELAY_MS = 30000;
const RETRY_LIMIT = 3;
const retryCounts = new Map();
const processMessage = async (key, value, payload) => {
const content = value?.toString() ?? null;
if (content === null) {
this.logger.error('Event content is null', null, topic, key);
return;
}
const event = JSON.parse(content);
if (typeof handler === 'function') {
await handler({ key, event, payload: payload });
}
else {
await handler.handle({ key, event, payload: payload });
}
};
const sendToDeadLetterQueue = async (message) => {
const producer = this.kafka.producer();
await producer.connect();
await producer.send({
topic: `${topic}-dlq`,
messages: [
{
key: message.key,
value: message.value,
},
],
});
await producer.disconnect();
this.logger.warn(`Message offset ${message.offset} sent to DLQ.`, topic);
};
const runConsumer = async () => {
await consumer.connect();
await consumer.subscribe({ topic, fromBeginning: true });
await consumer.run({
eachBatchAutoResolve: false,
eachBatch: async ({ batch, resolveOffset, heartbeat, isRunning, isStale }) => {
for (const message of batch.messages) {
if (!isRunning() || isStale())
break;
const { topic, partition } = batch;
const offsetKey = `${topic}-${partition}-${message.offset}`;
const retries = retryCounts.get(offsetKey) || 0;
try {
const key = message.key?.toString() ?? '';
this.logger.log(`Processing message`, message.offset, topic, key);
await processMessage(key, message.value, message);
resolveOffset(message.offset);
this.logger.log(`Message processed successfully`, message.offset, topic, key);
retryCounts.delete(offsetKey);
}
catch (error) {
this.logger.error(`Error processing message: ${error}`, null, message.offset, topic, message.key?.toString());
if (retries < RETRY_LIMIT) {
retryCounts.set(offsetKey, retries + 1);
this.logger.warn(`Retrying message.`, message.offset, topic, message.key?.toString());
consumer.pause([{ topic, partitions: [partition] }]);
setTimeout(async () => {
this.logger.log(`Resuming message.`, message.offset, topic, message.key?.toString());
consumer.resume([{ topic, partitions: [partition] }]);
}, RETRY_DELAY_MS);
}
else {
this.logger.warn(`Exceeded retry limit.`, message.offset, topic, message.key?.toString());
resolveOffset(message.offset);
await sendToDeadLetterQueue(message);
retryCounts.delete(offsetKey);
}
}
await heartbeat();
}
},
});
};
runConsumer().catch((error) => {
this.logger.error('Error starting Kafka consumer:', error);
});
}
async isHealthy() {
try {
const admin = this.kafka.admin();
await admin.connect();
const topics = await admin.listTopics();
await admin.disconnect();
return topics.length >= 0;
}
catch (error) {
this.logger.error('Kafka health check failed:', error);
return false;
}
}
};
exports.KafkaClient = KafkaClient;
exports.KafkaClient = KafkaClient = KafkaClient_1 = __decorate([
(0, common_1.Injectable)(),
__metadata("design:paramtypes", [String, String])
], KafkaClient);
//# sourceMappingURL=client.js.map
;