UNPKG

@aks007/kafka-js

Version:

NodeBase code for basic tools

198 lines (194 loc) 5.3 kB
'use strict'; var kafkajs = require('kafkajs'); var nodeLogger = require('@aks007/node-logger'); // src/services/base-kafka.service.ts var BaseKafkaService = class { constructor(host, clientId) { this._logger = void 0; this._numConsumers = 1; this.toWinstonLogLevel = (level) => { switch (level) { case kafkajs.logLevel.ERROR: case kafkajs.logLevel.NOTHING: return "error"; case kafkajs.logLevel.WARN: return "warn"; case kafkajs.logLevel.INFO: return "info"; case kafkajs.logLevel.DEBUG: return "debug"; } }; this.kafka = new kafkajs.Kafka({ clientId, brokers: host?.split(","), logCreator: (levelKF) => ( // This above field can be ignored as we will use global level to configure winston ({ namespace, level, label, log }) => { const { message, ...extra } = log; this._logger?.log({ level: this.toWinstonLogLevel(level), message, extra }); } ) }); } setLogger(value) { this._logger = value; return this.getThis(); } setPollTimeInterval(value) { this._pollTimeInterval = value; return this.getThis(); } clientId(value) { this._clientId = value; return this.getThis(); } setPollTimeout(value) { this._pollTimeout = value; return this.getThis(); } setNumConsumers(value) { this._numConsumers = value; return this.getThis(); } setTopic(value) { this._topic = value; return this.getThis(); } setGroup(value) { this._group = value; return this.getThis(); } disconnect() { this.producer?.disconnect(); this.consumers?.forEach((i) => i.disconnect()); } async init() { await this.initSender(); process.on("exit", () => { this.disconnect(); }); } setUp() { } async initSender() { this.producer = this.kafka.producer({ createPartitioner: kafkajs.Partitioners.LegacyPartitioner }); await this.producer.connect(); } startPolling() { } async _poll(consumer) { if (!consumer) return; await consumer.run({ eachBatch: async ({ batch, resolveOffset, heartbeat, commitOffsetsIfNecessary, uncommittedOffsets, isRunning, isStale, pause }) => { if (!batch?.messages) return; const result = {}; for (const msg of batch.messages) { if (!msg.key) continue; if (!Object.keys(result).includes(msg.key.toString())) result[msg.key.toString()] = []; result[msg.key.toString()].push(msg.value + ""); } if (result) { const lbKafka = nodeLogger.LogBuilder.create( "New Kafka message found" ).addSection("Topic", this._topic); for (const key in result) { lbKafka.addSection("Key", key); for (const value of result[key]) { lbKafka.addSection("Message", value); } } this._logger?.info(lbKafka.build()); } if (this.onNewMessageReceived) this.onNewMessageReceived(result); await heartbeat(); if (this._pollTimeInterval) new Promise((resolve) => setTimeout(resolve, this._pollTimeInterval)); } }); } async setOnMessageReceived(handler) { this.onNewMessageReceived = handler; await this.initConsumer(); } async initConsumer() { this.consumers = []; for (let i = 0; i < this._numConsumers; i++) { this.consumers.push( this.kafka.consumer({ groupId: this._group }) ); } if (!this.consumers || this.consumers.length === 0) return; this._logger?.info( new nodeLogger.LogBuilder("New consumer connecting to kafka").addSection("Topic", this._topic).addSection("Num consumers", this._numConsumers + "").build() ); for (const consumer of this.consumers) { await consumer.connect(); await consumer.subscribe({ topic: this._topic, fromBeginning: true }); await this._poll(consumer); } } }; // src/services/kafka-queue.service.ts var KafkaQueueService = class extends BaseKafkaService { constructor(host, clientId) { super(host, clientId); } async send(key, message) { if (!this.producer) return; await (Array.isArray(message) ? this.producer.send({ topic: this._topic, messages: message.map((val) => { return { key, value: JSON.stringify(val) }; }) }) : this.producer.send({ topic: this._topic, messages: [{ key, value: JSON.stringify(message) }] })); } deInit() { throw new Error("Method not implemented."); } async onReceive(handler) { await super.setOnMessageReceived((message) => { const result = {}; for (const key of Object.keys(message)) { result[key] = message[key].map((i) => JSON.parse(i)); } handler(result); }); return this.getThis(); } getThis() { return this; } }; exports.BaseKafkaService = BaseKafkaService; exports.KafkaQueueService = KafkaQueueService; //# sourceMappingURL=out.js.map //# sourceMappingURL=index.js.map