@biorate/rdkafka
Version:
Rdkafka connector
172 lines • 8.36 kB
JavaScript
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
};
var _RDKafkaConsumerStreamConnection_handle, _RDKafkaConsumerStreamConnection_setMetrics;
Object.defineProperty(exports, "__esModule", { value: true });
exports.RDKafkaConsumerStreamConnection = void 0;
const events_1 = require("events");
const lodash_1 = require("lodash");
const node_rdkafka_1 = require("node-rdkafka");
const tools_1 = require("@biorate/tools");
const prometheus_1 = require("@biorate/prometheus");
const enums_1 = require("../enums");
const errors_1 = require("../errors");
const helpers_1 = require("../helpers");
const util_1 = require("util");
class RDKafkaConsumerStreamConnection extends events_1.EventEmitter {
get buffer() {
var _a;
return (_a = this.config.buffer) !== null && _a !== void 0 ? _a : 100;
}
get concurrency() {
var _a;
return (_a = this.config.concurrency) !== null && _a !== void 0 ? _a : 10;
}
get delay() {
var _a;
return (_a = this.config.delay) !== null && _a !== void 0 ? _a : 0;
}
constructor(config) {
super();
this.handler = null;
this.pool = [];
this.started = false;
this.assignment = [];
_RDKafkaConsumerStreamConnection_handle.set(this, async () => {
var _a, _b;
let messages = [];
let time;
const latest = new Map();
const counter = new Map();
const tasks = [];
while (true) {
try {
await tools_1.timer.wait(this.delay);
if (!this.started)
continue;
if (!this.pool.length)
continue;
time = (0, helpers_1.timeDiff)();
latest.clear();
counter.clear();
messages.length = 0;
tasks.length = 0;
messages.push(...(0, lodash_1.uniqWith)(this.pool.splice(0, this.concurrency), lodash_1.isEqual));
for (const message of messages) {
if (!this.config.batch)
tasks.push(this.handler(message));
const prev = latest.get(message.partition);
const last = !prev || message.offset > prev.offset ? message : prev;
latest.set(message.partition, last);
counter.set(`${message.topic}\0${message.partition}`, ((_a = counter.get(message.topic)) !== null && _a !== void 0 ? _a : 0) + 1);
}
if (this.config.batch)
tasks.push(this.handler(messages));
await Promise.all(tasks);
for (const [, message] of latest) {
this.stream.consumer.commitMessage(message);
this.emit(enums_1.EventsConsumerStream.LatestMessage, message);
}
__classPrivateFieldGet(this, _RDKafkaConsumerStreamConnection_setMetrics, "f").call(this, counter, 200, time());
}
catch (e) {
if (messages.length)
this.pool.unshift(...messages);
counter.clear();
for (const message of messages)
counter.set(`${message.topic}\0${message.partition}`, ((_b = counter.get(message.topic)) !== null && _b !== void 0 ? _b : 0) + 1);
__classPrivateFieldGet(this, _RDKafkaConsumerStreamConnection_setMetrics, "f").call(this, counter, 500, time());
console.error(e);
}
}
});
_RDKafkaConsumerStreamConnection_setMetrics.set(this, (counter, status, time) => {
for (const [key, count] of counter) {
const [topic, partition] = key.split('\0');
for (const item of this.assignment) {
if (topic !== item.topic || Number(partition) !== item.partition)
continue;
const labels = {
topic,
status,
group: this.config.global['group.id'] || 'unknown',
partition: item.partition,
};
this.counter.labels(labels).inc(count);
this.histogram.labels(labels).observe(time);
}
}
});
this.config = config;
}
subscribe(handler) {
if (this.handler)
throw new errors_1.RDKafkaConsumerStreamAlreadySubscribedError();
this.stream = node_rdkafka_1.KafkaConsumer.createReadStream(Object.assign({ rebalance_cb: (err, assignment) => {
this.assignment.length = 0;
if (err.code === node_rdkafka_1.CODES.ERRORS.ERR__ASSIGN_PARTITIONS) {
this.assignment = assignment;
this.stream.consumer.assign(assignment);
}
else if (err.code === node_rdkafka_1.CODES.ERRORS.ERR__REVOKE_PARTITIONS) {
this.stream.consumer.unassign();
}
else {
throw err;
}
} }, this.config.global), this.config.topic, this.config.stream);
this.handler = handler;
this.stream.on('data', (message) => {
if (this.pool.length >= this.buffer)
this.stream.pause();
this.pool.push(message);
});
this.stream.consumer.on('event.error', (e) => void this.emit('error', e));
this.timer = setInterval(() => {
if (this.pool.length < this.buffer)
this.stream.resume();
});
this.started = true;
__classPrivateFieldGet(this, _RDKafkaConsumerStreamConnection_handle, "f").call(this);
}
async unsubscribe() {
this.stream.pause();
this.stream.removeAllListeners('data');
this.started = false;
this.handler = null;
clearInterval(this.timer);
await (0, util_1.promisify)(this.stream.close.bind(this.stream));
}
}
_RDKafkaConsumerStreamConnection_handle = new WeakMap(), _RDKafkaConsumerStreamConnection_setMetrics = new WeakMap();
__decorate([
(0, prometheus_1.counter)({
name: 'kafka_consumer_count',
help: 'Kafka consumer count',
labelNames: ['topic', 'status', 'group', 'partition'],
}),
__metadata("design:type", Object)
], RDKafkaConsumerStreamConnection.prototype, "counter", void 0);
__decorate([
(0, prometheus_1.histogram)({
name: 'kafka_consumer_seconds',
help: 'kafka consumer seconds bucket',
labelNames: ['topic', 'status', 'group', 'partition'],
buckets: [5, 10, 20, 50, 100, 300, 500, 1000, 2000, 3000, 5000, 10000],
}),
__metadata("design:type", Object)
], RDKafkaConsumerStreamConnection.prototype, "histogram", void 0);
exports.RDKafkaConsumerStreamConnection = RDKafkaConsumerStreamConnection;
//# sourceMappingURL=consumer-stream.js.map