@biorate/rdkafka
Version:
Rdkafka connector
87 lines • 4.23 kB
JavaScript
;
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.RDKafkaHighLevelProducerConnection = void 0;
const kafka_javascript_1 = require("@confluentinc/kafka-javascript");
const prometheus_1 = require("@biorate/prometheus");
const helpers_1 = require("../helpers");
class RDKafkaHighLevelProducerConnection extends kafka_javascript_1.HighLevelProducer {
producePromise(topic, partition, message, key, timestamp, headers) {
return new Promise((resolve, reject) => {
const time = (0, helpers_1.timeDiff)();
const callback = (e, offset) => {
this.counter.labels({ topic, status: e ? 500 : 200 }).inc(1);
this.histogram.labels({ topic, status: e ? 500 : 200 }).observe(time());
e ? reject(e) : resolve(offset);
};
headers
? this.produce(topic, partition, message, key, timestamp, headers, callback)
: this.produce(topic, partition, message, key, timestamp, callback);
});
}
connectPromise(metadataOptions) {
return new Promise((resolve, reject) => super.connect(metadataOptions, (e) => e ? reject(e) : resolve(this)));
}
initTransactionsPromise(timeout) {
return new Promise((resolve, reject) => {
const callback = (e) => (e ? reject(e) : resolve());
timeout
? super.initTransactions(timeout, callback)
: super.initTransactions(callback);
});
}
beginTransactionPromise() {
return new Promise((resolve, reject) => super.beginTransaction((e) => (e ? reject(e) : resolve())));
}
commitTransactionPromise(timeout) {
return new Promise((resolve, reject) => {
const callback = (e) => (e ? reject(e) : resolve());
timeout
? super.commitTransaction(timeout, callback)
: super.commitTransaction(callback);
});
}
abortTransactionPromise(timeout) {
return new Promise((resolve, reject) => {
const callback = (e) => (e ? reject(e) : resolve());
timeout
? super.abortTransaction(timeout, callback)
: super.abortTransaction(callback);
});
}
sendOffsetsToTransactionPromise(offsets, consumer, timeout) {
return new Promise((resolve, reject) => {
const callback = (e) => (e ? reject(e) : resolve());
timeout
? super.sendOffsetsToTransaction(offsets, consumer, timeout, callback)
: super.sendOffsetsToTransaction(offsets, consumer, callback);
});
}
}
__decorate([
(0, prometheus_1.counter)({
name: 'kafka_producer_seconds_count',
help: 'Kafka producer seconds count',
labelNames: ['topic', 'status'],
}),
__metadata("design:type", prometheus_1.Counter)
], RDKafkaHighLevelProducerConnection.prototype, "counter", void 0);
__decorate([
(0, prometheus_1.histogram)({
name: 'kafka_producer_seconds',
help: 'Kafka producer seconds bucket',
labelNames: ['topic', 'status'],
buckets: [5, 10, 20, 50, 100, 300, 500, 1000, 2000, 3000, 5000, 10000],
}),
__metadata("design:type", prometheus_1.Histogram)
], RDKafkaHighLevelProducerConnection.prototype, "histogram", void 0);
exports.RDKafkaHighLevelProducerConnection = RDKafkaHighLevelProducerConnection;
//# sourceMappingURL=high-level-producer.js.map