UNPKG

@message-queue-toolkit/kafka

Version:
96 lines 3.57 kB
import { InternalError, stringValueSerializer } from '@lokalise/node-core'; import { MessageSchemaContainer } from '@message-queue-toolkit/core'; import { Producer, jsonSerializer, stringSerializer, } from '@platformatic/kafka'; import { AbstractKafkaService } from "./AbstractKafkaService.js"; export class AbstractKafkaPublisher extends AbstractKafkaService { topicsConfig; schemaContainers; producer; isInitiated; constructor(dependencies, options) { super(dependencies, options); this.isInitiated = false; this.topicsConfig = options.topicsConfig; if (this.topicsConfig.length === 0) throw new Error('At least one topic must be defined'); this.schemaContainers = {}; for (const { topic, schemas } of this.topicsConfig) { this.schemaContainers[topic] = new MessageSchemaContainer({ messageSchemas: schemas, messageTypeField: this.options.messageTypeField, messageDefinitions: [], }); } this.producer = new Producer({ ...this.options.kafka, ...this.options, serializers: { key: stringSerializer, value: jsonSerializer, headerKey: stringSerializer, headerValue: stringSerializer, }, }); } async init() { if (this.isInitiated) return; try { await this.producer.listApis(); this.isInitiated = true; } catch (e) { throw new InternalError({ message: 'Producer init failed', errorCode: 'KAFKA_PRODUCER_INIT_ERROR', cause: e, }); } } async close() { if (!this.isInitiated) return; await this.producer.close(); this.isInitiated = false; } async publish(topic, message, requestContext, options) { const messageProcessingStartTimestamp = Date.now(); const schemaResult = this.schemaContainers[topic]?.resolveSchema(message); if (!schemaResult) throw new Error(`Message schemas not found for topic: ${topic}`); if (schemaResult.error) throw schemaResult.error; await this.init(); // lazy init try { const parsedMessage = schemaResult.result.parse(message); const headers = { ...options?.headers, [this.resolveHeaderRequestIdField()]: requestContext?.reqId ?? '', }; const kafkaMessage = { ...options, topic, value: parsedMessage, headers }; await this.producer?.send({ messages: [kafkaMessage], }); this.handleMessageProcessed({ message: kafkaMessage, processingResult: { status: 'published' }, messageProcessingStartTimestamp, }); } catch (error) { const errorDetails = { topic, publisher: this.constructor.name, message: stringValueSerializer(message), }; this.handlerError(error, errorDetails); throw new InternalError({ message: `Error while publishing to Kafka: ${error.message}`, errorCode: 'KAFKA_PUBLISH_ERROR', cause: error, details: errorDetails, }); } } } //# sourceMappingURL=AbstractKafkaPublisher.js.map