kafka-console
Version:
114 lines (113 loc) • 5.56 kB
JavaScript
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.collect = collect;
exports.toInt = toInt;
const commander_1 = require("commander");
const kafka_1 = require("./utils/kafka");
const consume_1 = __importDefault(require("./commands/consume"));
const produce_1 = __importDefault(require("./commands/produce"));
const metadata_1 = __importDefault(require("./commands/metadata"));
const list_1 = __importDefault(require("./commands/list"));
const config_1 = __importDefault(require("./commands/config"));
const createTopic_1 = __importDefault(require("./commands/createTopic"));
const deleteTopic_1 = __importDefault(require("./commands/deleteTopic"));
const fetchTopicOffsets_1 = __importDefault(require("./commands/fetchTopicOffsets"));
const { version } = require('../package.json');
function collect(value, result) {
return result.concat([value]);
}
function toInt(value, result) {
return parseInt(value, 10);
}
const commander = new commander_1.Command();
commander
.option('-b, --brokers <brokers>', 'bootstrap server host', process.env.KAFKA_BROKERS || 'localhost:9092')
.option('-l, --log-level <logLevel>', 'log level')
.option('-t, --timeout <timeout>', 'set a timeout of operation', toInt, process.env.KAFKA_TIMEOUT || '0')
.option('-p, --pretty', 'pretty print', false)
.option('--ssl', 'enable ssl', false)
.option('--mechanism <mechanism>', 'sasl mechanism', process.env.KAFKA_MECHANISM)
.option('--username <username>', 'sasl username', process.env.KAFKA_USERNAME)
.option('--password <password>', 'sasl password', process.env.KAFKA_PASSWORD)
.option('--auth-id <authId>', 'sasl aws authorization identity', process.env.KAFKA_AUTH_ID)
.option('--access-key-id <accessKeyId>', 'sasl aws access key id', process.env.KAFKA_ACCESS_KEY_ID)
.option('--secret-access-key <secretAccessKey>', 'sasl aws secret access key', process.env.KAFKA_SECRET_ACCESS_KEY)
.option('--session-token <seccionToken>', 'sasl aws session token', process.env.KAFKA_SESSION_TOKEN)
.option('--oauth-bearer <oauthBearer>', 'sasl oauth bearer token', process.env.KAFKA_OAUTH_BEARER)
.version(version);
commander
.command('consume <topic>')
.requiredOption('-g, --group <group>', 'consumer group name', `kafka-console-consumer-${Date.now()}`)
.option('-d, --data-format <data-format>', 'messages data-format: json, js, raw', 'json')
.option('-o, --output <filename>', 'write output to specified filename')
.option('-f, --from <from>', 'read messages from the specific timestamp in milliseconds or ISO 8601 format. Set 0 to read from the beginning')
.option('-c, --count <count>', 'a number of messages to read', toInt, Infinity)
.option('-s, --skip <skip>', 'a number of messages to skip', toInt, 0)
.description('Consume kafka topic events')
.action(consume_1.default);
commander
.command('produce <topic>')
.option('-d, --data-format <data-format>', 'messages data-format: json, js, raw', 'json')
.option('-i, --input <filename>', 'input filename')
.option('-w, --wait <wait>', 'wait the time in ms after sending a message', toInt, 0)
.option('-h, --header <header>', 'set a static header', collect, [])
.description('Produce kafka topic events')
.action(produce_1.default);
commander
.command('metadata')
.description('Displays kafka server metadata')
.action(metadata_1.default);
commander
.command('list')
.alias('ls')
.option('-a, --all', 'include internal topics')
.description('Lists kafka topics')
.action(list_1.default);
commander
.command('config')
.requiredOption('-r, --resource <resource>', 'resource', kafka_1.resourceParser)
.requiredOption('-n, --resourceName <resourceName>', 'resource name')
.description('Describes config for specific resource')
.action(config_1.default);
commander
.command('topic:create <topic>')
.description('Creates kafka topic')
.action(createTopic_1.default);
commander
.command('topic:delete <topic>')
.description('Deletes kafka topic')
.action(deleteTopic_1.default);
commander
.command('topic:offsets <topic> [timestamp]')
.description('Shows kafka topic offsets')
.option('-g, --group <group>', 'consumer group name', `kafka-console-consumer-${Date.now()}`)
.action(fetchTopicOffsets_1.default);
commander.on('--help', function () {
[
'',
'Examples:',
'',
' General consumer usage',
' $ npx kafka-console -b $KAFKA_BROKERS consume $KAFKA_TOPIC -g $KAFKA_TOPIC_GROUP --ssl --mechanism plain --username $KAFKA_USERNAME --password $KAFKA_PASSWORD',
'',
' Extracting consumer output with jq',
' $ npx kafka-console consume $KAFKA_TOPIC -g $KAFKA_TOPIC_GROUP --f ./formatter/avro.js | jq .value',
'',
' General producer usage',
' $ npx kafka-console produce $KAFKA_TOPIC -b $KAFKA_BROKERS --ssl --mechanism plain --username $KAFKA_USERNAME --password $KAFKA_PASSWORD',
'',
' Preparing producer payload json data with jq',
' $ cat payload.json|jq -r -c .[]|npx kafka-console produce $KAFKA_TOPIC -f ./formatter/avro.js',
'',
].forEach(msg => console.log(msg));
});
commander
.parseAsync(process.argv)
.catch(e => {
console.error(e.message);
process.exit(1);
});
exports.default = commander;
;