Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
console.log("Consumes and prints values of messages from a Kafka topic via the REST proxy API wrapper.");
console.log();
console.log("Usage: node console_consumer.js [--url ] --topic [--group ] [--message-limit ] [--from-beginning] [--format ]");
process.exit(help ? 0 : 1);
}
if (consumerGroup === undefined)
consumerGroup = "console-consumer-" + Math.round(Math.random() * 100000);
var kafka = new KafkaRest({"url": api_url});
var consumed = 0;
var consumerConfig = {
"format": format
};
if (fromBeginning) {
consumerConfig['auto.offset.reset'] = 'smallest';
}
kafka.consumer(consumerGroup).join(consumerConfig, function(err, consumer_instance) {
if (err) return console.log("Failed to create instance in consumer group: " + err);
console.log("Consumer instance initialized: " + consumer_instance.toString());
var stream = consumer_instance.subscribe(topicName);
stream.on('data', function(msgs) {
for(var i = 0; i < msgs.length; i++) {
if (format == "binary") {
// Messages keys (if available) and values are decoded from base64 into Buffers. You'll need to decode based
// on whatever serialization format you used. By default here, we just try to decode to text.
console.log(msgs[i].value.toString('utf8'));
// Also available: msgs[i].key, msgs[i].partition
} else {
console.log(JSON.stringify(msgs[i].value));
}
var binary = (format == "binary");
if (consumerGroup === undefined)
consumerGroup = "tweet-trending-consumer-" + Math.round(Math.random() * 100000);
var kafka = new KafkaRest({"url": api_url});
var consumer_instance;
// How often to report the top 10
var report_period = 10000;
// How much to discount the current weights for each report_period
var period_discount_rate = .99;
var consumerConfig = { "format" : format };
if (fromBeginning) {
consumerConfig['auto.offset.reset'] = 'smallest';
}
kafka.consumer(consumerGroup).join(consumerConfig, function(err, ci) {
if (err) return console.log("Failed to create instance in consumer group: " + err);
consumer_instance = ci;
var stream = consumer_instance.subscribe(topicName);
stream.on('data', function(msgs) {
for(var i = 0; i < msgs.length; i++) {
var tweet = (binary ? JSON.parse(msgs[i].value.toString('utf8')) : msgs[i].value);
processTweet(tweet);
}
});
stream.on('error', function(err) {
console.log("Consumer instance reported an error: " + err);
shutdown();
});
_initConsumer() {
const consumerParams = {
'metadata.broker.list': this._kafkaHosts,
'group.id': this._groupId,
'enable.auto.commit': this._autoCommit,
'offset_commit_cb': this._onOffsetCommit.bind(this),
};
if (this._fromOffset !== undefined) {
consumerParams['auto.offset.reset'] = this._fromOffset;
}
if (this._fetchMaxBytes !== undefined) {
consumerParams['fetch.message.max.bytes'] = this._fetchMaxBytes;
}
this._consumer = new kafka.KafkaConsumer(consumerParams);
this._consumer.connect();
return this._consumer.once('ready', () => {
this._consumerReady = true;
this._checkIfReady();
});
}
constructor(conf: any, topicConf: any = {}, options: Options = {}) {
this.dying = false
this.dead = false
this.topics = []
conf['auto.commit.interval.ms'] =
conf['auto.commit.interval.ms'] || DEFAULT_AUTO_COMMIT_INTERVAL
if (!conf['rebalance_cb']) {
conf['rebalance_cb'] = (err: any, assignment: any) => {
if (err.code === ErrorCode.ERR__ASSIGN_PARTITIONS) {
this.consumer.assign(assignment)
let rebalanceLog = 'consumer rebalance : '
for (const assign of assignment) {
rebalanceLog += `{topic ${assign.topic}, partition: ${assign.partition}} `
}
this.logger.info(rebalanceLog)
} else if (err.code === ErrorCode.ERR__REVOKE_PARTITIONS) {
this.consumer.unassign()
} else {
this.logger.error(err)
}
constructor(conf: any, topicConf: any = {}, options: Options = {}) {
this.dying = false
this.dead = false
this.topics = []
conf['auto.commit.interval.ms'] =
conf['auto.commit.interval.ms'] || DEFAULT_AUTO_COMMIT_INTERVAL
if (!conf['rebalance_cb']) {
conf['rebalance_cb'] = (err: any, assignment: any) => {
if (err.code === ErrorCode.ERR__ASSIGN_PARTITIONS) {
this.consumer.assign(assignment)
let rebalanceLog = 'consumer rebalance : '
for (const assign of assignment) {
rebalanceLog += `{topic ${assign.topic}, partition: ${assign.partition}} `
}
this.logger.info(rebalanceLog)
} else if (err.code === ErrorCode.ERR__REVOKE_PARTITIONS) {
this.consumer.unassign()
} else {
this.logger.error(err)
}
}