How to use the node-rdkafka.KafkaConsumer function in node-rdkafka

To help you get started, we’ve selected a few node-rdkafka examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github scality / backbeat / tests / functional / ingestion / IngestionReader.js View on Github external
});
const ingestionQP = new IngestionQueuePopulator({
    config: testConfig.extensions.ingestion,
    logger: dummyLogger,
});
const consumerParams = {
    'metadata.broker.list': [testConfig.kafka.hosts],
    'group.id': 'test-consumer-group-ingestion',
    // we manage stored offsets based on the highest
    // contiguous offset fully processed by a worker, so
    // disabling automatic offset store is needed
    'enable.auto.offset.store': false,
    // this function is called periodically based on
    // auto-commit of stored offsets
};
const consumer = new kafka.KafkaConsumer(consumerParams, {});

function setZookeeperInitState(ingestionReader, cb) {
    const path = `${ingestionReader.bucketInitPath}/isStatusComplete`;
    async.series([
        next => zkClient.mkdirp(path, next),
        next => zkClient.setData(path, Buffer.from('true'),
            -1, next),
    ], cb);
}

function checkEntryInQueue(kafkaEntries, expectedEntries, done) {
    // 2 entries per object, but the master key is filtered
    assert.strictEqual(kafkaEntries.length, expectedEntries.length);

    const retrievedEntries = kafkaEntries.map(entry => JSON.parse(entry.value));
github Optum / knack / packages / knack-consumer / index.js View on Github external
return new Promise((resolve, reject) => {
		consumer = new Kafka.KafkaConsumer(config.consumerConfig, config.topicConfig);
		consumer.on('ready', () => {
			consumer.subscribe(topics);
			if (flowMode) {
				// Flowing mode
				// consume messages as soon as they are available
				consumer.consume();
			} else {
				// Non-flowing mode
				taskTimer = new TaskTimer(NON_FLOW_MODE_INTERVAL_MS);
				taskTimer.on('tick', () => {
					consumer.consume(NON_FLOW_MODE_MSG_NUM);
				});
				taskTimer.start();
			}

			log.info(`subscribed to topics: ${topics.join(', ')} with flowMode: ${flowMode || false}`);
github waldophotos / kafka-avro / lib / kafka-consumer.js View on Github external
Consumer.prototype.getConsumer = Promise.method(function (opts, topicOpts) {
  if (!opts['metadata.broker.list']) {
    opts['metadata.broker.list'] = this.kafkaBrokerUrl;
  }

  log.info('getConsumer() :: Starting Consumer with opts', {opts});

  const consumer = new kafka.KafkaConsumer(opts, topicOpts);

  this._consumers.push(consumer);

  consumer.on('disconnect', function (args) {
    log.warn('getConsumer() :: Consumer disconnected. args', {args});
  });

  consumer.on('error', function (err) {
    log.error({err}, 'getConsumer() :: Consumer Error event fired');
  });

  // hack node-rdkafka
  consumer.__kafkaAvro_on = consumer.on;
  consumer.on = this._onWrapper.bind(this, consumer);

  return consumer;
github scality / backbeat / tests / functional / metadataIngestion / ingestion.js View on Github external
next => {
                kafkaConsumer = new kafka.KafkaConsumer(testKafkaConfig);
                return next();
            },
            next => {
github AliwareMQ / aliware-kafka-demos / kafka-nodejs-demo / vpc-ssl / consumer.js View on Github external
const Kafka = require('node-rdkafka');
const config = require('./setting');
console.log(Kafka.features);
console.log(Kafka.librdkafkaVersion);
console.log(config)

var consumer = new Kafka.KafkaConsumer({
	/*'debug': 'all',*/ 
    'api.version.request': 'true',
	'bootstrap.servers': config['bootstrap_servers'],
	'security.protocol' : 'sasl_ssl',
	'ssl.ca.location' : './ca-cert',
	'sasl.mechanisms' : 'PLAIN',
    'message.max.bytes': 32000,
    'fetch.max.bytes' : 32000,
    'fetch.message.max.bytes': 32000,
    'max.partition.fetch.bytes': 32000,
    'sasl.username' : config['sasl_plain_username'],
	'sasl.password' : config['sasl_plain_password'],
    'group.id' : config['consumer_id']
});
github joway / node-kfk / src / consumer.ts View on Github external
if (err.code === ErrorCode.ERR__ASSIGN_PARTITIONS) {
          this.consumer.assign(assignment)
          let rebalanceLog = 'consumer rebalance : '
          for (const assign of assignment) {
            rebalanceLog += `{topic ${assign.topic}, partition: ${assign.partition}} `
          }
          this.logger.info(rebalanceLog)
        } else if (err.code === ErrorCode.ERR__REVOKE_PARTITIONS) {
          this.consumer.unassign()
        } else {
          this.logger.error(err)
        }
      }
    }

    this.consumer = new Kafka.KafkaConsumer(conf, topicConf)

    this.debug = options.debug === undefined ? false : options.debug
    this.logger = winston.createLogger({
      level: this.debug ? 'debug' : 'info',
      format: winston.format.simple(),
      transports: [new winston.transports.Console()],
    })
    this.logger.debug(`debug mode : ${this.debug}`)
  }
github scality / backbeat / lib / BackbeatConsumer.js View on Github external
_initConsumer() {
        const consumerParams = {
            'metadata.broker.list': this._kafkaHosts,
            'group.id': this._groupId,
            'enable.auto.commit': this._autoCommit,
            'offset_commit_cb': this._onOffsetCommit.bind(this),
        };
        if (this._fromOffset !== undefined) {
            consumerParams['auto.offset.reset'] = this._fromOffset;
        }
        if (this._fetchMaxBytes !== undefined) {
            consumerParams['fetch.message.max.bytes'] = this._fetchMaxBytes;
        }
        this._consumer = new kafka.KafkaConsumer(consumerParams);
        this._consumer.connect();
        return this._consumer.once('ready', () => {
            this._consumerReady = true;
            this._checkIfReady();
        });
    }
github waldyrfelix / rocketseat-kafka / consumer.js View on Github external
function createConsumer(onData) {
    const consumer = new Kafka.KafkaConsumer({
        'bootstrap.servers': process.env.KAFKA_URI,
        'security.protocol': 'SASL_SSL',
        'sasl.mechanisms': 'PLAIN',
        'sasl.username': process.env.KAFKA_KEY,
        'sasl.password': process.env.KAFKA_SECRET,
        'group.id': process.env.KAFKA_CONSUMER_GROUP
    }, {
        'auto.offset.reset': 'earliest'
    });

    return new Promise((resolve, reject) => {
        consumer
            .on('ready', () => resolve(consumer))
            .on('data', onData);

        consumer.connect();
github AraiEzzra / DDKCORE / kafka / consumer.js View on Github external
const Kafka = require('node-rdkafka');
let config = process.env.NODE_ENV === 'development' ? require('../config/default') : process.env.NODE_ENV === 'testnet' ? require('../config/testnet') : require('../config/mainnet');

var consumer = new Kafka.KafkaConsumer({
    'group.id': 'kafka',
    'metadata.broker.list': config.kafka.host + ':' + config.kafka.port
  }, {});

module.exports = consumer;