Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
log('Using HighLevelConsumer');
consumer = new kafka.HighLevelConsumer(
client,
[
{
topic: 'test'
}
],
{
fromOffset: false,
groupId: uuid()
}
);
} else {
log('Using ConsumerGroup');
consumer = new kafka.ConsumerGroup(
{
host: process.env.ZOOKEEPER,
fromOffset: 'latest',
groupId: uuid()
},
['test']
);
}
consumer.on('error', function(err) {
log('Error occured in consumer:', err);
var span = instana.currentSpan();
span.disableAutoEnd();
// simulating asynchronous follow up steps with setTimeout and request-promise
setTimeout(function() {
request('http://127.0.0.1:' + agentPort).finally(function() {
autoCommit: true,
autoCommitIntervalMs: 5000,
sessionTimeout: 15000,
fetchMaxBytes: 10 * 1024 * 1024, // 10 MB
// An array of partition assignment protocols ordered by preference. 'roundrobin' or 'range' string for
// built ins (see below to pass in custom assignment protocol)
protocol: ["roundrobin"],
// Offsets to use for new groups other options could be 'earliest' or 'none'
// (none will emit an error if no offsets were saved) equivalent to Java client's auto.offset.reset
fromOffset: "latest",
// how to recover from OutOfRangeOffset error (where save offset is past server retention)
// accepts same value as fromOffset
outOfRangeOffset: "earliest"
};
var consumerGroup = new kafka.ConsumerGroup(options, [
envVariables.KAFKA_TOPICS_FIRENOC_CREATE,
envVariables.KAFKA_TOPICS_FIRENOC_UPDATE,
envVariables.KAFKA_TOPICS_FIRENOC_WORKFLOW,
envVariables.KAFKA_TOPICS_RECEIPT_CREATE
]);
console.log("Consumer ");
consumerGroup.on("message", function(message) {
console.log("consumer-topic", message.topic);
console.log("consumer-value", JSON.parse(message.value));
const value = JSON.parse(message.value);
let payloads = [];
const topic = envVariables.KAFKA_TOPICS_NOTIFICATION;
let smsRequest = {};
/* istanbul ignore next */
if (err) {
this.logger.error("Unable to create topics!", topics, err);
return reject(err);
}
const consumerOptions = Object.assign({
id: "default-kafka-consumer",
host: this.opts.host,
groupId: this.nodeID,
fromOffset: "latest",
encoding: "buffer",
}, this.opts.consumer);
const Kafka = require("kafka-node");
this.consumer = new Kafka.ConsumerGroup(consumerOptions, topics);
/* istanbul ignore next */
this.consumer.on("error", e => {
this.logger.error("Kafka Consumer error", e.message);
this.logger.debug(e);
if (!this.connected)
reject(e);
});
this.consumer.on("message", message => {
const topic = message.topic;
const cmd = topic.split(".")[1];
this.incomingMessage(cmd, message.value);
});
// (none will emit an error if no offsets were saved)
// equivalent to Java client's auto.offset.reset
// From kafka documentation
// What to do when there is no initial offset in ZooKeeper or if an offset is out of range:
// * smallest : automatically reset the offset to the smallest offset
// * largest : automatically reset the offset to the largest offset
// * anything else: throw exception to the consumer
fromOffset: 'earliest'
}
if (this.config.sslEnable) {
consumerOptions = Object.assign({ssl: this.config.sslOptions[0]}, consumerOptions)
}
let topics = [topic]
let consumerId = 'kafka-logagent-consumer' + uuid.v4()
var consumerGroup = new ConsumerGroup(Object.assign({id: consumerId}, consumerOptions), topics)
this.config.consumerGroup = consumerGroup
consumerGroup.on('error', onError)
consumerGroup.on('message', function (message) {
self.eventEmitter.emit('data.raw', message.value, {sourceName: 'kafka ' + kafkaHost, topic: message.topic, partition: message.partition, offset: message.offset})
})
consoleLogger.log('start consumer ')
}
const kafka = require('kafka-node');
const morgan = require('morgan');
const app = express();
app.use(morgan('combined'));
const modName = process.env.MOD_NAME;
const funcHandler = process.env.FUNC_HANDLER;
const timeout = Number(process.env.FUNC_TIMEOUT || '180');
const funcPort = Number(process.env.FUNC_PORT || '8080');
const kafkaSvc = _.get(process.env, 'KUBELESS_KAFKA_SVC', 'kafka');
const kafkaNamespace = _.get(process.env, 'KUBELESS_KAFKA_NAMESPACE', 'kubeless');
const kafkaHost = `${kafkaSvc}.${kafkaNamespace}:9092`;
const groupId = `${modName}${funcHandler}`;
const kafkaConsumer = new kafka.ConsumerGroup({
kafkaHost,
groupId,
}, [process.env.TOPIC_NAME]);
const statistics = helper.prepareStatistics('method', client);
helper.routeLivenessProbe(app);
helper.routeMetrics(app, client);
const functionCallingCode = `
try {
Promise.resolve(module.exports.${funcHandler}(message)).then(() => {
end();
}).catch((err) => {
// Catch asynchronous errors
handleError(err);
});
consumerApi.getMessagesFromTopic = (kafkaHostURI, topicName, mainWindow, partitionId) => {
// Send back test data
const buffer = new MessageBuffer(1000);
let hasData = false;
let lastChecked = Date.now();
logger.log('consumerAPI getMessagesFromTopic "topicName":', topicName);
const consumerGroup = new kafka.ConsumerGroup(
{
kafkaHost: kafkaHostURI,
groupId: 'testingLab2',
fromOffset: 'latest',
outOfRangeOffset: 'latest',
},
topicName,
);
consumerGroup.connect();
consumerGroup
.on('message', (message) => {
const formattedMessage = {
value: message.value.toString('utf8'),
topicName: message.topic,
partitionId: message.partition,
var async = require('async')
var ConsumerGroup = require('kafka-node').ConsumerGroup
var consumerOptions = {
kafkaHost: 'localhost:9092',
groupId: 'logagentGroup',
sessionTimeout: 15000,
protocol: ['roundrobin'],
fromOffset: 'earliest',
ssl: {rejectUnauthorized: false}
}
var topics = ['test']
var consumerGroup = new ConsumerGroup(Object.assign({id: 'logagent-consumer-example'}, consumerOptions), topics)
consumerGroup.on('error', onError)
consumerGroup.on('message', onMessage)
function onError (error) {
console.error(error)
console.error(error.stack)
}
function onMessage (message) {
console.log('%s read msg="%s" Topic="%s" Partition=%s Offset=%d', this.client.clientId, message.value, message.topic, message.partition, message.offset)
}
process.once('SIGINT', function () {
async.each([consumerGroup], function (consumer, callback) {
consumer.close(true, callback)
})
var async = require('async')
var ConsumerGroup = require('kafka-node').ConsumerGroup
var consumerOptions = {
kafkaHost: 'localhost:9092',
groupId: 'ExampleTestGroup',
sessionTimeout: 15000,
protocol: ['roundrobin'],
fromOffset: 'earliest'
}
var topics = ['test']
var consumerGroup = new ConsumerGroup(Object.assign({id: 'logagent-consumer-example'}, consumerOptions), topics)
consumerGroup.on('error', onError)
consumerGroup.on('message', onMessage)
function onError (error) {
console.error(error)
console.error(error.stack)
}
function onMessage (message) {
console.log('%s read msg %s Topic="%s" Partition=%s Offset=%d', this.client.clientId, message.value, message.topic, message.partition, message.offset)
}
process.once('SIGINT', function () {
async.each([consumerGroup], function (consumer, callback) {
consumer.close(true, callback)
})
fetchMinBytes: 1,
fetchMaxWaitMs: 100,
autoCommit: autoCommit,
autoCommitIntervalMs: 5000,
connectRetryOptions: this.connectDirectlyToBroker ? DEFAULT_RETRY_OPTIONS : undefined,
encoding: "buffer",
keyEncoding: "buffer"
};
//overwrite default options
_options = _options || {};
Object.keys(_options).forEach(key => options[key] = _options[key]);
this._autoCommitEnabled = options.autoCommit;
this.consumer = new ConsumerGroup(options, topics);
this.client = this.consumer.client;
this.isConsumer = true;
this.pause();
this.targetTopics = topics;
this._getLogger().info("starting ConsumerGroup for topic: " + JSON.stringify(topics));
this._attachConsumerListeners(dontListenForSIGINT);
}