Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
exports.consume = function (topic, cb) {
var options = {
autoCommit: false,
fromBeginning: true,
fetchMaxWaitMs: 1000,
fetchMaxBytes: 1024*1024
};
// use default partitions
var topics = [
{ topic: topic, partition: 0 },
{ topic: topic, partition: 1 }
];
var consumer = new kafka.Consumer(kafkaClient, topics, options);
var offset = new kafka.Offset(kafkaClient);
consumer.on('message', cb);
consumer.on('error', function (err) {
console.log(err);
throw err;
});
// recompute offset
consumer.on('offsetOutOfRange', function (topic) {
topic.maxNum = 2;
offset.fetch([topic], function (err, offsets) {
var min = Math.min.apply(null, offsets[topic.topic][topic.partition]);
consumer.setOffset(topic.topic, topic.partition, min);
app.use(express.static(__dirname + '/public'));
app.use('/bower_components', express.static(__dirname + '/bower_components'));
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
app.get('/historical', function (req, res) {
res.sendFile(__dirname + '/public/historical.html');
});
// Kafka Consumer Config
var zkserver = 'localhost:2181'; // Kafka Server Address
var kafka_client_id = 'reporting-layer';
var kafkaClient = new kafka.Client(zkserver,kafka_client_id);
var consumer = new kafka.Consumer(kafkaClient,[{ topic: 'bounceRate' },{ topic: 'averageTime' },{ topic: 'usersPerCategory' },{ topic: 'hitsByMarketingChannels' },{ topic: 'pagesByBounceRate' }],{autoCommit: true});
//cassandra configurations
var client = new cassandra.Client({contactPoints: ['localhost'], keyspace: 'rajsarka'});
// Define action to take when a websocket connection is established
io.on('connection', function (socket) {
console.log("A client is connected.");
//fetch conversion summary data from cassandra
socket.on('fetch-conversionSummaryChartData',function(query){
client.execute(query, function (err, result) {
if(err){
console.log(err);
}
console.log('executing query: ' + query);
console.log('processing data');
from(topic: string, offset?: number): Observable {
let client = this.getClient();
let consumer = new kafka.Consumer(client, [{
topic
}]/* TODO */);
return new Observable(o => {
consumer.on('message', m => {
if (m.value === '__done') {
return o.complete();
}
o.next(m.value)
});
consumer.on('error', err => o.error(err));
return () => consumer.close(() => {});
});
}
to(topic: string, iO: Observable): Subscription {
kit('publishes to a kafka topic', (done) => {
let topic = 'gustavTest-publish';
let consumer = new kafka.Consumer(client, [{
topic
}]);
let obs = new Observable(o => {
setTimeout(() => o.next('hello'), 15);
});
gr.to(topic, obs);
consumer.on('message', (message) => {
expect(message.value).to.equal('hello');
done();
});
});
});
}
});
var request = require('request-promise');
var kafka = require('kafka-node');
var uuid = require('uuid/v4');
var client = new kafka.Client(process.env.ZOOKEEPER + '/');
client.on('error', function(error) {
log('Got a client error: %s', error);
});
var consumer;
if (process.env.CONSUMER_TYPE === 'plain') {
log('Using Consumer');
consumer = new kafka.Consumer(
client,
[
{
topic: 'test'
}
],
{
fromOffset: false,
groupId: uuid()
}
);
} else if (process.env.CONSUMER_TYPE === 'highLevel') {
log('Using HighLevelConsumer');
consumer = new kafka.HighLevelConsumer(
client,
[
createConsumer() {
return new kafka.Consumer(
this.client,
[{
topic: this.subscription.options.topic,
offset: this.latestOffset
}],
{
fromOffset: true
}
);
}
async getConsumer(topicId, config) {
const client = await this.getKafkaClient(config);
const consumer = new kafka_node_1.Consumer(client, [
{ topic: topicId },
], {
groupId: topicId,
autoCommit: true,
});
return consumer;
}
async createTopic(topicId, config) {
client.once('ready', () => {
log.info('client is ready. creating consumer')
const consumer = new Consumer(
client,
[
{ topic }
]
)
resolve(consumer)
})
})