Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
import logger from "../config/logger";
var kafka = require("kafka-node");
import envVariables from "../EnvironmentVariables";
const Producer = kafka.Producer;
let client;
// if (process.env.NODE_ENV === "development") {
// client = new kafka.Client();
client = new kafka.KafkaClient({ kafkaHost: envVariables.KAFKA_BROKER_HOST });
// console.log("local - ");
// } else {
// client = new kafka.KafkaClient({ kafkaHost: envVariables.KAFKA_BROKER_HOST });
// console.log("cloud - ");
// }
const producer = new Producer(client);
producer.on("ready", function() {
logger.info("Producer is ready");
});
producer.on("error", function(err) {
logger.error("Producer is in error state");
logger.error(err.stack || err);
});
private static getKafkaClient(config: IConfig): KafkaClient {
// TODO: make sure what is the correct way to use kafka seed url, and check other constructor options here
const client = new KafkaClient({ kafkaHost: config.kafkaSeedUrls[0], connectTimeout: 6000, requestTimeout: 6000 });
return client;
}
function init() {
client = new kafka.KafkaClient({
kafkaHost: envHelper.sunbird_kafka_host,
maxAsyncRequests: 100
});
producer = new kafka.HighLevelProducer(client);
producer.on('ready', function () {
console.log('Kafka Producer is connected and ready.');
});
producer.on('error', function (error) {
console.error("Errored at kafka", error)
});
}
var kafka = require("kafka-node");
import envVariables from "../envVariables";
const Producer = kafka.Producer;
let client;
if (process.env.NODE_ENV === "development") {
client = new kafka.Client();
console.log("local Producer- ");
} else {
client = new kafka.KafkaClient({ kafkaHost: envVariables.KAFKA_BROKER_HOST });
console.log("cloud Producer- ");
}
const producer = new Producer(client);
producer.on("ready", function() {
console.log("Producer is ready");
});
producer.on("error", function(err) {
console.log("Producer is in error state");
console.log(err);
});
export default producer;
OutputKafka.prototype.start = function(callback) {
if(!this.kafkaHost) return;
var client = new kafka.KafkaClient({ kafkaHost: this.kafkaHost });
var Producer = kafka.Producer;
var options = {};
if (this.partition) {
var options = { partitionerType: Producer.PARTITIONER_TYPES["keyed"] };
}
this.producer = new Producer(client,options);
this.producer.on('error', (err) => {
logger.warning('Kafka Client Error:', err);
this.error_count++;
});
this.producer.on('ready', () => {
console.log('Kafka Client Ready!');
this.error_count = 0;
});
if (this.check_interval) {
init() {
assert(this.config.kafkaHost, '[egg-kafka] kafkaHost is required on config');
this.client = new kafka.KafkaClient(this.config);
this.producers();
}
constructor(subscription) {
this.subscription = subscription;
this.subscription.options.requestTimeout = subscription.options.requestTimeout || 10000;
this.subscription.options.connectTimeout = subscription.options.connectTimeout || 10000;
this.client = new kafka.KafkaClient(this.subscription.client);
this.offset = new kafka.Offset(this.client);
}
constructor(subscriptionModel) {
super(subscriptionModel);
subscriptionModel.options.requestTimeout = subscriptionModel.options.requestTimeout || 10000;
subscriptionModel.options.connectTimeout = subscriptionModel.options.connectTimeout || 10000;
this.options = subscriptionModel.options;
this.client = new kafka_node_1.KafkaClient(subscriptionModel.client);
this.offset = new kafka_node_1.Offset(this.client);
}
receiveMessage() {