Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const registerNewPrepareHandlersTask = async () => {
let handlerType = 'prepare'
Logger.debug(`lib.Kafka.Cron.registerNewHandlers running task for handlerType: ${handlerType}...`)
let participantNamesList = null
try {
participantNamesList = await DAO.retrieveAllParticipants()
} catch (err) {
Logger.error(`lib.Kafka.Cron.registerNewHandlers is unable to retrieve new participants: ${err}`)
}
if (participantNamesList && Array.isArray(participantNamesList)) {
for (let participantName of participantNamesList) {
// lets check to see if there is a Prepare Consumer for this partiticipant
let kafkaPrepareTopic = Utility.transformAccountToTopicName(participantName, TransferEventType.TRANSFER, TransferEventAction.PREPARE)
let isConsumerForPrepareTopicExist = false
try {
if (Consumer.getConsumer(kafkaPrepareTopic)) {
isConsumerForPrepareTopicExist = true
}
} catch (err) {
Logger.debug(`lib.Kafka.Cron.registerNewHandlers - participant ${participantName} for topic ${kafkaPrepareTopic} does not exist: ${err}`)
isConsumerForPrepareTopicExist = false
}
if (!isConsumerForPrepareTopicExist) {
} else {
message = messages
}
const messageId = message.value.id
const payload = message.value.content.payload
const headers = message.value.content.headers
const action = message.value.metadata.event.action
const bulkTransferId = payload.bulkTransferId
const kafkaTopic = message.topic
let consumer
Logger.info(Util.breadcrumb(location, { method: 'bulkPrepare' }))
try {
consumer = Kafka.Consumer.getConsumer(kafkaTopic)
} catch (err) {
Logger.info(`No consumer found for topic ${kafkaTopic}`)
Logger.error(err)
histTimerEnd({ success: false, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
return true
}
const actionLetter = action === TransferEventAction.BULK_PREPARE ? Enum.actionLetter.bulkPrepare : Enum.actionLetter.unknown
let params = { message, kafkaTopic, consumer }
Logger.info(Util.breadcrumb(location, { path: 'dupCheck' }))
const { isDuplicateId, isResend } = await BulkTransferService.checkDuplicate(bulkTransferId, payload.hash)
if (isDuplicateId && isResend) { // TODO: handle resend
Logger.info(Util.breadcrumb(location, `resend`))
Logger.info(Util.breadcrumb(location, `notImplemented`))
return true
}
if (isDuplicateId && !isResend) { // TODO: handle modified request
Logger.error(Util.breadcrumb(location, `callbackErrorModified1--${actionLetter}4`))
Logger.info(Util.breadcrumb(location, `notImplemented`))
Logger.error(err)
histTimerEnd({ success: false, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
return true
}
const actionLetter = action === TransferEventAction.BULK_PREPARE ? Enum.actionLetter.bulkPrepare : Enum.actionLetter.unknown
let params = { message, kafkaTopic, consumer }
Logger.info(Util.breadcrumb(location, { path: 'dupCheck' }))
const { isDuplicateId, isResend } = await BulkTransferService.checkDuplicate(bulkTransferId, payload.hash)
if (isDuplicateId && isResend) { // TODO: handle resend
Logger.info(Util.breadcrumb(location, `resend`))
Logger.info(Util.breadcrumb(location, `notImplemented`))
return true
}
if (isDuplicateId && !isResend) { // TODO: handle modified request
Logger.error(Util.breadcrumb(location, `callbackErrorModified1--${actionLetter}4`))
Logger.info(Util.breadcrumb(location, `notImplemented`))
return true
}
let { isValid, reasons, payerParticipantId, payeeParticipantId } = await Validator.validateBulkTransfer(payload, headers)
if (isValid) {
Logger.info(Util.breadcrumb(location, { path: 'isValid' }))
try {
Logger.info(Util.breadcrumb(location, `saveBulkTransfer`))
const participants = { payerParticipantId, payeeParticipantId }
/* const state = */ await BulkTransferService.bulkPrepare(payload, participants)
} catch (err) { // TODO: handle insert error
Logger.info(Util.breadcrumb(location, `callbackErrorInternal1--${actionLetter}5`))
Logger.info(Util.breadcrumb(location, `notImplemented`))
return true
}
} else { // TODO: handle validation failure
Logger.error(Util.breadcrumb(location, { path: 'validationFailed' }))
try {
Logger.info(Util.breadcrumb(location, `saveInvalidRequest`))
await BulkTransferService.bulkPrepare(payload, { payerParticipantId, payeeParticipantId }, reasons.toString(), false)
} catch (err) { // TODO: handle insert error
Logger.info(Util.breadcrumb(location, `callbackErrorInternal2--${actionLetter}7`))
Logger.info(Util.breadcrumb(location, `notImplemented`))
return true
}
Logger.info(Util.breadcrumb(location, `callbackErrorGeneric--${actionLetter}8`))
Logger.info(Util.breadcrumb(location, `notImplemented`))
return true // TODO: store invalid bulk transfer to database and produce callback notification to payer
}
} catch (err) {
Logger.error(`${Util.breadcrumb(location)}::${err.message}--BP0`)
histTimerEnd({ success: false, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
throw err
}
}
.catch(e => {
Logger.error('Error rejecting transfers', e)
})
}
if (listOfProducers[topicConf.topicName]) {
producer = listOfProducers[topicConf.topicName]
} else {
Logger.info('Producer::start::topic=' + topicConf.topicName)
producer = new Producer(config)
Logger.info('Producer::connect::start')
await producer.connect()
Logger.info('Producer::connect::end')
listOfProducers[topicConf.topicName] = producer
}
Logger.info(`Producer.sendMessage::messageProtocol:'${JSON.stringify(messageProtocol)}'`)
await producer.sendMessage(messageProtocol, topicConf)
Logger.info('Producer::end')
return true
} catch (err) {
Logger.error(err)
throw ErrorHandler.Factory.createInternalServerFSPIOPError(`Producer error has occurred for ${topicConf.topicName}`, err)
}
}
transporter.verify(function (error, success) {
if (error) {
Logger.error(error)
}
Logger.info('Server is ready to take our messages')
})
const registerBulkPrepareHandler = async () => {
try {
const bulkPrepareHandler = {
command: bulkPrepare,
topicName: Util.transformGeneralTopicName(TransferEventType.BULK, TransferEventAction.PREPARE),
config: Util.getKafkaConfig(Util.ENUMS.CONSUMER, TransferEventType.BULK.toUpperCase(), TransferEventAction.PREPARE.toUpperCase())
}
bulkPrepareHandler.config.rdkafkaConf['client.id'] = bulkPrepareHandler.topicName
await Kafka.Consumer.createHandler(bulkPrepareHandler.topicName, bulkPrepareHandler.config, bulkPrepareHandler.command)
return true
} catch (e) {
Logger.error(e)
throw e
}
}
.catch(err => {
Logger.error('Error handling TransferExecuted event', err)
}))
}
.catch(e => {
Logger.error('Error rejecting tokens', e)
})
}