Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
* Nikolay Anastasov
--------------
******/
'use strict'
const Producer = require('@mojaloop/central-services-stream').Util.Producer
const Logger = require('@mojaloop/central-services-logger')
const Uuid = require('uuid4')
const Utility = require('@mojaloop/central-services-shared').Util.Kafka
const Enum = require('@mojaloop/central-services-shared').Enum
const Config = require('../../../src/lib/config')
const TransferState = Enum.Transfers.TransferState
const TransferInternalState = Enum.Transfers.TransferInternalState
const TransferEventType = Enum.Events.Event.Type
const TransferEventAction = Enum.Events.Event.Action
const amount = parseFloat(Number(Math.floor(Math.random() * 100 * 100) / 100 + 100).toFixed(2)) // decimal amount between 100.01 and 200.00
const expiration = new Date((new Date()).getTime() + (24 * 60 * 60 * 1000)) // tomorrow
const Time = require('@mojaloop/central-services-shared').Util.Time
const transfer = {
transferId: Uuid(),
payerFsp: 'dfsp1',
payeeFsp: 'dfsp2',
amount: {
currency: 'USD',
amount
},
ilpPacket: 'AYIBgQAAAAAAAASwNGxldmVsb25lLmRmc3AxLm1lci45T2RTOF81MDdqUUZERmZlakgyOVc4bXFmNEpLMHlGTFGCAUBQU0svMS4wCk5vbmNlOiB1SXlweUYzY3pYSXBFdzVVc05TYWh3CkVuY3J5cHRpb246IG5vbmUKUGF5bWVudC1JZDogMTMyMzZhM2ItOGZhOC00MTYzLTg0NDctNGMzZWQzZGE5OGE3CgpDb250ZW50LUxlbmd0aDogMTM1CkNvbnRlbnQtVHlwZTogYXBwbGljYXRpb24vanNvbgpTZW5kZXItSWRlbnRpZmllcjogOTI4MDYzOTEKCiJ7XCJmZWVcIjowLFwidHJhbnNmZXJDb2RlXCI6XCJpbnZvaWNlXCIsXCJkZWJpdE5hbWVcIjpcImFsaWNlIGNvb3BlclwiLFwiY3JlZGl0TmFtZVwiOlwibWVyIGNoYW50XCIsXCJkZWJpdElkZW50aWZpZXJcIjpcIjkyODA2MzkxXCJ9IgA',
condition: '47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU',
expiration: Time.getUTCString(expiration),
extensionList: {
* Valentin Genev
* Rajiv Mothilal
* Miguel de Barros
* Shashikant Hirugade
--------------
******/
'use strict'
/**
* @module src/models/transfer/facade/
*/
const Db = require('../../lib/db')
const Enum = require('@mojaloop/central-services-shared').Enum
const TransferEventAction = Enum.Events.Event.Action
const TransferInternalState = Enum.Transfers.TransferInternalState
const TransferExtensionModel = require('./transferExtension')
const ParticipantFacade = require('../participant/facade')
const Time = require('@mojaloop/central-services-shared').Util.Time
const MLNumber = require('@mojaloop/ml-number')
const Config = require('../../lib/config')
const _ = require('lodash')
const ErrorHandler = require('@mojaloop/central-services-error-handling')
const Logger = require('@mojaloop/central-services-logger')
// Alphabetically ordered list of error texts used below
const UnsupportedActionText = 'Unsupported action'
const getById = async (id) => {
try {
/** @namespace Db.transfer **/
}
let message = {}
try {
if (Array.isArray(messages)) {
message = messages[0]
} else {
message = messages
}
const messageId = message.value.id
const payload = message.value.content.payload
const headers = message.value.content.headers
const action = message.value.metadata.event.action
const bulkTransferId = payload.bulkTransferId
const kafkaTopic = message.topic
Logger.info(Util.breadcrumb(location, { method: 'bulkFulfil' }))
const actionLetter = action === Enum.Events.Event.Action.BULK_COMMIT ? Enum.Events.ActionLetter.bulkCommit : Enum.Events.ActionLetter.unknown
let params = { message, kafkaTopic, decodedPayload: payload, consumer: Consumer, producer: Producer }
Logger.info(Util.breadcrumb(location, { path: 'dupCheck' }))
const { hasDuplicateId, hasDuplicateHash } = await Comparators.duplicateCheckComparator(bulkTransferId, payload.hash, BulkTransferService.getBulkTransferFulfilmentDuplicateCheck, BulkTransferService.saveBulkTransferFulfilmentDuplicateCheck)
if (hasDuplicateId && hasDuplicateHash) { // TODO: handle resend :: GET /bulkTransfer
Logger.info(Util.breadcrumb(location, `resend--${actionLetter}1`))
Logger.error(Util.breadcrumb(location, 'notImplemented'))
return true
}
if (hasDuplicateId && !hasDuplicateHash) {
Logger.error(Util.breadcrumb(location, `callbackErrorModified--${actionLetter}2`))
const fspiopError = ErrorHandler.Factory.createFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.MODIFIED_REQUEST)
const eventDetail = { functionality: Enum.Events.Event.Type.NOTIFICATION, action }
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, fspiopError: fspiopError.toApiErrorObject(Config.ERROR_HANDLING), eventDetail, fromSwitch })
throw fspiopError
const accounts = await ParticipantFacade.getAllAccountsByNameAndCurrency(name, currency, isAccountActive)
const accountMatched = accounts[accounts.map(account => account.participantCurrencyId).findIndex(i => i === id)]
if (!accountMatched) {
throw ErrorHandler.Factory.createInternalServerFSPIOPError(ParticipantAccountCurrencyMismatchText)
} else if (!accountMatched.accountIsActive) {
throw ErrorHandler.Factory.createInternalServerFSPIOPError(AccountInactiveErrorText)
} else if (accountMatched.ledgerAccountTypeId !== enums.ledgerAccountType.SETTLEMENT) {
throw ErrorHandler.Factory.createInternalServerFSPIOPError(AccountNotSettlementTypeErrorText)
}
transferId && (payload.transferId = transferId)
const messageProtocol = createRecordFundsMessageProtocol(setPayerPayeeFundsInOut(name, payload, enums))
messageProtocol.metadata.request = {
params: params,
enums: enums
}
return await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, KafkaProducer, Enum.Events.Event.Type.ADMIN, Enum.Events.Event.Action.TRANSFER, messageProtocol, Enum.Events.EventStatus.SUCCESS)
} catch (err) {
throw ErrorHandler.Factory.reformatFSPIOPError(err)
}
}
try {
if (Array.isArray(messages)) {
message = messages[0]
} else {
message = messages
}
const payload = decodePayload(message.value.content.payload)
const headers = message.value.content.headers
const eventType = message.value.metadata.event.type
const action = message.value.metadata.event.action
const state = message.value.metadata.event.state
const transferId = payload.transferId || (message.value.content.uriParams && message.value.content.uriParams.id)
const kafkaTopic = message.topic
Logger.info(Util.breadcrumb(location, { method: 'bulkProcessing' }))
const actionLetter = action === Enum.Events.Event.Action.BULK_PREPARE ? Enum.Events.ActionLetter.bulkPrepare
: (action === Enum.Events.Event.Action.BULK_COMMIT ? Enum.Events.ActionLetter.bulkCommit
: (action === Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED ? Enum.Events.ActionLetter.bulkTimeoutReceived
: (action === Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED ? Enum.Events.ActionLetter.bulkTimeoutReserved
: (action === Enum.Events.Event.Action.PREPARE_DUPLICATE ? Enum.Events.ActionLetter.bulkPrepareDuplicate
: (action === Enum.Events.Event.Action.FULFIL_DUPLICATE ? Enum.Events.ActionLetter.bulkFulfilDuplicate
: Enum.Events.ActionLetter.unknown)))))
const params = { message, kafkaTopic, decodedPayload: payload, consumer: Consumer, producer: Producer }
const eventDetail = { functionality: Enum.Events.Event.Type.NOTIFICATION, action }
/**
* Acquire bulk transfer info by transferId below needs to be improved. Currently, if
* an individual transfer fulfil is attempted as part of another bulk, bulkTransferInfo
* refers to the original bulkTransferId where that inidividual transfer has been added
* initially. This leads to an error which could be hard to trace back and determine
* the reason why it occured. Instead, the aquired bulkTransferInfo.bulkTransferId
* needs to be compared to the original bulkTransferId currently processed and an error
const registerPositionHandler = async () => {
try {
const positionHandler = {
command: positions,
topicName: Kafka.transformGeneralTopicName(Config.KAFKA_CONFIG.TOPIC_TEMPLATES.GENERAL_TOPIC_TEMPLATE.TEMPLATE, Enum.Events.Event.Type.POSITION, Enum.Events.Event.Action.PREPARE),
config: Kafka.getKafkaConfig(Config.KAFKA_CONFIG, Enum.Kafka.Config.CONSUMER, Enum.Events.Event.Type.TRANSFER.toUpperCase(), Enum.Events.Event.Action.POSITION.toUpperCase())
}
positionHandler.config.rdkafkaConf['client.id'] = `${positionHandler.config.rdkafkaConf['client.id']}-${Uuid()}`
await Consumer.createHandler(positionHandler.topicName, positionHandler.config, positionHandler.command)
return true
} catch (err) {
throw ErrorHandler.Factory.reformatFSPIOPError(err)
}
}
const registerBulkProcessingHandler = async () => {
try {
const bulkProcessingHandler = {
command: bulkProcessing,
topicName: Kafka.transformGeneralTopicName(Config.KAFKA_CONFIG.TOPIC_TEMPLATES.GENERAL_TOPIC_TEMPLATE.TEMPLATE, Enum.Events.Event.Type.BULK, Enum.Events.Event.Action.PROCESSING),
config: Kafka.getKafkaConfig(Config.KAFKA_CONFIG, Enum.Kafka.Config.CONSUMER, Enum.Events.Event.Type.BULK.toUpperCase(), Enum.Events.Event.Action.PROCESSING.toUpperCase())
}
bulkProcessingHandler.config.rdkafkaConf['client.id'] = bulkProcessingHandler.topicName
await Consumer.createHandler(bulkProcessingHandler.topicName, bulkProcessingHandler.config, bulkProcessingHandler.command)
return true
} catch (err) {
throw ErrorHandler.Factory.reformatFSPIOPError(err)
}
}
message = messages[0]
} else {
message = messages
}
const payload = decodePayload(message.value.content.payload)
const headers = message.value.content.headers
const eventType = message.value.metadata.event.type
const action = message.value.metadata.event.action
const state = message.value.metadata.event.state
const transferId = payload.transferId || (message.value.content.uriParams && message.value.content.uriParams.id)
const kafkaTopic = message.topic
Logger.info(Util.breadcrumb(location, { method: 'bulkProcessing' }))
const actionLetter = action === Enum.Events.Event.Action.BULK_PREPARE ? Enum.Events.ActionLetter.bulkPrepare
: (action === Enum.Events.Event.Action.BULK_COMMIT ? Enum.Events.ActionLetter.bulkCommit
: (action === Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED ? Enum.Events.ActionLetter.bulkTimeoutReceived
: (action === Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED ? Enum.Events.ActionLetter.bulkTimeoutReserved
: (action === Enum.Events.Event.Action.PREPARE_DUPLICATE ? Enum.Events.ActionLetter.bulkPrepareDuplicate
: (action === Enum.Events.Event.Action.FULFIL_DUPLICATE ? Enum.Events.ActionLetter.bulkFulfilDuplicate
: Enum.Events.ActionLetter.unknown)))))
const params = { message, kafkaTopic, decodedPayload: payload, consumer: Consumer, producer: Producer }
const eventDetail = { functionality: Enum.Events.Event.Type.NOTIFICATION, action }
/**
* Acquire bulk transfer info by transferId below needs to be improved. Currently, if
* an individual transfer fulfil is attempted as part of another bulk, bulkTransferInfo
* refers to the original bulkTransferId where that inidividual transfer has been added
* initially. This leads to an error which could be hard to trace back and determine
* the reason why it occured. Instead, the aquired bulkTransferInfo.bulkTransferId
* needs to be compared to the original bulkTransferId currently processed and an error
* needs to be thrown when these not match. The underlying problem is that as part of
* the reused chain prepare-position-bulk-processing / fulfil-position-bulk-processing,