Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
* Valentin Genev
* Rajiv Mothilal
* Miguel de Barros
* Nikolay Anastasov
--------------
******/
'use strict'
const Producer = require('@mojaloop/central-services-stream').Util.Producer
const Logger = require('@mojaloop/central-services-logger')
const Uuid = require('uuid4')
const Utility = require('@mojaloop/central-services-shared').Util.Kafka
const Enum = require('@mojaloop/central-services-shared').Enum
const Config = require('../../../src/lib/config')
const TransferState = Enum.Transfers.TransferState
const TransferInternalState = Enum.Transfers.TransferInternalState
const TransferEventType = Enum.Events.Event.Type
const TransferEventAction = Enum.Events.Event.Action
const amount = parseFloat(Number(Math.floor(Math.random() * 100 * 100) / 100 + 100).toFixed(2)) // decimal amount between 100.01 and 200.00
const expiration = new Date((new Date()).getTime() + (24 * 60 * 60 * 1000)) // tomorrow
const Time = require('@mojaloop/central-services-shared').Util.Time
const transfer = {
transferId: Uuid(),
payerFsp: 'dfsp1',
payeeFsp: 'dfsp2',
amount: {
currency: 'USD',
amount
},
ilpPacket: 'AYIBgQAAAAAAAASwNGxldmVsb25lLmRmc3AxLm1lci45T2RTOF81MDdqUUZERmZlakgyOVc4bXFmNEpLMHlGTFGCAUBQU0svMS4wCk5vbmNlOiB1SXlweUYzY3pYSXBFdzVVc05TYWh3CkVuY3J5cHRpb246IG5vbmUKUGF5bWVudC1JZDogMTMyMzZhM2ItOGZhOC00MTYzLTg0NDctNGMzZWQzZGE5OGE3CgpDb250ZW50LUxlbmd0aDogMTM1CkNvbnRlbnQtVHlwZTogYXBwbGljYXRpb24vanNvbgpTZW5kZXItSWRlbnRpZmllcjogOTI4MDYzOTEKCiJ7XCJmZWVcIjowLFwidHJhbnNmZXJDb2RlXCI6XCJpbnZvaWNlXCIsXCJkZWJpdE5hbWVcIjpcImFsaWNlIGNvb3BlclwiLFwiY3JlZGl0TmFtZVwiOlwibWVyIGNoYW50XCIsXCJkZWJpdElkZW50aWZpZXJcIjpcIjkyODA2MzkxXCJ9IgA',
await PositionService.changeParticipantPosition(transferInfo.participantCurrencyId, isReversal, transferInfo.amount, transferStateChange)
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, eventDetail })
histTimerEnd({ success: true, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
return true
}
} else if (eventType === Enum.Events.Event.Type.POSITION && [Enum.Events.Event.Action.REJECT, Enum.Events.Event.Action.ABORT].includes(action)) {
Logger.info(Utility.breadcrumb(location, { path: action }))
const transferInfo = await TransferService.getTransferInfoToChangePosition(transferId, Enum.Accounts.TransferParticipantRoleType.PAYER_DFSP, Enum.Accounts.LedgerEntryType.PRINCIPLE_VALUE)
let transferStateId
if (action === Enum.Events.Event.Action.REJECT) {
Logger.info(Utility.breadcrumb(location, `receivedReject--${actionLetter}5`))
transferStateId = Enum.Transfers.TransferInternalState.ABORTED_REJECTED
} else { // action === Enum.Events.Event.Action.ABORT
Logger.info(Utility.breadcrumb(location, `receivedError--${actionLetter}5`))
transferStateId = Enum.Transfers.TransferInternalState.ABORTED_ERROR
}
const isReversal = true
const transferStateChange = {
transferId: transferInfo.transferId,
transferStateId,
reason: transferInfo.reason
}
await PositionService.changeParticipantPosition(transferInfo.participantCurrencyId, isReversal, transferInfo.amount, transferStateChange)
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, eventDetail })
histTimerEnd({ success: true, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
return true
} else if (eventType === Enum.Events.Event.Type.POSITION && [Enum.Events.Event.Action.TIMEOUT_RESERVED, Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED].includes(action)) {
Logger.info(Utility.breadcrumb(location, { path: 'timeout' }))
span.setTags({ transactionId: transferId })
const transferInfo = await TransferService.getTransferInfoToChangePosition(transferId, Enum.Accounts.TransferParticipantRoleType.PAYER_DFSP, Enum.Accounts.LedgerEntryType.PRINCIPLE_VALUE)
if (transferInfo.transferStateId !== Enum.Transfers.TransferInternalState.RESERVED_TIMEOUT) {
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
// event & type set above when `const metadata` is initialized to NOTIFICATION / TIMEOUT_RECEIVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.NOTIFICATION, Enum.Events.Event.Action.TIMEOUT_RECEIVED, message, state, null, span)
} else if (result[i].transferStateId === Enum.Transfers.TransferInternalState.RESERVED_TIMEOUT) {
message.metadata.event.type = Enum.Events.Event.Type.POSITION
message.metadata.event.action = Enum.Events.Event.Action.TIMEOUT_RESERVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.POSITION, Enum.Events.Event.Action.TIMEOUT_RESERVED, message, state, result[i].payerFsp, span)
}
} else { // individual transfer from a bulk
if (result[i].transferStateId === Enum.Transfers.TransferInternalState.EXPIRED_PREPARED) {
message.to = message.from
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
message.metadata.event.type = Enum.Events.Event.Type.BULK_PROCESSING
message.metadata.event.action = Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.BULK_PROCESSING, Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED, message, state, null, span)
} else if (result[i].transferStateId === Enum.Transfers.TransferInternalState.RESERVED_TIMEOUT) {
message.metadata.event.type = Enum.Events.Event.Type.POSITION
message.metadata.event.action = Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.POSITION, Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED, message, state, result[i].payerFsp, span)
}
}
} catch (err) {
const fspiopError = ErrorHandler.Factory.reformatFSPIOPError(err)
const state = new EventSdk.EventStateMetadata(EventSdk.EventStatusType.failed, fspiopError.apiErrorCode.code, fspiopError.apiErrorCode.message)
await span.error(fspiopError, state)
await span.finish(fspiopError.message, state)
throw fspiopError
} finally {
if (!span.isFinished) {
await span.finish()
}
}
const indvidualTransfersStream = IndividualTransferModel.find({ messageId }).cursor()
// enable async/await operations for the stream
const streamReader = AwaitifyStream.createReader(indvidualTransfersStream)
let doc
while ((doc = await streamReader.readAsync()) !== null) {
const individualTransfer = doc.payload
individualTransfer.payerFsp = payload.payerFsp
individualTransfer.payeeFsp = payload.payeeFsp
individualTransfer.amount = individualTransfer.transferAmount
delete individualTransfer.transferAmount
individualTransfer.expiration = payload.expiration
const bulkTransferAssociationRecord = {
transferId: individualTransfer.transferId,
bulkTransferId: payload.bulkTransferId,
bulkProcessingStateId: Enum.Transfers.BulkProcessingState.RECEIVED
}
await BulkTransferService.bulkTransferAssociationCreate(bulkTransferAssociationRecord)
const dataUri = encodePayload(JSON.stringify(individualTransfer), headers[Enum.Http.Headers.GENERAL.CONTENT_TYPE.value])
const metadata = Util.StreamingProtocol.createMetadataWithCorrelatedEventState(message.value.metadata.event.id, Enum.Events.Event.Type.TRANSFER, Enum.Events.Event.Action.BULK_PREPARE, Enum.Events.EventStatus.SUCCESS.status, Enum.Events.EventStatus.SUCCESS.code, Enum.Events.EventStatus.SUCCESS.description)
const msg = {
value: Util.StreamingProtocol.createMessage(messageId, headers[Enum.Http.Headers.FSPIOP.DESTINATION], headers[Enum.Http.Headers.FSPIOP.SOURCE], metadata, headers, dataUri)
}
params = { message: msg, kafkaTopic, consumer: Consumer, producer: Producer }
const eventDetail = { functionality: Enum.Events.Event.Type.PREPARE, action: Enum.Events.Event.Action.BULK_PREPARE }
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, eventDetail })
histTimerEnd({ success: true, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
}
} catch (err) { // TODO: handle individual transfers streaming error
Logger.info(Util.breadcrumb(location, `callbackErrorInternal2--${actionLetter}6`))
Logger.error(Util.breadcrumb(location, 'notImplemented'))
return true
* the reason why it occured. Instead, the aquired bulkTransferInfo.bulkTransferId
* needs to be compared to the original bulkTransferId currently processed and an error
* needs to be thrown when these not match. The underlying problem is that as part of
* the reused chain prepare-position-bulk-processing / fulfil-position-bulk-processing,
* the bulkTransferId is not being transmitted!
*
* TODO: Add bulkTransferId field to messages from PrepareHandler and PositionHandler
* and compare the transmitted bulkTransferId to the bellow bulkTransferInfo.bulkTransferId
* (not in scope of #967)
*/
const bulkTransferInfo = await BulkTransferService.getBulkTransferState(transferId) // TODO: This is not ideal, as the transferId might be from another bulk
let criteriaState, incompleteBulkState, completedBulkState, bulkTransferState, processingStateId, errorCode, errorDescription
let produceNotification = false
if ([Enum.Transfers.BulkTransferState.RECEIVED, Enum.Transfers.BulkTransferState.PENDING_PREPARE].includes(bulkTransferInfo.bulkTransferStateId)) {
criteriaState = Enum.Transfers.BulkTransferState.RECEIVED
incompleteBulkState = Enum.Transfers.BulkTransferState.PENDING_PREPARE
completedBulkState = Enum.Transfers.BulkTransferState.ACCEPTED
if (action === Enum.Events.Event.Action.PREPARE_DUPLICATE && state.status === Enum.Events.EventState.ERROR) {
processingStateId = Enum.Transfers.BulkProcessingState.RECEIVED_DUPLICATE
errorCode = payload.errorInformation.errorCode
errorDescription = payload.errorInformation.errorDescription
} else if (action === Enum.Events.Event.Action.BULK_PREPARE && state.status === Enum.Events.EventState.ERROR) {
processingStateId = Enum.Transfers.BulkProcessingState.RECEIVED_INVALID
errorCode = payload.errorInformation.errorCode
errorDescription = payload.errorInformation.errorDescription
} else if (action === Enum.Events.Event.Action.BULK_PREPARE && state.status === Enum.Events.EventState.SUCCESS) {
processingStateId = Enum.Transfers.BulkProcessingState.ACCEPTED
} else if ([Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED, Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED].includes(action)) {
incompleteBulkState = Enum.Transfers.BulkTransferState.EXPIRING
completedBulkState = Enum.Transfers.BulkTransferState.COMPLETED
headers,
message
}, EventSdk.AuditEventAction.start)
if (result[i].bulkTransferId === null) { // regular transfer
if (result[i].transferStateId === Enum.Transfers.TransferInternalState.EXPIRED_PREPARED) {
message.to = message.from
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
// event & type set above when `const metadata` is initialized to NOTIFICATION / TIMEOUT_RECEIVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.NOTIFICATION, Enum.Events.Event.Action.TIMEOUT_RECEIVED, message, state, null, span)
} else if (result[i].transferStateId === Enum.Transfers.TransferInternalState.RESERVED_TIMEOUT) {
message.metadata.event.type = Enum.Events.Event.Type.POSITION
message.metadata.event.action = Enum.Events.Event.Action.TIMEOUT_RESERVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.POSITION, Enum.Events.Event.Action.TIMEOUT_RESERVED, message, state, result[i].payerFsp, span)
}
} else { // individual transfer from a bulk
if (result[i].transferStateId === Enum.Transfers.TransferInternalState.EXPIRED_PREPARED) {
message.to = message.from
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
message.metadata.event.type = Enum.Events.Event.Type.BULK_PROCESSING
message.metadata.event.action = Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.BULK_PROCESSING, Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED, message, state, null, span)
} else if (result[i].transferStateId === Enum.Transfers.TransferInternalState.RESERVED_TIMEOUT) {
message.metadata.event.type = Enum.Events.Event.Type.POSITION
message.metadata.event.action = Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.POSITION, Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED, message, state, result[i].payerFsp, span)
}
}
} catch (err) {
const fspiopError = ErrorHandler.Factory.reformatFSPIOPError(err)
const state = new EventSdk.EventStateMetadata(EventSdk.EventStatusType.failed, fspiopError.apiErrorCode.code, fspiopError.apiErrorCode.message)
await span.error(fspiopError, state)
await span.finish(fspiopError.message, state)
const transferExists = async (payload, transferId) => {
Logger.info(`AdminTransferHandler::${payload.action}::dupcheck::existsMatching`)
const currentTransferState = await TransferService.getTransferStateChange(transferId)
if (!currentTransferState || !currentTransferState.enumeration) {
Logger.info(`AdminTransferHandler::${payload.action}::dupcheck::existsMatching::transfer state not found`)
} else {
const transferStateEnum = currentTransferState.enumeration
if (transferStateEnum === Enum.Transfers.TransferState.COMMITTED || transferStateEnum === Enum.Transfers.TransferInternalState.ABORTED_REJECTED) {
Logger.info(`AdminTransferHandler::${payload.action}::dupcheck::existsMatching::request already finalized`)
} else if (transferStateEnum === Enum.Transfers.TransferInternalState.RECEIVED_PREPARE || transferStateEnum === Enum.Transfers.TransferState.RESERVED) {
Logger.info(`AdminTransferHandler::${payload.action}::dupcheck::existsMatching::previous request still in progress do nothing`)
}
}
return true
}
extension = extensions.map(ext => {
return { key: ext.key, value: ext.value }
})
} else {
extension = extensions.filter(ext => {
return ext.isFulfilment
}).map(ext => {
return { key: ext.key, value: ext.value }
})
}
}
if (extension && extension.length > 0) {
result.extensionList = { extension }
}
}
if ((bulkTransfer.bulkTransferStateId === Enum.Transfers.BulkTransferState.ACCEPTED &&
transfer.bulkProcessingStateId === Enum.Transfers.BulkProcessingState.ACCEPTED) ||
(bulkTransfer.bulkTransferStateId === Enum.Transfers.BulkTransferState.COMPLETED &&
transfer.bulkProcessingStateId > Enum.Transfers.BulkProcessingState.PROCESSING)) {
payeeIndividualTransfers.push(result)
}
return resolve(result)
})
}))
})
.transacting(trx)
await knex('transferFulfilment')
.insert({
transferId,
ilpFulfilment: 0,
completedDate: transactionTimestamp,
isValid: 1,
settlementWindowId: null,
createdDate: transactionTimestamp
})
.transacting(trx)
if (payload.action === Enum.Transfers.AdminTransferAction.RECORD_FUNDS_IN ||
payload.action === Enum.Transfers.AdminTransferAction.RECORD_FUNDS_OUT_COMMIT) {
const param1 = {
transferId: payload.transferId,
transferStateId: enums.transferState.COMMITTED,
reason: payload.reason,
createdDate: transactionTimestamp,
drUpdated: false,
crUpdated: true
}
await TransferFacade.transferStateAndPositionUpdate(param1, enums, trx)
} else {
throw new Error('Action not allowed for reconciliationTransferCommit')
}
if (doCommit) {
await trx.commit
}
'use strict'
/**
* @module src/handlers/transfers
*/
const Logger = require('@mojaloop/central-services-logger')
const EventSdk = require('@mojaloop/event-sdk')
const TransferService = require('../../domain/transfer')
const Util = require('@mojaloop/central-services-shared').Util
const Kafka = require('@mojaloop/central-services-shared').Util.Kafka
const Producer = require('@mojaloop/central-services-stream').Util.Producer
const Consumer = require('@mojaloop/central-services-stream').Util.Consumer
const Validator = require('./validator')
const Enum = require('@mojaloop/central-services-shared').Enum
const TransferState = Enum.Transfers.TransferState
const TransferEventType = Enum.Events.Event.Type
const TransferEventAction = Enum.Events.Event.Action
const TransferObjectTransform = require('../../domain/transfer/transform')
const Metrics = require('@mojaloop/central-services-metrics')
const Config = require('../../lib/config')
const decodePayload = require('@mojaloop/central-services-shared').Util.StreamingProtocol.decodePayload
const Comparators = require('@mojaloop/central-services-shared').Util.Comparators
const ErrorHandler = require('@mojaloop/central-services-error-handling')
const consumerCommit = true
const fromSwitch = true
const toDestination = true
/**
* @function TransferPrepareHandler
*