Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const toTransfer = (t) => {
// TODO: Validate 't' to confirm if its from the DB transferReadModel or from the saveTransferPrepare
if (t.isTransferReadModel) {
Logger.debug('In aggregate transfer transform for isTransferReadModel')
return Util.omitNil(fromTransferReadModel(t)) // TODO: Remove this once the DB validation is done for 't'
} else if (t.isSaveTransferPrepared) {
Logger.debug('In aggregate transfer transform for isSaveTransferPrepared')
return Util.omitNil(fromSaveTransferPrepared(t)) // TODO: Remove this once the DB validation is done for 't'
} else if (t.savePayeeTransferResponseExecuted) {
Logger.debug('In aggregate transfer transform for isSavePayeeTransferResponseExecuted')
return Util.omitNil(fromSavePayeeTransferResponseExecuted(t)) // TODO: Remove this once the DB validation is done for 't'
} else throw ErrorHandler.Factory.createFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.INTERNAL_SERVER_ERROR, `Unable to transform to transfer: ${t}`)
}
criteriaState = null // debugging breakpoint line
return true
}
if (produceNotification) {
if (eventType === Enum.Events.Event.Type.BULK_PROCESSING && action === Enum.Events.Event.Action.BULK_PREPARE) {
Logger.info(Util.breadcrumb(location, `bulkPrepare--${actionLetter}2`))
const payeeBulkResponse = Object.assign({}, { messageId: message.value.id, headers }, getBulkTransferByIdResult.payeeBulkTransfer)
const payeeIndividualTransfers = payeeBulkResponse.individualTransferResults.filter(individualTransfer => {
return !individualTransfer.errorInformation
})
if (payeeIndividualTransfers.length) {
payeeBulkResponse.individualTransferResults = payeeIndividualTransfers
const BulkTransferResultModel = BulkTransferModels.getBulkTransferResultModel()
await (new BulkTransferResultModel(payeeBulkResponse)).save()
const payload = Util.omitNil({
bulkTransferId: payeeBulkResponse.bulkTransferId,
bulkQuoteId: getBulkTransferByIdResult.bulkQuoteId,
payerFsp: getBulkTransferByIdResult.payerFsp,
payeeFsp: getBulkTransferByIdResult.payeeFsp,
expiration: getBulkTransferByIdResult.expiration,
extensionList: payeeBulkResponse.extensionList
})
const metadata = Util.StreamingProtocol.createMetadataWithCorrelatedEvent(params.message.value.metadata.event.id, params.message.value.metadata.type, params.message.value.metadata.action, Enum.Events.EventStatus.SUCCESS)
params.message.value = Util.StreamingProtocol.createMessage(params.message.value.id, payeeBulkResponse.destination, payeeBulkResponse.headers[Enum.Http.Headers.FSPIOP.SOURCE], metadata, payeeBulkResponse.headers, payload)
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, eventDetail })
histTimerEnd({ success: true, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
return true
} else {
// TODO: handle use case when no individual transfer has been accepted:
// Switch to finilize bulk state and notify payer with PUT /bulkTransfers/{id}
// const payerBulkResponse = Object.assign({}, { messageId: message.value.id, headers }, getBulkTransferByIdResult.payerBulkTransfer)
const fromTransferAggregate = (t) => {
const cleanProperties = Util.omitNil({
transferId: t.transferId,
amount: formatAmount(t.amount),
// transferState: Util.omitNil(t.transferState),
transferState: t.transferState,
completedTimestamp: t.completedTimestamp,
ilpPacket: t.ilpPacket,
condition: t.condition,
fulfilment: t.fulfilment,
expiration: t.expirationDate,
extensionList: formatExtensionList(t.extensionList)
})
return Util.mergeAndOmitNil(Util.pick(t, transferProperties), cleanProperties)
}
exports.update = async (record) => {
const fields = {
transferId: record.transferId,
value: record.value
}
try {
return await Db.ilpPacket.update({ transferId: record.transferId }, Util.omitNil(fields))
} catch (err) {
throw ErrorHandler.Factory.reformatFSPIOPError(err)
}
}
const transformTransferToFulfil = (transfer) => {
try {
const result = {
completedTimestamp: transfer.completedTimestamp,
transferState: transfer.transferStateEnumeration
}
if (transfer.fulfilment !== '0') result.fulfilment = transfer.fulfilment
const extension = transformExtensionList(transfer.extensionList)
if (extension.length > 0) {
result.extensionList = { extension }
}
return Util.omitNil(result)
} catch (err) {
throw ErrorHandler.Factory.createFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.INTERNAL_SERVER_ERROR, `Unable to transform to fulfil response: ${err}`)
}
}
}
} else if (eventType === Enum.Events.Event.Type.BULK_PROCESSING && [Enum.Events.Event.Action.BULK_COMMIT, Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED, Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED].includes(action)) {
Logger.info(Util.breadcrumb(location, `bulkFulfil--${actionLetter}3`))
const participants = await BulkTransferService.getParticipantsById(bulkTransferInfo.bulkTransferId)
const normalizedKeys = Object.keys(headers).reduce((keys, k) => { keys[k.toLowerCase()] = k; return keys }, {})
const payeeBulkResponseHeaders = Util.Headers.transformHeaders(headers, { httpMethod: headers[normalizedKeys[Enum.Http.Headers.FSPIOP.HTTP_METHOD]], sourceFsp: Enum.Http.Headers.FSPIOP.SWITCH.value, destinationFsp: participants.payeeFsp })
delete payeeBulkResponseHeaders[normalizedKeys[Enum.Http.Headers.FSPIOP.SIGNATURE]]
const payerBulkResponse = Object.assign({}, { messageId: message.value.id, headers: Util.clone(headers) }, getBulkTransferByIdResult.payerBulkTransfer)
const payeeBulkResponse = Object.assign({}, { messageId: message.value.id, headers: payeeBulkResponseHeaders }, getBulkTransferByIdResult.payeeBulkTransfer)
const BulkTransferResultModel = BulkTransferModels.getBulkTransferResultModel()
await (new BulkTransferResultModel(payerBulkResponse)).save()
await (new BulkTransferResultModel(payeeBulkResponse)).save()
const payerParams = Util.clone(params)
const payeeParams = Util.clone(params)
const payerPayload = Util.omitNil({
bulkTransferId: payerBulkResponse.bulkTransferId,
bulkTransferState: payerBulkResponse.bulkTransferState,
completedTimestamp: payerBulkResponse.completedTimestamp,
extensionList: payerBulkResponse.extensionList
})
const payerMetadata = Util.StreamingProtocol.createMetadataWithCorrelatedEvent(params.message.value.metadata.event.id, payerParams.message.value.metadata.type, payerParams.message.value.metadata.action, Enum.Events.EventStatus.SUCCESS)
payerParams.message.value = Util.StreamingProtocol.createMessage(params.message.value.id, participants.payerFsp, payerBulkResponse.headers[normalizedKeys[Enum.Http.Headers.FSPIOP.SOURCE]], payerMetadata, payerBulkResponse.headers, payerPayload)
const payeePayload = Util.omitNil({
bulkTransferId: payeeBulkResponse.bulkTransferId,
bulkTransferState: payeeBulkResponse.bulkTransferState,
completedTimestamp: payeeBulkResponse.completedTimestamp,
extensionList: payeeBulkResponse.extensionList
})
const payeeMetadata = Util.StreamingProtocol.createMetadataWithCorrelatedEvent(params.message.value.metadata.event.id, payeeParams.message.value.metadata.type, payeeParams.message.value.metadata.action, Enum.Events.EventStatus.SUCCESS)
payeeParams.message.value = Util.StreamingProtocol.createMessage(params.message.value.id, participants.payeeFsp, Enum.Http.Headers.FSPIOP.SWITCH.value, payeeMetadata, payeeBulkResponse.headers, payeePayload)
if ([Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED, Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED].includes(action)) {
exports.update = async (transferId, bulkTransferId, bulkTransferAssociation) => {
try {
const record = LibUtil.omitNil({
bulkProcessingStateId: bulkTransferAssociation.bulkProcessingStateId,
lastProcessedDate: bulkTransferAssociation.lastProcessedDate || Time.getUTCString(new Date()),
errorCode: bulkTransferAssociation.errorCode,
errorDescription: bulkTransferAssociation.errorDescription
})
return Db.bulkTransferAssociation.update({ transferId, bulkTransferId }, record)
} catch (err) {
throw new Error(err.message)
}
}
const payeeBulkResponse = Object.assign({}, { messageId: message.value.id, headers: payeeBulkResponseHeaders }, getBulkTransferByIdResult.payeeBulkTransfer)
const BulkTransferResultModel = BulkTransferModels.getBulkTransferResultModel()
await (new BulkTransferResultModel(payerBulkResponse)).save()
await (new BulkTransferResultModel(payeeBulkResponse)).save()
const payerParams = Util.clone(params)
const payeeParams = Util.clone(params)
const payerPayload = Util.omitNil({
bulkTransferId: payerBulkResponse.bulkTransferId,
bulkTransferState: payerBulkResponse.bulkTransferState,
completedTimestamp: payerBulkResponse.completedTimestamp,
extensionList: payerBulkResponse.extensionList
})
const payerMetadata = Util.StreamingProtocol.createMetadataWithCorrelatedEvent(params.message.value.metadata.event.id, payerParams.message.value.metadata.type, payerParams.message.value.metadata.action, Enum.Events.EventStatus.SUCCESS)
payerParams.message.value = Util.StreamingProtocol.createMessage(params.message.value.id, participants.payerFsp, payerBulkResponse.headers[normalizedKeys[Enum.Http.Headers.FSPIOP.SOURCE]], payerMetadata, payerBulkResponse.headers, payerPayload)
const payeePayload = Util.omitNil({
bulkTransferId: payeeBulkResponse.bulkTransferId,
bulkTransferState: payeeBulkResponse.bulkTransferState,
completedTimestamp: payeeBulkResponse.completedTimestamp,
extensionList: payeeBulkResponse.extensionList
})
const payeeMetadata = Util.StreamingProtocol.createMetadataWithCorrelatedEvent(params.message.value.metadata.event.id, payeeParams.message.value.metadata.type, payeeParams.message.value.metadata.action, Enum.Events.EventStatus.SUCCESS)
payeeParams.message.value = Util.StreamingProtocol.createMessage(params.message.value.id, participants.payeeFsp, Enum.Http.Headers.FSPIOP.SWITCH.value, payeeMetadata, payeeBulkResponse.headers, payeePayload)
if ([Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED, Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED].includes(action)) {
eventDetail.action = Enum.Events.Event.Action.BULK_COMMIT
}
await Kafka.proceed(Config.KAFKA_CONFIG, payerParams, { consumerCommit, eventDetail })
histTimerEnd({ success: true, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
await Kafka.proceed(Config.KAFKA_CONFIG, payeeParams, { consumerCommit, eventDetail })
histTimerEnd({ success: true, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
return true
} else if (eventType === Enum.Events.Event.Type.BULK_PROCESSING && [Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED, Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED].includes(action)) {