Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
try {
const state = Utility.StreamingProtocol.createEventState(Enum.Events.EventStatus.FAILURE.status, fspiopError.errorInformation.errorCode, fspiopError.errorInformation.errorDescription)
const metadata = Utility.StreamingProtocol.createMetadataWithCorrelatedEvent(result[i].transferId, Enum.Kafka.Topics.NOTIFICATION, Enum.Events.Event.Action.TIMEOUT_RECEIVED, state)
const headers = Utility.Http.SwitchDefaultHeaders(result[i].payerFsp, Enum.Http.HeaderResources.TRANSFERS, Enum.Http.Headers.FSPIOP.SWITCH.value)
const message = Utility.StreamingProtocol.createMessage(result[i].transferId, result[i].payeeFsp, result[i].payerFsp, metadata, headers, fspiopError, { id: result[i].transferId }, 'application/vnd.interoperability.transfers+json;version=1.0')
span.setTags(Utility.EventFramework.getTransferSpanTags({ payload: message.content.payload, headers }, Enum.Events.Event.Type.TRANSFER, Enum.Events.Event.Action.TIMEOUT_RECEIVED))
await span.audit({
state,
metadata,
headers,
message
}, EventSdk.AuditEventAction.start)
if (result[i].bulkTransferId === null) { // regular transfer
if (result[i].transferStateId === Enum.Transfers.TransferInternalState.EXPIRED_PREPARED) {
message.to = message.from
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
// event & type set above when `const metadata` is initialized to NOTIFICATION / TIMEOUT_RECEIVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.NOTIFICATION, Enum.Events.Event.Action.TIMEOUT_RECEIVED, message, state, null, span)
} else if (result[i].transferStateId === Enum.Transfers.TransferInternalState.RESERVED_TIMEOUT) {
message.metadata.event.type = Enum.Events.Event.Type.POSITION
message.metadata.event.action = Enum.Events.Event.Action.TIMEOUT_RESERVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.POSITION, Enum.Events.Event.Action.TIMEOUT_RESERVED, message, state, result[i].payerFsp, span)
}
} else { // individual transfer from a bulk
if (result[i].transferStateId === Enum.Transfers.TransferInternalState.EXPIRED_PREPARED) {
message.to = message.from
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
message.metadata.event.type = Enum.Events.Event.Type.BULK_PROCESSING
message.metadata.event.action = Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.BULK_PROCESSING, Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED, message, state, null, span)
} else if (result[i].transferStateId === Enum.Transfers.TransferInternalState.RESERVED_TIMEOUT) {
message.metadata.event.type = Enum.Events.Event.Type.POSITION
const intervalMin = timeoutSegment ? timeoutSegment.value : 0
const segmentId = timeoutSegment ? timeoutSegment.segmentId : 0
const cleanup = await TimeoutService.cleanupTransferTimeout()
const latestTransferStateChange = await TimeoutService.getLatestTransferStateChange()
const intervalMax = (latestTransferStateChange && parseInt(latestTransferStateChange.transferStateChangeId)) || 0
const result = await TimeoutService.timeoutExpireReserved(segmentId, intervalMin, intervalMax)
const fspiopError = ErrorHandler.Factory.createFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.TRANSFER_EXPIRED).toApiErrorObject(Config.ERROR_HANDLING)
if (!Array.isArray(result)) {
result[0] = result
}
for (let i = 0; i < result.length; i++) {
const span = EventSdk.Tracer.createSpan('cl_transfer_timeout')
try {
const state = Utility.StreamingProtocol.createEventState(Enum.Events.EventStatus.FAILURE.status, fspiopError.errorInformation.errorCode, fspiopError.errorInformation.errorDescription)
const metadata = Utility.StreamingProtocol.createMetadataWithCorrelatedEvent(result[i].transferId, Enum.Kafka.Topics.NOTIFICATION, Enum.Events.Event.Action.TIMEOUT_RECEIVED, state)
const headers = Utility.Http.SwitchDefaultHeaders(result[i].payerFsp, Enum.Http.HeaderResources.TRANSFERS, Enum.Http.Headers.FSPIOP.SWITCH.value)
const message = Utility.StreamingProtocol.createMessage(result[i].transferId, result[i].payeeFsp, result[i].payerFsp, metadata, headers, fspiopError, { id: result[i].transferId }, 'application/vnd.interoperability.transfers+json;version=1.0')
span.setTags(Utility.EventFramework.getTransferSpanTags({ payload: message.content.payload, headers }, Enum.Events.Event.Type.TRANSFER, Enum.Events.Event.Action.TIMEOUT_RECEIVED))
await span.audit({
state,
metadata,
headers,
message
}, EventSdk.AuditEventAction.start)
if (result[i].bulkTransferId === null) { // regular transfer
if (result[i].transferStateId === Enum.Transfers.TransferInternalState.EXPIRED_PREPARED) {
message.to = message.from
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
// event & type set above when `const metadata` is initialized to NOTIFICATION / TIMEOUT_RECEIVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.NOTIFICATION, Enum.Events.Event.Action.TIMEOUT_RECEIVED, message, state, null, span)
} else if (result[i].transferStateId === Enum.Transfers.TransferInternalState.RESERVED_TIMEOUT) {
message.metadata.event.type = Enum.Events.Event.Type.POSITION
const transferId = individualTransferFulfil.transferId
delete individualTransferFulfil.transferId
const bulkTransferAssociationRecord = {
transferId,
bulkTransferId: payload.bulkTransferId,
bulkProcessingStateId: Enum.Transfers.BulkProcessingState.PROCESSING
}
await BulkTransferService.bulkTransferAssociationUpdate(transferId, bulkTransferId, bulkTransferAssociationRecord)
if (state === Enum.Transfers.BulkTransferState.INVALID ||
individualTransferFulfil.errorInformation ||
!individualTransferFulfil.fulfilment) {
individualTransferFulfil.transferState = Enum.Transfers.TransferState.ABORTED
} else {
individualTransferFulfil.transferState = Enum.Transfers.TransferState.COMMITTED
}
const dataUri = encodePayload(JSON.stringify(individualTransferFulfil), headers[Enum.Http.Headers.GENERAL.CONTENT_TYPE.value])
const metadata = Util.StreamingProtocol.createMetadataWithCorrelatedEventState(message.value.metadata.event.id, Enum.Events.Event.Type.FULFIL, Enum.Events.Event.Action.COMMIT, Enum.Events.EventStatus.SUCCESS.status, Enum.Events.EventStatus.SUCCESS.code, Enum.Events.EventStatus.SUCCESS.description) // TODO: switch action to 'bulk-fulfil' flow
const msg = {
value: Util.StreamingProtocol.createMessage(messageId, headers[Enum.Http.Headers.FSPIOP.DESTINATION], headers[Enum.Http.Headers.FSPIOP.SOURCE], metadata, headers, dataUri, { id: transferId })
}
params = { message: msg, kafkaTopic, consumer: Consumer, producer: Producer }
const eventDetail = { functionality: Enum.Events.Event.Type.FULFIL, action: Enum.Events.Event.Action.BULK_COMMIT }
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, histTimerEnd, eventDetail })
histTimerEnd({ success: true, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
}
} catch (err) { // TODO: handle individual transfers streaming error
Logger.info(Util.breadcrumb(location, `callbackErrorInternal2--${actionLetter}6`))
Logger.error(Util.breadcrumb(location, 'notImplemented'))
return true
}
} else { // TODO: handle validation failure
Logger.error(Util.breadcrumb(location, { path: 'validationFailed' }))
let doc
while ((doc = await streamReader.readAsync()) !== null) {
const individualTransfer = doc.payload
individualTransfer.payerFsp = payload.payerFsp
individualTransfer.payeeFsp = payload.payeeFsp
individualTransfer.amount = individualTransfer.transferAmount
delete individualTransfer.transferAmount
individualTransfer.expiration = payload.expiration
const bulkTransferAssociationRecord = {
transferId: individualTransfer.transferId,
bulkTransferId: payload.bulkTransferId,
bulkProcessingStateId: Enum.Transfers.BulkProcessingState.RECEIVED
}
await BulkTransferService.bulkTransferAssociationCreate(bulkTransferAssociationRecord)
const dataUri = encodePayload(JSON.stringify(individualTransfer), headers[Enum.Http.Headers.GENERAL.CONTENT_TYPE.value])
const metadata = Util.StreamingProtocol.createMetadataWithCorrelatedEventState(message.value.metadata.event.id, Enum.Events.Event.Type.TRANSFER, Enum.Events.Event.Action.BULK_PREPARE, Enum.Events.EventStatus.SUCCESS.status, Enum.Events.EventStatus.SUCCESS.code, Enum.Events.EventStatus.SUCCESS.description)
const msg = {
value: Util.StreamingProtocol.createMessage(messageId, headers[Enum.Http.Headers.FSPIOP.DESTINATION], headers[Enum.Http.Headers.FSPIOP.SOURCE], metadata, headers, dataUri)
}
params = { message: msg, kafkaTopic, consumer: Consumer, producer: Producer }
const eventDetail = { functionality: Enum.Events.Event.Type.PREPARE, action: Enum.Events.Event.Action.BULK_PREPARE }
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, eventDetail })
histTimerEnd({ success: true, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
}
} catch (err) { // TODO: handle individual transfers streaming error
Logger.info(Util.breadcrumb(location, `callbackErrorInternal2--${actionLetter}6`))
Logger.error(Util.breadcrumb(location, 'notImplemented'))
return true
}
} else { // TODO: handle validation failure
Logger.error(Util.breadcrumb(location, { path: 'validationFailed' }))
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, eventDetail })
histTimerEnd({ success: true, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
return true
} else {
// TODO: handle use case when no individual transfer has been accepted:
// Switch to finilize bulk state and notify payer with PUT /bulkTransfers/{id}
// const payerBulkResponse = Object.assign({}, { messageId: message.value.id, headers }, getBulkTransferByIdResult.payerBulkTransfer)
Logger.info(Util.breadcrumb(location, `noTransfers--${actionLetter}1`))
Logger.error(Util.breadcrumb(location, 'notImplemented'))
return true
}
} else if (eventType === Enum.Events.Event.Type.BULK_PROCESSING && [Enum.Events.Event.Action.BULK_COMMIT, Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED, Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED].includes(action)) {
Logger.info(Util.breadcrumb(location, `bulkFulfil--${actionLetter}3`))
const participants = await BulkTransferService.getParticipantsById(bulkTransferInfo.bulkTransferId)
const normalizedKeys = Object.keys(headers).reduce((keys, k) => { keys[k.toLowerCase()] = k; return keys }, {})
const payeeBulkResponseHeaders = Util.Headers.transformHeaders(headers, { httpMethod: headers[normalizedKeys[Enum.Http.Headers.FSPIOP.HTTP_METHOD]], sourceFsp: Enum.Http.Headers.FSPIOP.SWITCH.value, destinationFsp: participants.payeeFsp })
delete payeeBulkResponseHeaders[normalizedKeys[Enum.Http.Headers.FSPIOP.SIGNATURE]]
const payerBulkResponse = Object.assign({}, { messageId: message.value.id, headers: Util.clone(headers) }, getBulkTransferByIdResult.payerBulkTransfer)
const payeeBulkResponse = Object.assign({}, { messageId: message.value.id, headers: payeeBulkResponseHeaders }, getBulkTransferByIdResult.payeeBulkTransfer)
const BulkTransferResultModel = BulkTransferModels.getBulkTransferResultModel()
await (new BulkTransferResultModel(payerBulkResponse)).save()
await (new BulkTransferResultModel(payeeBulkResponse)).save()
const payerParams = Util.clone(params)
const payeeParams = Util.clone(params)
const payerPayload = Util.omitNil({
bulkTransferId: payerBulkResponse.bulkTransferId,
bulkTransferState: payerBulkResponse.bulkTransferState,
completedTimestamp: payerBulkResponse.completedTimestamp,
extensionList: payerBulkResponse.extensionList
})
const payerMetadata = Util.StreamingProtocol.createMetadataWithCorrelatedEvent(params.message.value.metadata.event.id, payerParams.message.value.metadata.type, payerParams.message.value.metadata.action, Enum.Events.EventStatus.SUCCESS)
Logger.info(Util.breadcrumb(location, { path: 'getById' }))
const transfer = await TransferService.getById(transferId)
const transferStateEnum = transfer && transfer.transferStateEnumeration
if (!transfer) {
Logger.error(Util.breadcrumb(location, `callbackInternalServerErrorNotFound--${actionLetter}1`))
const fspiopError = ErrorHandler.Factory.createInternalServerFSPIOPError('transfer not found')
const eventDetail = { functionality, action: TransferEventAction.COMMIT }
/**
* TODO: BULK-Handle at BulkProcessingHandler (not in scope of #967)
* HOWTO: The list of individual transfers being committed should contain
* non-existing transferId
*/
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, fspiopError: fspiopError.toApiErrorObject(Config.ERROR_HANDLING), eventDetail, fromSwitch })
throw fspiopError
} else if (headers[Enum.Http.Headers.FSPIOP.SOURCE].toLowerCase() !== transfer.payeeFsp.toLowerCase()) {
/**
* If fulfilment request is coming from a source not matching transfer payee fsp,
* don't proceed the request, but rather send error callback to original payee fsp.
* This is also the reason why we need to retrieve the transfer info upfront now.
*/
Logger.info(Util.breadcrumb(location, `callbackErrorSourceNotMatchingPayeeFsp--${actionLetter}2`))
const fspiopError = ErrorHandler.Factory.createFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.VALIDATION_ERROR, `${Enum.Http.Headers.FSPIOP.SOURCE} does not match payee fsp`)
const toDestination = transfer.payeeFsp // overrding global boolean declaration with a string value for local use only
const eventDetail = { functionality, action: TransferEventAction.COMMIT }
/**
* TODO: BULK-Handle at BulkProcessingHandler (not in scope of #967)
* HOWTO: For regular transfers, send the fulfil from non-payee dfsp.
* Not sure if it will apply to bulk, as it could/should be captured
* at BulkPrepareHander. To be verified as part of future story.
*/
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, fspiopError: fspiopError.toApiErrorObject(Config.ERROR_HANDLING), eventDetail, fromSwitch, toDestination })
const validateFspiopSourceMatchesPayer = (payload, headers) => {
const matched = (headers && headers[Enum.Http.Headers.FSPIOP.SOURCE] === payload.payerFsp)
if (!matched) {
reasons.push('FSPIOP-Source header should match Payer')
return false
}
return true
}
const validateFspiopSourceAndDestination = async (payload, headers) => {
const validateFspiopSourceAndDestination = async (payload, headers) => {
const participant = await BulkTransferService.getParticipantsById(payload.bulkTransferId)
const matchedPayee = (headers && headers[Enum.Http.Headers.FSPIOP.SOURCE] === participant.payeeFsp)
const matchedPayer = (headers && headers[Enum.Http.Headers.FSPIOP.DESTINATION] === participant.payerFsp)
if (!matchedPayee) {
reasons.push('FSPIOP-Source header should match Payee')
return false
}
if (!matchedPayer) {
reasons.push('FSPIOP-Destination header should match Payer')
return false
}
return true
}
const validateParticipantByName = async (participantName) => {