Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
result[0] = result
}
for (let i = 0; i < result.length; i++) {
const span = EventSdk.Tracer.createSpan('cl_transfer_timeout')
try {
const state = Utility.StreamingProtocol.createEventState(Enum.Events.EventStatus.FAILURE.status, fspiopError.errorInformation.errorCode, fspiopError.errorInformation.errorDescription)
const metadata = Utility.StreamingProtocol.createMetadataWithCorrelatedEvent(result[i].transferId, Enum.Kafka.Topics.NOTIFICATION, Enum.Events.Event.Action.TIMEOUT_RECEIVED, state)
const headers = Utility.Http.SwitchDefaultHeaders(result[i].payerFsp, Enum.Http.HeaderResources.TRANSFERS, Enum.Http.Headers.FSPIOP.SWITCH.value)
const message = Utility.StreamingProtocol.createMessage(result[i].transferId, result[i].payeeFsp, result[i].payerFsp, metadata, headers, fspiopError, { id: result[i].transferId }, 'application/vnd.interoperability.transfers+json;version=1.0')
span.setTags(Utility.EventFramework.getTransferSpanTags({ payload: message.content.payload, headers }, Enum.Events.Event.Type.TRANSFER, Enum.Events.Event.Action.TIMEOUT_RECEIVED))
await span.audit({
state,
metadata,
headers,
message
}, EventSdk.AuditEventAction.start)
if (result[i].bulkTransferId === null) { // regular transfer
if (result[i].transferStateId === Enum.Transfers.TransferInternalState.EXPIRED_PREPARED) {
message.to = message.from
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
// event & type set above when `const metadata` is initialized to NOTIFICATION / TIMEOUT_RECEIVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.NOTIFICATION, Enum.Events.Event.Action.TIMEOUT_RECEIVED, message, state, null, span)
} else if (result[i].transferStateId === Enum.Transfers.TransferInternalState.RESERVED_TIMEOUT) {
message.metadata.event.type = Enum.Events.Event.Type.POSITION
message.metadata.event.action = Enum.Events.Event.Action.TIMEOUT_RESERVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.POSITION, Enum.Events.Event.Action.TIMEOUT_RESERVED, message, state, result[i].payerFsp, span)
}
} else { // individual transfer from a bulk
if (result[i].transferStateId === Enum.Transfers.TransferInternalState.EXPIRED_PREPARED) {
message.to = message.from
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
message.metadata.event.type = Enum.Events.Event.Type.BULK_PROCESSING
}
let message = {}
let prepareBatch = []
let contextFromMessage
let span
try {
if (Array.isArray(messages)) {
prepareBatch = Array.from(messages)
message = Object.assign(message, Utility.clone(prepareBatch[0]))
} else {
prepareBatch = [Object.assign({}, Utility.clone(messages))]
message = Object.assign({}, messages)
}
contextFromMessage = EventSdk.Tracer.extractContextFromMessage(message.value)
span = EventSdk.Tracer.createChildSpanFromContext('cl_transfer_position', contextFromMessage)
await span.audit(message, EventSdk.AuditEventAction.start)
const payload = decodePayload(message.value.content.payload)
const eventType = message.value.metadata.event.type
const action = message.value.metadata.event.action
const transferId = payload.transferId || (message.value.content.uriParams && message.value.content.uriParams.id)
if (!transferId) {
const fspiopError = ErrorHandler.Factory.createInternalServerFSPIOPError('transferId is null or undefined')
Logger.error(fspiopError)
throw fspiopError
}
const kafkaTopic = message.topic
Logger.info(Utility.breadcrumb(location, { method: 'positions' }))
const actionLetter = action === Enum.Events.Event.Action.PREPARE ? Enum.Events.ActionLetter.prepare
: (action === Enum.Events.Event.Action.COMMIT ? Enum.Events.ActionLetter.commit
: (action === Enum.Events.Event.Action.REJECT ? Enum.Events.ActionLetter.reject
: (action === Enum.Events.Event.Action.ABORT ? Enum.Events.ActionLetter.abort
let message = {}
if (Array.isArray(messages)) {
message = messages[0]
} else {
message = messages
}
const parentSpanService = 'cl_transfer_prepare'
const contextFromMessage = EventSdk.Tracer.extractContextFromMessage(message.value)
const span = EventSdk.Tracer.createChildSpanFromContext(parentSpanService, contextFromMessage)
try {
const payload = decodePayload(message.value.content.payload)
const headers = message.value.content.headers
const action = message.value.metadata.event.action
const transferId = payload.transferId
span.setTags({ transactionId: transferId })
await span.audit(message, EventSdk.AuditEventAction.start)
const kafkaTopic = message.topic
Logger.info(Util.breadcrumb(location, { method: 'prepare' }))
const actionLetter = action === TransferEventAction.PREPARE ? Enum.Events.ActionLetter.prepare
: (action === TransferEventAction.BULK_PREPARE ? Enum.Events.ActionLetter.bulkPrepare
: Enum.Events.ActionLetter.unknown)
let functionality = action === TransferEventAction.PREPARE ? TransferEventType.NOTIFICATION
: (action === TransferEventAction.BULK_PREPARE ? TransferEventType.BULK_PROCESSING
: Enum.Events.ActionLetter.unknown)
const params = { message, kafkaTopic, decodedPayload: payload, span, consumer: Consumer, producer: Producer }
Logger.info(Util.breadcrumb(location, { path: 'dupCheck' }))
const { hasDuplicateId, hasDuplicateHash } = await Comparators.duplicateCheckComparator(transferId, payload, TransferService.getTransferDuplicateCheck, TransferService.saveTransferDuplicateCheck)
if (hasDuplicateId && hasDuplicateHash) {
Logger.info(Util.breadcrumb(location, 'handleResend'))