Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const timeout = async () => {
try {
const timeoutSegment = await TimeoutService.getTimeoutSegment()
const intervalMin = timeoutSegment ? timeoutSegment.value : 0
const segmentId = timeoutSegment ? timeoutSegment.segmentId : 0
const cleanup = await TimeoutService.cleanupTransferTimeout()
const latestTransferStateChange = await TimeoutService.getLatestTransferStateChange()
const intervalMax = (latestTransferStateChange && parseInt(latestTransferStateChange.transferStateChangeId)) || 0
const result = await TimeoutService.timeoutExpireReserved(segmentId, intervalMin, intervalMax)
const fspiopError = ErrorHandler.Factory.createFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.TRANSFER_EXPIRED).toApiErrorObject(Config.ERROR_HANDLING)
if (!Array.isArray(result)) {
result[0] = result
}
for (let i = 0; i < result.length; i++) {
const span = EventSdk.Tracer.createSpan('cl_transfer_timeout')
try {
const state = Utility.StreamingProtocol.createEventState(Enum.Events.EventStatus.FAILURE.status, fspiopError.errorInformation.errorCode, fspiopError.errorInformation.errorDescription)
const metadata = Utility.StreamingProtocol.createMetadataWithCorrelatedEvent(result[i].transferId, Enum.Kafka.Topics.NOTIFICATION, Enum.Events.Event.Action.TIMEOUT_RECEIVED, state)
const headers = Utility.Http.SwitchDefaultHeaders(result[i].payerFsp, Enum.Http.HeaderResources.TRANSFERS, Enum.Http.Headers.FSPIOP.SWITCH.value)
const message = Utility.StreamingProtocol.createMessage(result[i].transferId, result[i].payeeFsp, result[i].payerFsp, metadata, headers, fspiopError, { id: result[i].transferId }, 'application/vnd.interoperability.transfers+json;version=1.0')
span.setTags(Utility.EventFramework.getTransferSpanTags({ payload: message.content.payload, headers }, Enum.Events.Event.Type.TRANSFER, Enum.Events.Event.Action.TIMEOUT_RECEIVED))
await span.audit({
state,
metadata,
headers,
message
}, EventSdk.AuditEventAction.start)
if (result[i].bulkTransferId === null) { // regular transfer
if (result[i].transferStateId === Enum.Transfers.TransferInternalState.EXPIRED_PREPARED) {
message.to = message.from
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
result[0] = result
}
for (let i = 0; i < result.length; i++) {
const span = EventSdk.Tracer.createSpan('cl_transfer_timeout')
try {
const state = Utility.StreamingProtocol.createEventState(Enum.Events.EventStatus.FAILURE.status, fspiopError.errorInformation.errorCode, fspiopError.errorInformation.errorDescription)
const metadata = Utility.StreamingProtocol.createMetadataWithCorrelatedEvent(result[i].transferId, Enum.Kafka.Topics.NOTIFICATION, Enum.Events.Event.Action.TIMEOUT_RECEIVED, state)
const headers = Utility.Http.SwitchDefaultHeaders(result[i].payerFsp, Enum.Http.HeaderResources.TRANSFERS, Enum.Http.Headers.FSPIOP.SWITCH.value)
const message = Utility.StreamingProtocol.createMessage(result[i].transferId, result[i].payeeFsp, result[i].payerFsp, metadata, headers, fspiopError, { id: result[i].transferId }, 'application/vnd.interoperability.transfers+json;version=1.0')
span.setTags(Utility.EventFramework.getTransferSpanTags({ payload: message.content.payload, headers }, Enum.Events.Event.Type.TRANSFER, Enum.Events.Event.Action.TIMEOUT_RECEIVED))
await span.audit({
state,
metadata,
headers,
message
}, EventSdk.AuditEventAction.start)
if (result[i].bulkTransferId === null) { // regular transfer
if (result[i].transferStateId === Enum.Transfers.TransferInternalState.EXPIRED_PREPARED) {
message.to = message.from
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
// event & type set above when `const metadata` is initialized to NOTIFICATION / TIMEOUT_RECEIVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.NOTIFICATION, Enum.Events.Event.Action.TIMEOUT_RECEIVED, message, state, null, span)
} else if (result[i].transferStateId === Enum.Transfers.TransferInternalState.RESERVED_TIMEOUT) {
message.metadata.event.type = Enum.Events.Event.Type.POSITION
message.metadata.event.action = Enum.Events.Event.Action.TIMEOUT_RESERVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.POSITION, Enum.Events.Event.Action.TIMEOUT_RESERVED, message, state, result[i].payerFsp, span)
}
} else { // individual transfer from a bulk
if (result[i].transferStateId === Enum.Transfers.TransferInternalState.EXPIRED_PREPARED) {
message.to = message.from
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
message.metadata.event.type = Enum.Events.Event.Type.BULK_PROCESSING
} else { // individual transfer from a bulk
if (result[i].transferStateId === Enum.Transfers.TransferInternalState.EXPIRED_PREPARED) {
message.to = message.from
message.from = Enum.Http.Headers.FSPIOP.SWITCH.value
message.metadata.event.type = Enum.Events.Event.Type.BULK_PROCESSING
message.metadata.event.action = Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.BULK_PROCESSING, Enum.Events.Event.Action.BULK_TIMEOUT_RECEIVED, message, state, null, span)
} else if (result[i].transferStateId === Enum.Transfers.TransferInternalState.RESERVED_TIMEOUT) {
message.metadata.event.type = Enum.Events.Event.Type.POSITION
message.metadata.event.action = Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED
await Kafka.produceGeneralMessage(Config.KAFKA_CONFIG, Producer, Enum.Kafka.Topics.POSITION, Enum.Events.Event.Action.BULK_TIMEOUT_RESERVED, message, state, result[i].payerFsp, span)
}
}
} catch (err) {
const fspiopError = ErrorHandler.Factory.reformatFSPIOPError(err)
const state = new EventSdk.EventStateMetadata(EventSdk.EventStatusType.failed, fspiopError.apiErrorCode.code, fspiopError.apiErrorCode.message)
await span.error(fspiopError, state)
await span.finish(fspiopError.message, state)
throw fspiopError
} finally {
if (!span.isFinished) {
await span.finish()
}
}
}
return {
intervalMin,
cleanup,
intervalMax,
result
}
} catch (err) {
const fspiopError = ErrorHandler.Factory.createFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.CLIENT_ERROR)
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, fspiopError: fspiopError.toApiErrorObject(Config.ERROR_HANDLING), eventDetail, fromSwitch })
throw fspiopError
}
// ============================================================================================
Util.breadcrumb(location, { path: 'validationPassed' })
Logger.info(Util.breadcrumb(location, `callbackMessage--${actionLetter}4`))
message.value.content.payload = TransferObjectTransform.toFulfil(transfer)
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, eventDetail, fromSwitch })
histTimerEnd({ success: true, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
return true
} catch (err) {
histTimerEnd({ success: false, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
const fspiopError = ErrorHandler.Factory.reformatFSPIOPError(err)
Logger.error(`${Util.breadcrumb(location)}::${err.message}--G0`)
const state = new EventSdk.EventStateMetadata(EventSdk.EventStatusType.failed, fspiopError.apiErrorCode.code, fspiopError.apiErrorCode.message)
await span.error(fspiopError, state)
await span.finish(fspiopError.message, state)
return true
} finally {
if (!span.isFinished) {
await span.finish()
}
}
}
const location = { module: 'GetTransferHandler', method: '', path: '' }
const histTimerEnd = Metrics.getHistogram(
'transfer_get',
'Consume a get transfer message from the kafka topic and process it accordingly',
['success', 'fspId']
).startTimer()
if (error) {
throw ErrorHandler.Factory.reformatFSPIOPError(error)
}
let message = {}
if (Array.isArray(messages)) {
message = messages[0]
} else {
message = messages
}
const contextFromMessage = EventSdk.Tracer.extractContextFromMessage(message.value)
const span = EventSdk.Tracer.createChildSpanFromContext('cl_transfer_get', contextFromMessage)
try {
await span.audit(message, EventSdk.AuditEventAction.start)
const metadata = message.value.metadata
const action = metadata.event.action
const transferId = message.value.content.uriParams.id
const kafkaTopic = message.topic
Logger.info(Util.breadcrumb(location, { method: `getTransfer:${action}` }))
const actionLetter = Enum.Events.ActionLetter.get
const params = { message, kafkaTopic, span, consumer: Consumer, producer: Producer }
const eventDetail = { functionality: TransferEventType.NOTIFICATION, action: TransferEventAction.GET }
Util.breadcrumb(location, { path: 'validationFailed' })
if (!await Validator.validateParticipantByName(message.value.from)) {
Logger.info(Util.breadcrumb(location, `breakParticipantDoesntExist--${actionLetter}1`))
throw ErrorHandler.Factory.reformatFSPIOPError(error)
}
let message = {}
let prepareBatch = []
let contextFromMessage
let span
try {
if (Array.isArray(messages)) {
prepareBatch = Array.from(messages)
message = Object.assign(message, Utility.clone(prepareBatch[0]))
} else {
prepareBatch = [Object.assign({}, Utility.clone(messages))]
message = Object.assign({}, messages)
}
contextFromMessage = EventSdk.Tracer.extractContextFromMessage(message.value)
span = EventSdk.Tracer.createChildSpanFromContext('cl_transfer_position', contextFromMessage)
await span.audit(message, EventSdk.AuditEventAction.start)
const payload = decodePayload(message.value.content.payload)
const eventType = message.value.metadata.event.type
const action = message.value.metadata.event.action
const transferId = payload.transferId || (message.value.content.uriParams && message.value.content.uriParams.id)
if (!transferId) {
const fspiopError = ErrorHandler.Factory.createInternalServerFSPIOPError('transferId is null or undefined')
Logger.error(fspiopError)
throw fspiopError
}
const kafkaTopic = message.topic
Logger.info(Utility.breadcrumb(location, { method: 'positions' }))
const actionLetter = action === Enum.Events.Event.Action.PREPARE ? Enum.Events.ActionLetter.prepare
: (action === Enum.Events.Event.Action.COMMIT ? Enum.Events.ActionLetter.commit
: (action === Enum.Events.Event.Action.REJECT ? Enum.Events.ActionLetter.reject
histTimerEnd({ success: false, fspId: Config.INSTRUMENTATION_METRICS_LABELS.fspId })
throw ErrorHandler.Factory.reformatFSPIOPError(error)
}
let message = {}
let prepareBatch = []
let contextFromMessage
let span
try {
if (Array.isArray(messages)) {
prepareBatch = Array.from(messages)
message = Object.assign(message, Utility.clone(prepareBatch[0]))
} else {
prepareBatch = [Object.assign({}, Utility.clone(messages))]
message = Object.assign({}, messages)
}
contextFromMessage = EventSdk.Tracer.extractContextFromMessage(message.value)
span = EventSdk.Tracer.createChildSpanFromContext('cl_transfer_position', contextFromMessage)
await span.audit(message, EventSdk.AuditEventAction.start)
const payload = decodePayload(message.value.content.payload)
const eventType = message.value.metadata.event.type
const action = message.value.metadata.event.action
const transferId = payload.transferId || (message.value.content.uriParams && message.value.content.uriParams.id)
if (!transferId) {
const fspiopError = ErrorHandler.Factory.createInternalServerFSPIOPError('transferId is null or undefined')
Logger.error(fspiopError)
throw fspiopError
}
const kafkaTopic = message.topic
Logger.info(Utility.breadcrumb(location, { method: 'positions' }))
const actionLetter = action === Enum.Events.Event.Action.PREPARE ? Enum.Events.ActionLetter.prepare
: (action === Enum.Events.Event.Action.COMMIT ? Enum.Events.ActionLetter.commit
const histTimerEnd = Metrics.getHistogram(
'transfer_get',
'Consume a get transfer message from the kafka topic and process it accordingly',
['success', 'fspId']
).startTimer()
if (error) {
throw ErrorHandler.Factory.reformatFSPIOPError(error)
}
let message = {}
if (Array.isArray(messages)) {
message = messages[0]
} else {
message = messages
}
const contextFromMessage = EventSdk.Tracer.extractContextFromMessage(message.value)
const span = EventSdk.Tracer.createChildSpanFromContext('cl_transfer_get', contextFromMessage)
try {
await span.audit(message, EventSdk.AuditEventAction.start)
const metadata = message.value.metadata
const action = metadata.event.action
const transferId = message.value.content.uriParams.id
const kafkaTopic = message.topic
Logger.info(Util.breadcrumb(location, { method: `getTransfer:${action}` }))
const actionLetter = Enum.Events.ActionLetter.get
const params = { message, kafkaTopic, span, consumer: Consumer, producer: Producer }
const eventDetail = { functionality: TransferEventType.NOTIFICATION, action: TransferEventAction.GET }
Util.breadcrumb(location, { path: 'validationFailed' })
if (!await Validator.validateParticipantByName(message.value.from)) {
Logger.info(Util.breadcrumb(location, `breakParticipantDoesntExist--${actionLetter}1`))
await Kafka.proceed(Config.KAFKA_CONFIG, params, { consumerCommit, histTimerEnd })
}
let message = {}
let prepareBatch = []
let contextFromMessage
let span
try {
if (Array.isArray(messages)) {
prepareBatch = Array.from(messages)
message = Object.assign(message, Utility.clone(prepareBatch[0]))
} else {
prepareBatch = [Object.assign({}, Utility.clone(messages))]
message = Object.assign({}, messages)
}
contextFromMessage = EventSdk.Tracer.extractContextFromMessage(message.value)
span = EventSdk.Tracer.createChildSpanFromContext('cl_transfer_position', contextFromMessage)
await span.audit(message, EventSdk.AuditEventAction.start)
const payload = decodePayload(message.value.content.payload)
const eventType = message.value.metadata.event.type
const action = message.value.metadata.event.action
const transferId = payload.transferId || (message.value.content.uriParams && message.value.content.uriParams.id)
if (!transferId) {
const fspiopError = ErrorHandler.Factory.createInternalServerFSPIOPError('transferId is null or undefined')
Logger.error(fspiopError)
throw fspiopError
}
const kafkaTopic = message.topic
Logger.info(Utility.breadcrumb(location, { method: 'positions' }))
const actionLetter = action === Enum.Events.Event.Action.PREPARE ? Enum.Events.ActionLetter.prepare
: (action === Enum.Events.Event.Action.COMMIT ? Enum.Events.ActionLetter.commit
: (action === Enum.Events.Event.Action.REJECT ? Enum.Events.ActionLetter.reject
: (action === Enum.Events.Event.Action.ABORT ? Enum.Events.ActionLetter.abort
let message = {}
if (Array.isArray(messages)) {
message = messages[0]
} else {
message = messages
}
const parentSpanService = 'cl_transfer_prepare'
const contextFromMessage = EventSdk.Tracer.extractContextFromMessage(message.value)
const span = EventSdk.Tracer.createChildSpanFromContext(parentSpanService, contextFromMessage)
try {
const payload = decodePayload(message.value.content.payload)
const headers = message.value.content.headers
const action = message.value.metadata.event.action
const transferId = payload.transferId
span.setTags({ transactionId: transferId })
await span.audit(message, EventSdk.AuditEventAction.start)
const kafkaTopic = message.topic
Logger.info(Util.breadcrumb(location, { method: 'prepare' }))
const actionLetter = action === TransferEventAction.PREPARE ? Enum.Events.ActionLetter.prepare
: (action === TransferEventAction.BULK_PREPARE ? Enum.Events.ActionLetter.bulkPrepare
: Enum.Events.ActionLetter.unknown)
let functionality = action === TransferEventAction.PREPARE ? TransferEventType.NOTIFICATION
: (action === TransferEventAction.BULK_PREPARE ? TransferEventType.BULK_PROCESSING
: Enum.Events.ActionLetter.unknown)
const params = { message, kafkaTopic, decodedPayload: payload, span, consumer: Consumer, producer: Producer }
Logger.info(Util.breadcrumb(location, { path: 'dupCheck' }))
const { hasDuplicateId, hasDuplicateHash } = await Comparators.duplicateCheckComparator(transferId, payload, TransferService.getTransferDuplicateCheck, TransferService.saveTransferDuplicateCheck)
if (hasDuplicateId && hasDuplicateHash) {
Logger.info(Util.breadcrumb(location, 'handleResend'))