Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
} else if (type === 'ofs-delta') {
offsetToObject[offset] = {
type,
offset
}
} else if (type === 'ref-delta') {
offsetToObject[offset] = {
type,
offset
}
}
})
times['offsets'] = Math.floor(marky.stop('offsets').duration)
log('Computing CRCs')
marky.mark('crcs')
// We need to know the lengths of the slices to compute the CRCs.
const offsetArray = Object.keys(offsetToObject).map(Number)
for (const [i, start] of offsetArray.entries()) {
const end =
i + 1 === offsetArray.length ? pack.byteLength - 20 : offsetArray[i + 1]
const o = offsetToObject[start]
const crc = crc32.buf(pack.slice(start, end)) >>> 0
o.end = end
o.crc = crc
}
times['crcs'] = Math.floor(marky.stop('crcs').duration)
// We don't have the hashes yet. But we can generate them using the .readSlice function!
const p = new GitPackIndex({
pack: Promise.resolve(pack),
packfileSha,
for (let i = 0; i < size; i++) {
const hash = reader.slice(20).toString('hex')
hashes[i] = hash
}
log(`hashes ${marky.stop('hashes').duration}`)
reader.seek(reader.tell() + 4 * size)
// Skip over CRCs
marky.mark('offsets')
// Get offsets
const offsets = new Map()
for (let i = 0; i < size; i++) {
offsets.set(hashes[i], reader.readUInt32BE())
}
log(`offsets ${marky.stop('offsets').duration}`)
const packfileSha = reader.slice(20).toString('hex')
log(`fromIdx ${marky.stop('fromIdx').duration}`)
return new GitPackIndex({
hashes,
crcs: {},
offsets,
packfileSha,
getExternalRefDelta
})
}
function computeTransformsPartTwo(nationalId, outAnimation) {
marky.mark('computeTransformsPartTwo()');
// reeeaaally fling it away when animating out. looks better
var slideInY = outAnimation ? screenHeight * 1.1 : screenHeight * 0.6;
var fgTransform = `translateY(${slideInY}px)`;
marky.stop('computeTransformsPartTwo()');
return {
fgTransform
};
}
exports.endIteration = function (testCase) {
var entry = marky.stop(testCase.name);
results.tests[testCase.name].iterations.push(entry.duration);
};
descriptor.value = async function(this: void, ...args: any[]) {
if (ProfilingEnabled) {
mark(propertyKey)
}
let result = await functionToMeasure.apply(this, args)
if (ProfilingEnabled) {
let measurement = stop(propertyKey)
console.log(`method '${measurement.name}' took ${measurement.duration.toFixed(2)} ms`)
}
return result
}
}
this.failedChunks.splice(failedChunkIndex, 1);
}
let trytesMessage = this.iota.utils.toTrytes(JSON.stringify({[this.ChunkContentKey]: chunk["content"]}));
let address = await this.getNewIotaAddress();
let transaction = await this.sendTransaction(address, trytesMessage);
Marky.stop('saveToTangle');
chunk["hash"] = transaction["hash"];
chunk["persisted"] = true;
this.chunkBundle[chunk["index"]] = chunk;
this.successfulChunks += 1;
return true;
} catch (err) {
Marky.stop('saveToTangle');
if (this.failedChunks.indexOf(chunk["index"]) === -1) {
this.failedChunks.push(chunk["index"]);
}
console.warn(err.message, chunk);
}
}
async persistChunk(chunk) {
Marky.mark('saveToTangle');
try {
let failedChunkIndex = this.failedChunks.indexOf(chunk["index"]);
if (failedChunkIndex !== -1) {
this.failedChunks.splice(failedChunkIndex, 1);
}
let trytesMessage = this.iota.utils.toTrytes(JSON.stringify({[this.ChunkContentKey]: chunk["content"]}));
let address = await this.getNewIotaAddress();
let transaction = await this.sendTransaction(address, trytesMessage);
Marky.stop('saveToTangle');
chunk["hash"] = transaction["hash"];
chunk["persisted"] = true;
this.chunkBundle[chunk["index"]] = chunk;
this.successfulChunks += 1;
return true;
} catch (err) {
Marky.stop('saveToTangle');
if (this.failedChunks.indexOf(chunk["index"]) === -1) {
this.failedChunks.push(chunk["index"]);
}
console.warn(err.message, chunk);
}
}
descriptor.value = async function(this: void, ...args: any[]) {
if (ProfilingEnabled) {
mark(propertyKey)
}
let result = await functionToMeasure.apply(this, args)
if (ProfilingEnabled) {
let measurement = stop(propertyKey)
console.log(`method '${measurement.name}' took ${measurement.duration.toFixed(2)} ms`)
}
return result
}
}
lastPercent = percent
const o = offsetToObject[offset]
if (o.oid) continue
try {
p.readDepth = 0
p.externalReadDepth = 0
marky.mark('readSlice')
const { type, object } = await p.readSlice({ start: offset })
const time = marky.stop('readSlice').duration
times.readSlice += time
callsToReadSlice += p.readDepth
callsToGetExternal += p.externalReadDepth
timeByDepth[p.readDepth] += time
objectsByDepth[p.readDepth] += 1
marky.mark('hash')
const oid = await shasum(GitObject.wrap({ type, object }))
times.hash += marky.stop('hash').duration
o.oid = oid
hashes.push(oid)
offsets.set(oid, offset)
crcs[oid] = o.crc
} catch (err) {
log('ERROR', err)
continue
}
}
marky.mark('sort')
hashes.sort()
times['sort'] = Math.floor(marky.stop('sort').duration)
const totalElapsedTime = marky.stop('total').duration
async read (filepath, options = {}) {
try {
marky.mark(filepath)
let buffer = await this._readFile(filepath, options)
readFileLog(`${filepath} ${marky.stop(filepath).duration}`)
return buffer
} catch (err) {
return null
}
}
/**