Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
await fs.unlink(tempFile)
throw e
}
})
// Update commit parentage information for this commit
await discoverAndUpdateCommit({
xrepoDatabase,
repository,
commit,
gitserverUrls: fetchConfiguration().gitServers,
ctx,
})
// Remove input
await fs.unlink(filename)
}
.then(async output => {
// Typedoc output.
console.log(output.stdout);
// Clean up temp home markdown file. (Nothing needs to wait for this.)
fs.unlink(tempHomePath);
// Clean up temp node index.d.ts file if it exists.
if (await fs.exists(tempNodeSourcePath)) {
fs.unlink(tempNodeSourcePath);
}
})
// Write out TOC file. Do this after Typedoc step to prevent Typedoc
const locals = {
config: config.get(),
section: 'invalid',
user: req.user,
errors: [ errorLog ]
}
return fs.unlink(tmpFilename).then(() => {
if (!req.accepts('text/html')) {
return res.status(422).send(errorLog)
} else {
res.send(pug.renderFile('templates/views/errors/invalid.jade', locals))
}
})
} else {
return fs.unlink(tmpFilename).then(() => {
res.status(200)
res.header('Content-Disposition', 'attachment; filename="' + req.params.displayId + '.gff"')
res.header('content-type', 'text/plain').send(log)
})
// res.header('content-type', 'text/plain').send(log);
}
})
})
if (result.status !== 200) {
throw new Error(`Download ${tarballUrl} status: ${result.status} error, should be 200`);
}
// make sure tarball file is not exists again
exists = await fs.exists(tarballFile);
if (!exists) {
try {
await fs.rename(tmpFile, tarballFile);
} catch (err) {
if (err.code === 'EPERM') {
// Error: EPERM: operation not permitted, rename
exists = await fs.exists(tarballFile);
if (exists) {
// parallel execution case same file exists, ignore rename error
// clean tmpFile
await fs.unlink(tmpFile);
} else {
// rename error
throw err;
}
} else {
// rename error
throw err;
}
}
} else {
// clean tmpFile
await fs.unlink(tmpFile);
}
const stat = await fs.stat(tarballFile);
debug('[%s@%s] saved %s %s => %s',
pkg.name, pkg.version, bytes(stat.size), tarballUrl, tarballFile);
fs.createReadStream(path)
.on('error', done)
.pipe(zlib.createGzip())
.on('error', done)
.pipe(fs.createWriteStream(tmp))
.on('error', done)
.on('finish', done)
}
compress.stats = yield fs.stat(tmp).catch(ignoreStatError)
// if the gzip size is larger than the original file,
// don't bother gzipping
if (compress.stats.size > stats.size) {
delete file.compress
yield fs.unlink(tmp)
} else {
// otherwise, rename to the correct path
yield fs.rename(tmp, compress.path)
}
return file
}
}
async function createDynamicGDR (name, grdName, idPrefix, targetDir) {
// normalize path so relative path ignores leading path.sep
if (!targetDir.endsWith(path.sep)) {
targetDir += path.sep
}
const gdrPath = path.join(targetDir, grdName)
// remove previously generated file
try {
await fs.unlink(gdrPath)
} catch (e) {}
// build file list from target dir
const filePaths = await getFileListDeep(targetDir)
const relativeFilePaths = filePaths.map(filePath => filePath.replace(targetDir, ''))
// get gdr string
const gdrFileContents = getGrdString(name, idPrefix, relativeFilePaths)
// write to file
await fs.writeFile(gdrPath, gdrFileContents, { encoding: 'utf8' })
}
}).then(() => {
console.log("Removing symlink");
return Fs.unlink(addonTargetDir);
});
})
}).then(() => {
console.log('unlinking:' + fileName)
fs.unlink(fileName)
console.log('unlinking:' + archiveName)
fs.unlink(archiveName)
}).catch((err) => {
if (!req.accepts('text/html')) {
this.ctx.onErrorCatch(async () => await fs.unlink(tarballPath));
await this.UpdateMTime(packageId);
async saveDeveloper(developer) {
if (developer) {
await mzfs.writeFile(await this.getDeveloperFilePath(), JSON.stringify(developer), 'utf8');
return developer;
}
try {
await mzfs.unlink(await this.getDeveloperFilePath());
} catch (err) {
}
return null;
}
}