Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const sizeBuf = new Buffer(8)
if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
throw new Error('Unable to read header size')
}
const sizePickle = pickle.createFromBuffer(sizeBuf)
size = sizePickle.createIterator().readUInt32()
headerBuf = new Buffer(size)
if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
throw new Error('Unable to read header')
}
} finally {
fs.closeSync(fd)
}
const headerPickle = pickle.createFromBuffer(headerBuf)
const header = headerPickle.createIterator().readString()
return {header: JSON.parse(header), headerSize: size}
}
fs.read(fd, headerBuffer, 0, size, null, (pickleReadErr, pickleReadBytes) => {
if (pickleReadErr)
{
return reject(pickleReadErr);
}
const header = pickle.createFromBuffer(headerBuffer).createIterator().readString();
return resolve({
header: JSON.parse(header),
size
});
});
});
fs.read(fd, headerSizeBuffer, 0, 8, null, (headerReadErr, headerBytesRead) => {
if (headerReadErr)
{
return reject(headerReadErr);
}
const size = pickle.createFromBuffer(headerSizeBuffer).createIterator().readUInt32();
const headerBuffer = Buffer.alloc(size);
fs.read(fd, headerBuffer, 0, size, null, (pickleReadErr, pickleReadBytes) => {
if (pickleReadErr)
{
return reject(pickleReadErr);
}
const header = pickle.createFromBuffer(headerBuffer).createIterator().readString();
return resolve({
header: JSON.parse(header),
size
});
});
export async function readAsar(archive: string): Promise {
const fd = await open(archive, "r")
let size: number
let headerBuf
try {
const sizeBuf = Buffer.allocUnsafe(8)
if ((await read(fd, sizeBuf, 0, 8, null as any)).bytesRead !== 8) {
throw new Error("Unable to read header size")
}
const sizePickle = createFromBuffer(sizeBuf)
size = sizePickle.createIterator().readUInt32()
headerBuf = Buffer.allocUnsafe(size)
if ((await read(fd, headerBuf, 0, size, null as any)).bytesRead !== size) {
throw new Error("Unable to read header")
}
}
finally {
await close(fd)
}
const headerPickle = createFromBuffer(headerBuf!)
const header = headerPickle.createIterator().readString()
return new AsarFilesystem(archive, JSON.parse(header), size)
}
const sizeBuf = Buffer.alloc(8)
if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
throw new Error('Unable to read header size')
}
const sizePickle = pickle.createFromBuffer(sizeBuf)
size = sizePickle.createIterator().readUInt32()
headerBuf = Buffer.alloc(size)
if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
throw new Error('Unable to read header')
}
} finally {
fs.closeSync(fd)
}
const headerPickle = pickle.createFromBuffer(headerBuf)
const header = headerPickle.createIterator().readString()
return { header: JSON.parse(header), headerSize: size }
}
const sizeBuf = new Buffer(8)
if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
throw new Error('Unable to read header size')
}
const sizePickle = pickle.createFromBuffer(sizeBuf)
size = sizePickle.createIterator().readUInt32()
headerBuf = new Buffer(size)
if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
throw new Error('Unable to read header')
}
} finally {
fs.closeSync(fd)
}
const headerPickle = pickle.createFromBuffer(headerBuf)
const header = headerPickle.createIterator().readString()
return {header: JSON.parse(header), headerSize: size}
}
module.exports.readArchiveHeaderSync = function (archive) {
const fd = fs.openSync(archive, 'r')
let size
let headerBuf
try {
const sizeBuf = new Buffer(8)
if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
throw new Error('Unable to read header size')
}
const sizePickle = pickle.createFromBuffer(sizeBuf)
size = sizePickle.createIterator().readUInt32()
headerBuf = new Buffer(size)
if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
throw new Error('Unable to read header')
}
} finally {
fs.closeSync(fd)
}
const headerPickle = pickle.createFromBuffer(headerBuf)
const header = headerPickle.createIterator().readString()
return {header: JSON.parse(header), headerSize: size}
}
module.exports.readArchiveHeader = function(archive) {
var fd = fs.openSync(archive, 'r');
var sizeBuf = new Buffer(8);
if (fs.readSync(fd, sizeBuf, 0, 8, null) != 8)
throw new Error('Unable to read header size');
var sizePickle = pickle.createFromBuffer(sizeBuf);
var size = sizePickle.createIterator().readUInt32();
var headerBuf = new Buffer(size);
if (fs.readSync(fd, headerBuf, 0, size, null) != size)
throw new Error('Unable to read header');
fs.closeSync(fd);
var headerPickle = pickle.createFromBuffer(headerBuf);
var header = headerPickle.createIterator().readString();
return { header: JSON.parse(header), headerSize: size };
};
if ((await read(fd, sizeBuf, 0, 8, null as any)).bytesRead !== 8) {
throw new Error("Unable to read header size")
}
const sizePickle = createFromBuffer(sizeBuf)
size = sizePickle.createIterator().readUInt32()
headerBuf = Buffer.allocUnsafe(size)
if ((await read(fd, headerBuf, 0, size, null as any)).bytesRead !== size) {
throw new Error("Unable to read header")
}
}
finally {
await close(fd)
}
const headerPickle = createFromBuffer(headerBuf!)
const header = headerPickle.createIterator().readString()
return new AsarFilesystem(archive, JSON.parse(header), size)
}