Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const check = t => {
const entries = fs.readdirSync(dir)
t.equal(entries.length, 1)
t.equal(entries[0], 'd')
Object.keys(expect).forEach(f => {
const file = dir + '/' + f
t.equal(fs.readFileSync(file, 'utf8'), expect[f])
})
t.end()
}
const unpack = new Unpack({ cwd: dir, strip: 8 })
const data = fs.readFileSync(tars + '/long-paths.tar')
// while we're at it, why not use gzip too?
const zip = new z.Gzip()
zip.pipe(unpack)
unpack.on('close', _ => check(t))
zip.end(data)
})
if (body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body)) {
headers['content-type'] = headers['content-type'] || 'application/json'
body = JSON.stringify(body)
} else if (body && !headers['content-type']) {
headers['content-type'] = 'application/octet-stream'
}
if (opts.gzip) {
headers['content-encoding'] = 'gzip'
if (bodyIsStream) {
const gz = new zlib.Gzip()
body.on('error', /* istanbul ignore next: unlikely and hard to test */
err => gz.emit('error', err))
body = body.pipe(gz)
} else if (!bodyIsPromise) {
body = new zlib.Gzip().end(body).concat()
}
}
if (opts.query) {
const q = typeof opts.query === 'string'
? qs.parse(opts.query)
: opts.query
const parsed = new url.URL(uri)
Object.keys(q).forEach(key => {
if (q[key] !== undefined) {
parsed.searchParams.set(key, q[key])
}
})
uri = url.format(parsed)
}
const bodyIsStream = Minipass.isStream(body)
const bodyIsPromise = body &&
typeof body === 'object' &&
typeof body.then === 'function'
if (body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body)) {
headers['content-type'] = headers['content-type'] || 'application/json'
body = JSON.stringify(body)
} else if (body && !headers['content-type']) {
headers['content-type'] = 'application/octet-stream'
}
if (opts.gzip) {
headers['content-encoding'] = 'gzip'
if (bodyIsStream) {
const gz = new zlib.Gzip()
body.on('error', /* istanbul ignore next: unlikely and hard to test */
err => gz.emit('error', err))
body = body.pipe(gz)
} else if (!bodyIsPromise) {
body = new zlib.Gzip().end(body).concat()
}
}
if (opts.query) {
const q = typeof opts.query === 'string'
? qs.parse(opts.query)
: opts.query
const parsed = new url.URL(uri)
Object.keys(q).forEach(key => {
if (q[key] !== undefined) {
t.test('pipe into a slow gzip reader', t => {
const out = []
const mp2 = new miniz.Unzip()
const p = new Pack({ cwd: files, gzip: true }).add('long-path').end()
p.pause()
class SlowStream extends EE {
write (chunk) {
mp2.write(chunk)
setTimeout(_ => {
this.emit('drain')
p.resume()
})
return false
}
end (chunk) {
return mp2.end(chunk)
}
}
gname: '',
devmaj: 0,
devmin: 0,
atime: null,
ctime: null,
nullBlock: false
}
t.match(h, expect)
t.equal(data.length, 2048)
t.match(data.slice(1024).toString(), /^\0{1024}$/)
const syncgz = new PackSync({ cwd: files, portable: true, gzip: true })
.add('dir').end().read()
t.equal(syncgz[9], 255, 'gzip OS flag set to "unknown"')
const sync = new miniz.Gunzip().end(zipped).read()
t.equal(sync.slice(512).toString(), data.slice(512).toString())
const hs = new Header(sync)
t.match(hs, expect)
const expect2 = {
type: 'File',
cksumValid: true,
needPax: false,
path: 'dir/x',
mode: 0o644,
size: 0,
mtime: mtime,
cksum: Number,
linkpath: '',
uname: '',
this.preservePaths = !!opt.preservePaths
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '')
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.readdirCache = opt.readdirCache || new Map()
this[WRITEENTRYCLASS] = WriteEntry
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
this.zip = null
if (opt.gzip) {
if (typeof opt.gzip !== 'object')
opt.gzip = {}
this.zip = new zlib.Gzip(opt.gzip)
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())
this.on('resume', _ => this.zip.resume())
} else
this.on('drain', this[ONDRAIN])
this.portable = !!opt.portable
this.noDirRecurse = !!opt.noDirRecurse
this.follow = !!opt.follow
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
this[QUEUE] = new Yallist
function fastPathGzip(bufferOrString, options) {
return streamToBuffer(new Gzip(options), bufferOrString)
}
static gzip(bufferOrString, options) {
options = utils.createOptions(options, DEFAULT_GZIP_CONFIG)
if (useGzipFastPath) {
try {
return streamToBuffer(new minizlib.Gzip(options), bufferOrString)
} catch (e) {
useGzipFastPath = false
}
}
return fallbackGzip(bufferOrString, options)
}
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.readdirCache = opt.readdirCache || new Map()
this[WRITEENTRYCLASS] = WriteEntry
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
this.portable = !!opt.portable
this.zip = null
if (opt.gzip) {
if (typeof opt.gzip !== 'object')
opt.gzip = {}
if (this.portable)
opt.gzip.portable = true
this.zip = new zlib.Gzip(opt.gzip)
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())
this.on('resume', _ => this.zip.resume())
} else
this.on('drain', this[ONDRAIN])
this.noDirRecurse = !!opt.noDirRecurse
this.follow = !!opt.follow
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
this[QUEUE] = new Yallist
this[JOBS] = 0
module.exports = function tar (cacheDir, destDir) {
debug(() => `untar ${cacheDir} ${destDir}`);
const gunzip = new zlib.Gunzip();
const extractStream = tarStream.extract();
extractStream.on('entry', (header, stream, next) => {
if (header.type !== 'file') {
stream.resume();
stream.on('end', next);
return;
}
const name = header.name.slice(header.name.indexOf('/'));
if (!cacheDir) {
writeOne(destDir + name, stream, next);
return;
}
if (!destDir) {
writeOne(cacheDir + name, stream, next);
return;
}